code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
|---|---|---|---|---|---|
from mopidy import listener
class AudioListener(listener.Listener):
"""
Marker interface for recipients of events sent by the audio actor.
Any Pykka actor that mixes in this class will receive calls to the methods
defined here when the corresponding events happen in the core actor. This
interface is used both for looking up what actors to notify of the events,
and for providing default implementations for those listeners that are not
interested in all events.
"""
@staticmethod
def send(event, **kwargs):
"""Helper to allow calling of audio listener events"""
listener.send(AudioListener, event, **kwargs)
def reached_end_of_stream(self):
"""
Called whenever the end of the audio stream is reached.
*MAY* be implemented by actor.
"""
pass
def stream_changed(self, uri):
"""
Called whenever the audio stream changes.
*MAY* be implemented by actor.
:param string uri: URI the stream has started playing.
"""
pass
def position_changed(self, position):
"""
Called whenever the position of the stream changes.
*MAY* be implemented by actor.
:param int position: Position in milliseconds.
"""
pass
def state_changed(self, old_state, new_state, target_state):
"""
Called after the playback state have changed.
Will be called for both immediate and async state changes in GStreamer.
Target state is used to when we should be in the target state, but
temporarily need to switch to an other state. A typical example of this
is buffering. When this happens an event with
`old=PLAYING, new=PAUSED, target=PLAYING` will be emitted. Once we have
caught up a `old=PAUSED, new=PLAYING, target=None` event will be
be generated.
Regular state changes will not have target state set as they are final
states which should be stable.
*MAY* be implemented by actor.
:param old_state: the state before the change
:type old_state: string from :class:`mopidy.core.PlaybackState` field
:param new_state: the state after the change
:type new_state: A :class:`mopidy.core.PlaybackState` field
:type new_state: string from :class:`mopidy.core.PlaybackState` field
:param target_state: the intended state
:type target_state: string from :class:`mopidy.core.PlaybackState`
field or :class:`None` if this is a final state.
"""
pass
def tags_changed(self, tags):
"""
Called whenever the current audio stream's tags change.
This event signals that some track metadata has been updated. This can
be metadata such as artists, titles, organization, or details about the
actual audio such as bit-rates, numbers of channels etc.
For the available tag keys please refer to GStreamer documentation for
tags.
*MAY* be implemented by actor.
:param tags: The tags that have just been updated.
:type tags: :class:`set` of strings
"""
pass
|
kingosticks/mopidy
|
mopidy/audio/listener.py
|
Python
|
apache-2.0
| 3,210
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
from unittest import mock
import pytest
from airflow.exceptions import AirflowException
from airflow.providers.amazon.aws.hooks.emr import EmrContainerHook
from airflow.providers.amazon.aws.sensors.emr import EmrContainerSensor
class TestEmrContainerSensor(unittest.TestCase):
def setUp(self):
self.sensor = EmrContainerSensor(
task_id='test_emrcontainer_sensor',
virtual_cluster_id='vzwemreks',
job_id='job1234',
poll_interval=5,
max_retries=1,
aws_conn_id='aws_default',
)
@mock.patch.object(EmrContainerHook, 'check_query_status', side_effect=("PENDING",))
def test_poke_pending(self, mock_check_query_status):
assert not self.sensor.poke(None)
@mock.patch.object(EmrContainerHook, 'check_query_status', side_effect=("SUBMITTED",))
def test_poke_submitted(self, mock_check_query_status):
assert not self.sensor.poke(None)
@mock.patch.object(EmrContainerHook, 'check_query_status', side_effect=("RUNNING",))
def test_poke_running(self, mock_check_query_status):
assert not self.sensor.poke(None)
@mock.patch.object(EmrContainerHook, 'check_query_status', side_effect=("COMPLETED",))
def test_poke_completed(self, mock_check_query_status):
assert self.sensor.poke(None)
@mock.patch.object(EmrContainerHook, 'check_query_status', side_effect=("FAILED",))
def test_poke_failed(self, mock_check_query_status):
with pytest.raises(AirflowException) as ctx:
self.sensor.poke(None)
assert 'EMR Containers sensor failed' in str(ctx.value)
@mock.patch.object(EmrContainerHook, 'check_query_status', side_effect=("CANCELLED",))
def test_poke_cancelled(self, mock_check_query_status):
with pytest.raises(AirflowException) as ctx:
self.sensor.poke(None)
assert 'EMR Containers sensor failed' in str(ctx.value)
@mock.patch.object(EmrContainerHook, 'check_query_status', side_effect=("CANCEL_PENDING",))
def test_poke_cancel_pending(self, mock_check_query_status):
with pytest.raises(AirflowException) as ctx:
self.sensor.poke(None)
assert 'EMR Containers sensor failed' in str(ctx.value)
|
Acehaidrey/incubator-airflow
|
tests/providers/amazon/aws/sensors/test_emr_containers.py
|
Python
|
apache-2.0
| 3,055
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from st2client import models
from st2client.commands import resource
LOG = logging.getLogger(__name__)
class PolicyTypeBranch(resource.ResourceBranch):
def __init__(self, description, app, subparsers, parent_parser=None):
super(PolicyTypeBranch, self).__init__(
models.PolicyType, description, app, subparsers,
parent_parser=parent_parser,
read_only=True,
commands={
'list': PolicyTypeListCommand,
'get': PolicyTypeGetCommand
})
class PolicyTypeListCommand(resource.ResourceListCommand):
display_attributes = ['id', 'resource_type', 'name', 'description']
def __init__(self, resource, *args, **kwargs):
super(PolicyTypeListCommand, self).__init__(resource, *args, **kwargs)
self.parser.add_argument('-r', '--resource-type', type=str, dest='resource_type',
help='Return policy types for the resource type.')
@resource.add_auth_token_to_kwargs_from_cli
def run(self, args, **kwargs):
if args.resource_type:
filters = {'resource_type': args.resource_type}
filters.update(**kwargs)
return self.manager.query(**filters)
else:
return self.manager.get_all(**kwargs)
class PolicyTypeGetCommand(resource.ResourceGetCommand):
pk_argument_name = 'ref_or_id'
def get_resource(self, ref_or_id, **kwargs):
return self.get_resource_by_ref_or_id(ref_or_id=ref_or_id, **kwargs)
class PolicyBranch(resource.ResourceBranch):
def __init__(self, description, app, subparsers, parent_parser=None):
super(PolicyBranch, self).__init__(
models.Policy, description, app, subparsers,
parent_parser=parent_parser,
commands={
'list': PolicyListCommand,
'get': PolicyGetCommand,
'update': PolicyUpdateCommand,
'delete': PolicyDeleteCommand
})
class PolicyListCommand(resource.ContentPackResourceListCommand):
display_attributes = ['ref', 'resource_ref', 'policy_type']
def __init__(self, resource, *args, **kwargs):
super(PolicyListCommand, self).__init__(resource, *args, **kwargs)
self.parser.add_argument('-r', '--resource-ref', type=str, dest='resource_ref',
help='Return policies for the resource ref.')
self.parser.add_argument('-pt', '--policy-type', type=str, dest='policy_type',
help='Return policies of the policy type.')
@resource.add_auth_token_to_kwargs_from_cli
def run(self, args, **kwargs):
if args.resource_ref or args.policy_type:
filters = {}
if args.resource_ref:
filters['resource_ref'] = args.resource_ref
if args.policy_type:
filters['policy_type'] = args.policy_type
filters.update(**kwargs)
return self.manager.query(**filters)
else:
return self.manager.get_all(**kwargs)
class PolicyGetCommand(resource.ContentPackResourceGetCommand):
display_attributes = ['all']
attribute_display_order = ['id', 'ref', 'pack', 'name', 'description',
'enabled', 'resource_ref', 'policy_type',
'parameters']
class PolicyUpdateCommand(resource.ContentPackResourceUpdateCommand):
pass
class PolicyDeleteCommand(resource.ContentPackResourceDeleteCommand):
pass
|
dennybaa/st2
|
st2client/st2client/commands/policy.py
|
Python
|
apache-2.0
| 4,341
|
"""
bbofuser
FILE: __init__.py
Created: 8/12/15 9:46 PM
"""
__author__ = 'Mark Scrimshire:@ekivemark'
|
ekivemark/BlueButtonDev
|
accounts/templates/accounts/messages/__init__.py
|
Python
|
apache-2.0
| 104
|
# -*- coding: utf-8 -*-
#
# This module (which must have the name queryfunc.py) is responsible
# for converting incoming queries to a database query understood by
# this particular node's database schema.
#
# This module must contain a function setupResults, taking a sql object
# as its only argument.
#
# library imports
import sys
from itertools import chain
from django.conf import settings
from django.db.models import Q
from vamdctap.sqlparse import sql2Q
import dictionaries
import models # this imports models.py from the same directory as this file
def LOG(s):
"Simple logger function"
if settings.DEBUG: print >> sys.stderr, s
#------------------------------------------------------------
# Helper functions (called from setupResults)
#------------------------------------------------------------
def getSpeciesWithStates(transs, sql):
"""
Use the Transition matches to obtain the related Species (only atoms in this example)
and the states related to each transition.
We also return some statistics of the result
"""
# get the reference ids for the 'species' ForeignKey field
# (see getRefs comment for more info)
spids = set( transs.values_list('finalstateindex__species',flat=True) )
# use the reference ids to query the Species database table
species = models.Species.objects.filter(pk__in=spids)
nspecies = species.count() # get some statistics
# List the IDs (i.e. keys from the states table) of all the states
# connected with all the selected transitions.
stateIds = set().union(transs.values_list('initialstateindex', flat=True), transs.values_list('finalstateindex', flat=True))
# get all states. Note that when building a queryset like this,
# (using objects.filter() etc) will usually not hit the database
# until it's really necessary, making this very efficient.
LOG("Getting states")
nstates = 0
if statesRequired(sql):
for spec in species:
# use the found reference ids to search the State database table
# Note that we store a new queryset called 'States' on the species queryset.
# This is important and a requirement looked for by the node
# software (all RETURNABLES AtomState* will try to loop over this
# nested queryset).
spec.States = models.States.objects.filter(species=spec).filter(pk__in=stateIds)
return species, nspecies, nstates
def everythingRequired(sql):
return len(sql.requestables) == 0
def transitionsRequired(sql):
return 'radiativetransitions' in sql.requestables or everythingRequired(sql)
def statesRequired(sql):
return 'atomstates' in sql.requestables or everythingRequired(sql)
def constraintsPresent(sql):
return len(sql.where) > 0
#------------------------------------------------------------
# Main function
#------------------------------------------------------------
def setupResults(sql, limit=100000):
try:
return query(sql, limit)
except Exception as oops:
LOG(oops)
raise oops
def query(sql, limit):
# log the incoming query
LOG(sql)
# convert the incoming sql to a correct django query syntax object
# based on the RESTRICTABLES dictionary in dictionaries.py
q = sql2Q(sql)
if constraintsPresent(sql) or transitionsRequired(sql) or statesRequired(sql):
species, nstates, transs, percentage = genericQuery(sql, q, limit)
nspecies = species.count()
ntranss = transs.count()
else:
species = allSpeciesQuery(sql, q, limit)
nspecies = species.count()
nstates = 0
ntranss = 0
transs = {}
percentage = None
# Adjust the counts of things returned according to the requestables.
# The caller will choose what actually to return, but we have to set
# the counts in the header ourselves.
if not transitionsRequired(sql):
ntranss = 0
if not statesRequired(sql):
nstates = 0
# Create the header with some useful info. The key names here are
# standardized and shouldn't be changed.
headerinfo={\
'Truncated':percentage,
'COUNT-species':nspecies,
'count-states':nstates,
'count-radiative':ntranss,
'last-modified' : '2013-08-31T21:30:00+00:00'
}
LOG(headerinfo)
# Return the data. The keynames are standardized.
if (nspecies > 0 or nstates > 0 or ntranss > 0):
return {'RadTrans' : transs,
'Atoms' : species,
'HeaderInfo': headerinfo,
}
# As a special case, if there are no data, return an empty structure.
# This causes the node software to send a 204 "No content" response.
else:
return {}
def genericQuery(sql, q, limit):
"""
When query constraints are present, this for mof query is used.
The query initially selects the transitions and then builds matching
sets of species and states. It has to be done this way because the
retrictables dictionary contains object references from the Transitions
table; the query sets cannot work on directly on the other tables.
"""
LOG("Generic query")
# We build a queryset of database matches on the Transision model
# since through this model (in our example) we are be able to
# reach all other models. Note that a queryset is actually not yet
# hitting the database, making it very efficient.
LOG("getting transitions")
transs = models.Transitions.objects.select_related(depth=2).filter(q)
# count the number of matches, make a simple truncation if there are
# too many (record the coverage in the returned header)
# If we are constraining by transitions but not returning them,
# do not truncate.
ntranss=transs.count()
if limit < ntranss and transitionsRequired(sql):
transs = transs[:limit]
percentage='%.1f' % (float(limit) / ntranss * 100)
else:
percentage=None
# Through the transition-matches, use our helper functions to extract
# all the relevant database data for our query.
#sources = getRefs(transs)
LOG("Getting species")
species, nspecies, nstates = getSpeciesWithStates(transs, sql)
LOG(species)
return species, nstates, transs, percentage
def allSpeciesQuery(sql, q, limit):
LOG("All-species query")
return models.Species.objects.all()
|
guyrixon/VAMDC-tutorials
|
source/tuition/build-a-node/spectroscopic/queryfunc.py
|
Python
|
bsd-2-clause
| 6,507
|
# The following script performs a database-level migration from
# an old server (pre 8/1/2009) to a new server (post 8/1/2009).
#
# This script assumes it is running off an exact copy of the
# OLD database, e.g. if a dumpscript was run and used to create
# this database exactly.
#
# Many projects were renamed resulting in the renaming of db
# tables that needs to be reflected here.
#
# This script will also clear the entire contents of the
# submission and xformmanager applications. It is working
# under the assumption that the forms will be reimported
# manually through one of the other means (REST API), import/
# export scripts, etc.
#
# What will be left at the end of this are properly named tables
# filled with data, except for xformmanager and submission
# which will have no data.
from django.db import connection
from django.core.management.commands.syncdb import Command
def run():
print "starting migration"
from receiver.models import Submission, Attachment
from xformmanager.models import FormDefModel, ElementDefModel, Metadata
# this part of the script walks through all the registered
# form definitions and deletes them.
from xformmanager.storageutility import StorageUtility
from graphing import dbhelper
# let's do some sanity checks to make sure everything is working
# as planned.
print "checking original database format"
all_formdefs = FormDefModel.objects.all()
all_tablenames = []
all_elements = ElementDefModel.objects.all()
print "found %s existing forms, %s elements" % (len(all_formdefs), len(all_elements))
# first walk through all the expected tables and make sure they exist
_check_data_tables(all_formdefs, all_tablenames)
# this is temporarily commented out because the child tables are still acting
# a bit funky.
#_check_element_tables(all_elements)
print "all tables exist pre-migration"
# alright, let's clear them all now
print "clearing xformmanager application"
su = StorageUtility()
su.clear(False)
print "checking deletion of tables xformmanager tables"
# now let's check make sure the forms and tables are gone
form_count = len(FormDefModel.objects.all())
if form_count != 0:
raise Exception("Not all forms were deleted! %s remain." % (table, form))
elem_count = len(ElementDefModel.objects.all())
if elem_count != 0:
raise Exception("Not all elements were deleted! %s remain." % (table, form))
for tablename in all_tablenames:
if _exists(tablename):
raise Exception("Expected table %s to be deleted but it wasn't!" % (tablename))
print "xformmanager cleanup verified"
print "Migrating tables..."
_perform_table_migration()
print "Table migration verified"
# now sync db. we have to do this before submissions and attachments
# are deleted because the new models expect foreign keys back to them
print "syncdb"
_syncdb()
print "done syncdb"
all_submissions = Submission.objects.all()
all_attachments = Attachment.objects.all()
print "Cleaning up %s submissions and %s attachments" % (len(all_submissions), len(all_attachments))
all_submissions.delete()
all_attachments.delete()
submission_count = len(Submission.objects.all())
attachment_count = len(Attachment.objects.all())
if submission_count != 0:
raise Exception("Tried to delete all submissions but %s remained" % submission_count)
if attachment_count != 0:
raise Exception("Tried to delete all submissions but %s remained" % attachment_count)
print "Submission cleanup verified"
def _check_data_tables(all_elements, all_tablenames):
'''Makes sure the table for each element exists, and adds
it to the passed in list of names. This works on both
formdef and elementdef objects, since they both support
.tablename'''
for elem in all_elements:
if not _exists(elem.table_name):
raise Exception("Expected to find table %s for %s but did not!" % (elem.table_name, elem))
all_tablenames.append(elem.table_name)
def _exists(table):
cursor = connection.cursor()
cursor.execute("show tables like '%s'" % table)
return len(cursor.fetchall()) == 1
def _perform_table_migration():
# moves any tables that have been renamed, but don't require
# structural changes.
table_remapping = {"organization_domain": "hq_domain",
"organization_extrole": "hq_extrole",
"organization_extuser": "hq_extuser",
"organization_organization": "hq_organization",
"organization_organization_members": "hq_organization_members",
"organization_organization_organization_type": "hq_organization_organization_type",
"organization_organization_supervisors": "hq_organization_supervisors",
"organization_organizationtype": "hq_organizationtype",
"organization_reporterprofile": "hq_reporterprofile",
"organization_reportschedule": "hq_reportschedule",
"dbanalyzer_basegraph": "graphing_basegraph",
"dbanalyzer_graphgroup": "graphing_graphgroup",
"dbanalyzer_graphgroup_graphs": "graphing_graphgroup_graphs",
"dbanalyzer_graphpref": "graphing_graphpref",
"dbanalyzer_graphpref_root_graphs": "graphing_graphpref_root_graphs",
"dbanalyzer_rawgraph": "graphing_rawgraph",
}
for oldname, newname in table_remapping.items():
_rename_table(oldname, newname)
cursor = connection.cursor()
# for some reason mysql insists on using these special slanted quote marks
# for this command.
cursor.execute("ALTER TABLE `hq_domain` ADD COLUMN `timezone` VARCHAR(64) AFTER `description`;")
cursor.execute("ALTER TABLE `receiver_submission` ADD COLUMN `content_type` VARCHAR(100) AFTER `bytes_received`;")
# update null constraints
cursor.execute("ALTER TABLE `xformmanager_metadata` MODIFY COLUMN `formname` VARCHAR(255) DEFAULT NULL;")
cursor.execute("ALTER TABLE `xformmanager_metadata` MODIFY COLUMN `formversion` VARCHAR(255) DEFAULT NULL;")
cursor.execute("ALTER TABLE `xformmanager_metadata` MODIFY COLUMN `deviceid` VARCHAR(255) DEFAULT NULL;")
cursor.execute("ALTER TABLE `xformmanager_metadata` MODIFY COLUMN `username` VARCHAR(255) DEFAULT NULL;")
cursor.execute("ALTER TABLE `xformmanager_metadata` MODIFY COLUMN `chw_id` VARCHAR(255) DEFAULT NULL;")
cursor.execute("ALTER TABLE `xformmanager_metadata` MODIFY COLUMN `uid` VARCHAR(32) DEFAULT NULL;")
def _rename_table(oldname, newname):
'''Renames a table, with some sanity checks'''
cursor = connection.cursor()
if not _exists(oldname):
raise Exception("Tried to rename %s but it didn't exist!" % oldname)
if _exists(newname):
raise Exception("Tried to rename %s to %s but the second already exists!" % (oldname, newname))
cursor.execute("ALTER TABLE %s RENAME TO %s" % (oldname, newname))
if not _exists(newname):
raise Exception("Tried to rename %s to %s but it didn't work!" % (oldname, newname))
if _exists(oldname):
raise Exception("Tried to rename %s to %s but the old table was still there!" % (oldname, newname))
def _syncdb():
sync = Command()
sync.handle()
|
icomms/wqmanager
|
utilities/data_migration/db_migration_aug_04_2009.py
|
Python
|
bsd-3-clause
| 7,661
|
from random import random
from bokeh.layouts import row
from bokeh.models import CustomJS, ColumnDataSource
from bokeh.plotting import figure, output_file, show
output_file("callback.html")
x = [random() for x in range(500)]
y = [random() for y in range(500)]
s1 = ColumnDataSource(data=dict(x=x, y=y))
p1 = figure(plot_width=400, plot_height=400, tools="lasso_select", title="Select Here")
p1.circle('x', 'y', source=s1, alpha=0.6)
s2 = ColumnDataSource(data=dict(x=[], y=[]))
p2 = figure(plot_width=400, plot_height=400, x_range=(0, 1), y_range=(0, 1),
tools="", title="Watch Here")
p2.circle('x', 'y', source=s2, alpha=0.6)
s1.callback = CustomJS(args=dict(s2=s2), code="""
var inds = cb_obj.selected['1d'].indices;
var d1 = cb_obj.data;
var d2 = s2.data;
d2['x'] = []
d2['y'] = []
for (i = 0; i < inds.length; i++) {
d2['x'].push(d1['x'][inds[i]])
d2['y'].push(d1['y'][inds[i]])
}
s2.change.emit();
""")
layout = row(p1, p2)
show(layout)
|
philippjfr/bokeh
|
sphinx/source/docs/user_guide/examples/interaction_callbacks_for_selections.py
|
Python
|
bsd-3-clause
| 1,054
|
from __future__ import unicode_literals
import datetime
import re
import sys
from unittest import skipIf
import warnings
from xml.dom.minidom import parseString
try:
import pytz
except ImportError:
pytz = None
from django.core import serializers
from django.core.urlresolvers import reverse
from django.db.models import Min, Max
from django.http import HttpRequest
from django.template import Context, RequestContext, Template, TemplateSyntaxError
from django.test import TestCase, override_settings, skipIfDBFeature, skipUnlessDBFeature
from django.test.utils import requires_tz_support
from django.utils import six
from django.utils import timezone
from .forms import EventForm, EventSplitForm, EventLocalizedForm, EventModelForm, EventLocalizedModelForm
from .models import Event, MaybeEvent, Session, SessionEvent, Timestamp, AllDayEvent
# These tests use the EAT (Eastern Africa Time) and ICT (Indochina Time)
# who don't have Daylight Saving Time, so we can represent them easily
# with FixedOffset, and use them directly as tzinfo in the constructors.
# settings.TIME_ZONE is forced to EAT. Most tests use a variant of
# datetime.datetime(2011, 9, 1, 13, 20, 30), which translates to
# 10:20:30 in UTC and 17:20:30 in ICT.
UTC = timezone.utc
EAT = timezone.get_fixed_timezone(180) # Africa/Nairobi
ICT = timezone.get_fixed_timezone(420) # Asia/Bangkok
@override_settings(TIME_ZONE='Africa/Nairobi', USE_TZ=False)
class LegacyDatabaseTests(TestCase):
def test_naive_datetime(self):
dt = datetime.datetime(2011, 9, 1, 13, 20, 30)
Event.objects.create(dt=dt)
event = Event.objects.get()
self.assertEqual(event.dt, dt)
@skipUnlessDBFeature('supports_microsecond_precision')
def test_naive_datetime_with_microsecond(self):
dt = datetime.datetime(2011, 9, 1, 13, 20, 30, 405060)
Event.objects.create(dt=dt)
event = Event.objects.get()
self.assertEqual(event.dt, dt)
@skipIfDBFeature('supports_microsecond_precision')
def test_naive_datetime_with_microsecond_unsupported(self):
dt = datetime.datetime(2011, 9, 1, 13, 20, 30, 405060)
Event.objects.create(dt=dt)
event = Event.objects.get()
# microseconds are lost during a round-trip in the database
self.assertEqual(event.dt, dt.replace(microsecond=0))
@skipUnlessDBFeature('supports_timezones')
def test_aware_datetime_in_local_timezone(self):
dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT)
Event.objects.create(dt=dt)
event = Event.objects.get()
self.assertIsNone(event.dt.tzinfo)
# interpret the naive datetime in local time to get the correct value
self.assertEqual(event.dt.replace(tzinfo=EAT), dt)
@skipUnlessDBFeature('supports_timezones')
@skipUnlessDBFeature('supports_microsecond_precision')
def test_aware_datetime_in_local_timezone_with_microsecond(self):
dt = datetime.datetime(2011, 9, 1, 13, 20, 30, 405060, tzinfo=EAT)
Event.objects.create(dt=dt)
event = Event.objects.get()
self.assertIsNone(event.dt.tzinfo)
# interpret the naive datetime in local time to get the correct value
self.assertEqual(event.dt.replace(tzinfo=EAT), dt)
# This combination actually never happens.
@skipUnlessDBFeature('supports_timezones')
@skipIfDBFeature('supports_microsecond_precision')
def test_aware_datetime_in_local_timezone_with_microsecond_unsupported(self):
dt = datetime.datetime(2011, 9, 1, 13, 20, 30, 405060, tzinfo=EAT)
Event.objects.create(dt=dt)
event = Event.objects.get()
self.assertIsNone(event.dt.tzinfo)
# interpret the naive datetime in local time to get the correct value
# microseconds are lost during a round-trip in the database
self.assertEqual(event.dt.replace(tzinfo=EAT), dt.replace(microsecond=0))
@skipUnlessDBFeature('supports_timezones')
@skipIfDBFeature('needs_datetime_string_cast')
def test_aware_datetime_in_utc(self):
dt = datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC)
Event.objects.create(dt=dt)
event = Event.objects.get()
self.assertIsNone(event.dt.tzinfo)
# interpret the naive datetime in local time to get the correct value
self.assertEqual(event.dt.replace(tzinfo=EAT), dt)
# This combination is no longer possible since timezone support
# was removed from the SQLite backend -- it didn't work.
@skipUnlessDBFeature('supports_timezones')
@skipUnlessDBFeature('needs_datetime_string_cast')
def test_aware_datetime_in_utc_unsupported(self):
dt = datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC)
Event.objects.create(dt=dt)
event = Event.objects.get()
self.assertIsNone(event.dt.tzinfo)
# django.db.backends.utils.typecast_dt will just drop the
# timezone, so a round-trip in the database alters the data (!)
# interpret the naive datetime in local time and you get a wrong value
self.assertNotEqual(event.dt.replace(tzinfo=EAT), dt)
# interpret the naive datetime in original time to get the correct value
self.assertEqual(event.dt.replace(tzinfo=UTC), dt)
@skipUnlessDBFeature('supports_timezones')
@skipIfDBFeature('needs_datetime_string_cast')
def test_aware_datetime_in_other_timezone(self):
dt = datetime.datetime(2011, 9, 1, 17, 20, 30, tzinfo=ICT)
Event.objects.create(dt=dt)
event = Event.objects.get()
self.assertIsNone(event.dt.tzinfo)
# interpret the naive datetime in local time to get the correct value
self.assertEqual(event.dt.replace(tzinfo=EAT), dt)
# This combination is no longer possible since timezone support
# was removed from the SQLite backend -- it didn't work.
@skipUnlessDBFeature('supports_timezones')
@skipUnlessDBFeature('needs_datetime_string_cast')
def test_aware_datetime_in_other_timezone_unsupported(self):
dt = datetime.datetime(2011, 9, 1, 17, 20, 30, tzinfo=ICT)
Event.objects.create(dt=dt)
event = Event.objects.get()
self.assertIsNone(event.dt.tzinfo)
# django.db.backends.utils.typecast_dt will just drop the
# timezone, so a round-trip in the database alters the data (!)
# interpret the naive datetime in local time and you get a wrong value
self.assertNotEqual(event.dt.replace(tzinfo=EAT), dt)
# interpret the naive datetime in original time to get the correct value
self.assertEqual(event.dt.replace(tzinfo=ICT), dt)
@skipIfDBFeature('supports_timezones')
def test_aware_datetime_unspported(self):
dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT)
with self.assertRaises(ValueError):
Event.objects.create(dt=dt)
def test_auto_now_and_auto_now_add(self):
now = datetime.datetime.now()
past = now - datetime.timedelta(seconds=2)
future = now + datetime.timedelta(seconds=2)
Timestamp.objects.create()
ts = Timestamp.objects.get()
self.assertLess(past, ts.created)
self.assertLess(past, ts.updated)
self.assertGreater(future, ts.updated)
self.assertGreater(future, ts.updated)
def test_query_filter(self):
dt1 = datetime.datetime(2011, 9, 1, 12, 20, 30)
dt2 = datetime.datetime(2011, 9, 1, 14, 20, 30)
Event.objects.create(dt=dt1)
Event.objects.create(dt=dt2)
self.assertEqual(Event.objects.filter(dt__gte=dt1).count(), 2)
self.assertEqual(Event.objects.filter(dt__gt=dt1).count(), 1)
self.assertEqual(Event.objects.filter(dt__gte=dt2).count(), 1)
self.assertEqual(Event.objects.filter(dt__gt=dt2).count(), 0)
def test_query_datetime_lookups(self):
Event.objects.create(dt=datetime.datetime(2011, 1, 1, 1, 30, 0))
Event.objects.create(dt=datetime.datetime(2011, 1, 1, 4, 30, 0))
self.assertEqual(Event.objects.filter(dt__year=2011).count(), 2)
self.assertEqual(Event.objects.filter(dt__month=1).count(), 2)
self.assertEqual(Event.objects.filter(dt__day=1).count(), 2)
self.assertEqual(Event.objects.filter(dt__week_day=7).count(), 2)
self.assertEqual(Event.objects.filter(dt__hour=1).count(), 1)
self.assertEqual(Event.objects.filter(dt__minute=30).count(), 2)
self.assertEqual(Event.objects.filter(dt__second=0).count(), 2)
def test_query_aggregation(self):
# Only min and max make sense for datetimes.
Event.objects.create(dt=datetime.datetime(2011, 9, 1, 23, 20, 20))
Event.objects.create(dt=datetime.datetime(2011, 9, 1, 13, 20, 30))
Event.objects.create(dt=datetime.datetime(2011, 9, 1, 3, 20, 40))
result = Event.objects.all().aggregate(Min('dt'), Max('dt'))
self.assertEqual(result, {
'dt__min': datetime.datetime(2011, 9, 1, 3, 20, 40),
'dt__max': datetime.datetime(2011, 9, 1, 23, 20, 20),
})
def test_query_annotation(self):
# Only min and max make sense for datetimes.
morning = Session.objects.create(name='morning')
afternoon = Session.objects.create(name='afternoon')
SessionEvent.objects.create(dt=datetime.datetime(2011, 9, 1, 23, 20, 20), session=afternoon)
SessionEvent.objects.create(dt=datetime.datetime(2011, 9, 1, 13, 20, 30), session=afternoon)
SessionEvent.objects.create(dt=datetime.datetime(2011, 9, 1, 3, 20, 40), session=morning)
morning_min_dt = datetime.datetime(2011, 9, 1, 3, 20, 40)
afternoon_min_dt = datetime.datetime(2011, 9, 1, 13, 20, 30)
self.assertQuerysetEqual(
Session.objects.annotate(dt=Min('events__dt')).order_by('dt'),
[morning_min_dt, afternoon_min_dt],
transform=lambda d: d.dt)
self.assertQuerysetEqual(
Session.objects.annotate(dt=Min('events__dt')).filter(dt__lt=afternoon_min_dt),
[morning_min_dt],
transform=lambda d: d.dt)
self.assertQuerysetEqual(
Session.objects.annotate(dt=Min('events__dt')).filter(dt__gte=afternoon_min_dt),
[afternoon_min_dt],
transform=lambda d: d.dt)
def test_query_datetimes(self):
Event.objects.create(dt=datetime.datetime(2011, 1, 1, 1, 30, 0))
Event.objects.create(dt=datetime.datetime(2011, 1, 1, 4, 30, 0))
self.assertQuerysetEqual(
Event.objects.datetimes('dt', 'year'),
[datetime.datetime(2011, 1, 1, 0, 0, 0)],
transform=lambda d: d)
self.assertQuerysetEqual(
Event.objects.datetimes('dt', 'month'),
[datetime.datetime(2011, 1, 1, 0, 0, 0)],
transform=lambda d: d)
self.assertQuerysetEqual(
Event.objects.datetimes('dt', 'day'),
[datetime.datetime(2011, 1, 1, 0, 0, 0)],
transform=lambda d: d)
self.assertQuerysetEqual(
Event.objects.datetimes('dt', 'hour'),
[datetime.datetime(2011, 1, 1, 1, 0, 0),
datetime.datetime(2011, 1, 1, 4, 0, 0)],
transform=lambda d: d)
self.assertQuerysetEqual(
Event.objects.datetimes('dt', 'minute'),
[datetime.datetime(2011, 1, 1, 1, 30, 0),
datetime.datetime(2011, 1, 1, 4, 30, 0)],
transform=lambda d: d)
self.assertQuerysetEqual(
Event.objects.datetimes('dt', 'second'),
[datetime.datetime(2011, 1, 1, 1, 30, 0),
datetime.datetime(2011, 1, 1, 4, 30, 0)],
transform=lambda d: d)
def test_raw_sql(self):
# Regression test for #17755
dt = datetime.datetime(2011, 9, 1, 13, 20, 30)
event = Event.objects.create(dt=dt)
self.assertQuerysetEqual(
Event.objects.raw('SELECT * FROM timezones_event WHERE dt = %s', [dt]),
[event],
transform=lambda d: d)
def test_filter_date_field_with_aware_datetime(self):
# Regression test for #17742
day = datetime.date(2011, 9, 1)
AllDayEvent.objects.create(day=day)
# This is 2011-09-02T01:30:00+03:00 in EAT
dt = datetime.datetime(2011, 9, 1, 22, 30, 0, tzinfo=UTC)
self.assertTrue(AllDayEvent.objects.filter(day__gte=dt).exists())
@override_settings(TIME_ZONE='Africa/Nairobi', USE_TZ=True)
class NewDatabaseTests(TestCase):
@requires_tz_support
def test_naive_datetime(self):
dt = datetime.datetime(2011, 9, 1, 13, 20, 30)
with warnings.catch_warnings(record=True) as recorded:
warnings.simplefilter('always')
Event.objects.create(dt=dt)
self.assertEqual(len(recorded), 1)
msg = str(recorded[0].message)
self.assertTrue(msg.startswith("DateTimeField Event.dt received "
"a naive datetime"))
event = Event.objects.get()
# naive datetimes are interpreted in local time
self.assertEqual(event.dt, dt.replace(tzinfo=EAT))
@requires_tz_support
def test_datetime_from_date(self):
dt = datetime.date(2011, 9, 1)
with warnings.catch_warnings(record=True) as recorded:
warnings.simplefilter('always')
Event.objects.create(dt=dt)
self.assertEqual(len(recorded), 1)
msg = str(recorded[0].message)
self.assertTrue(msg.startswith("DateTimeField Event.dt received "
"a naive datetime"))
event = Event.objects.get()
self.assertEqual(event.dt, datetime.datetime(2011, 9, 1, tzinfo=EAT))
@requires_tz_support
@skipUnlessDBFeature('supports_microsecond_precision')
def test_naive_datetime_with_microsecond(self):
dt = datetime.datetime(2011, 9, 1, 13, 20, 30, 405060)
with warnings.catch_warnings(record=True) as recorded:
warnings.simplefilter('always')
Event.objects.create(dt=dt)
self.assertEqual(len(recorded), 1)
msg = str(recorded[0].message)
self.assertTrue(msg.startswith("DateTimeField Event.dt received "
"a naive datetime"))
event = Event.objects.get()
# naive datetimes are interpreted in local time
self.assertEqual(event.dt, dt.replace(tzinfo=EAT))
@requires_tz_support
@skipIfDBFeature('supports_microsecond_precision')
def test_naive_datetime_with_microsecond_unsupported(self):
dt = datetime.datetime(2011, 9, 1, 13, 20, 30, 405060)
with warnings.catch_warnings(record=True) as recorded:
warnings.simplefilter('always')
Event.objects.create(dt=dt)
self.assertEqual(len(recorded), 1)
msg = str(recorded[0].message)
self.assertTrue(msg.startswith("DateTimeField Event.dt received "
"a naive datetime"))
event = Event.objects.get()
# microseconds are lost during a round-trip in the database
# naive datetimes are interpreted in local time
self.assertEqual(event.dt, dt.replace(microsecond=0, tzinfo=EAT))
def test_aware_datetime_in_local_timezone(self):
dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT)
Event.objects.create(dt=dt)
event = Event.objects.get()
self.assertEqual(event.dt, dt)
@skipUnlessDBFeature('supports_microsecond_precision')
def test_aware_datetime_in_local_timezone_with_microsecond(self):
dt = datetime.datetime(2011, 9, 1, 13, 20, 30, 405060, tzinfo=EAT)
Event.objects.create(dt=dt)
event = Event.objects.get()
self.assertEqual(event.dt, dt)
@skipIfDBFeature('supports_microsecond_precision')
def test_aware_datetime_in_local_timezone_with_microsecond_unsupported(self):
dt = datetime.datetime(2011, 9, 1, 13, 20, 30, 405060, tzinfo=EAT)
Event.objects.create(dt=dt)
event = Event.objects.get()
# microseconds are lost during a round-trip in the database
self.assertEqual(event.dt, dt.replace(microsecond=0))
def test_aware_datetime_in_utc(self):
dt = datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC)
Event.objects.create(dt=dt)
event = Event.objects.get()
self.assertEqual(event.dt, dt)
def test_aware_datetime_in_other_timezone(self):
dt = datetime.datetime(2011, 9, 1, 17, 20, 30, tzinfo=ICT)
Event.objects.create(dt=dt)
event = Event.objects.get()
self.assertEqual(event.dt, dt)
def test_auto_now_and_auto_now_add(self):
now = timezone.now()
past = now - datetime.timedelta(seconds=2)
future = now + datetime.timedelta(seconds=2)
Timestamp.objects.create()
ts = Timestamp.objects.get()
self.assertLess(past, ts.created)
self.assertLess(past, ts.updated)
self.assertGreater(future, ts.updated)
self.assertGreater(future, ts.updated)
def test_query_filter(self):
dt1 = datetime.datetime(2011, 9, 1, 12, 20, 30, tzinfo=EAT)
dt2 = datetime.datetime(2011, 9, 1, 14, 20, 30, tzinfo=EAT)
Event.objects.create(dt=dt1)
Event.objects.create(dt=dt2)
self.assertEqual(Event.objects.filter(dt__gte=dt1).count(), 2)
self.assertEqual(Event.objects.filter(dt__gt=dt1).count(), 1)
self.assertEqual(Event.objects.filter(dt__gte=dt2).count(), 1)
self.assertEqual(Event.objects.filter(dt__gt=dt2).count(), 0)
@skipIf(pytz is None, "this test requires pytz")
def test_query_filter_with_pytz_timezones(self):
tz = pytz.timezone('Europe/Paris')
dt = datetime.datetime(2011, 9, 1, 12, 20, 30, tzinfo=tz)
Event.objects.create(dt=dt)
next = dt + datetime.timedelta(seconds=3)
prev = dt - datetime.timedelta(seconds=3)
self.assertEqual(Event.objects.filter(dt__exact=dt).count(), 1)
self.assertEqual(Event.objects.filter(dt__exact=next).count(), 0)
self.assertEqual(Event.objects.filter(dt__in=(prev, next)).count(), 0)
self.assertEqual(Event.objects.filter(dt__in=(prev, dt, next)).count(), 1)
self.assertEqual(Event.objects.filter(dt__range=(prev, next)).count(), 1)
@requires_tz_support
def test_query_filter_with_naive_datetime(self):
dt = datetime.datetime(2011, 9, 1, 12, 20, 30, tzinfo=EAT)
Event.objects.create(dt=dt)
dt = dt.replace(tzinfo=None)
with warnings.catch_warnings(record=True) as recorded:
warnings.simplefilter('always')
# naive datetimes are interpreted in local time
self.assertEqual(Event.objects.filter(dt__exact=dt).count(), 1)
self.assertEqual(Event.objects.filter(dt__lte=dt).count(), 1)
self.assertEqual(Event.objects.filter(dt__gt=dt).count(), 0)
self.assertEqual(len(recorded), 3)
for warning in recorded:
msg = str(warning.message)
self.assertTrue(msg.startswith("DateTimeField Event.dt "
"received a naive datetime"))
@skipUnlessDBFeature('has_zoneinfo_database')
def test_query_datetime_lookups(self):
Event.objects.create(dt=datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=EAT))
Event.objects.create(dt=datetime.datetime(2011, 1, 1, 4, 30, 0, tzinfo=EAT))
self.assertEqual(Event.objects.filter(dt__year=2011).count(), 2)
self.assertEqual(Event.objects.filter(dt__month=1).count(), 2)
self.assertEqual(Event.objects.filter(dt__day=1).count(), 2)
self.assertEqual(Event.objects.filter(dt__week_day=7).count(), 2)
self.assertEqual(Event.objects.filter(dt__hour=1).count(), 1)
self.assertEqual(Event.objects.filter(dt__minute=30).count(), 2)
self.assertEqual(Event.objects.filter(dt__second=0).count(), 2)
@skipUnlessDBFeature('has_zoneinfo_database')
def test_query_datetime_lookups_in_other_timezone(self):
Event.objects.create(dt=datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=EAT))
Event.objects.create(dt=datetime.datetime(2011, 1, 1, 4, 30, 0, tzinfo=EAT))
with timezone.override(UTC):
# These two dates fall in the same day in EAT, but in different days,
# years and months in UTC.
self.assertEqual(Event.objects.filter(dt__year=2011).count(), 1)
self.assertEqual(Event.objects.filter(dt__month=1).count(), 1)
self.assertEqual(Event.objects.filter(dt__day=1).count(), 1)
self.assertEqual(Event.objects.filter(dt__week_day=7).count(), 1)
self.assertEqual(Event.objects.filter(dt__hour=22).count(), 1)
self.assertEqual(Event.objects.filter(dt__minute=30).count(), 2)
self.assertEqual(Event.objects.filter(dt__second=0).count(), 2)
def test_query_aggregation(self):
# Only min and max make sense for datetimes.
Event.objects.create(dt=datetime.datetime(2011, 9, 1, 23, 20, 20, tzinfo=EAT))
Event.objects.create(dt=datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT))
Event.objects.create(dt=datetime.datetime(2011, 9, 1, 3, 20, 40, tzinfo=EAT))
result = Event.objects.all().aggregate(Min('dt'), Max('dt'))
self.assertEqual(result, {
'dt__min': datetime.datetime(2011, 9, 1, 3, 20, 40, tzinfo=EAT),
'dt__max': datetime.datetime(2011, 9, 1, 23, 20, 20, tzinfo=EAT),
})
def test_query_annotation(self):
# Only min and max make sense for datetimes.
morning = Session.objects.create(name='morning')
afternoon = Session.objects.create(name='afternoon')
SessionEvent.objects.create(dt=datetime.datetime(2011, 9, 1, 23, 20, 20, tzinfo=EAT), session=afternoon)
SessionEvent.objects.create(dt=datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT), session=afternoon)
SessionEvent.objects.create(dt=datetime.datetime(2011, 9, 1, 3, 20, 40, tzinfo=EAT), session=morning)
morning_min_dt = datetime.datetime(2011, 9, 1, 3, 20, 40, tzinfo=EAT)
afternoon_min_dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT)
self.assertQuerysetEqual(
Session.objects.annotate(dt=Min('events__dt')).order_by('dt'),
[morning_min_dt, afternoon_min_dt],
transform=lambda d: d.dt)
self.assertQuerysetEqual(
Session.objects.annotate(dt=Min('events__dt')).filter(dt__lt=afternoon_min_dt),
[morning_min_dt],
transform=lambda d: d.dt)
self.assertQuerysetEqual(
Session.objects.annotate(dt=Min('events__dt')).filter(dt__gte=afternoon_min_dt),
[afternoon_min_dt],
transform=lambda d: d.dt)
@skipUnlessDBFeature('has_zoneinfo_database')
def test_query_datetimes(self):
Event.objects.create(dt=datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=EAT))
Event.objects.create(dt=datetime.datetime(2011, 1, 1, 4, 30, 0, tzinfo=EAT))
self.assertQuerysetEqual(
Event.objects.datetimes('dt', 'year'),
[datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=EAT)],
transform=lambda d: d)
self.assertQuerysetEqual(
Event.objects.datetimes('dt', 'month'),
[datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=EAT)],
transform=lambda d: d)
self.assertQuerysetEqual(
Event.objects.datetimes('dt', 'day'),
[datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=EAT)],
transform=lambda d: d)
self.assertQuerysetEqual(
Event.objects.datetimes('dt', 'hour'),
[datetime.datetime(2011, 1, 1, 1, 0, 0, tzinfo=EAT),
datetime.datetime(2011, 1, 1, 4, 0, 0, tzinfo=EAT)],
transform=lambda d: d)
self.assertQuerysetEqual(
Event.objects.datetimes('dt', 'minute'),
[datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=EAT),
datetime.datetime(2011, 1, 1, 4, 30, 0, tzinfo=EAT)],
transform=lambda d: d)
self.assertQuerysetEqual(
Event.objects.datetimes('dt', 'second'),
[datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=EAT),
datetime.datetime(2011, 1, 1, 4, 30, 0, tzinfo=EAT)],
transform=lambda d: d)
@skipUnlessDBFeature('has_zoneinfo_database')
def test_query_datetimes_in_other_timezone(self):
Event.objects.create(dt=datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=EAT))
Event.objects.create(dt=datetime.datetime(2011, 1, 1, 4, 30, 0, tzinfo=EAT))
with timezone.override(UTC):
self.assertQuerysetEqual(
Event.objects.datetimes('dt', 'year'),
[datetime.datetime(2010, 1, 1, 0, 0, 0, tzinfo=UTC),
datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=UTC)],
transform=lambda d: d)
self.assertQuerysetEqual(
Event.objects.datetimes('dt', 'month'),
[datetime.datetime(2010, 12, 1, 0, 0, 0, tzinfo=UTC),
datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=UTC)],
transform=lambda d: d)
self.assertQuerysetEqual(
Event.objects.datetimes('dt', 'day'),
[datetime.datetime(2010, 12, 31, 0, 0, 0, tzinfo=UTC),
datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=UTC)],
transform=lambda d: d)
self.assertQuerysetEqual(
Event.objects.datetimes('dt', 'hour'),
[datetime.datetime(2010, 12, 31, 22, 0, 0, tzinfo=UTC),
datetime.datetime(2011, 1, 1, 1, 0, 0, tzinfo=UTC)],
transform=lambda d: d)
self.assertQuerysetEqual(
Event.objects.datetimes('dt', 'minute'),
[datetime.datetime(2010, 12, 31, 22, 30, 0, tzinfo=UTC),
datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=UTC)],
transform=lambda d: d)
self.assertQuerysetEqual(
Event.objects.datetimes('dt', 'second'),
[datetime.datetime(2010, 12, 31, 22, 30, 0, tzinfo=UTC),
datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=UTC)],
transform=lambda d: d)
def test_raw_sql(self):
# Regression test for #17755
dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT)
event = Event.objects.create(dt=dt)
self.assertQuerysetEqual(
Event.objects.raw('SELECT * FROM timezones_event WHERE dt = %s', [dt]),
[event],
transform=lambda d: d)
@requires_tz_support
def test_filter_date_field_with_aware_datetime(self):
# Regression test for #17742
day = datetime.date(2011, 9, 1)
AllDayEvent.objects.create(day=day)
# This is 2011-09-02T01:30:00+03:00 in EAT
dt = datetime.datetime(2011, 9, 1, 22, 30, 0, tzinfo=UTC)
self.assertFalse(AllDayEvent.objects.filter(day__gte=dt).exists())
def test_null_datetime(self):
# Regression test for #17294
e = MaybeEvent.objects.create()
self.assertEqual(e.dt, None)
@override_settings(TIME_ZONE='Africa/Nairobi')
class SerializationTests(TestCase):
# Backend-specific notes:
# - JSON supports only milliseconds, microseconds will be truncated.
# - PyYAML dumps the UTC offset correctly for timezone-aware datetimes,
# but when it loads this representation, it substracts the offset and
# returns a naive datetime object in UTC (http://pyyaml.org/ticket/202).
# Tests are adapted to take these quirks into account.
def assert_python_contains_datetime(self, objects, dt):
self.assertEqual(objects[0]['fields']['dt'], dt)
def assert_json_contains_datetime(self, json, dt):
self.assertIn('"fields": {"dt": "%s"}' % dt, json)
def assert_xml_contains_datetime(self, xml, dt):
field = parseString(xml).getElementsByTagName('field')[0]
self.assertXMLEqual(field.childNodes[0].wholeText, dt)
def assert_yaml_contains_datetime(self, yaml, dt):
# Depending on the yaml dumper, '!timestamp' might be absent
six.assertRegex(self, yaml,
r"- fields: {dt: !(!timestamp)? '%s'}" % re.escape(dt))
def test_naive_datetime(self):
dt = datetime.datetime(2011, 9, 1, 13, 20, 30)
data = serializers.serialize('python', [Event(dt=dt)])
self.assert_python_contains_datetime(data, dt)
obj = next(serializers.deserialize('python', data)).object
self.assertEqual(obj.dt, dt)
data = serializers.serialize('json', [Event(dt=dt)])
self.assert_json_contains_datetime(data, "2011-09-01T13:20:30")
obj = next(serializers.deserialize('json', data)).object
self.assertEqual(obj.dt, dt)
data = serializers.serialize('xml', [Event(dt=dt)])
self.assert_xml_contains_datetime(data, "2011-09-01T13:20:30")
obj = next(serializers.deserialize('xml', data)).object
self.assertEqual(obj.dt, dt)
if not isinstance(serializers.get_serializer('yaml'), serializers.BadSerializer):
data = serializers.serialize('yaml', [Event(dt=dt)])
self.assert_yaml_contains_datetime(data, "2011-09-01 13:20:30")
obj = next(serializers.deserialize('yaml', data)).object
self.assertEqual(obj.dt, dt)
def test_naive_datetime_with_microsecond(self):
dt = datetime.datetime(2011, 9, 1, 13, 20, 30, 405060)
data = serializers.serialize('python', [Event(dt=dt)])
self.assert_python_contains_datetime(data, dt)
obj = next(serializers.deserialize('python', data)).object
self.assertEqual(obj.dt, dt)
data = serializers.serialize('json', [Event(dt=dt)])
self.assert_json_contains_datetime(data, "2011-09-01T13:20:30.405")
obj = next(serializers.deserialize('json', data)).object
self.assertEqual(obj.dt, dt.replace(microsecond=405000))
data = serializers.serialize('xml', [Event(dt=dt)])
self.assert_xml_contains_datetime(data, "2011-09-01T13:20:30.405060")
obj = next(serializers.deserialize('xml', data)).object
self.assertEqual(obj.dt, dt)
if not isinstance(serializers.get_serializer('yaml'), serializers.BadSerializer):
data = serializers.serialize('yaml', [Event(dt=dt)])
self.assert_yaml_contains_datetime(data, "2011-09-01 13:20:30.405060")
obj = next(serializers.deserialize('yaml', data)).object
self.assertEqual(obj.dt, dt)
def test_aware_datetime_with_microsecond(self):
dt = datetime.datetime(2011, 9, 1, 17, 20, 30, 405060, tzinfo=ICT)
data = serializers.serialize('python', [Event(dt=dt)])
self.assert_python_contains_datetime(data, dt)
obj = next(serializers.deserialize('python', data)).object
self.assertEqual(obj.dt, dt)
data = serializers.serialize('json', [Event(dt=dt)])
self.assert_json_contains_datetime(data, "2011-09-01T17:20:30.405+07:00")
obj = next(serializers.deserialize('json', data)).object
self.assertEqual(obj.dt, dt.replace(microsecond=405000))
data = serializers.serialize('xml', [Event(dt=dt)])
self.assert_xml_contains_datetime(data, "2011-09-01T17:20:30.405060+07:00")
obj = next(serializers.deserialize('xml', data)).object
self.assertEqual(obj.dt, dt)
if not isinstance(serializers.get_serializer('yaml'), serializers.BadSerializer):
data = serializers.serialize('yaml', [Event(dt=dt)])
self.assert_yaml_contains_datetime(data, "2011-09-01 17:20:30.405060+07:00")
obj = next(serializers.deserialize('yaml', data)).object
self.assertEqual(obj.dt.replace(tzinfo=UTC), dt)
def test_aware_datetime_in_utc(self):
dt = datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC)
data = serializers.serialize('python', [Event(dt=dt)])
self.assert_python_contains_datetime(data, dt)
obj = next(serializers.deserialize('python', data)).object
self.assertEqual(obj.dt, dt)
data = serializers.serialize('json', [Event(dt=dt)])
self.assert_json_contains_datetime(data, "2011-09-01T10:20:30Z")
obj = next(serializers.deserialize('json', data)).object
self.assertEqual(obj.dt, dt)
data = serializers.serialize('xml', [Event(dt=dt)])
self.assert_xml_contains_datetime(data, "2011-09-01T10:20:30+00:00")
obj = next(serializers.deserialize('xml', data)).object
self.assertEqual(obj.dt, dt)
if not isinstance(serializers.get_serializer('yaml'), serializers.BadSerializer):
data = serializers.serialize('yaml', [Event(dt=dt)])
self.assert_yaml_contains_datetime(data, "2011-09-01 10:20:30+00:00")
obj = next(serializers.deserialize('yaml', data)).object
self.assertEqual(obj.dt.replace(tzinfo=UTC), dt)
def test_aware_datetime_in_local_timezone(self):
dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT)
data = serializers.serialize('python', [Event(dt=dt)])
self.assert_python_contains_datetime(data, dt)
obj = next(serializers.deserialize('python', data)).object
self.assertEqual(obj.dt, dt)
data = serializers.serialize('json', [Event(dt=dt)])
self.assert_json_contains_datetime(data, "2011-09-01T13:20:30+03:00")
obj = next(serializers.deserialize('json', data)).object
self.assertEqual(obj.dt, dt)
data = serializers.serialize('xml', [Event(dt=dt)])
self.assert_xml_contains_datetime(data, "2011-09-01T13:20:30+03:00")
obj = next(serializers.deserialize('xml', data)).object
self.assertEqual(obj.dt, dt)
if not isinstance(serializers.get_serializer('yaml'), serializers.BadSerializer):
data = serializers.serialize('yaml', [Event(dt=dt)])
self.assert_yaml_contains_datetime(data, "2011-09-01 13:20:30+03:00")
obj = next(serializers.deserialize('yaml', data)).object
self.assertEqual(obj.dt.replace(tzinfo=UTC), dt)
def test_aware_datetime_in_other_timezone(self):
dt = datetime.datetime(2011, 9, 1, 17, 20, 30, tzinfo=ICT)
data = serializers.serialize('python', [Event(dt=dt)])
self.assert_python_contains_datetime(data, dt)
obj = next(serializers.deserialize('python', data)).object
self.assertEqual(obj.dt, dt)
data = serializers.serialize('json', [Event(dt=dt)])
self.assert_json_contains_datetime(data, "2011-09-01T17:20:30+07:00")
obj = next(serializers.deserialize('json', data)).object
self.assertEqual(obj.dt, dt)
data = serializers.serialize('xml', [Event(dt=dt)])
self.assert_xml_contains_datetime(data, "2011-09-01T17:20:30+07:00")
obj = next(serializers.deserialize('xml', data)).object
self.assertEqual(obj.dt, dt)
if not isinstance(serializers.get_serializer('yaml'), serializers.BadSerializer):
data = serializers.serialize('yaml', [Event(dt=dt)])
self.assert_yaml_contains_datetime(data, "2011-09-01 17:20:30+07:00")
obj = next(serializers.deserialize('yaml', data)).object
self.assertEqual(obj.dt.replace(tzinfo=UTC), dt)
@override_settings(DATETIME_FORMAT='c', TIME_ZONE='Africa/Nairobi', USE_L10N=False, USE_TZ=True)
class TemplateTests(TestCase):
@requires_tz_support
def test_localtime_templatetag_and_filters(self):
"""
Test the {% localtime %} templatetag and related filters.
"""
datetimes = {
'utc': datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC),
'eat': datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT),
'ict': datetime.datetime(2011, 9, 1, 17, 20, 30, tzinfo=ICT),
'naive': datetime.datetime(2011, 9, 1, 13, 20, 30),
}
templates = {
'notag': Template("{% load tz %}{{ dt }}|{{ dt|localtime }}|{{ dt|utc }}|{{ dt|timezone:ICT }}"),
'noarg': Template("{% load tz %}{% localtime %}{{ dt }}|{{ dt|localtime }}|{{ dt|utc }}|{{ dt|timezone:ICT }}{% endlocaltime %}"),
'on': Template("{% load tz %}{% localtime on %}{{ dt }}|{{ dt|localtime }}|{{ dt|utc }}|{{ dt|timezone:ICT }}{% endlocaltime %}"),
'off': Template("{% load tz %}{% localtime off %}{{ dt }}|{{ dt|localtime }}|{{ dt|utc }}|{{ dt|timezone:ICT }}{% endlocaltime %}"),
}
# Transform a list of keys in 'datetimes' to the expected template
# output. This makes the definition of 'results' more readable.
def t(*result):
return '|'.join(datetimes[key].isoformat() for key in result)
# Results for USE_TZ = True
results = {
'utc': {
'notag': t('eat', 'eat', 'utc', 'ict'),
'noarg': t('eat', 'eat', 'utc', 'ict'),
'on': t('eat', 'eat', 'utc', 'ict'),
'off': t('utc', 'eat', 'utc', 'ict'),
},
'eat': {
'notag': t('eat', 'eat', 'utc', 'ict'),
'noarg': t('eat', 'eat', 'utc', 'ict'),
'on': t('eat', 'eat', 'utc', 'ict'),
'off': t('eat', 'eat', 'utc', 'ict'),
},
'ict': {
'notag': t('eat', 'eat', 'utc', 'ict'),
'noarg': t('eat', 'eat', 'utc', 'ict'),
'on': t('eat', 'eat', 'utc', 'ict'),
'off': t('ict', 'eat', 'utc', 'ict'),
},
'naive': {
'notag': t('naive', 'eat', 'utc', 'ict'),
'noarg': t('naive', 'eat', 'utc', 'ict'),
'on': t('naive', 'eat', 'utc', 'ict'),
'off': t('naive', 'eat', 'utc', 'ict'),
}
}
for k1, dt in six.iteritems(datetimes):
for k2, tpl in six.iteritems(templates):
ctx = Context({'dt': dt, 'ICT': ICT})
actual = tpl.render(ctx)
expected = results[k1][k2]
self.assertEqual(actual, expected, '%s / %s: %r != %r' % (k1, k2, actual, expected))
# Changes for USE_TZ = False
results['utc']['notag'] = t('utc', 'eat', 'utc', 'ict')
results['ict']['notag'] = t('ict', 'eat', 'utc', 'ict')
with self.settings(USE_TZ=False):
for k1, dt in six.iteritems(datetimes):
for k2, tpl in six.iteritems(templates):
ctx = Context({'dt': dt, 'ICT': ICT})
actual = tpl.render(ctx)
expected = results[k1][k2]
self.assertEqual(actual, expected, '%s / %s: %r != %r' % (k1, k2, actual, expected))
@skipIf(pytz is None, "this test requires pytz")
def test_localtime_filters_with_pytz(self):
"""
Test the |localtime, |utc, and |timezone filters with pytz.
"""
# Use a pytz timezone as local time
tpl = Template("{% load tz %}{{ dt|localtime }}|{{ dt|utc }}")
ctx = Context({'dt': datetime.datetime(2011, 9, 1, 12, 20, 30)})
with self.settings(TIME_ZONE='Europe/Paris'):
self.assertEqual(tpl.render(ctx), "2011-09-01T12:20:30+02:00|2011-09-01T10:20:30+00:00")
# Use a pytz timezone as argument
tpl = Template("{% load tz %}{{ dt|timezone:tz }}")
ctx = Context({'dt': datetime.datetime(2011, 9, 1, 13, 20, 30),
'tz': pytz.timezone('Europe/Paris')})
self.assertEqual(tpl.render(ctx), "2011-09-01T12:20:30+02:00")
# Use a pytz timezone name as argument
tpl = Template("{% load tz %}{{ dt|timezone:'Europe/Paris' }}")
ctx = Context({'dt': datetime.datetime(2011, 9, 1, 13, 20, 30),
'tz': pytz.timezone('Europe/Paris')})
self.assertEqual(tpl.render(ctx), "2011-09-01T12:20:30+02:00")
def test_localtime_templatetag_invalid_argument(self):
with self.assertRaises(TemplateSyntaxError):
Template("{% load tz %}{% localtime foo %}{% endlocaltime %}").render()
def test_localtime_filters_do_not_raise_exceptions(self):
"""
Test the |localtime, |utc, and |timezone filters on bad inputs.
"""
tpl = Template("{% load tz %}{{ dt }}|{{ dt|localtime }}|{{ dt|utc }}|{{ dt|timezone:tz }}")
with self.settings(USE_TZ=True):
# bad datetime value
ctx = Context({'dt': None, 'tz': ICT})
self.assertEqual(tpl.render(ctx), "None|||")
ctx = Context({'dt': 'not a date', 'tz': ICT})
self.assertEqual(tpl.render(ctx), "not a date|||")
# bad timezone value
tpl = Template("{% load tz %}{{ dt|timezone:tz }}")
ctx = Context({'dt': datetime.datetime(2011, 9, 1, 13, 20, 30), 'tz': None})
self.assertEqual(tpl.render(ctx), "")
ctx = Context({'dt': datetime.datetime(2011, 9, 1, 13, 20, 30), 'tz': 'not a tz'})
self.assertEqual(tpl.render(ctx), "")
@requires_tz_support
def test_timezone_templatetag(self):
"""
Test the {% timezone %} templatetag.
"""
tpl = Template(
"{% load tz %}"
"{{ dt }}|"
"{% timezone tz1 %}"
"{{ dt }}|"
"{% timezone tz2 %}"
"{{ dt }}"
"{% endtimezone %}"
"{% endtimezone %}"
)
ctx = Context({'dt': datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC),
'tz1': ICT, 'tz2': None})
self.assertEqual(tpl.render(ctx), "2011-09-01T13:20:30+03:00|2011-09-01T17:20:30+07:00|2011-09-01T13:20:30+03:00")
@skipIf(pytz is None, "this test requires pytz")
def test_timezone_templatetag_with_pytz(self):
"""
Test the {% timezone %} templatetag with pytz.
"""
tpl = Template("{% load tz %}{% timezone tz %}{{ dt }}{% endtimezone %}")
# Use a pytz timezone as argument
ctx = Context({'dt': datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT),
'tz': pytz.timezone('Europe/Paris')})
self.assertEqual(tpl.render(ctx), "2011-09-01T12:20:30+02:00")
# Use a pytz timezone name as argument
ctx = Context({'dt': datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT),
'tz': 'Europe/Paris'})
self.assertEqual(tpl.render(ctx), "2011-09-01T12:20:30+02:00")
def test_timezone_templatetag_invalid_argument(self):
with self.assertRaises(TemplateSyntaxError):
Template("{% load tz %}{% timezone %}{% endtimezone %}").render()
with self.assertRaises(ValueError if pytz is None else pytz.UnknownTimeZoneError):
Template("{% load tz %}{% timezone tz %}{% endtimezone %}").render(Context({'tz': 'foobar'}))
@skipIf(sys.platform.startswith('win'), "Windows uses non-standard time zone names")
def test_get_current_timezone_templatetag(self):
"""
Test the {% get_current_timezone %} templatetag.
"""
tpl = Template("{% load tz %}{% get_current_timezone as time_zone %}{{ time_zone }}")
self.assertEqual(tpl.render(Context()), "Africa/Nairobi" if pytz else "EAT")
with timezone.override(UTC):
self.assertEqual(tpl.render(Context()), "UTC")
tpl = Template("{% load tz %}{% timezone tz %}{% get_current_timezone as time_zone %}{% endtimezone %}{{ time_zone }}")
self.assertEqual(tpl.render(Context({'tz': ICT})), "+0700")
with timezone.override(UTC):
self.assertEqual(tpl.render(Context({'tz': ICT})), "+0700")
@skipIf(pytz is None, "this test requires pytz")
def test_get_current_timezone_templatetag_with_pytz(self):
"""
Test the {% get_current_timezone %} templatetag with pytz.
"""
tpl = Template("{% load tz %}{% get_current_timezone as time_zone %}{{ time_zone }}")
with timezone.override(pytz.timezone('Europe/Paris')):
self.assertEqual(tpl.render(Context()), "Europe/Paris")
tpl = Template("{% load tz %}{% timezone 'Europe/Paris' %}{% get_current_timezone as time_zone %}{% endtimezone %}{{ time_zone }}")
self.assertEqual(tpl.render(Context()), "Europe/Paris")
def test_get_current_timezone_templatetag_invalid_argument(self):
with self.assertRaises(TemplateSyntaxError):
Template("{% load tz %}{% get_current_timezone %}").render()
@skipIf(sys.platform.startswith('win'), "Windows uses non-standard time zone names")
def test_tz_template_context_processor(self):
"""
Test the django.core.context_processors.tz template context processor.
"""
tpl = Template("{{ TIME_ZONE }}")
self.assertEqual(tpl.render(Context()), "")
self.assertEqual(tpl.render(RequestContext(HttpRequest())), "Africa/Nairobi" if pytz else "EAT")
@requires_tz_support
def test_date_and_time_template_filters(self):
tpl = Template("{{ dt|date:'Y-m-d' }} at {{ dt|time:'H:i:s' }}")
ctx = Context({'dt': datetime.datetime(2011, 9, 1, 20, 20, 20, tzinfo=UTC)})
self.assertEqual(tpl.render(ctx), "2011-09-01 at 23:20:20")
with timezone.override(ICT):
self.assertEqual(tpl.render(ctx), "2011-09-02 at 03:20:20")
def test_date_and_time_template_filters_honor_localtime(self):
tpl = Template("{% load tz %}{% localtime off %}{{ dt|date:'Y-m-d' }} at {{ dt|time:'H:i:s' }}{% endlocaltime %}")
ctx = Context({'dt': datetime.datetime(2011, 9, 1, 20, 20, 20, tzinfo=UTC)})
self.assertEqual(tpl.render(ctx), "2011-09-01 at 20:20:20")
with timezone.override(ICT):
self.assertEqual(tpl.render(ctx), "2011-09-01 at 20:20:20")
def test_localtime_with_time_zone_setting_set_to_none(self):
# Regression for #17274
tpl = Template("{% load tz %}{{ dt }}")
ctx = Context({'dt': datetime.datetime(2011, 9, 1, 12, 20, 30, tzinfo=EAT)})
with self.settings(TIME_ZONE=None):
# the actual value depends on the system time zone of the host
self.assertTrue(tpl.render(ctx).startswith("2011"))
@requires_tz_support
def test_now_template_tag_uses_current_time_zone(self):
# Regression for #17343
tpl = Template("{% now \"O\" %}")
self.assertEqual(tpl.render(Context({})), "+0300")
with timezone.override(ICT):
self.assertEqual(tpl.render(Context({})), "+0700")
@override_settings(DATETIME_FORMAT='c', TIME_ZONE='Africa/Nairobi', USE_L10N=False, USE_TZ=False)
class LegacyFormsTests(TestCase):
def test_form(self):
form = EventForm({'dt': '2011-09-01 13:20:30'})
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data['dt'], datetime.datetime(2011, 9, 1, 13, 20, 30))
@skipIf(pytz is None, "this test requires pytz")
def test_form_with_non_existent_time(self):
form = EventForm({'dt': '2011-03-27 02:30:00'})
with timezone.override(pytz.timezone('Europe/Paris')):
# this is obviously a bug
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data['dt'], datetime.datetime(2011, 3, 27, 2, 30, 0))
@skipIf(pytz is None, "this test requires pytz")
def test_form_with_ambiguous_time(self):
form = EventForm({'dt': '2011-10-30 02:30:00'})
with timezone.override(pytz.timezone('Europe/Paris')):
# this is obviously a bug
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data['dt'], datetime.datetime(2011, 10, 30, 2, 30, 0))
def test_split_form(self):
form = EventSplitForm({'dt_0': '2011-09-01', 'dt_1': '13:20:30'})
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data['dt'], datetime.datetime(2011, 9, 1, 13, 20, 30))
def test_model_form(self):
EventModelForm({'dt': '2011-09-01 13:20:30'}).save()
e = Event.objects.get()
self.assertEqual(e.dt, datetime.datetime(2011, 9, 1, 13, 20, 30))
@override_settings(DATETIME_FORMAT='c', TIME_ZONE='Africa/Nairobi', USE_L10N=False, USE_TZ=True)
class NewFormsTests(TestCase):
@requires_tz_support
def test_form(self):
form = EventForm({'dt': '2011-09-01 13:20:30'})
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data['dt'], datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC))
def test_form_with_other_timezone(self):
form = EventForm({'dt': '2011-09-01 17:20:30'})
with timezone.override(ICT):
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data['dt'], datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC))
def test_form_with_explicit_timezone(self):
form = EventForm({'dt': '2011-09-01 17:20:30+07:00'})
# Datetime inputs formats don't allow providing a time zone.
self.assertFalse(form.is_valid())
@skipIf(pytz is None, "this test requires pytz")
def test_form_with_non_existent_time(self):
with timezone.override(pytz.timezone('Europe/Paris')):
form = EventForm({'dt': '2011-03-27 02:30:00'})
self.assertFalse(form.is_valid())
self.assertEqual(form.errors['dt'],
["2011-03-27 02:30:00 couldn't be interpreted in time zone "
"Europe/Paris; it may be ambiguous or it may not exist."])
@skipIf(pytz is None, "this test requires pytz")
def test_form_with_ambiguous_time(self):
with timezone.override(pytz.timezone('Europe/Paris')):
form = EventForm({'dt': '2011-10-30 02:30:00'})
self.assertFalse(form.is_valid())
self.assertEqual(form.errors['dt'],
["2011-10-30 02:30:00 couldn't be interpreted in time zone "
"Europe/Paris; it may be ambiguous or it may not exist."])
@requires_tz_support
def test_split_form(self):
form = EventSplitForm({'dt_0': '2011-09-01', 'dt_1': '13:20:30'})
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data['dt'], datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC))
@requires_tz_support
def test_localized_form(self):
form = EventLocalizedForm(initial={'dt': datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT)})
with timezone.override(ICT):
self.assertIn("2011-09-01 17:20:30", str(form))
@requires_tz_support
def test_model_form(self):
EventModelForm({'dt': '2011-09-01 13:20:30'}).save()
e = Event.objects.get()
self.assertEqual(e.dt, datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC))
@requires_tz_support
def test_localized_model_form(self):
form = EventLocalizedModelForm(instance=Event(dt=datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT)))
with timezone.override(ICT):
self.assertIn("2011-09-01 17:20:30", str(form))
@override_settings(DATETIME_FORMAT='c', TIME_ZONE='Africa/Nairobi', USE_L10N=False, USE_TZ=True,
PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF='timezones.urls')
class AdminTests(TestCase):
fixtures = ['tz_users.xml']
def setUp(self):
self.client.login(username='super', password='secret')
@requires_tz_support
def test_changelist(self):
e = Event.objects.create(dt=datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC))
response = self.client.get(reverse('admin:timezones_event_changelist'))
self.assertContains(response, e.dt.astimezone(EAT).isoformat())
def test_changelist_in_other_timezone(self):
e = Event.objects.create(dt=datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC))
with timezone.override(ICT):
response = self.client.get(reverse('admin:timezones_event_changelist'))
self.assertContains(response, e.dt.astimezone(ICT).isoformat())
@requires_tz_support
def test_change_editable(self):
e = Event.objects.create(dt=datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC))
response = self.client.get(reverse('admin:timezones_event_change', args=(e.pk,)))
self.assertContains(response, e.dt.astimezone(EAT).date().isoformat())
self.assertContains(response, e.dt.astimezone(EAT).time().isoformat())
def test_change_editable_in_other_timezone(self):
e = Event.objects.create(dt=datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC))
with timezone.override(ICT):
response = self.client.get(reverse('admin:timezones_event_change', args=(e.pk,)))
self.assertContains(response, e.dt.astimezone(ICT).date().isoformat())
self.assertContains(response, e.dt.astimezone(ICT).time().isoformat())
@requires_tz_support
def test_change_readonly(self):
Timestamp.objects.create()
# re-fetch the object for backends that lose microseconds (MySQL)
t = Timestamp.objects.get()
response = self.client.get(reverse('admin:timezones_timestamp_change', args=(t.pk,)))
self.assertContains(response, t.created.astimezone(EAT).isoformat())
def test_change_readonly_in_other_timezone(self):
Timestamp.objects.create()
# re-fetch the object for backends that lose microseconds (MySQL)
t = Timestamp.objects.get()
with timezone.override(ICT):
response = self.client.get(reverse('admin:timezones_timestamp_change', args=(t.pk,)))
self.assertContains(response, t.created.astimezone(ICT).isoformat())
@override_settings(TIME_ZONE='Africa/Nairobi')
class UtilitiesTests(TestCase):
def test_make_aware(self):
self.assertEqual(
timezone.make_aware(datetime.datetime(2011, 9, 1, 13, 20, 30), EAT),
datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT)
)
self.assertEqual(
timezone.make_aware(datetime.datetime(2011, 9, 1, 10, 20, 30), UTC),
datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC)
)
def test_make_naive(self):
self.assertEqual(
timezone.make_naive(datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT), EAT),
datetime.datetime(2011, 9, 1, 13, 20, 30)
)
self.assertEqual(
timezone.make_naive(datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT), UTC),
datetime.datetime(2011, 9, 1, 10, 20, 30)
)
self.assertEqual(
timezone.make_naive(datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC), UTC),
datetime.datetime(2011, 9, 1, 10, 20, 30)
)
|
liavkoren/djangoDev
|
tests/timezones/tests.py
|
Python
|
bsd-3-clause
| 55,082
|
"""
======================================================================
Time-frequency on simulated data (Multitaper vs. Morlet vs. Stockwell)
======================================================================
This example demonstrates the different time-frequency estimation methods
on simulated data. It shows the time-frequency resolution trade-off
and the problem of estimation variance. In addition it highlights
alternative functions for generating TFRs without averaging across
trials, or by operating on numpy arrays.
"""
# Authors: Hari Bharadwaj <hari@nmr.mgh.harvard.edu>
# Denis Engemann <denis.engemann@gmail.com>
# Chris Holdgraf <choldgraf@berkeley.edu>
#
# License: BSD (3-clause)
import numpy as np
from matplotlib import pyplot as plt
from mne import create_info, EpochsArray
from mne.baseline import rescale
from mne.time_frequency import (tfr_multitaper, tfr_stockwell, tfr_morlet,
tfr_array_morlet)
print(__doc__)
###############################################################################
# Simulate data
# -------------
#
# We'll simulate data with a known spectro-temporal structure.
sfreq = 1000.0
ch_names = ['SIM0001', 'SIM0002']
ch_types = ['grad', 'grad']
info = create_info(ch_names=ch_names, sfreq=sfreq, ch_types=ch_types)
n_times = 1024 # Just over 1 second epochs
n_epochs = 40
seed = 42
rng = np.random.RandomState(seed)
noise = rng.randn(n_epochs, len(ch_names), n_times)
# Add a 50 Hz sinusoidal burst to the noise and ramp it.
t = np.arange(n_times, dtype=np.float) / sfreq
signal = np.sin(np.pi * 2. * 50. * t) # 50 Hz sinusoid signal
signal[np.logical_or(t < 0.45, t > 0.55)] = 0. # Hard windowing
on_time = np.logical_and(t >= 0.45, t <= 0.55)
signal[on_time] *= np.hanning(on_time.sum()) # Ramping
data = noise + signal
reject = dict(grad=4000)
events = np.empty((n_epochs, 3), dtype=int)
first_event_sample = 100
event_id = dict(sin50hz=1)
for k in range(n_epochs):
events[k, :] = first_event_sample + k * n_times, 0, event_id['sin50hz']
epochs = EpochsArray(data=data, info=info, events=events, event_id=event_id,
reject=reject)
###############################################################################
# Calculate a time-frequency representation (TFR)
# -----------------------------------------------
#
# Below we'll demonstrate the output of several TFR functions in MNE:
#
# * :func:`mne.time_frequency.tfr_multitaper`
# * :func:`mne.time_frequency.tfr_stockwell`
# * :func:`mne.time_frequency.tfr_morlet`
#
# Multitaper transform
# ====================
# First we'll use the multitaper method for calculating the TFR.
# This creates several orthogonal tapering windows in the TFR estimation,
# which reduces variance. We'll also show some of the parameters that can be
# tweaked (e.g., ``time_bandwidth``) that will result in different multitaper
# properties, and thus a different TFR. You can trade time resolution or
# frequency resolution or both in order to get a reduction in variance.
freqs = np.arange(5., 100., 3.)
vmin, vmax = -3., 3. # Define our color limits.
###############################################################################
# **(1) Least smoothing (most variance/background fluctuations).**
n_cycles = freqs / 2.
time_bandwidth = 2.0 # Least possible frequency-smoothing (1 taper)
power = tfr_multitaper(epochs, freqs=freqs, n_cycles=n_cycles,
time_bandwidth=time_bandwidth, return_itc=False)
# Plot results. Baseline correct based on first 100 ms.
power.plot([0], baseline=(0., 0.1), mode='mean', vmin=vmin, vmax=vmax,
title='Sim: Least smoothing, most variance')
###############################################################################
# **(2) Less frequency smoothing, more time smoothing.**
n_cycles = freqs # Increase time-window length to 1 second.
time_bandwidth = 4.0 # Same frequency-smoothing as (1) 3 tapers.
power = tfr_multitaper(epochs, freqs=freqs, n_cycles=n_cycles,
time_bandwidth=time_bandwidth, return_itc=False)
# Plot results. Baseline correct based on first 100 ms.
power.plot([0], baseline=(0., 0.1), mode='mean', vmin=vmin, vmax=vmax,
title='Sim: Less frequency smoothing, more time smoothing')
###############################################################################
# **(3) Less time smoothing, more frequency smoothing.**
n_cycles = freqs / 2.
time_bandwidth = 8.0 # Same time-smoothing as (1), 7 tapers.
power = tfr_multitaper(epochs, freqs=freqs, n_cycles=n_cycles,
time_bandwidth=time_bandwidth, return_itc=False)
# Plot results. Baseline correct based on first 100 ms.
power.plot([0], baseline=(0., 0.1), mode='mean', vmin=vmin, vmax=vmax,
title='Sim: Less time smoothing, more frequency smoothing')
##############################################################################
# Stockwell (S) transform
# =======================
#
# Stockwell uses a Gaussian window to balance temporal and spectral resolution.
# Importantly, frequency bands are phase-normalized, hence strictly comparable
# with regard to timing, and, the input signal can be recoverd from the
# transform in a lossless way if we disregard numerical errors. In this case,
# we control the spectral / temporal resolution by specifying different widths
# of the gaussian window using the ``width`` parameter.
fig, axs = plt.subplots(1, 3, figsize=(15, 5), sharey=True)
fmin, fmax = freqs[[0, -1]]
for width, ax in zip((0.2, .7, 3.0), axs):
power = tfr_stockwell(epochs, fmin=fmin, fmax=fmax, width=width)
power.plot([0], baseline=(0., 0.1), mode='mean', axes=ax, show=False,
colorbar=False)
ax.set_title('Sim: Using S transform, width = {:0.1f}'.format(width))
plt.tight_layout()
###############################################################################
# Morlet Wavelets
# ===============
#
# Finally, show the TFR using morlet wavelets, which are a sinusoidal wave
# with a gaussian envelope. We can control the balance between spectral and
# temporal resolution with the ``n_cycles`` parameter, which defines the
# number of cycles to include in the window.
fig, axs = plt.subplots(1, 3, figsize=(15, 5), sharey=True)
all_n_cycles = [1, 3, freqs / 2.]
for n_cycles, ax in zip(all_n_cycles, axs):
power = tfr_morlet(epochs, freqs=freqs,
n_cycles=n_cycles, return_itc=False)
power.plot([0], baseline=(0., 0.1), mode='mean', vmin=vmin, vmax=vmax,
axes=ax, show=False, colorbar=False)
n_cycles = 'scaled by freqs' if not isinstance(n_cycles, int) else n_cycles
ax.set_title('Sim: Using Morlet wavelet, n_cycles = %s' % n_cycles)
plt.tight_layout()
###############################################################################
# Calculating a TFR without averaging over epochs
# -----------------------------------------------
#
# It is also possible to calculate a TFR without averaging across trials.
# We can do this by using ``average=False``. In this case, an instance of
# :class:`mne.time_frequency.EpochsTFR` is returned.
n_cycles = freqs / 2.
power = tfr_morlet(epochs, freqs=freqs,
n_cycles=n_cycles, return_itc=False, average=False)
print(type(power))
avgpower = power.average()
avgpower.plot([0], baseline=(0., 0.1), mode='mean', vmin=vmin, vmax=vmax,
title='Using Morlet wavelets and EpochsTFR', show=False)
###############################################################################
# Operating on arrays
# -------------------
#
# MNE also has versions of the functions above which operate on numpy arrays
# instead of MNE objects. They expect inputs of the shape
# ``(n_epochs, n_channels, n_times)``. They will also return a numpy array
# of shape ``(n_epochs, n_channels, n_freqs, n_times)``.
power = tfr_array_morlet(epochs.get_data(), sfreq=epochs.info['sfreq'],
freqs=freqs, n_cycles=n_cycles,
output='avg_power')
# Baseline the output
rescale(power, epochs.times, (0., 0.1), mode='mean', copy=False)
fig, ax = plt.subplots()
mesh = ax.pcolormesh(epochs.times * 1000, freqs, power[0],
cmap='RdBu_r', vmin=vmin, vmax=vmax)
ax.set_title('TFR calculated on a numpy array')
ax.set(ylim=freqs[[0, -1]], xlabel='Time (ms)')
fig.colorbar(mesh)
plt.tight_layout()
plt.show()
|
teonlamont/mne-python
|
examples/time_frequency/plot_time_frequency_simulated.py
|
Python
|
bsd-3-clause
| 8,402
|
"""
===========================================================
A demo of K-Means clustering on the handwritten digits data
===========================================================
In this example we compare the various initialization strategies for K-means in
terms of runtime and quality of the results.
As the ground truth is known here, we also apply different cluster quality
metrics to judge the goodness of fit of the cluster labels to the ground truth.
Cluster quality metrics evaluated (see :ref:`clustering_evaluation` for
definitions and discussions of the metrics):
=========== ========================================================
Shorthand full name
=========== ========================================================
homo homogeneity score
compl completeness score
v-meas V measure
ARI adjusted Rand index
AMI adjusted mutual information
silhouette silhouette coefficient
=========== ========================================================
"""
# %%
# Load the dataset
# ----------------
#
# We will start by loading the `digits` dataset. This dataset contains
# handwritten digits from 0 to 9. In the context of clustering, one would like
# to group images such that the handwritten digits on the image are the same.
import numpy as np
from sklearn.datasets import load_digits
data, labels = load_digits(return_X_y=True)
(n_samples, n_features), n_digits = data.shape, np.unique(labels).size
print(f"# digits: {n_digits}; # samples: {n_samples}; # features {n_features}")
# %%
# Define our evaluation benchmark
# -------------------------------
#
# We will first our evaluation benchmark. During this benchmark, we intend to
# compare different initialization methods for KMeans. Our benchmark will:
#
# * create a pipeline which will scale the data using a
# :class:`~sklearn.preprocessing.StandardScaler`;
# * train and time the pipeline fitting;
# * measure the performance of the clustering obtained via different metrics.
from time import time
from sklearn import metrics
from sklearn.pipeline import make_pipeline
from sklearn.preprocessing import StandardScaler
def bench_k_means(kmeans, name, data, labels):
"""Benchmark to evaluate the KMeans initialization methods.
Parameters
----------
kmeans : KMeans instance
A :class:`~sklearn.cluster.KMeans` instance with the initialization
already set.
name : str
Name given to the strategy. It will be used to show the results in a
table.
data : ndarray of shape (n_samples, n_features)
The data to cluster.
labels : ndarray of shape (n_samples,)
The labels used to compute the clustering metrics which requires some
supervision.
"""
t0 = time()
estimator = make_pipeline(StandardScaler(), kmeans).fit(data)
fit_time = time() - t0
results = [name, fit_time, estimator[-1].inertia_]
# Define the metrics which require only the true labels and estimator
# labels
clustering_metrics = [
metrics.homogeneity_score,
metrics.completeness_score,
metrics.v_measure_score,
metrics.adjusted_rand_score,
metrics.adjusted_mutual_info_score,
]
results += [m(labels, estimator[-1].labels_) for m in clustering_metrics]
# The silhouette score requires the full dataset
results += [
metrics.silhouette_score(
data,
estimator[-1].labels_,
metric="euclidean",
sample_size=300,
)
]
# Show the results
formatter_result = (
"{:9s}\t{:.3f}s\t{:.0f}\t{:.3f}\t{:.3f}\t{:.3f}\t{:.3f}\t{:.3f}\t{:.3f}"
)
print(formatter_result.format(*results))
# %%
# Run the benchmark
# -----------------
#
# We will compare three approaches:
#
# * an initialization using `kmeans++`. This method is stochastic and we will
# run the initialization 4 times;
# * a random initialization. This method is stochastic as well and we will run
# the initialization 4 times;
# * an initialization based on a :class:`~sklearn.decomposition.PCA`
# projection. Indeed, we will use the components of the
# :class:`~sklearn.decomposition.PCA` to initialize KMeans. This method is
# deterministic and a single initialization suffice.
from sklearn.cluster import KMeans
from sklearn.decomposition import PCA
print(82 * "_")
print("init\t\ttime\tinertia\thomo\tcompl\tv-meas\tARI\tAMI\tsilhouette")
kmeans = KMeans(init="k-means++", n_clusters=n_digits, n_init=4, random_state=0)
bench_k_means(kmeans=kmeans, name="k-means++", data=data, labels=labels)
kmeans = KMeans(init="random", n_clusters=n_digits, n_init=4, random_state=0)
bench_k_means(kmeans=kmeans, name="random", data=data, labels=labels)
pca = PCA(n_components=n_digits).fit(data)
kmeans = KMeans(init=pca.components_, n_clusters=n_digits, n_init=1)
bench_k_means(kmeans=kmeans, name="PCA-based", data=data, labels=labels)
print(82 * "_")
# %%
# Visualize the results on PCA-reduced data
# -----------------------------------------
#
# :class:`~sklearn.decomposition.PCA` allows to project the data from the
# original 64-dimensional space into a lower dimensional space. Subsequently,
# we can use :class:`~sklearn.decomposition.PCA` to project into a
# 2-dimensional space and plot the data and the clusters in this new space.
import matplotlib.pyplot as plt
reduced_data = PCA(n_components=2).fit_transform(data)
kmeans = KMeans(init="k-means++", n_clusters=n_digits, n_init=4)
kmeans.fit(reduced_data)
# Step size of the mesh. Decrease to increase the quality of the VQ.
h = 0.02 # point in the mesh [x_min, x_max]x[y_min, y_max].
# Plot the decision boundary. For that, we will assign a color to each
x_min, x_max = reduced_data[:, 0].min() - 1, reduced_data[:, 0].max() + 1
y_min, y_max = reduced_data[:, 1].min() - 1, reduced_data[:, 1].max() + 1
xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h))
# Obtain labels for each point in mesh. Use last trained model.
Z = kmeans.predict(np.c_[xx.ravel(), yy.ravel()])
# Put the result into a color plot
Z = Z.reshape(xx.shape)
plt.figure(1)
plt.clf()
plt.imshow(
Z,
interpolation="nearest",
extent=(xx.min(), xx.max(), yy.min(), yy.max()),
cmap=plt.cm.Paired,
aspect="auto",
origin="lower",
)
plt.plot(reduced_data[:, 0], reduced_data[:, 1], "k.", markersize=2)
# Plot the centroids as a white X
centroids = kmeans.cluster_centers_
plt.scatter(
centroids[:, 0],
centroids[:, 1],
marker="x",
s=169,
linewidths=3,
color="w",
zorder=10,
)
plt.title(
"K-means clustering on the digits dataset (PCA-reduced data)\n"
"Centroids are marked with white cross"
)
plt.xlim(x_min, x_max)
plt.ylim(y_min, y_max)
plt.xticks(())
plt.yticks(())
plt.show()
|
manhhomienbienthuy/scikit-learn
|
examples/cluster/plot_kmeans_digits.py
|
Python
|
bsd-3-clause
| 6,808
|
# -*- Mode: Python -*-
# vi:si:et:sw=4:sts=4:ts=4
'''
Tests related to dicts.
'''
import unittest
import common
class KeysTestCase(common.TestCase):
'''
test that modules with the same name do not shadow eachother.
'''
def test_dict(self):
self.check('test_dict')
if __name__ == '__main__':
unittest.main()
|
mitar/pychecker
|
test/test_dict.py
|
Python
|
bsd-3-clause
| 343
|
from direct.directnotify.DirectNotifyGlobal import directNotify
from direct.distributed.DistributedObjectAI import DistributedObjectAI
from direct.task import Task
from otp.distributed import OtpDoGlobals
import random
class NonRepeatableRandomSourceAI(DistributedObjectAI):
notify = directNotify.newCategory('NonRepeatableRandomSourceAI')
def __init__(self, air):
DistributedObjectAI.__init__(self, air)
def announceGenerate(self):
DistributedObjectAI.announceGenerate(self)
self._contextGen = SerialMaskedGen((1L << 32) - 1)
self._requests = {}
self._sampleTask = self.doMethodLater(3 * 60, self._sampleRandomTask, self.uniqueName('sampleRandom'))
self._sampleRandom()
def delete(self):
self.removeTask(self._sampleTask)
self._sampleTask = None
DistributedObjectAI.delete(self)
return
def _sampleRandomTask(self, task = None):
self._sampleRandom()
return Task.again
def _sampleRandom(self):
self.air.sendUpdateToDoId('NonRepeatableRandomSource', 'randomSample', OtpDoGlobals.OTP_DO_ID_TOONTOWN_NON_REPEATABLE_RANDOM_SOURCE, [self.doId, int(random.randrange(1L << 32))])
def randomSampleAck(self):
self._sampleRandom()
def getRandomSamples(self, callback, num = None):
if num is None:
num = 1
context = self._contextGen.next()
self._requests[context] = (callback,)
self.air.sendUpdateToDoId('NonRepeatableRandomSource', 'getRandomSamples', OtpDoGlobals.OTP_DO_ID_TOONTOWN_NON_REPEATABLE_RANDOM_SOURCE, [self.doId,
'NonRepeatableRandomSource',
context,
num])
return
def getRandomSamplesReply(self, context, samples):
callback, = self._requests.pop(context)
callback(samples)
|
ksmit799/Toontown-Source
|
toontown/distributed/NonRepeatableRandomSourceAI.py
|
Python
|
mit
| 1,827
|
def encode(message, rails):
pass
def decode(encoded_message, rails):
pass
|
smalley/python
|
exercises/rail-fence-cipher/rail_fence_cipher.py
|
Python
|
mit
| 84
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
from hypothesis import given
import hypothesis.strategies as st
from caffe2.python import core
from caffe2.python import workspace
import caffe2.python.hypothesis_test_util as hu
class TestWeightedSample(hu.HypothesisTestCase):
@given(
batch=st.integers(min_value=0, max_value=128),
weights_len=st.integers(min_value=0, max_value=128),
**hu.gcs
)
def test_weighted_sample(self, batch, weights_len, gc, dc):
weights = np.zeros((batch, weights_len))
values = np.zeros((batch, weights_len))
rand_indices = []
rand_values = []
if batch > 0 and weights_len > 0:
for i in range(batch):
rand_tmp = np.random.randint(0, weights_len)
rand_val = np.random.rand()
rand_indices.append(rand_tmp)
rand_values.append(rand_val)
weights[i, rand_tmp] = 1.0
values[i, rand_tmp] = rand_val
rand_indices = np.array(rand_indices, dtype=np.float32)
rand_values = np.array(rand_values, dtype=np.float32)
workspace.FeedBlob("weights", weights.astype(np.float32))
workspace.FeedBlob("values", values.astype(np.float32))
# output both indices and values
op = core.CreateOperator(
"WeightedSample", ["weights", "values"],
["sample_indices", "sample_values"]
)
workspace.RunOperatorOnce(op)
result_indices = workspace.FetchBlob("sample_indices")
result_values = workspace.FetchBlob("sample_values")
if batch > 0 and weights_len > 0:
for i in range(batch):
np.testing.assert_allclose(rand_indices[i], result_indices[i])
np.testing.assert_allclose(rand_values[i], result_values[i])
else:
np.testing.assert_allclose(rand_indices, result_indices)
np.testing.assert_allclose(rand_values, result_values)
self.assertDeviceChecks(
dc,
op,
[weights.astype(np.float32), values.astype(np.float32)],
[0, 1]
)
# output indices only
op2 = core.CreateOperator(
"WeightedSample", ["weights"], ["sample_indices_2"]
)
workspace.RunOperatorOnce(op2)
result = workspace.FetchBlob("sample_indices_2")
if batch > 0 and weights_len > 0:
for i in range(batch):
np.testing.assert_allclose(rand_indices[i], result[i])
else:
np.testing.assert_allclose(rand_indices, result)
self.assertDeviceChecks(dc, op2, [weights.astype(np.float32)], [0])
if __name__ == "__main__":
import unittest
unittest.main()
|
ryfeus/lambda-packs
|
pytorch/source/caffe2/python/operator_test/weighted_sample_test.py
|
Python
|
mit
| 2,884
|
#!/usr/bin/env python
# Copyright 2016 the V8 project authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import signal
import subprocess
import sys
GCMOLE_PATH = os.path.dirname(os.path.abspath(__file__))
CLANG_BIN = os.path.join(GCMOLE_PATH, 'gcmole-tools', 'bin')
CLANG_PLUGINS = os.path.join(GCMOLE_PATH, 'gcmole-tools')
LUA = os.path.join(GCMOLE_PATH, 'gcmole-tools', 'lua52')
DRIVER = os.path.join(GCMOLE_PATH, 'gcmole.lua')
BASE_PATH = os.path.dirname(os.path.dirname(GCMOLE_PATH))
assert len(sys.argv) == 2
proc = subprocess.Popen(
[LUA, DRIVER, sys.argv[1]],
env={'CLANG_BIN': CLANG_BIN, 'CLANG_PLUGINS': CLANG_PLUGINS},
cwd=BASE_PATH,
)
def handle_sigterm(*args):
try:
proc.kill()
except OSError:
pass
signal.signal(signal.SIGTERM, handle_sigterm)
proc.communicate()
sys.exit(proc.returncode)
|
MTASZTAKI/ApertusVR
|
plugins/languageAPI/jsAPI/3rdParty/nodejs/10.1.0/source/deps/v8/tools/gcmole/run-gcmole.py
|
Python
|
mit
| 924
|
import os.path
import warnings
__version__ = (0, 3, 7)
def _get_git_revision(path):
revision_file = os.path.join(path, 'refs', 'heads', 'master')
if not os.path.exists(revision_file):
return None
fh = open(revision_file, 'r')
try:
return fh.read()
finally:
fh.close()
def get_revision():
"""
:returns: Revision number of this branch/checkout, if available. None if
no revision number can be determined.
"""
package_dir = os.path.dirname(__file__)
checkout_dir = os.path.normpath(os.path.join(package_dir, '..'))
path = os.path.join(checkout_dir, '.git')
if os.path.exists(path):
return _get_git_revision(path)
return None
__build__ = get_revision()
def lazy_object(location):
def inner(*args, **kwargs):
parts = location.rsplit('.', 1)
warnings.warn('`djangoratings.%s` is deprecated. Please use `%s` instead.' % (parts[1], location), DeprecationWarning)
try:
imp = __import__(parts[0], globals(), locals(), [parts[1]], -1)
except:
imp = __import__(parts[0], globals(), locals(), [parts[1]])
func = getattr(imp, parts[1])
if callable(func):
return func(*args, **kwargs)
return func
return inner
RatingField = lazy_object('djangoratings.fields.RatingField')
AnonymousRatingField = lazy_object('djangoratings.fields.AnonymousRatingField')
Rating = lazy_object('djangoratings.fields.Rating')
|
hzlf/openbroadcast
|
website/djangoratings/__init__.py
|
Python
|
gpl-3.0
| 1,486
|
#
# (c) 2017, Red Hat, Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import re
import time
import glob
from ansible.module_utils._text import to_text
from ansible.plugins.action.bigip import ActionModule as _ActionModule
from ansible.module_utils.six.moves.urllib.parse import urlsplit
try:
from library.module_utils.network.f5.common import f5_provider_spec
except:
from ansible.module_utils.network.f5.common import f5_provider_spec
from ansible.utils.display import Display
display = Display()
PRIVATE_KEYS_RE = re.compile('__.+__')
class ActionModule(_ActionModule):
def run(self, tmp=None, task_vars=None):
if self._task.args.get('src'):
try:
self._handle_template()
except ValueError as exc:
return dict(failed=True, msg=to_text(exc))
result = super(ActionModule, self).run(tmp, task_vars)
del tmp # tmp no longer has any effect
if self._task.args.get('backup') and result.get('__backup__'):
# User requested backup and no error occurred in module.
# NOTE: If there is a parameter error, _backup key may not be in results.
filepath = self._write_backup(task_vars['inventory_hostname'],
result['__backup__'])
result['backup_path'] = filepath
# strip out any keys that have two leading and two trailing
# underscore characters
for key in list(result.keys()):
if PRIVATE_KEYS_RE.match(key):
del result[key]
return result
def _get_working_path(self):
cwd = self._loader.get_basedir()
if self._task._role is not None:
cwd = self._task._role._role_path
return cwd
def _write_backup(self, host, contents):
backup_path = self._get_working_path() + '/backup'
if not os.path.exists(backup_path):
os.mkdir(backup_path)
for fn in glob.glob('%s/%s*' % (backup_path, host)):
os.remove(fn)
tstamp = time.strftime("%Y-%m-%d@%H:%M:%S", time.localtime(time.time()))
filename = '%s/%s_config.%s' % (backup_path, host, tstamp)
fh = open(filename, 'w')
fh.write(contents)
fh.close()
return filename
def _handle_template(self):
src = self._task.args.get('src')
working_path = self._get_working_path()
if os.path.isabs(src) or urlsplit('src').scheme:
source = src
else:
source = self._loader.path_dwim_relative(working_path, 'templates', src)
if not source:
source = self._loader.path_dwim_relative(working_path, src)
if not os.path.exists(source):
raise ValueError('path specified in src not found')
try:
with open(source, 'r') as f:
template_data = to_text(f.read())
except IOError:
return dict(failed=True, msg='unable to load src file')
# Create a template search path in the following order:
# [working_path, self_role_path, dependent_role_paths, dirname(source)]
searchpath = [working_path]
if self._task._role is not None:
searchpath.append(self._task._role._role_path)
if hasattr(self._task, "_block:"):
dep_chain = self._task._block.get_dep_chain()
if dep_chain is not None:
for role in dep_chain:
searchpath.append(role._role_path)
searchpath.append(os.path.dirname(source))
self._templar.environment.loader.searchpath = searchpath
self._task.args['src'] = self._templar.template(template_data)
|
veger/ansible
|
lib/ansible/plugins/action/bigip_imish_config.py
|
Python
|
gpl-3.0
| 4,433
|
# Copyright (C) 2010-2014 GRNET S.A.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
def camelCase(s):
return s[0].lower() + s[1:]
class Fault(Exception):
def __init__(self, message='', details='', name=''):
Exception.__init__(self, message, details, name)
self.message = message
self.details = details
self.name = name or camelCase(self.__class__.__name__)
class BadRequest(Fault):
code = 400
class Unauthorized(Fault):
code = 401
class ResizeNotAllowed(Fault):
code = 403
class Forbidden(Fault):
code = 403
class ItemNotFound(Fault):
code = 404
class BuildInProgress(Fault):
code = 409
class OverLimit(Fault):
code = 413
class BadMediaType(Fault):
code = 415
class NetworkInUse(Fault):
code = 421
class ServiceUnavailable(Fault):
code = 503
|
vinilios/synnefo
|
snf-cyclades-app/synnefo/api/faults.py
|
Python
|
gpl-3.0
| 1,435
|
#!/usr/bin/env python
"""
MAVProxy sailing dashboard
"""
from MAVProxy.modules.lib import multiproc
import time
class SailingDashboard(object):
'''
A sailing dashboard for MAVProxy
'''
def __init__(self, title="MAVProxy: Sailing Dashboard"):
self.title = title
# create a pipe for communication from the module to the GUI
self.child_pipe_recv, self.parent_pipe_send = multiproc.Pipe(duplex=False)
self.close_event = multiproc.Event()
self.close_event.clear()
# create and start the child process
self.child = multiproc.Process(target=self.child_task)
self.child.start()
# prevent the parent from using the child connection
self.child_pipe_recv.close()
def child_task(self):
'''The child process hosts the GUI elements'''
# prevent the child from using the parent connection
self.parent_pipe_send.close()
from MAVProxy.modules.lib import wx_processguard
from MAVProxy.modules.lib.wx_loader import wx
from MAVProxy.modules.lib.wxsaildash_ui import SailingDashboardFrame
# create the wx application and pass self as the state
app = wx.App()
app.frame = SailingDashboardFrame(state=self, title=self.title, size=(800, 300))
app.frame.SetDoubleBuffered(True)
app.frame.Show()
app.MainLoop()
# trigger a close event when the main app window is closed.
# the event is monitored by the MAVProxy module which will
# flag the module for unloading
self.close_event.set()
def close(self):
'''Close the GUI'''
# trigger a close event which is monitored by the
# child gui process - it will close allowing the
# process to be joined
self.close_event.set()
if self.is_alive():
self.child.join(timeout=2.0)
def is_alive(self):
'''Check if the GUI process is alive'''
return self.child.is_alive()
if __name__ == "__main__":
'''A stand alone test for the sailing dashboard'''
multiproc.freeze_support()
sail_dash = SailingDashboard()
while sail_dash.is_alive():
print('sailing dashboard is alive')
time.sleep(0.5)
|
tridge/MAVProxy
|
MAVProxy/modules/lib/wxsaildash.py
|
Python
|
gpl-3.0
| 2,264
|
########################################################################
#
# (C) 2015, Brian Coca <bcoca@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
########################################################################
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import datetime
import os
import tarfile
import tempfile
import yaml
from distutils.version import LooseVersion
from shutil import rmtree
import ansible.constants as C
from ansible.errors import AnsibleError
from ansible.module_utils.urls import open_url
from ansible.playbook.role.requirement import RoleRequirement
from ansible.galaxy.api import GalaxyAPI
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
class GalaxyRole(object):
SUPPORTED_SCMS = set(['git', 'hg'])
META_MAIN = os.path.join('meta', 'main.yml')
META_INSTALL = os.path.join('meta', '.galaxy_install_info')
ROLE_DIRS = ('defaults','files','handlers','meta','tasks','templates','vars','tests')
def __init__(self, galaxy, name, src=None, version=None, scm=None, path=None):
self._metadata = None
self._install_info = None
self._validate_certs = not C.GALAXY_IGNORE_CERTS
# set validate_certs
if galaxy.options.ignore_certs:
self._validate_certs = False
display.vvv('Validate TLS certificates: %s' % self._validate_certs)
self.options = galaxy.options
self.galaxy = galaxy
self.name = name
self.version = version
self.src = src or name
self.scm = scm
if path is not None:
if self.name not in path:
path = os.path.join(path, self.name)
self.path = path
else:
for role_path_dir in galaxy.roles_paths:
role_path = os.path.join(role_path_dir, self.name)
if os.path.exists(role_path):
self.path = role_path
break
else:
# use the first path by default
self.path = os.path.join(galaxy.roles_paths[0], self.name)
def __eq__(self, other):
return self.name == other.name
@property
def metadata(self):
"""
Returns role metadata
"""
if self._metadata is None:
meta_path = os.path.join(self.path, self.META_MAIN)
if os.path.isfile(meta_path):
try:
f = open(meta_path, 'r')
self._metadata = yaml.safe_load(f)
except:
display.vvvvv("Unable to load metadata for %s" % self.name)
return False
finally:
f.close()
return self._metadata
@property
def install_info(self):
"""
Returns role install info
"""
if self._install_info is None:
info_path = os.path.join(self.path, self.META_INSTALL)
if os.path.isfile(info_path):
try:
f = open(info_path, 'r')
self._install_info = yaml.safe_load(f)
except:
display.vvvvv("Unable to load Galaxy install info for %s" % self.name)
return False
finally:
f.close()
return self._install_info
def _write_galaxy_install_info(self):
"""
Writes a YAML-formatted file to the role's meta/ directory
(named .galaxy_install_info) which contains some information
we can use later for commands like 'list' and 'info'.
"""
info = dict(
version=self.version,
install_date=datetime.datetime.utcnow().strftime("%c"),
)
info_path = os.path.join(self.path, self.META_INSTALL)
with open(info_path, 'w+') as f:
try:
self._install_info = yaml.safe_dump(info, f)
except:
return False
return True
def remove(self):
"""
Removes the specified role from the roles path.
There is a sanity check to make sure there's a meta/main.yml file at this
path so the user doesn't blow away random directories.
"""
if self.metadata:
try:
rmtree(self.path)
return True
except:
pass
return False
def fetch(self, role_data):
"""
Downloads the archived role from github to a temp location
"""
if role_data:
# first grab the file and save it to a temp location
if "github_user" in role_data and "github_repo" in role_data:
archive_url = 'https://github.com/%s/%s/archive/%s.tar.gz' % (role_data["github_user"], role_data["github_repo"], self.version)
else:
archive_url = self.src
display.display("- downloading role from %s" % archive_url)
try:
url_file = open_url(archive_url, validate_certs=self._validate_certs)
temp_file = tempfile.NamedTemporaryFile(delete=False)
data = url_file.read()
while data:
temp_file.write(data)
data = url_file.read()
temp_file.close()
return temp_file.name
except Exception as e:
display.error("failed to download the file: %s" % str(e))
return False
def install(self):
# the file is a tar, so open it that way and extract it
# to the specified (or default) roles directory
if self.scm:
# create tar file from scm url
tmp_file = RoleRequirement.scm_archive_role(**self.spec)
elif self.src:
if os.path.isfile(self.src):
# installing a local tar.gz
tmp_file = self.src
elif '://' in self.src:
role_data = self.src
tmp_file = self.fetch(role_data)
else:
api = GalaxyAPI(self.galaxy)
role_data = api.lookup_role_by_name(self.src)
if not role_data:
raise AnsibleError("- sorry, %s was not found on %s." % (self.src, api.api_server))
role_versions = api.fetch_role_related('versions', role_data['id'])
if not self.version:
# convert the version names to LooseVersion objects
# and sort them to get the latest version. If there
# are no versions in the list, we'll grab the head
# of the master branch
if len(role_versions) > 0:
loose_versions = [LooseVersion(a.get('name',None)) for a in role_versions]
loose_versions.sort()
self.version = str(loose_versions[-1])
elif role_data.get('github_branch', None):
self.version = role_data['github_branch']
else:
self.version = 'master'
elif self.version != 'master':
if role_versions and self.version not in [a.get('name', None) for a in role_versions]:
raise AnsibleError("- the specified version (%s) of %s was not found in the list of available versions (%s)." % (self.version, self.name, role_versions))
tmp_file = self.fetch(role_data)
else:
raise AnsibleError("No valid role data found")
if tmp_file:
display.debug("installing from %s" % tmp_file)
if not tarfile.is_tarfile(tmp_file):
raise AnsibleError("the file downloaded was not a tar.gz")
else:
if tmp_file.endswith('.gz'):
role_tar_file = tarfile.open(tmp_file, "r:gz")
else:
role_tar_file = tarfile.open(tmp_file, "r")
# verify the role's meta file
meta_file = None
members = role_tar_file.getmembers()
# next find the metadata file
for member in members:
if self.META_MAIN in member.name:
meta_file = member
break
if not meta_file:
raise AnsibleError("this role does not appear to have a meta/main.yml file.")
else:
try:
self._metadata = yaml.safe_load(role_tar_file.extractfile(meta_file))
except:
raise AnsibleError("this role does not appear to have a valid meta/main.yml file.")
# we strip off the top-level directory for all of the files contained within
# the tar file here, since the default is 'github_repo-target', and change it
# to the specified role's name
display.display("- extracting %s to %s" % (self.name, self.path))
try:
if os.path.exists(self.path):
if not os.path.isdir(self.path):
raise AnsibleError("the specified roles path exists and is not a directory.")
elif not getattr(self.options, "force", False):
raise AnsibleError("the specified role %s appears to already exist. Use --force to replace it." % self.name)
else:
# using --force, remove the old path
if not self.remove():
raise AnsibleError("%s doesn't appear to contain a role.\n please remove this directory manually if you really want to put the role here." % self.path)
else:
os.makedirs(self.path)
# now we do the actual extraction to the path
for member in members:
# we only extract files, and remove any relative path
# bits that might be in the file for security purposes
# and drop the leading directory, as mentioned above
if member.isreg() or member.issym():
parts = member.name.split(os.sep)[1:]
final_parts = []
for part in parts:
if part != '..' and '~' not in part and '$' not in part:
final_parts.append(part)
member.name = os.path.join(*final_parts)
role_tar_file.extract(member, self.path)
# write out the install info file for later use
self._write_galaxy_install_info()
except OSError as e:
raise AnsibleError("Could not update files in %s: %s" % (self.path, str(e)))
# return the parsed yaml metadata
display.display("- %s was installed successfully" % self.name)
try:
os.unlink(tmp_file)
except (OSError,IOError) as e:
display.warning("Unable to remove tmp file (%s): %s" % (tmp_file, str(e)))
return True
return False
@property
def spec(self):
"""
Returns role spec info
{
'scm': 'git',
'src': 'http://git.example.com/repos/repo.git',
'version': 'v1.0',
'name': 'repo'
}
"""
return dict(scm=self.scm, src=self.src, version=self.version, name=self.name)
|
benjixx/ansible
|
lib/ansible/galaxy/role.py
|
Python
|
gpl-3.0
| 12,494
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Adapted by Nicolas Bessi. Copyright Camptocamp SA
# Based on Florent Xicluna original code. Copyright Wingo SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import locale
import os
import platform
import subprocess
from openerp import release
from openerp.tools.config import config
def _get_output(cmd):
bindir = config['root_path']
p = subprocess.Popen(cmd, shell=True, cwd=bindir, stdout=subprocess.PIPE)
return p.communicate()[0].rstrip()
def get_server_environment():
# inspired by server/bin/service/web_services.py
try:
rev_id = _get_output('bzr revision-info')
except Exception as e:
rev_id = 'Exception: %s' % (e,)
os_lang = '.'.join([x for x in locale.getdefaultlocale() if x])
if not os_lang:
os_lang = 'NOT SET'
if os.name == 'posix' and platform.system() == 'Linux':
lsbinfo = _get_output('lsb_release -a')
else:
lsbinfo = 'not lsb compliant'
return (
('platform', platform.platform()),
('os.name', os.name),
('lsb_release', lsbinfo),
('release', platform.release()),
('version', platform.version()),
('architecture', platform.architecture()[0]),
('locale', os_lang),
('python', platform.python_version()),
('openerp', release.version),
('revision', rev_id),
)
|
archetipo/server-tools
|
server_environment/system_info.py
|
Python
|
agpl-3.0
| 2,163
|
# -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe
import unittest
from frappe.utils import today, add_days, getdate
from erpnext.accounts.utils import get_fiscal_year
from erpnext.accounts.report.financial_statements import get_months
from erpnext.accounts.doctype.sales_invoice.test_sales_invoice import create_sales_invoice
from erpnext.selling.doctype.sales_order.test_sales_order import make_sales_order
from erpnext.accounts.doctype.subscription.subscription import make_subscription_entry
class TestSubscription(unittest.TestCase):
def test_daily_subscription(self):
qo = frappe.copy_doc(quotation_records[0])
qo.submit()
doc = make_subscription(reference_document=qo.name)
self.assertEquals(doc.next_schedule_date, today())
make_subscription_entry()
frappe.db.commit()
quotation = frappe.get_doc(doc.reference_doctype, doc.reference_document)
self.assertEquals(quotation.subscription, doc.name)
new_quotation = frappe.db.get_value('Quotation',
{'subscription': doc.name, 'name': ('!=', quotation.name)}, 'name')
new_quotation = frappe.get_doc('Quotation', new_quotation)
for fieldname in ['customer', 'company', 'order_type', 'total', 'net_total']:
self.assertEquals(quotation.get(fieldname), new_quotation.get(fieldname))
for fieldname in ['item_code', 'qty', 'rate', 'amount']:
self.assertEquals(quotation.items[0].get(fieldname),
new_quotation.items[0].get(fieldname))
def test_monthly_subscription_for_so(self):
current_fiscal_year = get_fiscal_year(today(), as_dict=True)
start_date = current_fiscal_year.year_start_date
end_date = current_fiscal_year.year_end_date
for doctype in ['Sales Order', 'Sales Invoice']:
if doctype == 'Sales Invoice':
docname = create_sales_invoice(posting_date=start_date)
else:
docname = make_sales_order()
self.monthly_subscription(doctype, docname.name, start_date, end_date)
def monthly_subscription(self, doctype, docname, start_date, end_date):
doc = make_subscription(reference_doctype=doctype, frequency = 'Monthly',
reference_document = docname, start_date=start_date, end_date=end_date)
doc.disabled = 1
doc.save()
frappe.db.commit()
make_subscription_entry()
docnames = frappe.get_all(doc.reference_doctype, {'subscription': doc.name})
self.assertEquals(len(docnames), 1)
doc = frappe.get_doc('Subscription', doc.name)
doc.disabled = 0
doc.save()
months = get_months(getdate(start_date), getdate(today()))
make_subscription_entry()
docnames = frappe.get_all(doc.reference_doctype, {'subscription': doc.name})
self.assertEquals(len(docnames), months)
quotation_records = frappe.get_test_records('Quotation')
def make_subscription(**args):
args = frappe._dict(args)
doc = frappe.get_doc({
'doctype': 'Subscription',
'reference_doctype': args.reference_doctype or 'Quotation',
'reference_document': args.reference_document or \
frappe.db.get_value('Quotation', {'docstatus': 1}, 'name'),
'frequency': args.frequency or 'Daily',
'start_date': args.start_date or add_days(today(), -1),
'end_date': args.end_date or add_days(today(), 1),
'submit_on_creation': args.submit_on_creation or 0
}).insert(ignore_permissions=True)
if not args.do_not_submit:
doc.submit()
return doc
|
indictranstech/erpnext
|
erpnext/accounts/doctype/subscription/test_subscription.py
|
Python
|
agpl-3.0
| 3,376
|
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Configure build environment for certain Intel platforms."""
import argparse
import os
import subprocess
BASIC_BUILD_OPTS = ["--cxxopt=-D_GLIBCXX_USE_CXX11_ABI=0", "--copt=-O3"]
SECURE_BUILD_OPTS = [
"--copt=-Wformat", "--copt=-Wformat-security", "--copt=-fstack-protector",
"--copt=-fPIC", "--copt=-fpic", "--linkopt=-znoexecstack",
"--linkopt=-zrelro", "--linkopt=-znow", "--linkopt=-fstack-protector"
]
class IntelPlatform(object):
min_gcc_major_version_ = 0
min_gcc_minor_version_ = 0
host_gcc_major_version_ = 0
host_gcc_minor_version_ = 0
BAZEL_PREFIX_ = "--copt="
ARCH_PREFIX_ = "-march="
FLAG_PREFIX_ = "-m"
def __init__(self, min_gcc_major_version, min_gcc_minor_version):
self.min_gcc_minor_version_ = min_gcc_minor_version
self.min_gcc_major_version_ = min_gcc_major_version
# Return True or False depending on whether
# The platform optimization flags can be generated by
# the gcc version specified in the parameters
def set_host_gcc_version(self, gcc_major_version, gcc_minor_version):
# True only if the gcc version in the tuple is >=
# min_gcc_major_version_, min_gcc_minor_version_
if gcc_major_version < self.min_gcc_major_version_:
print("Your MAJOR version of GCC is too old: {}; "
"it must be at least {}.{}".format(gcc_major_version,
self.min_gcc_major_version_,
self.min_gcc_minor_version_))
return False
elif gcc_major_version == self.min_gcc_major_version_ and \
gcc_minor_version < self.min_gcc_minor_version_:
print("Your MINOR version of GCC is too old: {}; "
"it must be at least {}.{}".format(gcc_minor_version,
self.min_gcc_major_version_,
self.min_gcc_minor_version_))
return False
print("gcc version OK: {}.{}".format(gcc_major_version, gcc_minor_version))
self.host_gcc_major_version_ = gcc_major_version
self.host_gcc_minor_version_ = gcc_minor_version
return True
# return a string with all the necessary bazel formatted flags for this
# platform in this gcc environment
def get_bazel_gcc_flags(self):
raise NotImplementedError(self)
# Returns True if the host gcc version is older than the gcc version in which
# the new march flag became available.
# Specify the version in which the new name usage began
def use_old_arch_names(self, gcc_new_march_major_version,
gcc_new_march_minor_version):
if self.host_gcc_major_version_ < gcc_new_march_major_version:
return True
elif self.host_gcc_major_version_ == gcc_new_march_major_version and \
self.host_gcc_minor_version_ < gcc_new_march_minor_version:
return True
return False
class NehalemPlatform(IntelPlatform):
def __init__(self):
IntelPlatform.__init__(self, 4, 8)
def get_bazel_gcc_flags(self):
NEHALEM_ARCH_OLD = "corei7"
NEHALEM_ARCH_NEW = "nehalem"
if self.use_old_arch_names(4, 9):
return self.BAZEL_PREFIX_ + self.ARCH_PREFIX_ + \
NEHALEM_ARCH_OLD + " "
else:
return self.BAZEL_PREFIX_ + self.ARCH_PREFIX_ + \
NEHALEM_ARCH_NEW + " "
class SandyBridgePlatform(IntelPlatform):
def __init__(self):
IntelPlatform.__init__(self, 4, 8)
def get_bazel_gcc_flags(self):
SANDYBRIDGE_ARCH_OLD = "corei7-avx"
SANDYBRIDGE_ARCH_NEW = "sandybridge"
if self.use_old_arch_names(4, 9):
return self.BAZEL_PREFIX_ + self.ARCH_PREFIX_ + \
SANDYBRIDGE_ARCH_OLD + " "
else:
return self.BAZEL_PREFIX_ + self.ARCH_PREFIX_ + \
SANDYBRIDGE_ARCH_NEW + " "
class HaswellPlatform(IntelPlatform):
def __init__(self):
IntelPlatform.__init__(self, 4, 8)
def get_bazel_gcc_flags(self):
HASWELL_ARCH_OLD = "core-avx2" # Only missing the POPCNT instruction
HASWELL_ARCH_NEW = "haswell"
POPCNT_FLAG = "popcnt"
if self.use_old_arch_names(4, 9):
ret_val = self.BAZEL_PREFIX_ + self.ARCH_PREFIX_ + \
HASWELL_ARCH_OLD + " "
return ret_val + self.BAZEL_PREFIX_ + self.FLAG_PREFIX_ + \
POPCNT_FLAG + " "
else:
return self.BAZEL_PREFIX_ + self.ARCH_PREFIX_ + \
HASWELL_ARCH_NEW + " "
class SkylakePlatform(IntelPlatform):
def __init__(self):
IntelPlatform.__init__(self, 4, 9)
def get_bazel_gcc_flags(self):
SKYLAKE_ARCH_OLD = "broadwell" # Only missing the POPCNT instruction
SKYLAKE_ARCH_NEW = "skylake-avx512"
# the flags that broadwell is missing: pku, clflushopt, clwb, avx512vl,
# avx512bw, avx512dq. xsavec and xsaves are available in gcc 5.x
# but for now, just exclude them.
AVX512_FLAGS = ["avx512f", "avx512cd"]
if self.use_old_arch_names(6, 1):
ret_val = self.BAZEL_PREFIX_ + self.ARCH_PREFIX_ + \
SKYLAKE_ARCH_OLD + " "
for flag in AVX512_FLAGS:
ret_val += self.BAZEL_PREFIX_ + self.FLAG_PREFIX_ + flag + " "
return ret_val
else:
return self.BAZEL_PREFIX_ + self.ARCH_PREFIX_ + \
SKYLAKE_ARCH_NEW + " "
class CascadelakePlatform(IntelPlatform):
def __init__(self):
IntelPlatform.__init__(self, 8, 3)
def get_bazel_gcc_flags(self):
CASCADELAKE_ARCH_OLD = "skylake-avx512" # Only missing the POPCNT instruction
CASCADELAKE_ARCH_NEW = "cascadelake"
# the flags that broadwell is missing: pku, clflushopt, clwb, avx512vl, avx512bw, avx512dq
VNNI_FLAG = "avx512vnni"
if IntelPlatform.use_old_arch_names(self, 9, 1):
ret_val = self.BAZEL_PREFIX_ + self.ARCH_PREFIX_ + \
CASCADELAKE_ARCH_OLD + " "
return ret_val + self.BAZEL_PREFIX_ + self.FLAG_PREFIX_ + \
VNNI_FLAG + " "
else:
return self.BAZEL_PREFIX_ + self.ARCH_PREFIX_ + \
CASCADELAKE_ARCH_NEW + " "
class IcelakeClientPlatform(IntelPlatform):
def __init__(self):
IntelPlatform.__init__(self, 8, 4)
def get_bazel_gcc_flags(self):
ICELAKE_ARCH_OLD = "skylake-avx512"
ICELAKE_ARCH_NEW = "icelake-client"
AVX512_FLAGS = ["avx512f", "avx512cd"]
if IntelPlatform.use_old_arch_names(self, 8, 4):
ret_val = self.BAZEL_PREFIX_ + self.ARCH_PREFIX_ + \
ICELAKE_ARCH_OLD + " "
for flag in AVX512_FLAGS:
ret_val += self.BAZEL_PREFIX_ + self.FLAG_PREFIX_ + flag + " "
return ret_val
else:
return self.BAZEL_PREFIX_ + self.ARCH_PREFIX_ + \
ICELAKE_ARCH_NEW + " "
class IcelakeServerPlatform(IntelPlatform):
def __init__(self):
IntelPlatform.__init__(self, 8, 4)
def get_bazel_gcc_flags(self):
ICELAKE_ARCH_OLD = "skylake-avx512"
ICELAKE_ARCH_NEW = "icelake-server"
AVX512_FLAGS = ["avx512f", "avx512cd"]
if IntelPlatform.use_old_arch_names(self, 8, 4):
ret_val = self.BAZEL_PREFIX_ + self.ARCH_PREFIX_ + \
ICELAKE_ARCH_OLD + " "
for flag in AVX512_FLAGS:
ret_val += self.BAZEL_PREFIX_ + self.FLAG_PREFIX_ + flag + " "
return ret_val
else:
return self.BAZEL_PREFIX_ + self.ARCH_PREFIX_ + \
ICELAKE_ARCH_NEW + " "
class BuildEnvSetter(object):
"""Prepares the proper environment settings for various Intel platforms."""
default_platform_ = "haswell"
PLATFORMS_ = {
"nehalem": NehalemPlatform(),
"sandybridge": SandyBridgePlatform(),
"haswell": HaswellPlatform(),
"skylake": SkylakePlatform(),
"cascadelake": CascadelakePlatform(),
"icelake-client": IcelakeClientPlatform(),
"icelake-server": IcelakeServerPlatform(),
}
def __init__(self):
self.args = None
self.bazel_flags_ = "build "
self.target_platform_ = None
# Return a tuple of the current gcc version
def get_gcc_version(self):
gcc_major_version = 0
gcc_minor_version = 0
# check to see if gcc is present
gcc_path = ""
gcc_path_cmd = "command -v gcc"
try:
gcc_path = subprocess.check_output(gcc_path_cmd, shell=True,
stderr=subprocess.STDOUT).\
strip()
print("gcc located here: {}".format(gcc_path))
if not os.access(gcc_path, os.F_OK | os.X_OK):
raise ValueError(
"{} does not exist or is not executable.".format(gcc_path))
gcc_output = subprocess.check_output(
[gcc_path, "-dumpfullversion", "-dumpversion"],
stderr=subprocess.STDOUT).strip()
# handle python2 vs 3 (bytes vs str type)
if isinstance(gcc_output, bytes):
gcc_output = gcc_output.decode("utf-8")
print("gcc version: {}".format(gcc_output))
gcc_info = gcc_output.split(".")
gcc_major_version = int(gcc_info[0])
gcc_minor_version = int(gcc_info[1])
except subprocess.CalledProcessException as e:
print("Problem getting gcc info: {}".format(e))
gcc_major_version = 0
gcc_minor_version = 0
return gcc_major_version, gcc_minor_version
def parse_args(self):
"""Set up argument parser, and parse CLI args."""
arg_parser = argparse.ArgumentParser(
description="Parse the arguments for the "
"TensorFlow build environment "
" setter")
arg_parser.add_argument(
"--disable-mkl",
dest="disable_mkl",
help="Turn off MKL. By default the compiler flag "
"--config=mkl is enabled.",
action="store_true")
arg_parser.add_argument(
"--disable-v2",
dest="disable_v2",
help="Build TensorFlow v1 rather than v2. By default the "
" compiler flag --config=v2 is enabled.",
action="store_true")
arg_parser.add_argument(
"--enable-bfloat16",
dest="enable_bfloat16",
help="Enable bfloat16 build. By default it is "
" disabled if no parameter is passed.",
action="store_true")
arg_parser.add_argument(
"--enable-dnnl1",
dest="enable_dnnl1",
help="Enable dnnl1 build. By default it is "
" disabled if no parameter is passed.",
action="store_true")
arg_parser.add_argument(
"-s",
"--secure-build",
dest="secure_build",
help="Enable secure build flags.",
action="store_true")
arg_parser.add_argument(
"-p",
"--platform",
choices=self.PLATFORMS_.keys(),
help="The target platform.",
dest="target_platform",
default=self.default_platform_)
arg_parser.add_argument(
"-f",
"--bazelrc-file",
dest="bazelrc_file",
help="The full path to the bazelrc file into which "
"the build command will be written. The path "
"will be relative to the container "
" environment.",
required=True)
self.args = arg_parser.parse_args()
def validate_args(self):
# Check the bazelrc file
if os.path.exists(self.args.bazelrc_file):
if os.path.isfile(self.args.bazelrc_file):
self._debug("The file {} exists and will be deleted.".format(
self.args.bazelrc_file))
elif os.path.isdir(self.args.bazelrc_file):
print("You can't write bazel config to \"{}\" "
"because it is a directory".format(self.args.bazelrc_file))
return False
# Validate gcc with the requested platform
gcc_major_version, gcc_minor_version = self.get_gcc_version()
if gcc_major_version == 0 or \
not self.target_platform_.set_host_gcc_version(
gcc_major_version, gcc_minor_version):
return False
return True
def set_build_args(self):
"""Generate Bazel build flags."""
for flag in BASIC_BUILD_OPTS:
self.bazel_flags_ += "{} ".format(flag)
if self.args.secure_build:
for flag in SECURE_BUILD_OPTS:
self.bazel_flags_ += "{} ".format(flag)
if not self.args.disable_mkl:
self.bazel_flags_ += "--config=mkl "
if self.args.disable_v2:
self.bazel_flags_ += "--config=v1 "
if self.args.enable_dnnl1:
self.bazel_flags_ += "--define build_with_mkl_dnn_v1_only=true "
if self.args.enable_bfloat16:
self.bazel_flags_ += "--copt=-DENABLE_INTEL_MKL_BFLOAT16 "
self.bazel_flags_ += self.target_platform_.get_bazel_gcc_flags()
def write_build_args(self):
self._debug("Writing build flags: {}".format(self.bazel_flags_))
with open(self.args.bazelrc_file, "w") as f:
f.write(self.bazel_flags_ + "\n")
def _debug(self, msg):
print(msg)
def go(self):
self.parse_args()
self.target_platform_ = self.PLATFORMS_.get(self.args.target_platform)
if self.validate_args():
self.set_build_args()
self.write_build_args()
else:
print("Error.")
env_setter = BuildEnvSetter()
env_setter.go()
|
tensorflow/tensorflow
|
tensorflow/tools/ci_build/linux/mkl/set-build-env.py
|
Python
|
apache-2.0
| 13,493
|
#!/usr/bin/env python
#
# Copyright 2014 Quantopian, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
from zipline.utils import parse_args, run_pipeline
if __name__ == "__main__":
parsed = parse_args(sys.argv[1:])
run_pipeline(print_algo=True, **parsed)
sys.exit(0)
|
erikness/AlephOne
|
scripts/run_algo.py
|
Python
|
apache-2.0
| 794
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
"""
Simple DOM for both SGML and XML documents.
"""
from __future__ import division
from __future__ import generators
from __future__ import nested_scopes
from __future__ import absolute_import
import sys
if sys.version_info[0] == 2:
STRING_TYPES = (basestring,)
else:
STRING_TYPES = (str,)
class Container:
def __init__(self):
self.children = []
def add(self, child):
child.parent = self
self.children.append(child)
def extend(self, children):
for child in children:
child.parent = self
self.children.append(child)
class Component:
def __init__(self):
self.parent = None
def index(self):
if self.parent:
return self.parent.children.index(self)
else:
return 0
def _line(self, file, line, column):
self.file = file
self.line = line
self.column = column
class DispatchError(Exception):
def __init__(self, scope, f):
msg = "no such attribute"
class Dispatcher:
def is_type(self, type):
cls = self
while cls is not None:
if cls.type == type:
return True
cls = cls.base
return False
def dispatch(self, f, attrs=""):
cls = self
while cls is not None:
if hasattr(f, cls.type):
return getattr(f, cls.type)(self)
else:
cls = cls.base
cls = self
while cls is not None:
if attrs:
sep = ", "
if cls.base is None:
sep += "or "
else:
sep = ""
attrs += "%s'%s'" % (sep, cls.type)
cls = cls.base
raise AttributeError("'%s' object has no attribute %s" %
(f.__class__.__name__, attrs))
class Node(Container, Component, Dispatcher):
type = "node"
base = None
def __init__(self):
Container.__init__(self)
Component.__init__(self)
self.query = Query([self])
def __getitem__(self, name):
for nd in self.query[name]:
return nd
def text(self):
from . import transforms
return self.dispatch(transforms.Text())
def tag(self, name, *attrs, **kwargs):
t = Tag(name, *attrs, **kwargs)
self.add(t)
return t
def data(self, s):
d = Data(s)
self.add(d)
return d
def entity(self, s):
e = Entity(s)
self.add(e)
return e
class Tree(Node):
type = "tree"
base = Node
class Tag(Node):
type = "tag"
base = Node
def __init__(self, _name, *attrs, **kwargs):
Node.__init__(self)
self.name = _name
self.attrs = list(attrs)
self.attrs.extend(kwargs.items())
self.singleton = False
def get_attr(self, name):
for k, v in self.attrs:
if name == k:
return v
def _idx(self, attr):
idx = 0
for k, v in self.attrs:
if k == attr:
return idx
idx += 1
return None
def set_attr(self, name, value):
idx = self._idx(name)
if idx is None:
self.attrs.append((name, value))
else:
self.attrs[idx] = (name, value)
def dispatch(self, f):
try:
attr = "do_" + self.name
method = getattr(f, attr)
except AttributeError:
return Dispatcher.dispatch(self, f, "'%s'" % attr)
return method(self)
class Leaf(Component, Dispatcher):
type = "leaf"
base = None
def __init__(self, data):
assert isinstance(data, STRING_TYPES)
self.data = data
class Data(Leaf):
type = "data"
base = Leaf
class Entity(Leaf):
type = "entity"
base = Leaf
class Character(Leaf):
type = "character"
base = Leaf
class Comment(Leaf):
type = "comment"
base = Leaf
###################
## Query Classes ##
###########################################################################
class Adder:
def __add__(self, other):
return Sum(self, other)
class Sum(Adder):
def __init__(self, left, right):
self.left = left
self.right = right
def __iter__(self):
for x in self.left:
yield x
for x in self.right:
yield x
class View(Adder):
def __init__(self, source):
self.source = source
class Filter(View):
def __init__(self, predicate, source):
View.__init__(self, source)
self.predicate = predicate
def __iter__(self):
for nd in self.source:
if self.predicate(nd):
yield nd
class Flatten(View):
def __iter__(self):
sources = [iter(self.source)]
while sources:
try:
nd = next(sources[-1])
if isinstance(nd, Tree):
sources.append(iter(nd.children))
else:
yield nd
except StopIteration:
sources.pop()
class Children(View):
def __iter__(self):
for nd in self.source:
for child in nd.children:
yield child
class Attributes(View):
def __iter__(self):
for nd in self.source:
for a in nd.attrs:
yield a
class Values(View):
def __iter__(self):
for name, value in self.source:
yield value
def flatten_path(path):
if isinstance(path, STRING_TYPES):
for part in path.split("/"):
yield part
elif callable(path):
yield path
else:
for p in path:
for fp in flatten_path(p):
yield fp
class Query(View):
def __iter__(self):
for nd in self.source:
yield nd
def __getitem__(self, path):
query = self.source
for p in flatten_path(path):
if callable(p):
select = Query
pred = p
source = query
elif isinstance(p, STRING_TYPES):
if p[0] == "@":
select = Values
pred = lambda x, n=p[1:]: x[0] == n
source = Attributes(query)
elif p[0] == "#":
select = Query
pred = lambda x, t=p[1:]: x.is_type(t)
source = Children(query)
else:
select = Query
def pred(x, n=p): return isinstance(x, Tag) and x.name == n
source = Flatten(Children(query))
else:
raise ValueError(p)
query = select(Filter(pred, source))
return query
|
gemmellr/qpid-proton
|
tools/python/mllib/dom.py
|
Python
|
apache-2.0
| 7,614
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""parsing_utils python module.
Importing from tensorflow.python.estimator is unsupported
and will soon break!
"""
# pylint: disable=unused-import,g-bad-import-order,g-import-not-at-top,wildcard-import
from tensorflow_estimator.python.estimator.canned import parsing_utils
# Include attrs that start with single underscore.
_HAS_DYNAMIC_ATTRIBUTES = True
parsing_utils.__all__ = [
s for s in dir(parsing_utils) if not s.startswith('__')
]
from tensorflow_estimator.python.estimator.canned.parsing_utils import *
|
tensorflow/tensorflow
|
tensorflow/python/estimator/canned/parsing_utils.py
|
Python
|
apache-2.0
| 1,209
|
#!/usr/bin/env python
# scapy.contrib.description = PPI
# scapy.contrib.status = loads
"""
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
####################################################################
# This file holds the GSM UM interface implementation for Scapy #
# author: Laurent Weber <k@0xbadcab1e.lu> #
# #
# Some examples on how to use this script: #
# http://0xbadcab1e.lu/scapy_gsm_um-howto.txt #
# #
# tested on: scapy-version: 2.2.0 (dev) #
####################################################################
import logging
from types import IntType
from types import NoneType
from types import StringType
#from time import sleep
import socket
logging.getLogger("scapy").setLevel(1)
from scapy.all import *
# This method is intended to send gsm air packets. It uses a unix domain
# socket. It opens a socket, sends the parameter to the socket and
# closes the socket.
# typeSock determines the type of the socket, can be:
# 0 for UDP Socket
# 1 for Unix Domain Socket
# 2 for TCP
def sendum(x, typeSock=0):
try:
if type(x) is not str:
x = str(x)
if typeSock is 0:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
host = '127.0.0.1'
port = 28670 # default for openBTS
s.connect((host, port))
elif typeSock is 1:
s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
s.connect("/tmp/osmoL")
elif typeSock is 2:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
host = '127.0.0.1'
port = 43797
s.connect((host, port))
s.send(x)
s.close()
except:
print "[Error]: There was a problem when trying to transmit data.\
Please make sure you started the socket server."
# Known Bugs/Problems:
# If a message uses multiple times the same IE you cannot set the values
# of this IE's if you use the preconfigured packets. You need to build
# the IE's by hand and than assemble them as entire messages.
# The ErrorLength class is a custom exception that gets raised when a
# packet doesn't have the correct size.
class ErrorLength(Exception):
def __str__(self):
error = "ERROR: Please make sure you build entire, 8 bit fields."
return repr(error)
###
# This method computes the length of the actual IE.
# It computes how many "None" fields have to be removed (if any).
# The method returns an integer containing the number of bytes that have to be
# cut off the packet.
# parameter length contains the max length of the IE can be found in
# 0408
# The parameter fields contains the value of the fields (not the default but
# the real, actual value.
# The parameter fields2 contains fields_desc.
# Location contains the location of the length field in the IE. Everything
# after the the length field has to be counted (04.07 11.2.1.1.2)
def adapt(min_length, max_length, fields, fields2, location=2):
# find out how much bytes there are between min_length and the location of
# the length field
location = min_length - location
i = len(fields) - 1
rm = mysum = 0
while i >= 0:
if fields[i] is None:
rm += 1
try:
mysum += fields2[i].size
except AttributeError: # ByteFields don't have .size
mysum += 8
else:
break
i -= 1
if mysum % 8 is 0:
length = mysum / 8 # Number of bytes we have to delete
dyn_length = (max_length - min_length - length)
if dyn_length < 0:
dyn_length = 0
if length is max_length: # Fix for packets that have all values set
length -= min_length # to None
return [length, dyn_length + location]
else:
raise ErrorLength()
def examples(example=None):
if example == None:
print """This command presents some example to introduce scapy
gsm-um to new users.
The following parameters can be used:
examples("imsiDetach")
examples("call")
examples("dissect")"""
elif example == "imsiDetach":
print """
>>> a=imsiDetachIndication()
... a.typeOfId=1; a.odd=1; a.idDigit1=0xF;
... a.idDigit2_1=2; a.idDigit2=7; a.idDigit3_1=0;
... a.idDigit3=7; a.idDigit4_1=7; a.idDigit4=2;
... a.idDigit5_1=0; a.idDigit5=0; a.idDigit6_1=0;
... a.idDigit6=1; a.idDigit7_1=2; a.idDigit7=7;
... a.idDigit8_1=7; a.idDigit8=5; a.idDigit9_1=1; a.idDigit9=4;
>>> hexdump(a)
0000 05 01 00 08 F0 27 07 72 00 01 27 75 14 .....'.r..'u.
>>> sendum(a)
"""
elif example == "call":
print """
If you use an USRP and the testcall function this sets up a phonecall:
>>> sendum(setupMobileOriginated())
>>> sendum(connectAcknowledge())
"""
# Section 10.2/3
class TpPd(Packet):
"""Skip indicator and transaction identifier and Protocol Discriminator"""
name = "Skip Indicator And Transaction Identifier and Protocol \
Discriminator"
fields_desc = [
BitField("ti", 0x0, 4),
BitField("pd", 0x3, 4)
]
class MessageType(Packet):
"""Message Type Section 10.4"""
name = "Message Type"
fields_desc = [
XByteField("mesType", 0x3C)
]
##
# Message for Radio Resources management (RR) Section 9.1
###
# Network to MS
def additionalAssignment(MobileAllocation_presence=0,
StartingTime_presence=0):
"""ADDITIONAL ASSIGNMENT Section 9.1.1"""
# Mandatory
a = TpPd(pd=0x6)
b = MessageType(mesType=0x3B) # 00111011
c = ChannelDescription()
packet = a / b / c
# Not Mandatory
if MobileAllocation_presence is 1:
d = MobileAllocationHdr(ieiMA=0x72, eightBitMA=0x0)
packet = packet / d
if StartingTime_presence is 1:
e = StartingTimeHdr(ieiST=0x7C, eightBitST=0x0)
packet = packet / e
return packet
# Network to MS
def assignmentCommand(FrequencyList_presence=0,
CellChannelDescription_presence=0,
CellChannelDescription_presence1=0,
MultislotAllocation_presence=0,
ChannelMode_presence=0, ChannelMode_presence1=0,
ChannelMode_presence2=0, ChannelMode_presence3=0,
ChannelMode_presence4=0, ChannelMode_presence5=0,
ChannelMode_presence6=0, ChannelMode_presence7=0,
ChannelDescription=0, ChannelMode2_presence=0,
MobileAllocation_presence=0, StartingTime_presence=0,
FrequencyList_presence1=0,
ChannelDescription2_presence=0,
ChannelDescription_presence=0,
FrequencyChannelSequence_presence=0,
MobileAllocation_presence1=0,
CipherModeSetting_presence=0,
VgcsTargetModeIdentication_presence=0,
MultiRateConfiguration_presence=0):
"""ASSIGNMENT COMMAND Section 9.1.2"""
a = TpPd(pd=0x6)
b = MessageType(mesType=0x2e) # 101110
c = ChannelDescription2()
d = PowerCommand()
packet = a / b / c / d
if FrequencyList_presence is 1:
e = FrequencyListHdr(ieiFL=0x05, eightBitFL=0x0)
packet = packet / e
if CellChannelDescription_presence is 1:
f = CellChannelDescriptionHdr(ieiCCD=0x62, eightBitCCD=0x0)
packet = packet / f
if MultislotAllocation_presence is 1:
g = MultislotAllocationHdr(ieiMSA=0x10, eightBitMSA=0x0)
packet = packet / g
if ChannelMode_presence is 1:
h = ChannelModeHdr(ieiCM=0x63, eightBitCM=0x0)
packet = packet / h
if ChannelMode_presence1 is 1:
i = ChannelModeHdr(ieiCM=0x11, eightBitCM=0x0)
packet = packet / i
if ChannelMode_presence2 is 1:
j = ChannelModeHdr(ieiCM=0x13, eightBitCM=0x0)
packet = packet / j
if ChannelMode_presence3 is 1:
k = ChannelModeHdr(ieiCM=0x14, eightBitCM=0x0)
packet = packet / k
if ChannelMode_presence4 is 1:
l = ChannelModeHdr(ieiCM=0x15, eightBitCM=0x0)
packet = packet / l
if ChannelMode_presence5 is 1:
m = ChannelModeHdr(ieiCM=0x16, eightBitCM=0x0)
packet = packet / m
if ChannelMode_presence6 is 1:
n = ChannelModeHdr(ieiCM=0x17, eightBitCM=0x0)
packet = packet / n
if ChannelMode_presence7 is 1:
o = ChannelModeHdr(ieiCM=0x18, eightBitCM=0x0)
packet = packet / o
if ChannelDescription_presence is 1:
p = ChannelDescriptionHdr(ieiCD=0x64, eightBitCD=0x0)
packet = packet / p
if ChannelMode2_presence is 1:
q = ChannelMode2Hdr(ieiCM2=0x66, eightBitCM2=0x0)
packet = packet / q
if MobileAllocation_presence is 1:
r = MobileAllocationHdr(ieiMA=0x72, eightBitMA=0x0)
packet = packet / r
if StartingTime_presence is 1:
s = StartingTimeHdr(ieiST=0x7C, eightBitST=0x0)
packet = packet / s
if FrequencyList_presence1 is 1:
t = FrequencyListHdr(ieiFL=0x19, eightBitFL=0x0)
packet = packet / t
if ChannelDescription2_presence is 1:
u = ChannelDescription2Hdr(ieiCD2=0x1C, eightBitCD2=0x0)
packet = packet / u
if ChannelDescription_presence is 1:
v = ChannelDescriptionHdr(ieiCD=0x1D, eightBitCD=0x0)
packet = packet / v
if FrequencyChannelSequence_presence is 1:
w = FrequencyChannelSequenceHdr(ieiFCS=0x1E, eightBitFCS=0x0)
packet = packet / w
if MobileAllocation_presence1 is 1:
x = MobileAllocationHdr(ieiMA=0x21, eightBitMA=0x0)
packet = packet / x
if CipherModeSetting_presence is 1:
y = CipherModeSettingHdr(ieiCMS=0x9, eightBitCMS=0x0)
packet = packet / y
if VgcsTargetModeIdentication_presence is 1:
z = VgcsTargetModeIdenticationHdr(ieiVTMI=0x01, eightBitVTMI=0x0)
packet = packet / z
if MultiRateConfiguration_presence is 1:
aa = MultiRateConfigurationHdr(ieiMRC=0x03, eightBitMRC=0x0)
packet = packet / aa
return packet
# MS to Network
def assignmentComplete():
"""ASSIGNMENT COMPLETE Section 9.1.3"""
a = TpPd(pd=0x6)
b = MessageType(mesType=0x29) # 00101001
c = RrCause()
packet = a / b / c
return packet
# MS to Network
def assignmentFailure():
"""ASSIGNMENT FAILURE Section 9.1.4"""
a = TpPd(pd=0x6)
b = MessageType(mesType=0x2F) # 00101111
c = RrCause()
packet = a / b / c
return packet
# Network to MS
def channelModeModify(VgcsTargetModeIdentication_presence=0,
MultiRateConfiguration_presence=0):
"""CHANNEL MODE MODIFY Section 9.1.5"""
a = TpPd(pd=0x6)
b = MessageType(mesType=0x8) # 0001000
c = ChannelDescription2()
d = ChannelMode()
packet = a / b / c / d
if VgcsTargetModeIdentication is 1:
e = VgcsTargetModeIdenticationHdr(ieiVTMI=0x01, eightBitVTMI=0x0)
packet = packet / e
if MultiRateConfiguration is 1:
f = MultiRateConfigurationHdr(ieiMRC=0x03, eightBitMRC=0x0)
packet = packet / f
return packet
def channelModeModifyAcknowledge():
"""CHANNEL MODE MODIFY ACKNOWLEDGE Section 9.1.6"""
a = TpPd(pd=0x6)
b = MessageType(mesType=0x17) # 00010111
c = ChannelDescription2()
d = ChannelMode()
packet = a / b / c / d
return packet
# Network to MS
def channelRelease(BaRange_presence=0, GroupChannelDescription_presence=0,
GroupCipherKeyNumber_presence=0, GprsResumption_presence=0,
BaListPref_presence=0):
"""CHANNEL RELEASE Section 9.1.7"""
a = TpPd(pd=0x6)
b = MessageType(mesType=0xD) # 00001101
c = RrCause()
packet = a / b / c
if BaRange_presence is 1:
d = BaRangeHdr(ieiBR=0x73, eightBitBR=0x0)
packet = packet / d
if GroupChannelDescription_presence is 1:
e = GroupChannelDescriptionHdr(ieiGCD=0x74, eightBitGCD=0x0)
packet = packet / e
if GroupCipherKeyNumber_presence is 1:
f = GroupCipherKeyNumber(ieiGCKN=0x8)
packet = packet / f
if GprsResumption_presence is 1:
g = GprsResumptionHdr(ieiGR=0xC, eightBitGR=0x0)
packet = packet / g
if BaListPref_presence is 1:
h = BaListPrefHdr(ieiBLP=0x75, eightBitBLP=0x0)
packet = packet / h
return packet
class ChannelRequest(Packet):
"""Channel request Section 9.1.8"""
name = "Channel Request"
fields_desc = [
ByteField("estCause", 0x0)
]
def channelRequest():
return ChannelRequest()
# Network to MS
def cipheringModeCommand():
"""CIPHERING MODE COMMAND Section 9.1.9"""
a = TpPd(pd=0x6)
b = MessageType(mesType=0x35) # 00110101
c = RrCause()
#d=cipherModeSetting()
#e=cipherResponse()
# FIX
d = CipherModeSettingAndcipherResponse()
packet = a / b / c / d
return packet
def cipheringModeComplete(MobileId_presence=0):
"""CIPHERING MODE COMPLETE Section 9.1.10"""
a = TpPd(pd=0x6)
b = MessageType(mesType=0x32) # 00110010
packet = a / b
if MobileId_presence is 1:
c = MobileIdHdr(ieiMI=0x17, eightBitMI=0x0)
packet = packet / c
return packet
# Network to MS
def classmarkChange(MobileStationClassmark3_presence=0):
"""CLASSMARK CHANGE Section 9.1.11"""
a = TpPd(pd=0x6)
b = MessageType(mesType=0x16) # 00010110
c = MobileStationClassmark2()
packet = a / b / c
if MobileStationClassmark3_presence is 1:
e = MobileStationClassmark3(ieiMSC3=0x20)
packet = packet / e
return packet
# Network to MS
def classmarkEnquiry():
"""CLASSMARK ENQUIRY Section 9.1.12"""
a = TpPd(pd=0x6)
b = MessageType(mesType=0x13) # 00010011
packet = a / b
return packet
# 9.1.12a Spare
# Network to MS
def configurationChangeCommand(ChannelMode_presence=0,
ChannelMode_presence1=0,
ChannelMode_presence2=0,
ChannelMode_presence3=0,
ChannelMode_presence4=0,
ChannelMode_presence5=0,
ChannelMode_presence6=0,
ChannelMode_presence7=0):
"""CONFIGURATION CHANGE COMMAND Section 9.1.12b"""
a = TpPd(pd=0x6)
b = MessageType(mesType=0x30) # 00110000
c = MultislotAllocation()
packet = a / b / c
if ChannelMode_presence is 1:
d = ChannelModeHdr(ieiCM=0x63, eightBitCM=0x0)
packet = packet / d
if ChannelMode_presence1 is 1:
e = ChannelModeHdr(ieiCM=0x11, eightBitCM=0x0)
packet = packet / e
if ChannelMode_presence2 is 1:
f = ChannelModeHdr(ieiCM=0x13, eightBitCM=0x0)
packet = packet / f
if ChannelMode_presence3 is 1:
g = ChannelModeHdr(ieiCM=0x14, eightBitCM=0x0)
packet = packet / g
if ChannelMode_presence4 is 1:
h = ChannelModeHdr(ieiCM=0x15, eightBitCM=0x0)
packet = packet / h
if ChannelMode_presence5 is 1:
i = ChannelModeHdr(ieiCM=0x16, eightBitCM=0x0)
packet = packet / i
if ChannelMode_presence6 is 1:
j = ChannelModeHdr(ieiCM=0x17, eightBitCM=0x0)
packet = packet / j
if ChannelMode_presence7 is 1:
k = ChannelModeHdr(ieiCM=0x18, eightBitCM=0x0)
packet = packet / k
return packet
def configurationChangeAcknowledge():
"""CONFIGURATION CHANGE ACKNOWLEDGE Section 9.1.12c"""
a = TpPd(pd=0x6)
b = MessageType(mesType=0x31) # 00110001
c = MobileId()
packet = a / b / c
return packet
def configurationChangeReject():
"""CONFIGURATION CHANGE REJECT Section 9.1.12d"""
a = TpPd(pd=0x6)
b = MessageType(mesType=0x33) # 00110011
c = RrCause()
packet = a / b / c
return packet
# Network to MS
def frequencyRedefinition(CellChannelDescription_presence=0):
"""Frequency redefinition Section 9.1.13"""
a = TpPd(pd=0x6)
b = MessageType(mesType=0x14) # 00010100
c = ChannelDescription()
d = MobileAllocation()
e = StartingTime()
packet = a / b / c / d / e
if CellChannelDescription_presence is 1:
f = CellChannelDescriptionHdr(ieiCCD=0x62, eightBitCCD=0x0)
packet = packet / f
return packet
# Network to MS
def pdchAssignmentCommand(ChannelDescription_presence=0,
CellChannelDescription_presence=0,
MobileAllocation_presence=0,
StartingTime_presence=0, FrequencyList_presence=0,
ChannelDescription_presence1=0,
FrequencyChannelSequence_presence=0,
MobileAllocation_presence1=0,
PacketChannelDescription_presence=0,
DedicatedModeOrTBF_presence=0):
"""PDCH ASSIGNMENT COMMAND Section 9.1.13a"""
a = TpPd(pd=0x6)
b = MessageType(mesType=0x23) # 00100011
c = ChannelDescription()
packet = a / b / c
if ChannelDescription_presence is 1:
d = ChannelDescriptionHdr(ieiCD=0x62, eightBitCD=0x0)
packet = packet / d
if CellChannelDescription_presence is 1:
e = CellChannelDescriptionHdr(ieiCCD=0x05, eightBitCCD=0x0)
packet = packet / e
if MobileAllocation_presence is 1:
f = MobileAllocationHdr(ieiMA=0x72, eightBitMA=0x0)
packet = packet / f
if StartingTime_presence is 1:
g = StartingTimeHdr(ieiST=0x7C, eightBitST=0x0)
packet = packet / g
if FrequencyList_presence is 1:
h = FrequencyListHdr(ieiFL=0x19, eightBitFL=0x0)
packet = packet / h
if ChannelDescription_presence1 is 1:
i = ChannelDescriptionHdr(ieiCD=0x1C, eightBitCD=0x0)
packet = packet / i
if FrequencyChannelSequence_presence is 1:
j = FrequencyChannelSequenceHdr(ieiFCS=0x1E, eightBitFCS=0x0)
packet = packet / j
if MobileAllocation_presence1 is 1:
k = MobileAllocationHdr(ieiMA=0x21, eightBitMA=0x0)
packet = packet / k
if PacketChannelDescription_presence is 1:
l = PacketChannelDescription(ieiPCD=0x22)
packet = packet / l
if DedicatedModeOrTBF_presence is 1:
m = DedicatedModeOrTBFHdr(ieiDMOT=0x23, eightBitDMOT=0x0)
packet = packet / m
return packet
def gprsSuspensionRequest():
"""GPRS SUSPENSION REQUEST Section 9.1.13b"""
a = TpPd(pd=0x6)
b = MessageType()
c = Tlli()
d = RoutingAreaIdentification()
e = SuspensionCause()
packet = a / b / c / d / e
return packet
class HandoverAccess(Packet):
name = "Handover Access" # Section 9.1.14"
fields_desc = [
ByteField("handover", None),
]
# Network to MS
def handoverCommand(SynchronizationIndication_presence=0,
FrequencyShortList_presence=0, FrequencyList_presence=0,
CellChannelDescription_presence=0,
MultislotAllocation_presence=0,
ChannelMode_presence=0, ChannelMode_presence1=0,
ChannelMode_presence2=0,
ChannelMode_presence3=0, ChannelMode_presence4=0,
ChannelMode_presence5=0,
ChannelMode_presence6=0, ChannelMode_presence7=0,
ChannelDescription_presence1=0, ChannelMode2_presence=0,
FrequencyChannelSequence_presence=0,
MobileAllocation_presence=0,
StartingTime_presence=0, TimeDifference_presence=0,
TimingAdvance_presence=0,
FrequencyShortList_presence1=0,
FrequencyList_presence1=0,
ChannelDescription2_presence=0,
ChannelDescription_presence2=0,
FrequencyChannelSequence_presence1=0,
MobileAllocation_presence1=0,
CipherModeSetting_presence=0,
VgcsTargetModeIdentication_presence=0,
MultiRateConfiguration_presence=0):
"""HANDOVER COMMAND Section 9.1.15"""
name = "Handover Command"
a = TpPd(pd=0x6)
b = MessageType(mesType=0x2b) # 00101011
c = CellDescription()
d = ChannelDescription2()
e = HandoverReference()
f = PowerCommandAndAccessType()
packet = a / b / c / d / e / f
if SynchronizationIndication_presence is 1:
g = SynchronizationIndicationHdr(ieiSI=0xD, eightBitSI=0x0)
packet = packet / g
if FrequencyShortList_presence is 1:
h = FrequencyShortListHdr(ieiFSL=0x02)
packet = packet / h
if FrequencyList_presence is 1:
i = FrequencyListHdr(ieiFL=0x05, eightBitFL=0x0)
packet = packet / i
if CellChannelDescription_presence is 1:
j = CellChannelDescriptionHdr(ieiCCD=0x62, eightBitCCD=0x0)
packet = packet / j
if MultislotAllocation_presence is 1:
k = MultislotAllocationHdr(ieiMSA=0x10, eightBitMSA=0x0)
packet = packet / k
if ChannelMode_presence is 1:
l = ChannelModeHdr(ieiCM=0x63, eightBitCM=0x0)
packet = packet / l
if ChannelMode_presence1 is 1:
m = ChannelModeHdr(ieiCM=0x11, eightBitCM=0x0)
packet = packet / m
if ChannelMode_presence2 is 1:
n = ChannelModeHdr(ieiCM=0x13, eightBitCM=0x0)
packet = packet / n
if ChannelMode_presence3 is 1:
o = ChannelModeHdr(ieiCM=0x14, eightBitCM=0x0)
packet = packet / o
if ChannelMode_presence4 is 1:
p = ChannelModeHdr(ieiCM=0x15, eightBitCM=0x0)
packet = packet / p
if ChannelMode_presence5 is 1:
q = ChannelModeHdr(ieiCM=0x16, eightBitCM=0x0)
packet = packet / q
if ChannelMode_presence6 is 1:
r = ChannelModeHdr(ieiCM=0x17, eightBitCM=0x0)
packet = packet / r
if ChannelMode_presence7 is 1:
s = ChannelModeHdr(ieiCM=0x18, eightBitCM=0x0)
packet = packet / s
if ChannelDescription_presence1 is 1:
s1 = ChannelDescriptionHdr(ieiCD=0x64, eightBitCD=0x0)
packet = packet / s1
if ChannelMode2_presence is 1:
t = ChannelMode2Hdr(ieiCM2=0x66, eightBitCM2=0x0)
packet = packet / t
if FrequencyChannelSequence_presence is 1:
u = FrequencyChannelSequenceHdr(ieiFCS=0x69, eightBitFCS=0x0)
packet = packet / u
if MobileAllocation_presence is 1:
v = MobileAllocationHdr(ieiMA=0x72, eightBitMA=0x0)
packet = packet / v
if StartingTime_presence is 1:
w = StartingTimeHdr(ieiST=0x7C, eightBitST=0x0)
packet = packet / w
if TimeDifference_presence is 1:
x = TimeDifferenceHdr(ieiTD=0x7B, eightBitTD=0x0)
packet = packet / x
if TimingAdvance_presence is 1:
y = TimingAdvanceHdr(ieiTA=0x7D, eightBitTA=0x0)
packet = packet / y
if FrequencyShortList_presence1 is 1:
z = FrequencyShortListHdr(ieiFSL=0x12)
packet = packet / z
if FrequencyList_presence1 is 1:
aa = FrequencyListHdr(ieiFL=0x19, eightBitFL=0x0)
packet = packet / aa
if ChannelDescription2_presence is 1:
ab = ChannelDescription2Hdr(ieiCD2=0x1C, eightBitCD2=0x0)
packet = packet / ab
if ChannelDescription_presence2 is 1:
ac = ChannelDescriptionHdr(ieiCD=0x1D, eightBitCD=0x0)
packet = packet / ac
if FrequencyChannelSequence_presence1 is 1:
ad = FrequencyChannelSequenceHdr(ieiFCS=0x1E, eightBitFCS=0x0)
packet = packet / ad
if MobileAllocation_presence1 is 1:
ae = MobileAllocationHdr(ieiMA=0x21, eightBitMA=0x0)
packet = packet / ae
if CipherModeSetting_presence is 1:
af = CipherModeSettingHdr(ieiCMS=0x9, eightBitCMS=0x0)
packet = packet / af
if VgcsTargetModeIdentication_presence is 1:
ag = VgcsTargetModeIdenticationHdr(ieiVTMI=0x01, eightBitVTMI=0x0)
packet = packet / ag
if MultiRateConfiguration_presence is 1:
ah = MultiRateConfigurationHdr(ieiMRC=0x03, eightBitMRC=0x0)
packet = packet / ah
return packet
def handoverComplete(MobileTimeDifference_presence=0):
"""HANDOVER COMPLETE Section 9.1.16"""
a = TpPd(pd=0x6)
b = MessageType(mesType=0x2c) # 00101100
c = RrCause()
packet = a / b / c
if MobileTimeDifference_presence is 1:
d = MobileTimeDifferenceHdr(ieiMTD=0x77, eightBitMTD=0x0)
packet = packet / d
return packet
def handoverFailure():
"""HANDOVER FAILURE Section 9.1.17"""
a = TpPd(pd=0x6)
b = MessageType(mesType=0x28) # 00101000
c = RrCause()
packet = a / b / c
return packet
#The L2 pseudo length of this message is the sum of lengths of all
#information elements present in the message except
#the IA Rest Octets and L2 Pseudo Length information elements.
# Network to MS
def immediateAssignment(ChannelDescription_presence=0,
PacketChannelDescription_presence=0,
StartingTime_presence=0):
"""IMMEDIATE ASSIGNMENT Section 9.1.18"""
a = L2PseudoLength()
b = TpPd(pd=0x6)
c = MessageType(mesType=0x3F) # 00111111
d = PageModeAndDedicatedModeOrTBF()
packet = a / b / c / d
if ChannelDescription_presence is 1:
f = ChannelDescription()
packet = packet / f
if PacketChannelDescription_presence is 1:
g = PacketChannelDescription()
packet = packet / g
h = RequestReference()
i = TimingAdvance()
j = MobileAllocation()
packet = packet / h / i / j
if StartingTime_presence is 1:
k = StartingTimeHdr(ieiST=0x7C, eightBitST=0x0)
packet = packet / k
l = IaRestOctets()
packet = packet / l
return packet
#The L2 pseudo length of this message is the sum of lengths of all
#information elements present in the message except
#the IAX Rest Octets and L2 Pseudo Length information elements.
# Network to MS
def immediateAssignmentExtended(StartingTime_presence=0):
"""IMMEDIATE ASSIGNMENT EXTENDED Section 9.1.19"""
a = L2PseudoLength()
b = TpPd(pd=0x6)
c = MessageType(mesType=0x39) # 00111001
d = PageModeAndSpareHalfOctets()
f = ChannelDescription()
g = RequestReference()
h = TimingAdvance()
i = MobileAllocation()
packet = a / b / c / d / f / g / h / i
if StartingTime_presence is 1:
j = StartingTimeHdr(ieiST=0x7C, eightBitST=0x0)
packet = packet / j
k = IaxRestOctets()
packet = packet / k
return packet
# This message has L2 pseudo length 19
# Network to MS
def immediateAssignmentReject():
"""IMMEDIATE ASSIGNMENT REJECT Section 9.1.20"""
a = L2PseudoLength(l2pLength=0x13)
b = TpPd(pd=0x6)
c = MessageType(mesType=0x3a) # 00111010
d = PageModeAndSpareHalfOctets()
f = RequestReference()
g = WaitIndication()
h = RequestReference()
i = WaitIndication()
j = RequestReference()
k = WaitIndication()
l = RequestReference()
m = WaitIndication()
n = IraRestOctets()
packet = a / b / c / d / f / g / h / i / j / k / l / m / n
return packet
def measurementReport():
"""MEASUREMENT REPORT Section 9.1.21"""
a = TpPd(pd=0x6)
b = MessageType(mesType=0x15) # 00010101
c = MeasurementResults()
packet = a / b / c
return packet
# len max 20
class NotificationFacch():
"""NOTIFICATION/FACCH Section 9.1.21a"""
name = "Notification/facch"
fields_desc = [
BitField("rr", 0x0, 1),
BitField("msgTyoe", 0x0, 5),
BitField("layer2Header", 0x0, 2),
BitField("frChanDes", 0x0, 24)
]
# The L2 pseudo length of this message has a value one
# Network to MS
def notificationNch():
"""NOTIFICATION/NCH Section 9.1.21b"""
a = L2PseudoLength(l2pLength=0x01)
b = TpPd(pd=0x6)
c = MessageType(mesType=0x20) # 00100000
d = NtNRestOctets()
packet = a / b / c / d
return packet
def notificationResponse():
"""NOTIFICATION RESPONSE Section 9.1.21d"""
a = TpPd(pd=0x6)
b = MessageType(mesType=0x26) # 00100110
c = MobileStationClassmark2()
d = MobileId()
e = DescriptiveGroupOrBroadcastCallReference()
packet = a / b / c / d / e
return packet
# Network to MS
def rrCellChangeOrder():
"""RR-CELL CHANGE ORDER Section 9.1.21e"""
a = TpPd(pd=0x6)
b = MessageType(mesType=0x8) # 00001000
c = CellDescription()
d = NcModeAndSpareHalfOctets()
packet = a / b / c / d
return packet
# Network to MS
def pagingRequestType1(MobileId_presence=0):
"""PAGING REQUEST TYPE 1 Section 9.1.22"""
#The L2 pseudo length of this message is the sum of lengths of all
#information elements present in the message except
#the P1 Rest Octets and L2 Pseudo Length information elements.
a = L2PseudoLength()
b = TpPd(pd=0x6)
c = MessageType(mesType=0x21) # 00100001
d = PageModeAndChannelNeeded()
f = MobileId()
packet = a / b / c / d / f
if MobileId_presence is 1:
g = MobileIdHdr(ieiMI=0x17, eightBitMI=0x0)
packet = packet / g
h = P1RestOctets()
packet = packet / h
return packet
# The L2 pseudo length of this message is the sum of lengths of all
# information elements present in the message except
# Network to MS
def pagingRequestType2(MobileId_presence=0):
"""PAGING REQUEST TYPE 2 Section 9.1.23"""
a = L2PseudoLength()
b = TpPd(pd=0x6)
c = MessageType(mesType=0x22) # 00100010
d = PageModeAndChannelNeeded()
f = MobileId()
g = MobileId()
packet = a / b / c / d / f / g
if MobileId_presence is 1:
h = MobileIdHdr(ieiMI=0x17, eightBitMI=0x0)
packet = packet / h
i = P2RestOctets()
packet = packet / i
return packet
# Network to MS
def pagingRequestType3():
"""PAGING REQUEST TYPE 3 Section 9.1.24"""
# This message has a L2 Pseudo Length of 19
a = L2PseudoLength(l2pLength=0x13)
b = TpPd(pd=0x6)
c = MessageType(mesType=0x24) # 00100100
d = PageModeAndChannelNeeded()
e = TmsiPTmsi()
f = TmsiPTmsi()
g = TmsiPTmsi()
h = TmsiPTmsi()
i = P3RestOctets()
packet = a / b / c / d / e / f / g / h / i
return packet
def pagingResponse():
"""PAGING RESPONSE Section 9.1.25"""
a = TpPd(pd=0x6)
b = MessageType(mesType=0x27) # 00100111
c = CiphKeySeqNrAndSpareHalfOctets()
d = MobileStationClassmark2()
e = MobileId()
packet = a / b / c / d / e
return packet
# Network to MS
def partialRelease():
"""PARTIAL RELEASE Section 9.1.26"""
a = TpPd(pd=0x6)
b = MessageType(mesType=0xa) # 00001010
c = ChannelDescription()
packet = a / b / c
return packet
def partialReleaseComplete():
"""PARTIAL RELEASE COMPLETE Section 9.1.27"""
a = TpPd(pd=0x6)
b = MessageType(mesType=0xf) # 00001111
packet = a / b
return packet
# Network to MS
def physicalInformation():
"""PHYSICAL INFORMATION Section 9.1.28"""
a = TpPd(pd=0x6)
b = MessageType(mesType=0x2d) # 00101101
c = TimingAdvance()
packet = a / b / c
return packet
def rrInitialisationRequest():
"""RR Initialisation Request Section 9.1.28.a"""
a = TpPd(pd=0x6)
b = MessageType(mesType=0x3c) # 00111100
c = CiphKeySeqNrAndMacModeAndChannelCodingRequest()
e = MobileStationClassmark2()
f = Tlli()
g = ChannelRequestDescription()
h = GprsMeasurementResults()
packet = a / b / c / e / f / g / h
return packet
def rrStatus():
"""RR STATUS Section 9.1.29"""
a = TpPd(pd=0x6)
b = MessageType(mesType=0x12) # 00010010
c = RrCause()
packet = a / b / c
return packet
# It does not
# follow the basic format. Its length is _25_ bits. The
# order of bit transmission is defined in GSM 04.04.
# Network to MS
class SynchronizationChannelInformation():
"""SYNCHRONIZATION CHANNEL INFORMATION Section 9.1.30"""
name = "Synchronization Channel Information"
fields_desc = [
BitField("bsic", 0x0, 5),
BitField("t1Hi", 0x0, 3),
ByteField("t1Mi", 0x0),
BitField("t1Lo", 0x0, 1),
BitField("t2", 0x0, 5),
BitField("t3Hi", 0x0, 2),
BitField("t3Lo", 0x0, 1)
]
# This message has a L2 Pseudo Length of 21.
# Network to MS
def systemInformationType1():
"""SYSTEM INFORMATION TYPE 1 Section 9.1.31"""
a = L2PseudoLength(l2pLength=0x15)
b = TpPd(pd=0x6)
c = MessageType(mesType=0x19) # 00011001
d = CellChannelDescription()
e = RachControlParameters()
f = Si1RestOctets()
packet = a / b / c / d / e / f
return packet
# This message has a L2 Pseudo Length of 22.
# Network to MS
def systemInformationType2():
"""SYSTEM INFORMATION TYPE 2 Section 9.1.32"""
a = L2PseudoLength(l2pLength=0x16)
b = TpPd(pd=0x6)
c = MessageType(mesType=0x1a) # 00011010
d = NeighbourCellsDescription()
e = NccPermitted()
f = RachControlParameters()
packet = a / b / c / d / e / f
return packet
# This message has a L2 pseudo length of 21
# Network to MS
def systemInformationType2bis():
"""SYSTEM INFORMATION TYPE 2bis Section 9.1.33"""
a = L2PseudoLength(l2pLength=0x15)
b = TpPd(pd=0x6)
c = MessageType(mesType=0x2) # 00000010
d = NeighbourCellsDescription()
e = RachControlParameters()
f = Si2bisRestOctets()
packet = a / b / c / d / e / f
return packet
# This message has a L2 pseudo length of 18
# Network to MS
def systemInformationType2ter():
"""SYSTEM INFORMATION TYPE 2ter Section 9.1.34"""
a = L2PseudoLength(l2pLength=0x12)
b = TpPd(pd=0x6)
c = MessageType(mesType=0x3) # 00000011
d = NeighbourCellsDescription2()
e = Si2terRestOctets()
packet = a / b / c / d / e
return packet
# This message has a L2 Pseudo Length of 18
# Network to MS
def systemInformationType3():
"""SYSTEM INFORMATION TYPE 3 Section 9.1.35"""
a = L2PseudoLength(l2pLength=0x12)
b = TpPd(pd=0x6)
c = MessageType(mesType=0x1b) # 00011011
d = CellIdentity()
e = LocalAreaId()
f = ControlChannelDescription()
g = CellOptionsBCCH()
h = CellSelectionParameters()
i = RachControlParameters()
j = Si3RestOctets()
packet = a / b / c / d / e / f / g / h / i / j
return packet
#The L2 pseudo length of this message is the
#sum of lengths of all information elements present in the message except
#the SI 4 Rest Octets and L2 Pseudo Length
# Network to MS
def systemInformationType4(ChannelDescription_presence=0,
MobileAllocation_presence=0):
"""SYSTEM INFORMATION TYPE 4 Section 9.1.36"""
a = L2PseudoLength()
b = TpPd(pd=0x6)
c = MessageType(mesType=0x1C) # 000111100
d = LocalAreaId()
e = CellSelectionParameters()
f = RachControlParameters()
packet = a / b / c / d / e / f
if ChannelDescription_presence is 1:
g = ChannelDescriptionHdr(ieiCD=0x64, eightBitCD=0x0)
packet = packet / g
if MobileAllocation_presence is 1:
h = MobileAllocationHdr(ieiMA=0x72, eightBitMA=0x0)
packet = packet / h
i = Si4RestOctets()
packet = packet / i
return packet
#This message has a L2 Pseudo Length of 18
# Network to MS
def systemInformationType5():
"""SYSTEM INFORMATION TYPE 5 Section 9.1.37"""
a = L2PseudoLength(l2pLength=0x12)
b = TpPd(pd=0x6)
c = MessageType(mesType=0x35) # 000110101
d = NeighbourCellsDescription()
packet = a / b / c / d
return packet
#This message has a L2 Pseudo Length of 18
# Network to MS
def systemInformationType5bis():
"""SYSTEM INFORMATION TYPE 5bis Section 9.1.38"""
a = L2PseudoLength(l2pLength=0x12)
b = TpPd(pd=0x6)
c = MessageType(mesType=0x5) # 00000101
d = NeighbourCellsDescription()
packet = a / b / c / d
return packet
# This message has a L2 Pseudo Length of 18
# Network to MS
def systemInformationType5ter():
"""SYSTEM INFORMATION TYPE 5ter Section 9.1.39"""
a = L2PseudoLength(l2pLength=0x12)
b = TpPd(pd=0x6)
c = MessageType(mesType=0x6) # 00000110
d = NeighbourCellsDescription2()
packet = a / b / c / d
return packet
#This message has a L2 Pseudo Length of 11
# Network to MS
def systemInformationType6():
"""SYSTEM INFORMATION TYPE 6 Section 9.1.40"""
a = L2PseudoLength(l2pLength=0x0b)
b = TpPd(pd=0x6)
c = MessageType(mesType=0x1e) # 00011011
d = CellIdentity()
e = LocalAreaId()
f = CellOptionsBCCH()
g = NccPermitted()
h = Si6RestOctets()
packet = a / b / c / d / e / f / g
return packet
# The L2 pseudo length of this message has the value 1
# Network to MS
def systemInformationType7():
"""SYSTEM INFORMATION TYPE 7 Section 9.1.41"""
a = L2PseudoLength(l2pLength=0x01)
b = TpPd(pd=0x6)
c = MessageType(mesType=0x37) # 000110111
d = Si7RestOctets()
packet = a / b / c / d
return packet
# The L2 pseudo length of this message has the value 1
# Network to MS
def systemInformationType8():
"""SYSTEM INFORMATION TYPE 8 Section 9.1.42"""
a = L2PseudoLength(l2pLength=0x01)
b = TpPd(pd=0x6)
c = MessageType(mesType=0x18) # 00011000
d = Si8RestOctets()
packet = a / b / c / d
return packet
# The L2 pseudo length of this message has the value 1
# Network to MS
def systemInformationType9():
"""SYSTEM INFORMATION TYPE 9 Section 9.1.43"""
a = L2PseudoLength(l2pLength=0x01)
b = TpPd(pd=0x6)
c = MessageType(mesType=0x4) # 00000100
d = Si9RestOctets()
packet = a / b / c / d
return packet
# The L2 pseudo length of this message has the value 0
# Network to MS
def systemInformationType13():
"""SYSTEM INFORMATION TYPE 13 Section 9.1.43a"""
a = L2PseudoLength(l2pLength=0x00)
b = TpPd(pd=0x6)
c = MessageType(mesType=0x0) # 00000000
d = Si13RestOctets()
packet = a / b / c / d
return packet
#
# 9.1.43b / c spare
#
# The L2 pseudo length of this message has the value 1
# Network to MS
def systemInformationType16():
"""SYSTEM INFORMATION TYPE 16 Section 9.1.43d"""
a = L2PseudoLength(l2pLength=0x01)
b = TpPd(pd=0x6)
c = MessageType(mesType=0x3d) # 00111101
d = Si16RestOctets()
packet = a / b / c / d
return packet
# The L2 pseudo length of this message has the value 1
# Network to MS
def systemInformationType17():
"""SYSTEM INFORMATION TYPE 17 Section 9.1.43e"""
a = L2PseudoLength(l2pLength=0x01)
b = TpPd(pd=0x6)
c = MessageType(mesType=0x3e) # 00111110
d = Si17RestOctets()
packet = a / b / c / d
return packet
def talkerIndication():
"""TALKER INDICATION Section 9.1.44"""
a = TpPd(pd=0x6)
b = MessageType(mesType=0x11) # 00010001
c = MobileStationClassmark2()
d = MobileId()
packet = a / b / c / d
return packet
class UplinkAccess():
"""UPLINK ACCESS Section 9.1.45"""
name = "Uplink Access"
fields_desc = [
ByteField("establishment", 0x0)
]
# Network to MS
def uplinkBusy():
"""UPLINK BUSY Section 9.1.46"""
name = "Uplink Busy"
a = TpPd(pd=0x6)
b = MessageType(mesType=0x2a) # 00101010
packet = a / b
return packet
# Network to MS
class UplinkFree():
"""UPLINK FREE Section 9.1.47"""
name = "Uplink Free"
fields_desc = [
BitField("pd", 0x0, 1),
BitField("msgType", 0x0, 5),
BitField("layer2Header", 0x0, 2),
BitField("uplinkAccess", 0x0, 1),
BitField("lOrH", 0x0, 1), # 0 for L, 1 for H
BitField("upIdCode", 0x0, 6),
]
def uplinkRelease():
"""UPLINK RELEASE Section 9.1.48"""
a = TpPd(pd=0x6)
b = MessageType(mesType=0xe) # 00001110
c = RrCause()
packet = a / b / c
return packet
# Network to MS
def vgcsUplinkGrant():
"""VGCS UPLINK GRANT Section 9.1.49"""
a = TpPd(pd=0x6)
b = MessageType(mesType=0x9) # 00001001
c = RrCause()
d = RequestReference()
e = TimingAdvance()
packet = a / b / c / d / e
return packet
# Network to MS
def systemInformationType10():
"""SYSTEM INFORMATION TYPE 10 Section 9.1.50"""
name = "SyStem Information Type 10"
fields_desc = [
BitField("pd", 0x0, 1),
BitField("msgType", 0x0, 5),
BitField("layer2Header", 0x0, 2),
BitField("si10", 0x0, 160)
]
# Network to MS
# The L2 pseudo length of this message has the value 18
def extendedMeasurementOrder():
"""EXTENDED MEASUREMENT ORDER Section 9.1.51"""
a = L2PseudoLength(l2pLength=0x12)
b = TpPd(pd=0x6)
c = MessageType(mesType=0x37) # 00110111
d = ExtendedMeasurementFrequencyList()
packet = a / b / c / d
return packet
def extendedMeasurementReport():
"""EXTENDED MEASUREMENT REPORT Section 9.1.52"""
a = TpPd(pd=0x6)
b = MessageType(mesType=0x36) # 00110110
c = ExtendedMeasurementResults()
packet = a / b / c
return packet
def applicationInformation():
"""APPLICATION INFORMATION Section 9.1.53"""
a = TpPd(pd=0x6)
b = MessageType(mesType=0x38) # 00111000
c = ApduIDAndApduFlags()
e = ApduData()
packet = a / b / c / e
return packet
#
# 9.2 Messages for mobility management
#
# Network to MS
def authenticationReject():
"""AUTHENTICATION REJECT Section 9.2.1"""
a = TpPd(pd=0x5)
b = MessageType(mesType=0x11) # 00010001
packet = a / b
return packet
# Network to MS
def authenticationRequest():
"""AUTHENTICATION REQUEST Section 9.2.2"""
a = TpPd(pd=0x5)
b = MessageType(mesType=0x12) # 00010010
c = CiphKeySeqNrAndSpareHalfOctets()
d = AuthenticationParameterRAND()
packet = a / b / c / d
return packet
def authenticationResponse():
"""AUTHENTICATION RESPONSE Section 9.2.3"""
a = TpPd(pd=0x5)
b = MessageType(mesType=0x14) # 00010100
c = AuthenticationParameterSRES()
packet = a / b / c
return packet
def cmReestablishmentRequest(LocalAreaId_presence=0):
"""CM RE-ESTABLISHMENT REQUEST Section 9.2.4"""
a = TpPd(pd=0x5)
b = MessageType(mesType=0x28) # 00101000
c = CiphKeySeqNrAndSpareHalfOctets()
e = MobileStationClassmark2()
f = MobileId()
if LocalAreaId_presence is 1:
g = LocalAreaId(iei=0x13, eightbit=0x0)
packet = packet / g
packet = a / b / c / e / f
return packet
# Network to MS
def cmServiceAccept():
"""CM SERVICE ACCEPT Section 9.2.5"""
a = TpPd(pd=0x5)
b = MessageType(mesType=0x21) # 00100001
packet = a / b
return packet
# Network to MS
def cmServicePrompt():
"""CM SERVICE PROMPT Section 9.2.5a"""
a = TpPd(pd=0x5)
b = MessageType(mesType=0x25) # 00100101
c = PdAndSapi()
packet = a / b / c
return packet
# Network to MS
def cmServiceReject():
"""CM SERVICE REJECT Section 9.2.6"""
a = TpPd(pd=0x5)
b = MessageType(mesType=0x22) # 00100010
c = RejectCause()
packet = a / b / c
return packet
def cmServiceAbort():
"""CM SERVICE ABORT Section 9.2.7"""
a = TpPd(pd=0x5)
b = MessageType(mesType=0x23) # 00100011
packet = a / b
return packet
# Network to MS
def abort():
"""ABORT Section 9.2.8"""
a = TpPd(pd=0x5)
b = MessageType(mesType=0x29) # 00101001
c = RejectCause()
packet = a / b / c
return packet
def cmServiceRequest(PriorityLevel_presence=0):
"""CM SERVICE REQUEST Section 9.2.9"""
a = TpPd(pd=0x5)
b = MessageType(mesType=0x24) # 00100100
c = CmServiceTypeAndCiphKeySeqNr()
e = MobileStationClassmark2()
f = MobileId()
packet = a / b / c / e / f
if PriorityLevel_presence is 1:
g = PriorityLevelHdr(ieiPL=0x8, eightBitPL=0x0)
packet = packet / g
return packet
# Network to MS
def identityRequest():
"""IDENTITY REQUEST Section 9.2.10"""
a = TpPd(pd=0x5)
b = MessageType(mesType=0x8) # 00001000
c = IdentityTypeAndSpareHalfOctets()
packet = a / b / c
return packet
def identityResponse():
"""IDENTITY RESPONSE Section 9.2.11"""
a = TpPd(pd=0x5)
b = MessageType(mesType=0x9) # 00001001
c = MobileId()
packet = a / b / c
return packet
def imsiDetachIndication():
"""IMSI DETACH INDICATION Section 9.2.12"""
a = TpPd(pd=0x5)
b = MessageType(mesType=0x1) # 00000001
c = MobileStationClassmark1()
d = MobileId()
packet = a / b / c / d
return packet
# Network to MS
def locationUpdatingAccept(MobileId_presence=0,
FollowOnProceed_presence=0,
CtsPermission_presence=0):
"""LOCATION UPDATING ACCEPT Section 9.2.13"""
a = TpPd(pd=0x5)
b = MessageType(mesType=0x02) # 00000010
c = LocalAreaId()
packet = a / b / c
if MobileId_presence is 1:
d = MobileIdHdr(ieiMI=0x17, eightBitMI=0x0)
packet = packet / d
if FollowOnProceed_presence is 1:
e = FollowOnProceed(ieiFOP=0xA1)
packet = packet / e
if CtsPermission_presence is 1:
f = CtsPermissionHdr(ieiCP=0xA2, eightBitCP=0x0)
packet = packet / f
return packet
# Network to MS
def locationUpdatingReject():
"""LOCATION UPDATING REJECT Section 9.2.14"""
a = TpPd(pd=0x5)
b = MessageType(mesType=0x4) # 0x00000100
c = RejectCause()
packet = a / b / c
return packet
def locationUpdatingRequest():
"""LOCATION UPDATING REQUEST Section 9.2.15"""
a = TpPd(pd=0x5)
b = MessageType(mesType=0x8) # 00001000
c = LocationUpdatingTypeAndCiphKeySeqNr()
e = LocalAreaId()
f = MobileStationClassmark1()
g = MobileId()
packet = a / b / c / e / f / g
return packet
# Network to MS
def mmInformation(NetworkName_presence=0, NetworkName_presence1=0,
TimeZone_presence=0, TimeZoneAndTime_presence=0,
LsaIdentifier_presence=0):
"""MM INFORMATION Section 9.2.15a"""
a = TpPd(pd=0x5)
b = MessageType(mesType=0x32) # 00110010
packet = a / b
if NetworkName_presence is 1:
c = NetworkNameHdr(ieiNN=0x43, eightBitNN=0x0)
packet = packet / c
if NetworkName_presence1 is 1:
d = NetworkNameHdr(ieiNN=0x45, eightBitNN=0x0)
packet = packet / d
if TimeZone_presence is 1:
e = TimeZoneHdr(ieiTZ=0x46, eightBitTZ=0x0)
packet = packet / e
if TimeZoneAndTime_presence is 1:
f = TimeZoneAndTimeHdr(ieiTZAT=0x47, eightBitTZAT=0x0)
packet = packet / f
if LsaIdentifier_presence is 1:
g = LsaIdentifierHdr(ieiLI=0x48, eightBitLI=0x0)
packet = packet / g
return packet
def mmStatus():
"""MM STATUS Section 9.2.16"""
a = TpPd(pd=0x5)
b = MessageType(mesType=0x31) # 00110001
c = RejectCause()
packet = a / b / c
return packet
# Network to MS
def tmsiReallocationCommand():
"""TMSI REALLOCATION COMMAND Section 9.2.17"""
a = TpPd(pd=0x5)
b = MessageType(mesType=0x1a) # 00011010
c = LocalAreaId()
d = MobileId()
packet = a / b / c / d
return packet
def tmsiReallocationComplete():
"""TMSI REALLOCATION COMPLETE Section 9.2.18"""
a = TpPd(pd=0x5)
b = MessageType(mesType=0x1b) # 00011011
packet = a / b
return packet
def mmNull():
"""MM NULL Section 9.2.19"""
a = TpPd(pd=0x5)
b = MessageType(mesType=0x30) # 00110000
packet = a / b
return packet
#
# 9.3 Messages for circuit-switched call control
#
# Network to MS
def alertingNetToMs(Facility_presence=0, ProgressIndicator_presence=0,
UserUser_presence=0):
"""ALERTING Section 9.3.1.1"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x1) # 00000001
packet = a / b
if Facility_presence is 1:
c = FacilityHdr(ieiF=0x1C)
packet = packet / c
if ProgressIndicator_presence is 1:
d = ProgressIndicatorHdr(ieiPI=0x1E)
packet = packet / d
if UserUser_presence is 1:
e = UserUserHdr(ieiUU=0x7E)
packet = packet / e
return packet
def alertingMsToNet(Facility_presence=0, UserUser_presence=0,
SsVersionIndicator_presence=0):
"""ALERTING Section 9.3.1.2"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x1) # 00000001
packet = a / b
if Facility_presence is 1:
c = FacilityHdr(ieiF=0x1C, eightBitF=0x0)
packet = packet / c
if UserUser_presence is 1:
d = UserUserHdr(ieiUU=0x7E, eightBitUU=0x0)
packet = packet / d
if SsVersionIndicator_presence is 1:
e = SsVersionIndicatorHdr(ieiSVI=0x7F, eightBitSVI=0x0)
packet = packet / e
return packet
def callConfirmed(RepeatIndicator_presence=0,
BearerCapability_presence=0, BearerCapability_presence1=0,
Cause_presence=0, CallControlCapabilities_presence=0):
"""CALL CONFIRMED Section 9.3.2"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x8) # 00001000
packet = a / b
if RepeatIndicator_presence is 1:
c = RepeatIndicatorHdr(ieiRI=0xD, eightBitRI=0x0)
packet = packet / c
if BearerCapability_presence is 1:
d = BearerCapabilityHdr(ieiBC=0x04, eightBitBC=0x0)
packet = packet / d
if BearerCapability_presence1 is 1:
e = BearerCapabilityHdr(ieiBC=0x04, eightBitBC=0x0)
packet = packet / e
if Cause_presence is 1:
f = CauseHdr(ieiC=0x08, eightBitC=0x0)
packet = packet / f
if CallControlCapabilities_presence is 1:
g = CallControlCapabilitiesHdr(ieiCCC=0x15, eightBitCCC=0x0)
packet = packet / g
return packet
# Network to MS
def callProceeding(RepeatIndicator_presence=0,
BearerCapability_presence=0,
BearerCapability_presence1=0,
Facility_presence=0, ProgressIndicator_presence=0,
PriorityLevel_presence=0):
"""CALL PROCEEDING Section 9.3.3"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x2) # 00000010
packet = a / b
if RepeatIndicator_presence is 1:
c = RepeatIndicatorHdr(ieiRI=0xD, eightBitRI=0x0)
packet = packet / c
if BearerCapability_presence is 1:
d = BearerCapabilityHdr(ieiBC=0x04, eightBitBC=0x0)
packet = packet / d
if BearerCapability_presence1 is 1:
e = BearerCapabilityHdr(ieiBC=0x04, eightBitBC=0x0)
packet = packet / e
if Facility_presence is 1:
f = FacilityHdr(ieiF=0x1C, eightBitF=0x0)
packet = packet / f
if ProgressIndicator_presence is 1:
g = ProgressIndicatorHdr(ieiPI=0x1E, eightBitPI=0x0)
packet = packet / g
if PriorityLevel_presence is 1:
h = PriorityLevelHdr(ieiPL=0x80, eightBitPL=0x0)
packet = packet / h
return packet
# Network to MS
def congestionControl(Cause_presence=0):
"""CONGESTION CONTROL Section 9.3.4"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x39) # 00111001
c = CongestionLevelAndSpareHalfOctets()
packet = a / b / c
if Cause_presence is 1:
e = CauseHdr(ieiC=0x08, eightBitC=0x0)
packet = packet / e
return packet
# Network to MS
def connectNetToMs(Facility_presence=0, ProgressIndicator_presence=0,
ConnectedNumber_presence=0, ConnectedSubaddress_presence=0,
UserUser_presence=0):
"""CONNECT Section 9.3.5.1"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x7) # 00000111
packet = a / b
if Facility_presence is 1:
c = FacilityHdr(ieiF=0x1C, eightBitF=0x0)
packet = packet / c
if ProgressIndicator_presence is 1:
d = ProgressIndicatorHdr(ieiPI=0x1E, eightBitPI=0x0)
packet = packet / d
if ConnectedNumber_presence is 1:
e = ConnectedNumberHdr(ieiCN=0x4C, eightBitCN=0x0)
packet = packet / e
if ConnectedSubaddress_presence is 1:
f = ConnectedSubaddressHdr(ieiCS=0x4D, eightBitCS=0x0)
packet = packet / f
if UserUser_presence is 1:
g = UserUserHdr(ieiUU=0x7F, eightBitUU=0x0)
packet = packet / g
return packet
def connectMsToNet(Facility_presence=0, ConnectedSubaddress_presence=0,
UserUser_presence=0, SsVersionIndicator_presence=0):
"""CONNECT Section 9.3.5.2"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x7) # 00000111
packet = a / b
if Facility_presence is 1:
c = FacilityHdr(ieiF=0x1C, eightBitF=0x0)
packet = packet / c
if ConnectedSubaddress_presence is 1:
d = ConnectedSubaddressHdr(ieiCS=0x4D, eightBitCS=0x0)
packet = packet / d
if UserUser_presence is 1:
e = UserUserHdr(ieiUU=0x7F, eightBitUU=0x0)
packet = packet / e
if SsVersionIndicator_presence is 1:
f = SsVersionIndicatorHdr(ieiSVI=0x7F, eightBitSVI=0x0)
packet = packet / f
return packet
def connectAcknowledge():
"""CONNECT ACKNOWLEDGE Section 9.3.6"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0xf) # 00001111
packet = a / b
return packet
# Network to MS
def disconnectNetToMs(Facility_presence=0, ProgressIndicator_presence=0,
UserUser_presence=0, AllowedActions_presence=0):
"""DISCONNECT Section 9.3.7.1"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x25) # 00100101
c = Cause()
packet = a / b / c
if Facility_presence is 1:
d = FacilityHdr(ieiF=0x1C, eightBitF=0x0)
packet = packet / d
if ProgressIndicator_presence is 1:
e = ProgressIndicatorHdr(ieiPI=0x1E, eightBitPI=0x0)
packet = packet / e
if UserUser_presence is 1:
f = UserUserHdr(ieiUU=0x7E, eightBitUU=0x0)
packet = packet / f
if AllowedActions_presence is 1:
g = AllowedActionsHdr(ieiAA=0x7B, eightBitAA=0x0)
packet = packet / g
return packet
def disconnectMsToNet(Facility_presence=0, UserUser_presence=0,
SsVersionIndicator_presence=0):
"""Disconnect Section 9.3.7.2"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x25) # 00100101
c = Cause()
packet = a / b / c
if Facility_presence is 1:
d = FacilityHdr(ieiF=0x1C, eightBitF=0x0)
packet = packet / d
if UserUser_presence is 1:
e = UserUserHdr(ieiUU=0x7E, eightBitUU=0x0)
packet = packet / e
if SsVersionIndicator_presence is 1:
f = SsVersionIndicatorHdr(ieiSVI=0x7F, eightBitSVI=0x0)
packet = packet / f
return packet
def emergencySetup(BearerCapability_presence=0):
"""EMERGENCY SETUP Section 9.3.8"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0xe) # 00001110
packet = a / b
if BearerCapability_presence is 1:
c = BearerCapabilityHdr(ieiBC=0x04, eightBitBC=0x0)
packet = packet / c
return packet
# Network to MS
def facilityNetToMs():
"""FACILITY Section 9.3.9.1"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x3a) # 00111010
c = Facility()
packet = a / b / c
return packet
def facilityMsToNet(SsVersionIndicator_presence=0):
"""FACILITY Section 9.3.9.2"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x3a) # 00111010
c = Facility()
packet = a / b / c
if SsVersionIndicator_presence is 1:
d = SsVersionIndicatorHdr(ieiSVI=0x7F, eightBitSVI=0x0)
packet = packet / d
return packet
def hold():
"""HOLD Section 9.3.10"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x18) # 00011000
packet = a / b
return packet
# Network to MS
def holdAcknowledge():
"""HOLD ACKNOWLEDGE Section 9.3.11"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x19) # 00011001
packet = a / b
return packet
# Network to MS
def holdReject():
"""HOLD REJECT Section 9.3.12"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x1a) # 00011010
c = Cause()
packet = a / b / c
return packet
def modify(LowLayerCompatibility_presence=0,
HighLayerCompatibility_presence=0,
ReverseCallSetupDirection_presence=0):
"""MODIFY Section 9.3.13"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x17) # 00010111
c = BearerCapability()
packet = a / b / c
if LowLayerCompatibility_presence is 1:
d = LowLayerCompatibilityHdr(ieiLLC=0x7C, eightBitLLC=0x0)
packet = packet / d
if HighLayerCompatibility_presence is 1:
e = HighLayerCompatibilityHdr(ieiHLC=0x7D, eightBitHLC=0x0)
packet = packet / e
if ReverseCallSetupDirection_presence is 1:
f = ReverseCallSetupDirectionHdr(ieiRCSD=0xA3)
packet = packet / f
return packet
def modifyComplete(LowLayerCompatibility_presence=0,
HighLayerCompatibility_presence=0,
ReverseCallSetupDirection_presence=0):
"""MODIFY COMPLETE Section 9.3.14"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x1f) # 00011111
c = BearerCapability()
packet = a / b / c
if LowLayerCompatibility_presence is 1:
d = LowLayerCompatibilityHdr(ieiLLC=0x7C, eightBitLLC=0x0)
packet = packet / d
if HighLayerCompatibility_presence is 1:
e = HighLayerCompatibilityHdr(ieiHLC=0x7D, eightBitHLC=0x0)
packet = packet / e
if ReverseCallSetupDirection_presence is 1:
f = ReverseCallSetupDirection(ieiRCSD=0xA3)
packet = packet / f
return packet
def modifyReject(LowLayerCompatibility_presence=0,
HighLayerCompatibility_presence=0):
"""MODIFY REJECT Section 9.3.15"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x13) # 00010011
c = BearerCapability()
d = Cause()
packet = a / b / c / d
if LowLayerCompatibility_presence is 1:
e = LowLayerCompatibilityHdr(ieiLLC=0x7C, eightBitLLC=0x0)
packet = packet / e
if HighLayerCompatibility_presence is 1:
f = HighLayerCompatibilityHdr(ieiHLC=0x7D, eightBitHLC=0x0)
packet = packet / f
return packet
def notify():
"""NOTIFY Section 9.3.16"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x3e) # 00111110
c = NotificationIndicator()
packet = a / b / c
return packet
# Network to MS
def progress(UserUser_presence=0):
"""PROGRESS Section 9.3.17"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x3) # 00000011
c = ProgressIndicator()
packet = a / b / c
if UserUser_presence is 1:
d = UserUserHdr()
packet = packet / d
return packet
# Network to MS
def ccEstablishment():
"""CC-ESTABLISHMENT Section 9.3.17a"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x4) # 00000100
c = SetupContainer()
packet = a / b / c
return packet
def ccEstablishmentConfirmed(RepeatIndicator_presence=0,
BearerCapability_presence=0,
BearerCapability_presence1=0,
Cause_presence=0):
"""CC-ESTABLISHMENT CONFIRMED Section 9.3.17b"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x6) # 00000110
packet = a / b
if RepeatIndicator_presence is 1:
c = RepeatIndicatorHdr(ieiRI=0xD, eightBitRI=0x0)
packet = packet / c
if BearerCapability_presence is 1:
d = BearerCapabilityHdr(ieiBC=0x04, eightBitBC=0x0)
packet = packet / d
if BearerCapability_presence1 is 1:
e = BearerCapabilityHdr(ieiBC=0x04, eightBitBC=0x0)
packet = packet / e
if Cause_presence is 1:
f = CauseHdr(ieiC=0x08, eightBitC=0x0)
packet = packet / f
return packet
# Network to MS
def releaseNetToMs():
"""RELEASE Section 9.3.18.1"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x2d) # 00101101
c = CauseHdr(ieiC=0x08, eightBitC=0x0)
d = CauseHdr(ieiC=0x08, eightBitC=0x0)
e = FacilityHdr(ieiF=0x1C, eightBitF=0x0)
f = UserUserHdr(ieiUU=0x7E, eightBitUU=0x0)
packet = a / b / c / d / e / f
return packet
def releaseMsToNet(Cause_presence=0, Cause_presence1=0,
Facility_presence=0, UserUser_presence=0,
SsVersionIndicator_presence=0):
"""RELEASE Section 9.3.18.2"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x2d) # 00101101
packet = a / b
if Cause_presence is 1:
c = CauseHdr(ieiC=0x08, eightBitC=0x0)
packet = packet / c
if Cause_presence1 is 1:
d = CauseHdr(ieiC=0x08, eightBitC=0x0)
packet = packet / d
if Facility_presence is 1:
e = FacilityHdr(ieiF=0x1C, eightBitF=0x0)
packet = packet / e
if UserUser_presence is 1:
f = UserUserHdr(ieiUU=0x7E, eightBitUU=0x0)
packet = packet / f
if SsVersionIndicator_presence is 1:
g = SsVersionIndicatorHdr(ieiSVI=0x7F, eightBitSVI=0x0)
packet = packet / g
return packet
# Network to MS
def recall():
"""RECALL Section 9.3.18a"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0xb) # 00001011
c = RecallType()
d = Facility()
packet = a / b / c / d
return packet
# Network to MS
def releaseCompleteNetToMs(Cause_presence=0, Facility_presence=0,
UserUser_presence=0):
"""RELEASE COMPLETE Section 9.3.19.1"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x2a) # 00101010
packet = a / b
if Cause_presence is 1:
c = CauseHdr(ieiC=0x08, eightBitC=0x0)
packet = packet / c
if Facility_presence is 1:
d = FacilityHdr(ieiF=0x1C, eightBitF=0x0)
packet = packet / d
if UserUser_presence is 1:
e = UserUserHdr(ieiUU=0x7E, eightBitUU=0x0)
packet = packet / e
return packet
def releaseCompleteMsToNet(Cause_presence=0, Facility_presence=0,
UserUser_presence=0, SsVersionIndicator_presence=0):
"""RELEASE COMPLETE Section 9.3.19.2"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x2a) # 00101010
packet = a / b
if Cause_presence is 1:
c = CauseHdr(ieiC=0x08, eightBitC=0x0)
packet = packet / c
if Facility_presence is 1:
d = FacilityHdr(ieiF=0x1C, eightBitF=0x0)
packet = packet / d
if UserUser_presence is 1:
e = UserUserHdr(ieiUU=0x7E, eightBitUU=0x0)
packet = packet / e
if SsVersionIndicator_presence is 1:
f = SsVersionIndicatorHdr(ieiSVI=0x7F, eightBitSVI=0x0)
packet = packet / f
return packet
def retrieve():
"""RETRIEVE Section 9.3.20"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x1c) # 00011100
packet = a / b
return packet
# Network to MS
def retrieveAcknowledge():
"""RETRIEVE ACKNOWLEDGE Section 9.3.21"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x1d) # 00011101
packet = a / b
return packet
# Network to MS
def retrieveReject():
"""RETRIEVE REJECT Section 9.3.22"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x1e) # 00011110
c = Cause()
packet = a / b / c
return packet
# Network to MS
def setupMobileTerminated(RepeatIndicator_presence=0,
BearerCapability_presence=0,
BearerCapability_presence1=0,
Facility_presence=0, ProgressIndicator_presence=0,
Signal_presence=0,
CallingPartyBcdNumber_presence=0,
CallingPartySubaddress_presence=0,
CalledPartyBcdNumber_presence=0,
CalledPartySubaddress_presence=0,
# RecallType_presence=0,
RedirectingPartyBcdNumber_presence=0,
RedirectingPartySubaddress_presence=0,
RepeatIndicator_presence1=0,
LowLayerCompatibility_presence=0,
LowLayerCompatibility_presence1=0,
RepeatIndicator_presence2=0,
HighLayerCompatibility_presence=0,
HighLayerCompatibility_presence1=0,
UserUser_presence=0, PriorityLevel_presence=0,
AlertingPattern_presence=0):
"""SETUP Section 9.3.23.1"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x5) # 00000101
packet = a / b
if RepeatIndicator_presence is 1:
c = RepeatIndicatorHdr(ieiRI=0xD, eightBitRI=0x0)
packet = packet / c
if BearerCapability_presence is 1:
d = BearerCapabilityHdr(ieiBC=0x04, eightBitBC=0x0)
packet = packet / d
if BearerCapability_presence1 is 1:
e = BearerCapabilityHdr(ieiBC=0x04, eightBitBC=0x0)
packet = packet / e
if Facility_presence is 1:
f = FacilityHdr(ieiF=0x1C, eightBitF=0x0)
packet = packet / f
if ProgressIndicator_presence is 1:
g = ProgressIndicatorHdr(ieiPI=0x1E, eightBitPI=0x0)
packet = packet / g
if Signal_presence is 1:
h = SignalHdr(ieiS=0x34, eightBitS=0x0)
packet = packet / h
if CallingPartyBcdNumber_presence is 1:
i = CallingPartyBcdNumberHdr(ieiCPBN=0x5C, eightBitCPBN=0x0)
packet = packet / i
if CallingPartySubaddress_presence is 1:
j = CallingPartySubaddressHdr(ieiCPS=0x5D, eightBitCPS=0x0)
packet = packet / j
if CalledPartyBcdNumber_presence is 1:
k = CalledPartyBcdNumberHdr(ieiCPBN=0x5E, eightBitCPBN=0x0)
packet = packet / k
if CalledPartySubaddress_presence is 1:
l = CalledPartySubaddressHdr(ieiCPS=0x6D, eightBitCPS=0x0)
packet = packet / l
if RedirectingPartyBcdNumber_presence is 1:
n = RedirectingPartyBcdNumberHdr(ieiRPBN=0x74, eightBitRPBN=0x0)
packet = packet / n
if RedirectingPartySubaddress_presence is 1:
m = RedirectingPartySubaddress_presence(ieiRPBN=0x75, eightBitRPBN=0x0)
packet = packet / m
if RepeatIndicator_presence1 is 1:
o = RepeatIndicatorHdr(ieiRI=0xD0, eightBitRI=0x0)
packet = packet / o
if LowLayerCompatibility_presence is 1:
p = LowLayerCompatibilityHdr(ieiLLC=0x7C, eightBitLLC=0x0)
packet = packet / p
if LowLayerCompatibility_presence1 is 1:
q = LowLayerCompatibilityHdr(ieiLLC=0x7C, eightBitLLC=0x0)
packet = packet / q
if RepeatIndicator_presence2 is 1:
r = RepeatIndicatorHdr(ieiRI=0xD, eightBitRI=0x0)
packet = packet / r
if HighLayerCompatibility_presence is 1:
s = HighLayerCompatibilityHdr(ieiHLC=0x7D, eightBitHLC=0x0)
packet = packet / s
if HighLayerCompatibility_presence1 is 1:
t = HighLayerCompatibilityHdr(ieiHLC=0x7D, eightBitHLC=0x0)
packet = packet / t
if UserUser_presence is 1:
u = UserUserHdr(ieiUU=0x7E, eightBitUU=0x0)
packet = packet / u
if PriorityLevel_presence is 1:
v = PriorityLevelHdr(ieiPL=0x8, eightBitPL=0x0)
packet = packet / v
if AlertingPattern_presence is 1:
w = AlertingPatternHdr(ieiAP=0x19, eightBitAP=0x0)
packet = packet / w
return packet
def setupMobileOriginated(RepeatIndicator_presence=0,
BearerCapability_presence=0,
BearerCapability_presence1=0,
Facility_presence=0,
CallingPartySubaddress_presence=0,
CalledPartyBcdNumber_presence=0,
CalledPartySubaddress_presence=0,
RepeatIndicator_presence1=0,
LowLayerCompatibility_presence=0,
LowLayerCompatibility_presence1=0,
RepeatIndicator_presence2=0,
HighLayerCompatibility_presence=0,
HighLayerCompatibility_presence1=0,
UserUser_presence=0, SsVersionIndicator_presence=0,
ClirSuppression_presence=0,
ClirInvocation_presence=0,
CallControlCapabilities_presence=0,
Facility_presence1=0,
Facility_presence2=0):
"""SETUP Section 9.3.23.2"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x5) # 00000101
packet = a / b
if RepeatIndicator_presence is 1:
c = RepeatIndicatorHdr(ieiRI=0xD, eightBitRI=0x0)
packet = packet / c
if BearerCapability_presence is 1:
d = BearerCapabilityHdr(ieiBC=0x04, eightBitBC=0x0)
packet = packet / d
if BearerCapability_presence1 is 1:
e = BearerCapabilityHdr(ieiBC=0x04, eightBitBC=0x0)
packet = packet / e
if Facility_presence is 1:
f = FacilityHdr(ieiF=0x1C, eightBitF=0x0)
packet = packet / f
if CallingPartySubaddress_presence is 1:
g = CallingPartySubaddressHdr(ieiCPS=0x5D, eightBitCPS=0x0)
packet = packet / g
if CalledPartyBcdNumber_presence is 1:
h = CalledPartyBcdNumberHdr(ieiCPBN=0x5E, eightBitCPBN=0x0)
packet = packet / h
if CalledPartySubaddress_presence is 1:
i = CalledPartySubaddressHdr(ieiCPS=0x6D, eightBitCPS=0x0)
packet = packet / i
if RepeatIndicator_presence1 is 1:
j = RepeatIndicatorHdr(ieiRI=0xD0, eightBitRI=0x0)
packet = packet / j
if LowLayerCompatibility_presence is 1:
k = LowLayerCompatibilityHdr(ieiLLC=0x7C, eightBitLLC=0x0)
packet = packet / k
if LowLayerCompatibility_presence1 is 1:
l = LowLayerCompatibilityHdr(ieiLLC=0x7C, eightBitLLC=0x0)
packet = packet / l
if RepeatIndicator_presence2 is 1:
m = RepeatIndicatorHdr(ieiRI=0xD, eightBitRI=0x0)
packet = packet / m
if HighLayerCompatibility_presence is 1:
n = HighLayerCompatibilityHdr(ieiHLC=0x7D, eightBitHLC=0x0)
packet = packet / n
if HighLayerCompatibility_presence1 is 1:
o = HighLayerCompatibilityHdr(ieiHLC=0x7D, eightBitHLC=0x0)
packet = packet / o
if UserUser_presence is 1:
p = UserUserHdr(ieiUU=0x7E, eightBitUU=0x0)
packet = packet / p
if SsVersionIndicator_presence is 1:
q = SsVersionIndicatorHdr(ieiSVI=0x7F, eightBitSVI=0x0)
packet = packet / q
if ClirSuppression_presence is 1:
r = ClirSuppressionHdr(ieiCS=0xA1, eightBitCS=0x0)
packet = packet / r
if ClirInvocation_presence is 1:
s = ClirInvocationHdr(ieiCI=0xA2, eightBitCI=0x0)
packet = packet / s
if CallControlCapabilities_presence is 1:
t = CallControlCapabilitiesHdr(ieiCCC=0x15, eightBitCCC=0x0)
packet = packet / t
if Facility_presence1 is 1:
u = FacilityHdr(ieiF=0x1D, eightBitF=0x0)
packet = packet / u
if Facility_presence2 is 1:
v = FacilityHdr(ieiF=0x1B, eightBitF=0x0)
packet = packet / v
return packet
def startCc(CallControlCapabilities_presence=0):
"""START CC Section 9.3.23a"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x9) # 00001001
packet = a / b
if CallControlCapabilities_presence is 1:
c = CallControlCapabilitiesHdr(ieiCCC=0x15, eightBitCCC=0x0)
packet = paclet / c
return packet
def startDtmf():
"""START DTMF Section 9.3.24"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x35) # 00110101
c = KeypadFacilityHdr(ieiKF=0x2C, eightBitKF=0x0)
packet = a / b / c
return packet
# Network to MS
def startDtmfAcknowledge():
"""START DTMF ACKNOWLEDGE Section 9.3.25"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x32) # 00110010
c = KeypadFacilityHdr(ieiKF=0x2C, eightBitKF=0x0)
packet = a / b / c
return packet
# Network to MS
def startDtmfReject():
""" START DTMF REJECT Section 9.3.26"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x37) # 00110111
c = Cause()
packet = a / b / c
return packet
def status(AuxiliaryStates_presence=0):
"""STATUS Section 9.3.27"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x3d) # 00111101
c = Cause()
d = CallState()
packet = a / b / c / d
if AuxiliaryStates_presence is 1:
e = AuxiliaryStatesHdr(ieiAS=0x24, eightBitAS=0x0)
packet = packet / e
return packet
def statusEnquiry():
"""STATUS ENQUIRY Section 9.3.28"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x34) # 00110100
packet = a / b
return packet
def stopDtmf():
"""STOP DTMF Section 9.3.29"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x31) # 00110001
packet = a / b
return packet
# Network to MS
def stopDtmfAcknowledge():
"""STOP DTMF ACKNOWLEDGE Section 9.3.30"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x32) # 00110010
packet = a / b
return packet
def userInformation(MoreData_presence=0):
"""USER INFORMATION Section 9.3.31"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x20) # 000100000
c = UserUser()
packet = a / b / c
if MoreData_presence is 1:
d = MoreDataHdr(ieiMD=0xA0, eightBitMD=0x0)
packet = packet / d
return packet
#
# 9.4 GPRS Mobility Management Messages
#
def attachRequest(PTmsiSignature_presence=0, GprsTimer_presence=0,
TmsiStatus_presence=0):
"""ATTACH REQUEST Section 9.4.1"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x1) # 0000001
c = MsNetworkCapability()
d = AttachTypeAndCiphKeySeqNr()
f = DrxParameter()
g = MobileId()
h = RoutingAreaIdentification()
i = MsRadioAccessCapability()
packet = a / b / c / d / f / g / h / i
if PTmsiSignature_presence is 1:
j = PTmsiSignature(ieiPTS=0x19)
packet = packet / j
if GprsTimer_presence is 1:
k = GprsTimer(ieiGT=0x17)
packet = packet / k
if TmsiStatus_presence is 1:
l = TmsiStatus(ieiTS=0x9)
packet = packet / l
return packet
def attachAccept(PTmsiSignature_presence=0, GprsTimer_presence=0,
MobileId_presence=0, MobileId_presence1=0,
GmmCause_presence=0):
"""ATTACH ACCEPT Section 9.4.2"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x2) # 00000010
c = AttachResult()
d = ForceToStandby()
e = GprsTimer()
f = RadioPriorityAndSpareHalfOctets()
h = RoutingAreaIdentification()
packet = a / b / c / d / e / f / h
if PTmsiSignature_presence is 1:
i = PTmsiSignature(ieiPTS=0x19)
packet = packet / i
if GprsTimer_presence is 1:
j = GprsTimer(ieiGT=0x17)
packet = packet / j
if MobileId_presence is 1:
k = MobileIdHdr(ieiMI=0x18, eightBitMI=0x0)
packet = packet / k
if MobileId_presence1 is 1:
l = MobileIdHdr(ieiMI=0x23, eightBitMI=0x0)
packet = packet / l
if GmmCause_presence is 1:
m = GmmCause(ieiGC=0x25)
packet = packet / m
return packet
def attachComplete():
"""ATTACH COMPLETE Section 9.4.3"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x3) # 00000011
packet = a / b
return packet
def attachReject():
"""ATTACH REJECT Section 9.4.4"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x1) # 00000001
c = GmmCause()
packet = a / b / c
return packet
def detachRequest(GmmCause_presence=0):
"""DETACH REQUEST Section 9.4.5"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x5) # 00000101
c = DetachTypeAndForceToStandby()
packet = a / b / c
if GmmCause_presence is 1:
e = GmmCause(ieiGC=0x25)
packet = packet / e
return packet
def detachRequestMsOriginating():
"""DETACH REQUEST Section 9.4.5.2"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x5) # 00000101
c = DetachTypeAndSpareHalfOctets()
packet = a / b / c
return packet
def detachAcceptMsTerminated():
"""DETACH ACCEPT Section 9.4.6.1"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x6) # 00000110
packet = a / b
return packet
def detachAcceptMsOriginating():
"""DETACH ACCEPT Section 9.4.6.2"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x6) # 00000110
c = ForceToStandbyAndSpareHalfOctets()
packet = a / b / c
return packet
def ptmsiReallocationCommand(PTmsiSignature_presence=0):
"""P-TMSI REALLOCATION COMMAND Section 9.4.7"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x10) # 00010000
c = MobileId()
d = RoutingAreaIdentification()
e = ForceToStandbyAndSpareHalfOctets()
packet = a / b / c / d / e
if PTmsiSignature_presence is 1:
g = PTmsiSignature(ieiPTS=0x19)
packet = packet / g
return packet
def ptmsiReallocationComplete():
"""P-TMSI REALLOCATION COMPLETE Section 9.4.8"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x11) # 00010001
packet = a / b
return packet
def authenticationAndCipheringRequest(
AuthenticationParameterRAND_presence=0,
CiphKeySeqNr_presence=0):
"""AUTHENTICATION AND CIPHERING REQUEST Section 9.4.9"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x12) # 00010010
d = CipheringAlgorithmAndImeisvRequest()
e = ForceToStandbyAndAcReferenceNumber()
packet = a / b / d / e
if AuthenticationParameterRAND_presence is 1:
g = AuthenticationParameterRAND(ieiAPR=0x21)
packet = packet / g
if CiphKeySeqNr_presence is 1:
h = CiphKeySeqNrHdr(ieiCKSN=0x08, eightBitCKSN=0x0)
packet = packet / h
return packet
def authenticationAndCipheringResponse(
AuthenticationParameterSRES_presence=0,
MobileId_presence=0):
"""AUTHENTICATION AND CIPHERING RESPONSE Section 9.4.10"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x13) # 00010011
c = AcReferenceNumberAndSpareHalfOctets()
packet = a / b / c
if AuthenticationParameterSRES_presence is 1:
e = AuthenticationParameterSRES(ieiAPS=0x22)
packet = packet / e
if MobileId_presence is 1:
f = MobileIdHdr(ieiMI=0x23, eightBitMI=0x0)
packet = packet / f
return packet
def authenticationAndCipheringReject():
"""AUTHENTICATION AND CIPHERING REJECT Section 9.4.11"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x14) # 00010100
packet = a / b
return packet
def identityRequest():
"""IDENTITY REQUEST Section 9.4.12"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x15) # 00010101
c = IdentityType2AndforceToStandby()
packet = a / b / c
return packet
def identityResponse():
"""IDENTITY RESPONSE Section 9.4.13"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x16) # 00010110
c = MobileId()
packet = a / b / c
return packet
def routingAreaUpdateRequest(PTmsiSignature_presence=0,
GprsTimer_presence=0,
DrxParameter_presence=0,
TmsiStatus_presence=0):
"""ROUTING AREA UPDATE REQUEST Section 9.4.14"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x8) # 00001000
c = UpdateTypeAndCiphKeySeqNr()
e = RoutingAreaIdentification()
f = MsNetworkCapability()
packet = a / b / c / e / f
if PTmsiSignature_presence is 1:
g = PTmsiSignature(ieiPTS=0x19)
packet = packet / g
if GprsTimer_presence is 1:
h = GprsTimer(ieiGT=0x17)
packet = packet / h
if DrxParameter_presence is 1:
i = DrxParameter(ieiDP=0x27)
packet = packet / i
if TmsiStatus_presence is 1:
j = TmsiStatus(ieiTS=0x9)
packet = packet / j
return packet
def routingAreaUpdateAccept(PTmsiSignature_presence=0,
MobileId_presence=0, MobileId_presence1=0,
ReceiveNpduNumbersList_presence=0,
GprsTimer_presence=0, GmmCause_presence=0):
"""ROUTING AREA UPDATE ACCEPT Section 9.4.15"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x9) # 00001001
c = ForceToStandbyAndUpdateResult()
e = GprsTimer()
f = RoutingAreaIdentification()
packet = a / b / c / e / f
if PTmsiSignature_presence is 1:
g = PTmsiSignature(ieiPTS=0x19)
packet = packet / g
if MobileId_presence is 1:
h = MobileIdHdr(ieiMI=0x18, eightBitMI=0x0)
packet = packet / h
if MobileId_presence1 is 1:
i = MobileIdHdr(ieiMI=0x23, eightBitMI=0x0)
packet = packet / i
if ReceiveNpduNumbersList_presence is 1:
j = ReceiveNpduNumbersList(ieiRNNL=0x26)
packet = packet / j
if GprsTimer_presence is 1:
k = GprsTimer(ieiGT=0x17)
packet = packet / k
if GmmCause_presence is 1:
l = GmmCause(ieiGC=0x25)
packet = packet / l
return packet
def routingAreaUpdateComplete(ReceiveNpduNumbersList_presence=0):
"""ROUTING AREA UPDATE COMPLETE Section 9.4.16"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0xa) # 00001010
packet = a / b
if ReceiveNpduNumbersList_presence is 1:
c = ReceiveNpduNumbersList(ieiRNNL=0x26)
packet = packet / c
return packet
def routingAreaUpdateReject():
"""ROUTING AREA UPDATE REJECT Section 9.4.17"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0xb) # 00001011
c = GmmCause()
d = ForceToStandbyAndSpareHalfOctets()
packet = a / b / c / d
return packet
def gmmStatus():
"""GMM STATUS Section 9.4.18"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x20) # 00100000
c = GmmCause()
packet = a / b / c
return packet
def gmmInformation(NetworkName_presence=0, NetworkName_presence1=0,
TimeZone_presence=0, TimeZoneAndTime_presence=0,
LsaIdentifier_presence=0):
"""GMM INFORMATION Section 9.4.19"""
a = TpPd(pd=0x3)
b = MessageType(mesType=0x21) # 00100001
packet = a / b
if NetworkName_presence is 1:
c = NetworkNameHdr(ieiNN=0x43, eightBitNN=0x0)
packet = packet / c
if NetworkName_presence1 is 1:
d = NetworkNameHdr(ieiNN=0x45, eightBitNN=0x0)
packet = packet / d
if TimeZone_presence is 1:
e = TimeZoneHdr(ieiTZ=0x46, eightBitTZ=0x0)
packet = packet / e
if TimeZoneAndTime_presence is 1:
f = TimeZoneAndTimeHdr(ieiTZAT=0x47, eightBitTZAT=0x0)
packet = packet / f
if LsaIdentifier_presence is 1:
g = LsaIdentifierHdr(ieiLI=0x48, eightBitLI=0x0)
packet = packet / g
return packet
#
# 9.5 GPRS Session Management Messages
#
def activatePdpContextRequest(AccessPointName_presence=0,
ProtocolConfigurationOptions_presence=0):
"""ACTIVATE PDP CONTEXT REQUEST Section 9.5.1"""
a = TpPd(pd=0x8)
b = MessageType(mesType=0x41) # 01000001
c = NetworkServiceAccessPointIdentifier()
d = LlcServiceAccessPointIdentifier()
e = QualityOfService()
f = PacketDataProtocolAddress()
packet = a / b / c / d / e / f
if AccessPointName_presence is 1:
g = AccessPointName(ieiAPN=0x28)
packet = packet / g
if ProtocolConfigurationOptions_presence is 1:
h = ProtocolConfigurationOptions(ieiPCO=0x27)
packet = packet / h
return packet
def activatePdpContextAccept(PacketDataProtocolAddress_presence=0,
ProtocolConfigurationOptions_presence=0):
"""ACTIVATE PDP CONTEXT ACCEPT Section 9.5.2"""
a = TpPd(pd=0x8)
b = MessageType(mesType=0x42) # 01000010
c = LlcServiceAccessPointIdentifier()
d = QualityOfService()
e = RadioPriorityAndSpareHalfOctets()
packet = a / b / c / d / e
if PacketDataProtocolAddress_presence is 1:
f = PacketDataProtocolAddress(ieiPDPA=0x2B)
packet = packet / f
if ProtocolConfigurationOptions_presence is 1:
g = ProtocolConfigurationOptions(ieiPCO=0x27)
packet = packet / g
return packet
def activatePdpContextReject(ProtocolConfigurationOptions_presence=0):
"""ACTIVATE PDP CONTEXT REJECT Section 9.5.3"""
a = TpPd(pd=0x8)
b = MessageType(mesType=0x43) # 01000011
c = SmCause()
packet = a / b / c
if ProtocolConfigurationOptions_presence is 1:
d = ProtocolConfigurationOptions(ieiPCO=0x27)
packet = packet / d
return packet
def requestPdpContextActivation(AccessPointName_presence=0):
"""REQUEST PDP CONTEXT ACTIVATION Section 9.5.4"""
a = TpPd(pd=0x8)
b = MessageType(mesType=0x44) # 01000100
c = PacketDataProtocolAddress()
packet = a / b / c
if AccessPointName_presence is 1:
d = AccessPointName(ieiAPN=0x28)
packet = packet / d
return packet
def requestPdpContextActivationReject():
"""REQUEST PDP CONTEXT ACTIVATION REJECT Section 9.5.5"""
a = TpPd(pd=0x8)
b = MessageType(mesType=0x45) # 01000101
c = SmCause()
packet = a / b / c
return packet
def modifyPdpContextRequest():
"""MODIFY PDP CONTEXT REQUEST Section 9.5.6"""
a = TpPd(pd=0x8)
b = MessageType(mesType=0x48) # 01001000
c = RadioPriorityAndSpareHalfOctets()
d = LlcServiceAccessPointIdentifier()
e = QualityOfService()
packet = a / b / c / d / e
return packet
def modifyPdpContextAccept():
"""MODIFY PDP CONTEXT ACCEPT Section 9.5.7"""
a = TpPd(pd=0x8)
b = MessageType(mesType=0x45) # 01000101
packet = a / b
return packet
def deactivatePdpContextRequest():
"""DEACTIVATE PDP CONTEXT REQUEST Section 9.5.8"""
a = TpPd(pd=0x8)
b = MessageType(mesType=0x46) # 01000110
c = SmCause()
packet = a / b / c
return packet
def deactivatePdpContextAccept():
"""DEACTIVATE PDP CONTEXT ACCEPT Section 9.5.9"""
a = TpPd(pd=0x8)
b = MessageType(mesType=0x47) # 01000111
packet = a / b
return packet
def activateAaPdpContextRequest(AccessPointName_presence=0,
ProtocolConfigurationOptions_presence=0,
GprsTimer_presence=0):
"""ACTIVATE AA PDP CONTEXT REQUEST Section 9.5.10"""
a = TpPd(pd=0x8)
b = MessageType(mesType=0x50) # 01010000
c = NetworkServiceAccessPointIdentifier()
d = LlcServiceAccessPointIdentifier()
e = QualityOfService()
f = PacketDataProtocolAddress()
packet = a / b / c / d / e / f
if AccessPointName_presence is 1:
g = AccessPointName(ieiAPN=0x28)
packet = packet / g
if ProtocolConfigurationOptions_presence is 1:
h = ProtocolConfigurationOptions(ieiPCO=0x27)
packet = packet / h
if GprsTimer_presence is 1:
i = GprsTimer(ieiGT=0x29)
packet = packet / i
return packet
def activateAaPdpContextAccept(ProtocolConfigurationOptions_presence=0,
GprsTimer_presence=0):
"""ACTIVATE AA PDP CONTEXT ACCEPT Section 9.5.11"""
a = TpPd(pd=0x8)
b = MessageType(mesType=0x51) # 01010001
c = LlcServiceAccessPointIdentifier()
d = QualityOfService()
e = MobileId()
f = PacketDataProtocolAddress()
g = RadioPriorityAndSpareHalfOctets()
packet = a / b / c / d / e / f / g
if ProtocolConfigurationOptions_presence is 1:
i = ProtocolConfigurationOptions(ieiPCO=0x27)
packet = packet / i
if GprsTimer_presence is 1:
j = GprsTimer(ieiGT=0x29)
packet = packet / j
return packet
def activateAaPdpContextReject(ProtocolConfigurationOptions_presence=0):
"""ACTIVATE AA PDP CONTEXT REJECT Section 9.5.12"""
a = TpPd(pd=0x8)
b = MessageType(mesType=0x52) # 01010010
c = SmCause()
packet = a / b / c
if ProtocolConfigurationOptions_presence is 1:
d = ProtocolConfigurationOptions(ieiPCO=0x27)
packet = packet / d
return packet
def deactivateAaPdpContextRequest():
"""DEACTIVATE AA PDP CONTEXT REQUEST Section 9.5.13"""
a = TpPd(pd=0x8)
b = MessageType(mesType=0x53) # 01010011
c = AaDeactivationCauseAndSpareHalfOctets()
packet = a / b / c
return packet
def deactivateAaPdpContextAccept():
"""DEACTIVATE AA PDP CONTEXT ACCEPT Section 9.5.14"""
a = TpPd(pd=0x8)
b = MessageType(mesType=0x54) # 01010100
packet = a / b
return packet
def smStatus():
"""SM STATUS Section 9.5.15"""
a = TpPd(pd=0x8)
b = MessageType(mesType=0x55) # 01010101
c = SmCause()
packet = a / b / c
return packet
# ============================================#
# Information Elements contents (Section 10) #
# =========================================== #
####
# This section contains the elements we need to build the messages
####
#
# Common information elements:
#
class CellIdentityHdr(Packet):
""" Cell identity Section 10.5.1.1 """
name = "Cell Identity"
fields_desc = [
BitField("eightBitCI", None, 1),
XBitField("ieiCI", None, 7),
ByteField("ciValue1", 0x0),
ByteField("ciValue2", 0x0)
]
class CiphKeySeqNrHdr(Packet):
""" Ciphering Key Sequence Number Section 10.5.1.2 """
name = "Cipher Key Sequence Number"
fields_desc = [
XBitField("ieiCKSN", None, 4),
BitField("spare", 0x0, 1),
BitField("keySeq", 0x0, 3)
]
# Fix 1/2 len problem
class CiphKeySeqNrAndSpareHalfOctets(Packet):
name = "Cipher Key Sequence Number and Spare Half Octets"
fields_desc = [
BitField("spare", 0x0, 1),
BitField("keySeq", 0x0, 3),
BitField("spareHalfOctets", 0x0, 4)
]
# Fix 1/2 len problem
class CiphKeySeqNrAndMacModeAndChannelCodingRequest(Packet):
name = "Cipher Key Sequence Number and Mac Mode And Channel Coding Request"
fields_desc = [
BitField("spare", 0x0, 1),
BitField("keySeq", 0x0, 3),
BitField("macMode", 0x0, 2),
BitField("cs", 0x0, 2)
]
class LocalAreaIdHdr(Packet):
""" Local Area Identification Section 10.5.1.3 """
name = "Location Area Identification"
fields_desc = [
BitField("eightBitLAI", None, 1),
XBitField("ieiLAI", None, 7),
BitField("mccDigit2", 0x0, 4),
BitField("mccDigit1", 0x0, 4),
BitField("mncDigit3", 0x0, 4),
BitField("mccDigit3", 0x0, 4),
BitField("mncDigit2", 0x0, 4),
BitField("mncDigit1", 0x0, 4),
ByteField("lac1", 0x0),
ByteField("lac2", 0x0)
]
#
# The Mobile Identity is a type 4 information element with a minimum
# length of 3 octet and 11 octets length maximal.
#
# len 3 - 11
class MobileIdHdr(Packet):
""" Mobile Identity Section 10.5.1.4 """
name = "Mobile Identity"
fields_desc = [
BitField("eightBitMI", 0x0, 1),
XBitField("ieiMI", 0x0, 7),
XByteField("lengthMI", None),
BitField("idDigit1", 0x0, 4),
BitField("oddEven", 0x0, 1),
BitField("typeOfId", 0x0, 3),
BitField("idDigit2_1", None, 4), # optional
BitField("idDigit2", None, 4),
BitField("idDigit3_1", None, 4),
BitField("idDigit3", None, 4),
BitField("idDigit4_1", None, 4),
BitField("idDigit4", None, 4),
BitField("idDigit5_1", None, 4),
BitField("idDigit5", None, 4),
BitField("idDigit6_1", None, 4),
BitField("idDigit6", None, 4),
BitField("idDigit7_1", None, 4),
BitField("idDigit7", None, 4),
BitField("idDigit8_1", None, 4),
BitField("idDigit8", None, 4),
BitField("idDigit9_1", None, 4),
BitField("idDigit9", None, 4),
]
def post_build(self, p, pay):
# this list holds the values of the variables, the
# INTERESTING value!
a = [getattr(self, fld.name, None) for fld in self.fields_desc]
res = adapt(3, 11, a, self.fields_desc)
if self.lengthMI is None:
p = p[:1] + struct.pack(">B", res[1]) + p[2:]
if res[0] != 0:
p = p[:-res[0]]
print repr(p)
return p + pay
class MobileStationClassmark1Hdr(Packet):
""" Mobile Station Classmark 1 Section 10.5.1.5 """
name = "Mobile Station Classmark 1"
fields_desc = [
BitField("eightBitiMSC1", None, 1),
XBitField("ieiMSC1", None, 7),
BitField("spare", 0x0, 1),
BitField("revisionLvl", 0x0, 2),
BitField("esInd", 0x0, 1),
BitField("a51", 0x0, 1),
BitField("rfPowerCap", 0x0, 3)
]
class MobileStationClassmark2Hdr(Packet):
""" Mobile Station Classmark 2 Section 10.5.1.6 """
name = "Mobile Station Classmark 2"
fields_desc = [
BitField("eightBitMSC2", None, 1),
XBitField("ieiMSC2", None, 7),
XByteField("lengthMSC2", 0x3),
BitField("spare", 0x0, 1),
BitField("revisionLvl", 0x0, 2),
BitField("esInd", 0x0, 1),
BitField("a51", 0x0, 1),
BitField("rfPowerCap", 0x0, 3),
BitField("spare1", 0x0, 1),
BitField("psCap", 0x0, 1),
BitField("ssScreenInd", 0x0, 2),
BitField("smCaPabi", 0x0, 1),
BitField("vbs", 0x0, 1),
BitField("vgcs", 0x0, 1),
BitField("fc", 0x0, 1),
BitField("cm3", 0x0, 1),
BitField("spare2", 0x0, 1),
BitField("lcsvaCap", 0x0, 1),
BitField("spare3", 0x0, 1),
BitField("soLsa", 0x0, 1),
BitField("cmsp", 0x0, 1),
BitField("a53", 0x0, 1),
BitField("a52", 0x0, 1)
]
# len max 14
class MobileStationClassmark3(Packet):
""" Mobile Station Classmark 3 Section 10.5.1.7 """
name = "Mobile Station Classmark 3"
fields_desc = [
# FIXME
ByteField("ieiMSC3", 0x0),
ByteField("byte2", 0x0),
ByteField("byte3", 0x0),
ByteField("byte4", 0x0),
ByteField("byte5", 0x0),
ByteField("byte6", 0x0),
ByteField("byte7", 0x0),
ByteField("byte8", 0x0),
ByteField("byte9", 0x0),
ByteField("byte10", 0x0),
ByteField("byte11", 0x0),
ByteField("byte12", 0x0),
ByteField("byte13", 0x0),
ByteField("byte14", 0x0)
]
class SpareHalfOctets(Packet):
""" Spare Half Octet Section 10.5.1.8 """
name = "Spare Half Octet"
fields_desc = [
BitField("filler", None, 4),
BitField("spareHalfOctets", 0x0, 4)
]
class DescriptiveGroupOrBroadcastCallReferenceHdr(Packet):
""" Descriptive group or broadcast call reference Section 10.5.1.9 """
name = "Descriptive Group or Broadcast Call Reference"
fields_desc = [
BitField("eightBitDGOBCR", None, 1),
XBitField("ieiDGOBCR", None, 7),
BitField("binCallRef", 0x0, 27),
BitField("sf", 0x0, 1),
BitField("fa", 0x0, 1),
BitField("callPrio", 0x0, 3),
BitField("cipherInfo", 0x0, 4),
BitField("spare1", 0x0, 1),
BitField("spare2", 0x0, 1),
BitField("spare3", 0x0, 1),
BitField("spare4", 0x0, 1)
]
class GroupCipherKeyNumber(Packet):
""" Group Cipher Key Number reference Section 10.5.1.10 """
name = "Group Cipher Key Number"
fields_desc = [
XBitField("ieiGCKN", None, 4),
BitField("groupCipher", 0x0, 4)
]
class PdAndSapiHdr(Packet):
""" PD and SAPI $(CCBS)$ Section 10.5.1.10a """
name = "PD and SAPI $(CCBS)$"
fields_desc = [
BitField("eightBitPAS", None, 1),
XBitField("ieiPAS", None, 7),
BitField("spare", 0x0, 1),
BitField("spare1", 0x0, 1),
BitField("sapi", 0x0, 2),
BitField("pd", 0x0, 4)
]
class PriorityLevelHdr(Packet):
""" Priority Level Section 10.5.1.11 """
name = "Priority Level"
fields_desc = [
XBitField("ieiPL", None, 4),
BitField("spare", 0x0, 1),
BitField("callPrio", 0x0, 3)
]
#
# Radio Resource management information elements
#
# len 6 to max for L3 message (251)
class BaRangeHdr(Packet):
""" BA Range Section 10.5.2.1a """
name = "BA Range"
fields_desc = [
BitField("eightBitBR", None, 1),
XBitField("ieiBR", None, 7),
XByteField("lengthBR", None),
#error: byte format requires -128 <= number <= 127
ByteField("nrOfRanges", 0x0),
# # rX = range X
# # L o = Lower H i = higher
# # H p = high Part Lp = low Part
ByteField("r1LoHp", 0x0),
BitField("r1LoLp", 0x0, 3),
BitField("r1HiHp", 0x0, 5),
BitField("r1HiLp", 0x0, 4),
BitField("r2LoHp", 0x0, 4),
# optional
BitField("r2LoLp", None, 5),
BitField("r2HiHp", None, 3),
ByteField("r2HiLp", None),
ByteField("r3LoHp", None),
BitField("r3LoLp", None, 5),
BitField("r3HiHp", None, 3),
ByteField("r3HiLp", None),
ByteField("r4LoHp", None),
BitField("r4LoLp", None, 5),
BitField("r4HiHp", None, 3),
ByteField("r4HiLp", None),
ByteField("r5LoHp", None),
BitField("r5LoLp", None, 5),
BitField("r5HiHp", None, 3),
ByteField("r5HiLp", None),
ByteField("r6LoHp", None),
BitField("r6LoLp", None, 5),
BitField("r6HiHp", None, 3),
ByteField("r6HiLp", None),
ByteField("r7LoHp", None),
BitField("r7LoLp", None, 5),
BitField("r7HiHp", None, 3),
ByteField("r7HiLp", None),
ByteField("r8LoHp", None),
BitField("r8LoLp", None, 5),
BitField("r8HiHp", None, 3),
ByteField("r8HiLp", None),
ByteField("r9LoHp", None),
BitField("r9LoLp", None, 5),
BitField("r9HiHp", None, 3),
ByteField("r9HiLp", None),
ByteField("r10LoHp", None),
BitField("r10LoLp", None, 5),
BitField("r10HiHp", None, 3),
ByteField("r10HiLp", None),
ByteField("r11LoHp", None),
BitField("r11LoLp", None, 5),
BitField("r11HiHp", None, 3),
ByteField("r11HiLp", None),
ByteField("r12LoHp", None),
BitField("r12LoLp", None, 5),
BitField("r12HiHp", None, 3),
ByteField("r12HiLp", None),
ByteField("r13LoHp", None),
BitField("r13LoLp", None, 5),
BitField("r13HiHp", None, 3),
ByteField("r13HiLp", None),
ByteField("r14LoHp", None),
BitField("r14LoLp", None, 5),
BitField("r14HiHp", None, 3),
ByteField("r14HiLp", None),
ByteField("r15LoHp", None),
BitField("r15LoLp", None, 5),
BitField("r15HiHp", None, 3),
ByteField("r15HiLp", None),
ByteField("r16LoHp", None),
BitField("r16LoLp", None, 5),
BitField("r16HiHp", None, 3),
ByteField("r16HiLp", None),
ByteField("r17LoHp", None),
BitField("r17LoLp", None, 5),
BitField("r17HiHp", None, 3),
ByteField("r17HiLp", None),
ByteField("r18LoHp", None),
BitField("r18LoLp", None, 5),
BitField("r18HiHp", None, 3),
ByteField("r18HiLp", None),
ByteField("r19LoHp", None),
BitField("r19LoLp", None, 5),
BitField("r19HiHp", None, 3),
ByteField("r19HiLp", None),
ByteField("r20LoHp", None),
BitField("r20LoLp", None, 5),
BitField("r20HiHp", None, 3),
ByteField("r20HiLp", None),
ByteField("r21LoHp", None),
BitField("r21LoLp", None, 5),
BitField("r21HiHp", None, 3),
ByteField("r21HiLp", None),
ByteField("r22LoHp", None),
BitField("r22LoLp", None, 5),
BitField("r22HiHp", None, 3),
ByteField("r22HiLp", None),
ByteField("r23LoHp", None),
BitField("r23LoLp", None, 5),
BitField("r23HiHp", None, 3),
ByteField("r23HiLp", None),
ByteField("r24LoHp", None),
BitField("r24LoLp", None, 5),
BitField("r24HiHp", None, 3),
ByteField("r24HiLp", None),
ByteField("r25LoHp", None),
BitField("r25LoLp", None, 5),
BitField("r25HiHp", None, 3),
ByteField("r25HiLp", None),
ByteField("r26LoHp", None),
BitField("r26LoLp", None, 5),
BitField("r26HiHp", None, 3),
ByteField("r26HiLp", None),
ByteField("r27LoHp", None),
BitField("r27LoLp", None, 5),
BitField("r27HiHp", None, 3),
ByteField("r27HiLp", None),
ByteField("r28LoHp", None),
BitField("r28LoLp", None, 5),
BitField("r28HiHp", None, 3),
ByteField("r28HiLp", None),
ByteField("r29LoHp", None),
BitField("r29LoLp", None, 5),
BitField("r29HiHp", None, 3),
ByteField("r29HiLp", None),
ByteField("r30LoHp", None),
BitField("r30LoLp", None, 5),
BitField("r30HiHp", None, 3),
ByteField("r30HiLp", None),
ByteField("r31LoHp", None),
BitField("r31LoLp", None, 5),
BitField("r31HiHp", None, 3),
ByteField("r31HiLp", None),
ByteField("r32LoHp", None),
BitField("r32LoLp", None, 5),
BitField("r32HiHp", None, 3),
ByteField("r32HiLp", None),
ByteField("r33LoHp", None),
BitField("r33LoLp", None, 5),
BitField("r33HiHp", None, 3),
ByteField("r33HiLp", None),
ByteField("r34LoHp", None),
BitField("r34LoLp", None, 5),
BitField("r34HiHp", None, 3),
ByteField("r34HiLp", None),
ByteField("r35LoHp", None),
BitField("r35LoLp", None, 5),
BitField("r35HiHp", None, 3),
ByteField("r35HiLp", None),
ByteField("r36LoHp", None),
BitField("r36LoLp", None, 5),
BitField("r36HiHp", None, 3),
ByteField("r36HiLp", None),
ByteField("r37LoHp", None),
BitField("r37LoLp", None, 5),
BitField("r37HiHp", None, 3),
ByteField("r37HiLp", None),
ByteField("r38LoHp", None),
BitField("r38LoLp", None, 5),
BitField("r38HiHp", None, 3),
ByteField("r38HiLp", None),
ByteField("r39LoHp", None),
BitField("r39LoLp", None, 5),
BitField("r39HiHp", None, 3),
ByteField("r39HiLp", None),
ByteField("r40LoHp", None),
BitField("r40LoLp", None, 5),
BitField("r40HiHp", None, 3),
ByteField("r40HiLp", None),
ByteField("r41LoHp", None),
BitField("r41LoLp", None, 5),
BitField("r41HiHp", None, 3),
ByteField("r41HiLp", None),
ByteField("r42LoHp", None),
BitField("r42LoLp", None, 5),
BitField("r42HiHp", None, 3),
ByteField("r42HiLp", None),
ByteField("r43LoHp", None),
BitField("r43LoLp", None, 5),
BitField("r43HiHp", None, 3),
ByteField("r43HiLp", None),
ByteField("r44LoHp", None),
BitField("r44LoLp", None, 5),
BitField("r44HiHp", None, 3),
ByteField("r44HiLp", None),
ByteField("r45LoHp", None),
BitField("r45LoLp", None, 5),
BitField("r45HiHp", None, 3),
ByteField("r45HiLp", None),
ByteField("r46LoHp", None),
BitField("r46LoLp", None, 5),
BitField("r46HiHp", None, 3),
ByteField("r46HiLp", None),
ByteField("r47LoHp", None),
BitField("r47LoLp", None, 5),
BitField("r47HiHp", None, 3),
ByteField("r47HiLp", None),
ByteField("r48LoHp", None),
BitField("r48LoLp", None, 5),
BitField("r48HiHp", None, 3),
ByteField("r48HiLp", None),
ByteField("r49LoHp", None),
BitField("r49LoLp", None, 5),
BitField("r49HiHp", None, 3),
ByteField("r49HiLp", None),
ByteField("r50LoHp", None),
BitField("r50LoLp", None, 5),
BitField("r50HiHp", None, 3),
ByteField("r50HiLp", None),
ByteField("r51LoHp", None),
BitField("r51LoLp", None, 5),
BitField("r51HiHp", None, 3),
ByteField("r51HiLp", None),
ByteField("r52LoHp", None),
BitField("r52LoLp", None, 5),
BitField("r52HiHp", None, 3),
ByteField("r52HiLp", None),
ByteField("r53LoHp", None),
BitField("r53LoLp", None, 5),
BitField("r53HiHp", None, 3),
ByteField("r53HiLp", None),
ByteField("r54LoHp", None),
BitField("r54LoLp", None, 5),
BitField("r54HiHp", None, 3),
ByteField("r54HiLp", None),
ByteField("r55LoHp", None),
BitField("r55LoLp", None, 5),
BitField("r55HiHp", None, 3),
ByteField("r55HiLp", None),
ByteField("r56LoHp", None),
BitField("r56LoLp", None, 5),
BitField("r56HiHp", None, 3),
ByteField("r56HiLp", None),
ByteField("r57LoHp", None),
BitField("r57LoLp", None, 5),
BitField("r57HiHp", None, 3),
ByteField("r57HiLp", None),
ByteField("r58LoHp", None),
BitField("r58LoLp", None, 5),
BitField("r58HiHp", None, 3),
ByteField("r58HiLp", None),
ByteField("r59LoHp", None),
BitField("r59LoLp", None, 5),
BitField("r59HiHp", None, 3),
ByteField("r59HiLp", None),
ByteField("r60LoHp", None),
BitField("r60LoLp", None, 5),
BitField("r60HiHp", None, 3),
ByteField("r60HiLp", None),
ByteField("r61LoHp", None),
BitField("r61LoLp", None, 5),
BitField("r61HiHp", None, 3),
ByteField("r61HiLp", None),
ByteField("r62LoHp", None),
BitField("r62LoLp", None, 5),
BitField("r62HiHp", None, 3),
ByteField("r62HiLp", None),
ByteField("r63LoHp", None),
BitField("r63LoLp", None, 5),
BitField("r63HiHp", None, 3),
ByteField("r63HiLp", None),
ByteField("r64LoHp", None),
BitField("r64LoLp", None, 5),
BitField("r64HiHp", None, 3),
ByteField("r64HiLp", None),
ByteField("r65LoHp", None),
BitField("r65LoLp", None, 5),
BitField("r65HiHp", None, 3),
ByteField("r65HiLp", None),
ByteField("r66LoHp", None),
BitField("r66LoLp", None, 5),
BitField("r66HiHp", None, 3),
ByteField("r66HiLp", None),
ByteField("r67LoHp", None),
BitField("r67LoLp", None, 5),
BitField("r67HiHp", None, 3),
ByteField("r67HiLp", None),
ByteField("r68LoHp", None),
BitField("r68LoLp", None, 5),
BitField("r68HiHp", None, 3),
ByteField("r68HiLp", None),
ByteField("r69LoHp", None),
BitField("r69LoLp", None, 5),
BitField("r69HiHp", None, 3),
ByteField("r69HiLp", None),
ByteField("r70LoHp", None),
BitField("r70LoLp", None, 5),
BitField("r70HiHp", None, 3),
ByteField("r70HiLp", None),
ByteField("r71LoHp", None),
BitField("r71LoLp", None, 5),
BitField("r71HiHp", None, 3),
ByteField("r71HiLp", None),
ByteField("r72LoHp", None),
BitField("r72LoLp", None, 5),
BitField("r72HiHp", None, 3),
ByteField("r72HiLp", None),
ByteField("r73LoHp", None),
BitField("r73LoLp", None, 5),
BitField("r73HiHp", None, 3),
ByteField("r73HiLp", None),
ByteField("r74LoHp", None),
BitField("r74LoLp", None, 5),
BitField("r74HiHp", None, 3),
ByteField("r74HiLp", None),
ByteField("r75LoHp", None),
BitField("r75LoLp", None, 5),
BitField("r75HiHp", None, 3),
ByteField("r75HiLp", None),
ByteField("r76LoHp", None),
BitField("r76LoLp", None, 5),
BitField("r76HiHp", None, 3),
ByteField("r76HiLp", None),
ByteField("r77LoHp", None),
BitField("r77LoLp", None, 5),
BitField("r77HiHp", None, 3),
ByteField("r77HiLp", None),
ByteField("r78LoHp", None),
BitField("r78LoLp", None, 5),
BitField("r78HiHp", None, 3),
ByteField("r78HiLp", None),
ByteField("r79LoHp", None),
BitField("r79LoLp", None, 5),
BitField("r79HiHp", None, 3),
ByteField("r79HiLp", None),
ByteField("r80LoHp", None),
BitField("r80LoLp", None, 5),
BitField("r80HiHp", None, 3),
ByteField("r80HiLp", None),
ByteField("r81LoHp", None),
BitField("r81LoLp", None, 5),
BitField("r81HiHp", None, 3),
ByteField("r81HiLp", None),
ByteField("r82LoHp", None),
BitField("r82LoLp", None, 5),
BitField("r82HiHp", None, 3),
ByteField("r82HiLp", None),
ByteField("r83LoHp", None),
BitField("r83LoLp", None, 5),
BitField("r83HiHp", None, 3),
ByteField("r83HiLp", None),
ByteField("r84LoHp", None),
BitField("r84LoLp", None, 5),
BitField("r84HiHp", None, 3),
ByteField("r84HiLp", None)
]
def post_build(self, p, pay):
a = [getattr(self, fld.name) for fld in self.fields_desc]
res = adapt(6, 251, a, self.fields_desc)
if self.lengthBR is None:
p = p[:1] + struct.pack(">B", res[1]) + p[2:]
if res[0] != 0:
p = p[:-res[0]]
return p + pay
# len 3 to max for L3 message (251)
class BaListPrefHdr(Packet):
""" BA List Pref Section 10.5.2.1c """
name = "BA List Pref"
fields_desc = [
# FIXME dynamic
BitField("eightBitBLP", None, 1),
XBitField("ieiBLP", None, 7),
XByteField("lengthBLP", None),
BitField("fixBit", 0x0, 1),
BitField("rangeLower", 0x0, 10),
BitField("fixBit2", 0x0, 1),
BitField("rangeUpper", 0x0, 10),
BitField("baFreq", 0x0, 10),
BitField("sparePad", 0x0, 8)
]
# len 17 || Have a look at the specs for the field format
# Bit map 0 format
# Range 1024 format
# Range 512 format
# Range 256 format
# Range 128 format
# Variable bit map format
class CellChannelDescriptionHdr(Packet):
""" Cell Channel Description Section 10.5.2.1b """
name = "Cell Channel Description "
fields_desc = [
BitField("eightBitCCD", None, 1),
XBitField("ieiCCD", None, 7),
BitField("bit128", 0x0, 1),
BitField("bit127", 0x0, 1),
BitField("spare1", 0x0, 1),
BitField("spare2", 0x0, 1),
BitField("bit124", 0x0, 1),
BitField("bit123", 0x0, 1),
BitField("bit122", 0x0, 1),
BitField("bit121", 0x0, 1),
ByteField("bit120", 0x0),
ByteField("bit112", 0x0),
ByteField("bit104", 0x0),
ByteField("bit96", 0x0),
ByteField("bit88", 0x0),
ByteField("bit80", 0x0),
ByteField("bit72", 0x0),
ByteField("bit64", 0x0),
ByteField("bit56", 0x0),
ByteField("bit48", 0x0),
ByteField("bit40", 0x0),
ByteField("bit32", 0x0),
ByteField("bit24", 0x0),
ByteField("bit16", 0x0),
ByteField("bit8", 0x0)
]
class CellDescriptionHdr(Packet):
""" Cell Description Section 10.5.2.2 """
name = "Cell Description"
fields_desc = [
BitField("eightBitCD", None, 1),
XBitField("ieiCD", None, 7),
BitField("bcchHigh", 0x0, 2),
BitField("ncc", 0x0, 3),
BitField("bcc", 0x0, 3),
ByteField("bcchLow", 0x0)
]
class CellOptionsBCCHHdr(Packet):
""" Cell Options (BCCH) Section 10.5.2.3 """
name = "Cell Options (BCCH)"
fields_desc = [
BitField("eightBitCOB", None, 1),
XBitField("ieiCOB", None, 7),
BitField("spare", 0x0, 1),
BitField("pwrc", 0x0, 1),
BitField("dtx", 0x0, 2),
BitField("rLinkTout", 0x0, 4)
]
class CellOptionsSACCHHdr(Packet):
""" Cell Options (SACCH) Section 10.5.2.3a """
name = "Cell Options (SACCH)"
fields_desc = [
BitField("eightBitCOS", None, 1),
XBitField("ieiCOS", None, 7),
BitField("dtx", 0x0, 1),
BitField("pwrc", 0x0, 1),
BitField("dtx", 0x0, 1),
BitField("rLinkTout", 0x0, 4)
]
class CellSelectionParametersHdr(Packet):
""" Cell Selection Parameters Section 10.5.2.4 """
name = "Cell Selection Parameters"
fields_desc = [
BitField("eightBitCSP", None, 1),
XBitField("ieiCSP", None, 7),
BitField("cellReselect", 0x0, 3),
BitField("msTxPwrMax", 0x0, 5),
BitField("acs", None, 1),
BitField("neci", None, 1),
BitField("rxlenAccMin", None, 6)
]
class MacModeAndChannelCodingRequestHdr(Packet):
""" MAC Mode and Channel Coding Requested Section 10.5.2.4a """
name = "MAC Mode and Channel Coding Requested"
fields_desc = [
XBitField("ieiMMACCR", None, 4),
BitField("macMode", 0x0, 2),
BitField("cs", 0x0, 2)
]
class ChannelDescriptionHdr(Packet):
""" Channel Description Section 10.5.2.5 """
name = "Channel Description"
fields_desc = [
BitField("eightBitCD", None, 1),
XBitField("ieiCD", None, 7),
BitField("channelTyp", 0x0, 5),
BitField("tn", 0x0, 3),
BitField("tsc", 0x0, 3),
BitField("h", 0x1, 1),
# if h=1 maybe we find a better solution here...
BitField("maioHi", 0x0, 4),
BitField("maioLo", 0x0, 2),
BitField("hsn", 0x0, 6)
#BitField("spare", 0x0, 2),
#BitField("arfcnHigh", 0x0, 2),
#ByteField("arfcnLow", 0x0)
]
class ChannelDescription2Hdr(Packet):
""" Channel Description 2 Section 10.5.2.5a """
name = "Channel Description 2"
fields_desc = [
BitField("eightBitCD2", None, 1),
XBitField("ieiCD2", None, 7),
BitField("channelTyp", 0x0, 5),
BitField("tn", 0x0, 3),
BitField("tsc", 0x0, 3),
BitField("h", 0x0, 1),
# if h=1
# BitField("maioHi", 0x0, 4),
# BitField("maioLo", 0x0, 2),
# BitField("hsn", 0x0, 6)
BitField("spare", 0x0, 2),
BitField("arfcnHigh", 0x0, 2),
ByteField("arfcnLow", 0x0)
]
class ChannelModeHdr(Packet):
""" Channel Mode Section 10.5.2.6 """
name = "Channel Mode"
fields_desc = [
BitField("eightBitCM", None, 1),
XBitField("ieiCM", None, 7),
ByteField("mode", 0x0)
]
class ChannelMode2Hdr(Packet):
""" Channel Mode 2 Section 10.5.2.7 """
name = "Channel Mode 2"
fields_desc = [
BitField("eightBitCM2", None, 1),
XBitField("ieiCM2", None, 7),
ByteField("mode", 0x0)
]
class ChannelNeededHdr(Packet):
""" Channel Needed Section 10.5.2.8 """
name = "Channel Needed"
fields_desc = [
XBitField("ieiCN", None, 4),
BitField("channel2", 0x0, 2),
BitField("channel1", 0x0, 2),
]
class ChannelRequestDescriptionHdr(Packet):
"""Channel Request Description Section 10.5.2.8a """
name = "Channel Request Description"
fields_desc = [
BitField("eightBitCRD", None, 1),
XBitField("ieiCRD", None, 7),
BitField("mt", 0x0, 1),
ConditionalField(BitField("spare", 0x0, 39),
lambda pkt: pkt.mt == 0),
ConditionalField(BitField("spare", 0x0, 3),
lambda pkt: pkt.mt == 1),
ConditionalField(BitField("priority", 0x0, 2),
lambda pkt: pkt.mt == 1),
ConditionalField(BitField("rlcMode", 0x0, 1),
lambda pkt: pkt.mt == 1),
ConditionalField(BitField("llcFrame", 0x1, 1),
lambda pkt: pkt.mt == 1),
ConditionalField(ByteField("reqBandMsb", 0x0),
lambda pkt: pkt.mt == 1),
ConditionalField(ByteField("reqBandLsb", 0x0),
lambda pkt: pkt.mt == 1),
ConditionalField(ByteField("rlcMsb", 0x0),
lambda pkt: pkt.mt == 1),
ConditionalField(ByteField("rlcLsb", 0x0),
lambda pkt: pkt.mt == 1)
]
class CipherModeSettingHdr(Packet):
"""Cipher Mode Setting Section 10.5.2.9 """
name = "Cipher Mode Setting"
fields_desc = [
XBitField("ieiCMS", None, 4),
BitField("algoId", 0x0, 3),
BitField("sc", 0x0, 1),
]
class CipherResponseHdr(Packet):
"""Cipher Response Section 10.5.2.10 """
name = "Cipher Response"
fields_desc = [
XBitField("ieiCR", None, 4),
BitField("spare", 0x0, 3),
BitField("cr", 0x0, 1),
]
# This packet fixes the problem with the 1/2 Byte length. Concatenation
# of cipherModeSetting and cipherResponse
class CipherModeSettingAndcipherResponse(Packet):
name = "Cipher Mode Setting And Cipher Response"
fields_desc = [
BitField("algoId", 0x0, 3),
BitField("sc", 0x0, 1),
BitField("spare", 0x0, 3),
BitField("cr", 0x0, 1)
]
class ControlChannelDescriptionHdr(Packet):
"""Control Channel Description Section 10.5.2.11 """
name = "Control Channel Description"
fields_desc = [
BitField("eightBitCCD", None, 1),
XBitField("ieiCCD", None, 7),
BitField("spare", 0x0, 1),
BitField("att", 0x0, 1),
BitField("bsAgBlksRes", 0x0, 3),
BitField("ccchConf", 0x0, 3),
BitField("spare", 0x0, 1),
BitField("spare1", 0x0, 1),
BitField("spare2", 0x0, 1),
BitField("spare3", 0x0, 1),
BitField("spare4", 0x0, 1),
BitField("bsPaMfrms", 0x0, 3),
ByteField("t3212", 0x0)
]
class FrequencyChannelSequenceHdr(Packet):
"""Frequency Channel Sequence Section 10.5.2.12"""
name = "Frequency Channel Sequence"
fields_desc = [
BitField("eightBitFCS", None, 1),
XBitField("ieiFCS", None, 7),
BitField("spare", 0x0, 1),
BitField("lowestArfcn", 0x0, 7),
BitField("skipArfcn01", 0x0, 4),
BitField("skipArfcn02", 0x0, 4),
BitField("skipArfcn03", 0x0, 4),
BitField("skipArfcn04", 0x0, 4),
BitField("skipArfcn05", 0x0, 4),
BitField("skipArfcn06", 0x0, 4),
BitField("skipArfcn07", 0x0, 4),
BitField("skipArfcn08", 0x0, 4),
BitField("skipArfcn09", 0x0, 4),
BitField("skipArfcn10", 0x0, 4),
BitField("skipArfcn11", 0x0, 4),
BitField("skipArfcn12", 0x0, 4),
BitField("skipArfcn13", 0x0, 4),
BitField("skipArfcn14", 0x0, 4),
BitField("skipArfcn15", 0x0, 4),
BitField("skipArfcn16", 0x0, 4)
]
class FrequencyListHdr(Packet):
"""Frequency List Section 10.5.2.13"""
name = "Frequency List"
# Problem:
# There are several formats for the Frequency List information
# element, distinguished by the "format indicator" subfield.
# Some formats are frequency bit maps, the others use a special encoding
# scheme.
fields_desc = [
BitField("eightBitFL", None, 1),
XBitField("ieiFL", None, 7),
XByteField("lengthFL", None),
BitField("formatID", 0x0, 2),
BitField("spare", 0x0, 2),
BitField("arfcn124", 0x0, 1),
BitField("arfcn123", 0x0, 1),
BitField("arfcn122", 0x0, 1),
BitField("arfcn121", 0x0, 1),
ByteField("arfcn120", 0x0),
ByteField("arfcn112", 0x0),
ByteField("arfcn104", 0x0),
ByteField("arfcn96", 0x0),
ByteField("arfcn88", 0x0),
ByteField("arfcn80", 0x0),
ByteField("arfcn72", 0x0),
ByteField("arfcn64", 0x0),
ByteField("arfcn56", 0x0),
ByteField("arfcn48", 0x0),
ByteField("arfcn40", 0x0),
ByteField("arfcn32", 0x0),
ByteField("arfcn24", 0x0),
ByteField("arfcn16", 0x0),
ByteField("arfcn8", 0x0)
]
class FrequencyShortListHdr(Packet):
"""Frequency Short List Section 10.5.2.14"""
name = "Frequency Short List"
# len is 10
#This element is encoded exactly as the Frequency List information element,
#except that it has a fixed length instead of a
#variable length and does not contain a length indicator and that it
#shall not be encoded in bitmap 0 format.
fields_desc = [
ByteField("ieiFSL", 0x0),
ByteField("byte2", 0x0),
ByteField("byte3", 0x0),
ByteField("byte4", 0x0),
ByteField("byte5", 0x0),
ByteField("byte6", 0x0),
ByteField("byte7", 0x0),
ByteField("byte8", 0x0),
ByteField("byte9", 0x0),
ByteField("byte10", 0x0)
]
class FrequencyShortListHdr2(Packet):
"""Frequency Short List2 Section 10.5.2.14a"""
name = "Frequency Short List 2"
fields_desc = [
ByteField("byte1", 0x0),
ByteField("byte2", 0x0),
ByteField("byte3", 0x0),
ByteField("byte4", 0x0),
ByteField("byte5", 0x0),
ByteField("byte6", 0x0),
ByteField("byte7", 0x0),
ByteField("byte8", 0x0)
]
# len 4 to 13
class GroupChannelDescriptionHdr(Packet):
"""Group Channel Description Section 10.5.2.14b"""
name = "Group Channel Description"
fields_desc = [
BitField("eightBitGCD", None, 1),
XBitField("ieiGCD", None, 7),
XByteField("lengthGCD", None),
BitField("channelType", 0x0, 5),
BitField("tn", 0x0, 3),
BitField("tsc", 0x0, 3),
BitField("h", 0x0, 1),
# if h == 0 the packet looks the following way:
ConditionalField(BitField("spare", 0x0, 2),
lambda pkt: pkt. h == 0x0),
ConditionalField(BitField("arfcnHi", 0x0, 2),
lambda pkt: pkt. h == 0x0),
ConditionalField(ByteField("arfcnLo", None),
lambda pkt: pkt. h == 0x0),
# if h == 1 the packet looks the following way:
ConditionalField(BitField("maioHi", 0x0, 4),
lambda pkt: pkt. h == 0x1),
ConditionalField(BitField("maioLo", None, 2),
lambda pkt: pkt. h == 0x1),
ConditionalField(BitField("hsn", None, 6),
lambda pkt: pkt. h == 0x1),
# finished with conditional fields
ByteField("maC6", None),
ByteField("maC7", None),
ByteField("maC8", None),
ByteField("maC9", None),
ByteField("maC10", None),
ByteField("maC11", None),
ByteField("maC12", None),
ByteField("maC13", None),
ByteField("maC14", None)
]
def post_build(self, p, pay):
a = [getattr(self, fld.name) for fld in self.fields_desc]
res = adapt(4, 13, a, self.fields_desc)
if self.lengthGCD is None:
p = p[:1] + struct.pack(">B", res[1]) + p[2:]
if res[0] != 0:
p = p[:-res[0]]
return p + pay
class GprsResumptionHdr(Packet):
"""GPRS Resumption Section 10.5.2.14c"""
name = "GPRS Resumption"
fields_desc = [
XBitField("ieiGR", None, 4),
BitField("spare", 0x0, 3),
BitField("ack", 0x0, 1)
]
class HandoverReferenceHdr(Packet):
"""Handover Reference Section 10.5.2.15"""
name = "Handover Reference"
fields_desc = [
BitField("eightBitHR", None, 1),
XBitField("ieiHR", None, 7),
ByteField("handoverRef", 0x0)
]
# len 1-12
class IaRestOctets(Packet):
"""IA Rest Octets Section 10.5.2.16"""
name = "IA Rest Octets"
fields_desc = [
ByteField("ieiIRO", 0x0),
# FIXME brainfuck packet
XByteField("lengthIRO", None),
ByteField("byte2", None),
ByteField("byte3", None),
ByteField("byte4", None),
ByteField("byte5", None),
ByteField("byte6", None),
ByteField("byte7", None),
ByteField("byte8", None),
ByteField("byte9", None),
ByteField("byte10", None),
ByteField("byte11", None)
]
def post_build(self, p, pay):
a = [getattr(self, fld.name) for fld in self.fields_desc]
res = adapt(1, 12, a, self.fields_desc)
if self.lengthIRO is None:
if res[1] < 0: # FIXME better fix
res[1] = 0
p = p[:1] + struct.pack(">B", res[1]) + p[2:]
if res[0] != 0:
p = p[:-res[0]]
return p + pay
class IraRestOctetsHdr(Packet):
"""IAR Rest Octets Section 10.5.2.17"""
name = "IAR Rest Octets"
fields_desc = [
BitField("eightBitIRO", None, 1),
XBitField("ieiIRO", None, 7),
BitField("spare01", 0x0, 1),
BitField("spare02", 0x0, 1),
BitField("spare03", 0x1, 1),
BitField("spare04", 0x0, 1),
BitField("spare05", 0x1, 1),
BitField("spare06", 0x0, 1),
BitField("spare07", 0x1, 1),
BitField("spare08", 0x1, 1),
BitField("spare09", 0x0, 1),
BitField("spare10", 0x0, 1),
BitField("spare11", 0x1, 1),
BitField("spare12", 0x0, 1),
BitField("spare13", 0x1, 1),
BitField("spare14", 0x0, 1),
BitField("spare15", 0x1, 1),
BitField("spare16", 0x1, 1),
BitField("spare17", 0x0, 1),
BitField("spare18", 0x0, 1),
BitField("spare19", 0x1, 1),
BitField("spare20", 0x0, 1),
BitField("spare21", 0x1, 1),
BitField("spare22", 0x0, 1),
BitField("spare23", 0x1, 1),
BitField("spare24", 0x1, 1)
]
# len is 1 to 5 what do we do with the variable size? no lenght
# field?! WTF
class IaxRestOctetsHdr(Packet):
"""IAX Rest Octets Section 10.5.2.18"""
name = "IAX Rest Octets"
fields_desc = [
BitField("eightBitIRO", None, 1),
XBitField("ieiIRO", None, 7),
BitField("spare01", 0x0, 1),
BitField("spare02", 0x0, 1),
BitField("spare03", 0x1, 1),
BitField("spare04", 0x0, 1),
BitField("spare05", 0x1, 1),
BitField("spare06", 0x0, 1),
BitField("spare07", 0x1, 1),
BitField("spare08", 0x1, 1),
ByteField("spareB1", None),
ByteField("spareB2", None),
ByteField("spareB3", None)
]
class L2PseudoLengthHdr(Packet):
"""L2 Pseudo Length Section 10.5.2.19"""
name = "L2 Pseudo Length"
fields_desc = [
BitField("eightBitPL", None, 1),
XBitField("ieiPL", None, 7),
BitField("l2pLength", None, 6),
BitField("bit2", 0x0, 1),
BitField("bit1", 0x1, 1)
]
class MeasurementResultsHdr(Packet):
"""Measurement Results Section 10.5.2.20"""
name = "Measurement Results"
fields_desc = [
BitField("eightBitMR", None, 1),
XBitField("ieiMR", None, 7),
BitField("baUsed", 0x0, 1),
BitField("dtxUsed", 0x0, 1),
BitField("rxLevFull", 0x0, 6),
BitField("spare", 0x0, 1),
BitField("measValid", 0x0, 1),
BitField("rxLevSub", 0x0, 6),
BitField("spare0", 0x0, 1),
BitField("rxqualFull", 0x0, 3),
BitField("rxqualSub", 0x0, 3),
BitField("noNcellHi", 0x0, 1),
BitField("noNcellLo", 0x0, 2),
BitField("rxlevC1", 0x0, 6),
BitField("bcchC1", 0x0, 5),
BitField("bsicC1Hi", 0x0, 3),
BitField("bsicC1Lo", 0x0, 3),
BitField("rxlevC2", 0x0, 5),
BitField("rxlevC2Lo", 0x0, 1),
BitField("bcchC2", 0x0, 5),
BitField("bsicC1Hi", 0x0, 2),
BitField("bscicC2Lo", 0x0, 4),
BitField("bscicC2Hi", 0x0, 4),
BitField("rxlevC3Lo", 0x0, 2),
BitField("bcchC3", 0x0, 5),
BitField("rxlevC3Hi", 0x0, 1),
BitField("bsicC3Lo", 0x0, 5),
BitField("bsicC3Hi", 0x0, 3),
BitField("rxlevC4Lo", 0x0, 3),
BitField("bcchC4", 0x0, 5),
BitField("bsicC4", 0x0, 6),
BitField("rxlevC5Hi", 0x0, 2),
BitField("rxlevC5Lo", 0x0, 4),
BitField("bcchC5Hi", 0x0, 4),
BitField("bcchC5Lo", 0x0, 1),
BitField("bsicC5", 0x0, 6),
BitField("rxlevC6", 0x0, 1),
BitField("rxlevC6Lo", 0x0, 5),
BitField("bcchC6Hi", 0x0, 3),
BitField("bcchC6Lo", 0x0, 3),
BitField("bsicC6", 0x0, 5)
]
class GprsMeasurementResultsHdr(Packet):
"""GPRS Measurement Results Section 10.5.2.20a"""
name = "GPRS Measurement Results"
fields_desc = [
BitField("eightBitGMR", None, 1),
XBitField("ieiGMR", None, 7),
BitField("cValue", 0x0, 6),
BitField("rxqualHi", 0x0, 2),
BitField("rxqL", 0x0, 1),
BitField("spare", 0x0, 1),
BitField("signVar", 0x0, 6)
]
# len 3 to 10
class MobileAllocationHdr(Packet):
"""Mobile Allocation Section 10.5.2.21"""
name = "Mobile Allocation"
fields_desc = [
BitField("eightBitMA", None, 1),
XBitField("ieiMA", None, 7),
XByteField("lengthMA", None),
ByteField("maC64", 0x12),
ByteField("maC56", None), # optional fields start here
ByteField("maC48", None),
ByteField("maC40", None),
ByteField("maC32", None),
ByteField("maC24", None),
ByteField("maC16", None),
ByteField("maC8", None)
]
def post_build(self, p, pay):
a = [getattr(self, fld.name) for fld in self.fields_desc]
res = adapt(3, 10, a, self.fields_desc)
if self.lengthMA is None:
p = p[:1] + struct.pack(">B", res[1]) + p[2:]
if res[0] != 0:
p = p[:-res[0]]
return p + pay
class MobileTimeDifferenceHdr(Packet):
"""Mobile Time Difference Section 10.5.2.21a"""
name = "Mobile Time Difference"
fields_desc = [
BitField("eightBitMTD", None, 1),
XBitField("ieiMTD", None, 7),
XByteField("lengthMTD", 0x5),
ByteField("valueHi", 0x0),
ByteField("valueCnt", 0x0),
BitField("valueLow", 0x0, 5),
BitField("spare", 0x0, 1),
BitField("spare1", 0x0, 1),
BitField("spare2", 0x0, 1)
]
# min 4 octets max 8
class MultiRateConfigurationHdr(Packet):
""" MultiRate configuration Section 10.5.2.21aa"""
name = "MultiRate Configuration"
fields_desc = [
BitField("eightBitMRC", None, 1),
XBitField("ieiMRC", None, 7),
XByteField("lengthMRC", None),
BitField("mrVersion", 0x0, 3),
BitField("spare", 0x0, 1),
BitField("icmi", 0x0, 1),
BitField("spare", 0x0, 1),
BitField("startMode", 0x0, 2),
ByteField("amrCodec", 0x0),
BitField("spare", None, 2),
BitField("threshold1", None, 6),
BitField("hysteresis1", None, 4),
BitField("threshold2", None, 4),
BitField("threshold2cnt", None, 2),
BitField("hysteresis2", None, 4),
BitField("threshold3", None, 2),
BitField("threshold3cnt", None, 4),
BitField("hysteresis3", None, 4)
]
def post_build(self, p, pay):
a = [getattr(self, fld.name) for fld in self.fields_desc]
res = adapt(4, 8, a, self.fields_desc)
if self.lengthMRC is None:
p = p[:1] + struct.pack(">B", res[1]) + p[2:]
if res[0] != 0:
p = p[:-res[0]]
return p + pay
# len 3 to 12
class MultislotAllocationHdr(Packet):
"""Multislot Allocation Section 10.5.2.21b"""
name = "Multislot Allocation"
fields_desc = [
BitField("eightBitMSA", None, 1),
XBitField("ieiMSA", None, 7),
XByteField("lengthMSA", None),
BitField("ext0", 0x1, 1),
BitField("da", 0x0, 7),
ConditionalField(BitField("ext1", 0x1, 1), # optional
lambda pkt: pkt.ext0 == 0),
ConditionalField(BitField("ua", 0x0, 7),
lambda pkt: pkt.ext0 == 0),
ByteField("chan1", None),
ByteField("chan2", None),
ByteField("chan3", None),
ByteField("chan4", None),
ByteField("chan5", None),
ByteField("chan6", None),
ByteField("chan7", None),
ByteField("chan8", None)
]
def post_build(self, p, pay):
a = [getattr(self, fld.name) for fld in self.fields_desc]
res = adapt(3, 12, a, self.fields_desc)
if res[0] != 0:
p = p[:-res[0]]
if self.lengthMSA is None:
p = p[:1] + struct.pack(">B", len(p)-2) + p[2:]
return p + pay
class NcModeHdr(Packet):
"""NC mode Section 10.5.2.21c"""
name = "NC Mode"
fields_desc = [
XBitField("ieiNM", None, 4),
BitField("spare", 0x0, 2),
BitField("ncMode", 0x0, 2)
]
# Fix for len problem
# concatenation NC Mode And Spare Half Octets
class NcModeAndSpareHalfOctets(Packet):
name = "NC Mode And Spare Half Octets"
fields_desc = [
BitField("spare", 0x0, 2),
BitField("ncMode", 0x0, 2),
BitField("spareHalfOctets", 0x0, 4)
]
class NeighbourCellsDescriptionHdr(Packet):
"""Neighbour Cells Description Section 10.5.2.22"""
name = "Neighbour Cells Description"
fields_desc = [
BitField("eightBitNCD", None, 1),
XBitField("ieiNCD", None, 7),
BitField("bit128", 0x0, 1),
BitField("bit127", 0x0, 1),
BitField("extInd", 0x0, 1),
BitField("baInd", 0x0, 1),
BitField("bit124", 0x0, 1),
BitField("bit123", 0x0, 1),
BitField("bit122", 0x0, 1),
BitField("bit121", 0x0, 1),
BitField("120bits", 0x0, 120)
]
class NeighbourCellsDescription2Hdr(Packet):
"""Neighbour Cells Description 2 Section 10.5.2.22a"""
name = "Neighbour Cells Description 2"
fields_desc = [
BitField("eightBitNCD2", None, 1),
XBitField("ieiNCD2", None, 7),
BitField("bit128", 0x0, 1),
BitField("multiband", 0x0, 2),
BitField("baInd", 0x0, 1),
BitField("bit124", 0x0, 1),
BitField("bit123", 0x0, 1),
BitField("bit122", 0x0, 1),
BitField("bit121", 0x0, 1),
BitField("120bits", 0x0, 120)
]
class NtNRestOctets(Packet):
"""NT/N Rest Octets Section 10.5.2.22c"""
name = "NT/N Rest Octets"
fields_desc = [
BitField("nln", 0x0, 2),
BitField("ncnInfo", 0x0, 4),
BitField("spare", 0x0, 2)
]
#
# The following packet has no length info!
#
# len 1-18
class P1RestOctets(Packet):
"""P1 Rest Octets Section 10.5.2.23"""
name = "P1 Rest Octets"
fields_desc = [
BitField("nln", 0x0, 2),
BitField("nlnStatus", 0x0, 1),
BitField("prio1", 0x0, 3),
BitField("prio2", 0x0, 3),
# optional
BitField("pageIndication1", 0x0, 1),
BitField("pageIndication2", 0x0, 1),
BitField("spare", 0x0, 5),
ByteField("spareB1", None),
ByteField("spareB2", None),
ByteField("spareB3", None),
ByteField("spareB4", None),
ByteField("spareB5", None),
ByteField("spareB6", None),
ByteField("spareB7", None),
ByteField("spareB8", None),
ByteField("spareB9", None),
ByteField("spareB10", None),
ByteField("spareB11", None),
ByteField("spareB12", None),
ByteField("spareB13", None),
ByteField("spareB14", None),
ByteField("spareB15", None),
ByteField("spareB16", None),
]
# len 2-12
class P2RestOctets(Packet):
"""P2 Rest Octets Section 10.5.2.24"""
name = "P2 Rest Octets"
fields_desc = [
BitField("cn3", 0x0, 2),
BitField("nln", 0x0, 2),
BitField("nlnStatus", 0x0, 1),
BitField("prio1", 0x0, 3),
BitField("prio2", 0x0, 3),
BitField("prio3", 0x0, 3),
BitField("pageIndication3", 0x0, 1),
BitField("spare", 0x0, 1),
# optinal (No length field!)
ByteField("spareB1", None),
ByteField("spareB2", None),
ByteField("spareB3", None),
ByteField("spareB4", None),
ByteField("spareB5", None),
ByteField("spareB6", None),
ByteField("spareB7", None),
ByteField("spareB8", None),
ByteField("spareB9", None),
ByteField("spareB10", None)
]
# len 4
class P3RestOctets(Packet):
"""P3 Rest Octets Section 10.5.2.25"""
name = "P3 Rest Octets"
fields_desc = [
BitField("cn3", 0x0, 2),
BitField("cn4", 0x0, 2),
BitField("nln", 0x0, 2),
BitField("nlnStatus", 0x0, 1),
BitField("prio1", 0x0, 3),
BitField("prio2", 0x0, 3),
BitField("prio3", 0x0, 3),
BitField("prio4", 0x0, 3),
BitField("spare", 0x0, 5)
]
# len 4
# strange packet, lots of valid formats
# ideas for the dynamic packets:
# 1] for user interaction: Create an interactive "builder" based on a
# Q/A process (not very scapy like)
# 2] for usage in scripts, create an alternative packet for every
# possible packet layout
#
class PacketChannelDescription(Packet):
"""Packet Channel Description Section 10.5.2.25a"""
name = "Packet Channel Description"
fields_desc = [
ByteField("ieiPCD", None),
BitField("chanType", 0x0, 5), # This packet has multiple
# possible layouts. I moddeled the first one
BitField("tn", 0x0, 3), # maybe build an
#"interactive" builder. Like
# a Q/A then propose a
# packet?
BitField("tsc", 0x0, 3),
BitField("chooser1", 0x0, 1),
BitField("chooser2", 0x0, 1),
BitField("spare1", 0x0, 1),
BitField("arfcn", 0x0, 10),
]
class DedicatedModeOrTBFHdr(Packet):
"""Dedicated mode or TBF Section 10.5.2.25b"""
name = "Dedicated Mode or TBF"
fields_desc = [
XBitField("ieiDMOT", None, 4),
BitField("spare", 0x0, 1),
BitField("tma", 0x0, 1),
BitField("downlink", 0x0, 1),
BitField("td", 0x0, 1)
]
# FIXME add implementation
class RrPacketUplinkAssignment(Packet):
"""RR Packet Uplink Assignment Section 10.5.2.25c"""
name = "RR Packet Uplink Assignment"
fields_desc = [
# Fill me
]
class PageModeHdr(Packet):
"""Page Mode Section 10.5.2.26"""
name = "Page Mode"
fields_desc = [
XBitField("ieiPM", None, 4),
BitField("spare", 0x0, 1),
BitField("spare1", 0x0, 1),
BitField("pm", 0x0, 2)
]
# Fix for 1/2 len problem
# concatenation: pageMode and dedicatedModeOrTBF
class PageModeAndDedicatedModeOrTBF(Packet):
name = "Page Mode and Dedicated Mode Or TBF"
fields_desc = [
BitField("spare", 0x0, 1),
BitField("spare1", 0x0, 1),
BitField("pm", 0x0, 2),
BitField("spare", 0x0, 1),
BitField("tma", 0x0, 1),
BitField("downlink", 0x0, 1),
BitField("td", 0x0, 1)
]
# Fix for 1/2 len problem
# concatenation: pageMode and spareHalfOctets
class PageModeAndSpareHalfOctets(Packet):
name = "Page Mode and Spare Half Octets"
fields_desc = [
BitField("spare", 0x0, 1),
BitField("spare1", 0x0, 1),
BitField("pm", 0x0, 2),
BitField("spareHalfOctets", 0x0, 4)
]
# Fix for 1/2 len problem
# concatenation: pageMode and Channel Needed
class PageModeAndChannelNeeded(Packet):
name = "Page Mode and Channel Needed"
fields_desc = [
BitField("spare", 0x0, 1),
BitField("spare1", 0x0, 1),
BitField("pm", 0x0, 2),
BitField("channel2", 0x0, 2),
BitField("channel1", 0x0, 2)
]
class NccPermittedHdr(Packet):
"""NCC Permitted Section 10.5.2.27"""
name = "NCC Permited"
fields_desc = [
BitField("eightBitNP", None, 1),
XBitField("ieiNP", None, 7),
ByteField("nccPerm", 0x0)
]
class PowerCommandHdr(Packet):
"""Power Command Section 10.5.2.28"""
name = "Power Command"
fields_desc = [
BitField("eightBitPC", None, 1),
XBitField("ieiPC", None, 7),
BitField("spare", 0x0, 1),
BitField("spare1", 0x0, 1),
BitField("spare2", 0x0, 1),
BitField("powerLvl", 0x0, 5)
]
class PowerCommandAndAccessTypeHdr(Packet):
"""Power Command and access type Section 10.5.2.28a"""
name = "Power Command and Access Type"
fields_desc = [
BitField("eightBitPCAAT", None, 1),
XBitField("ieiPCAAT", None, 7),
BitField("atc", 0x0, 1),
BitField("spare", 0x0, 1),
BitField("spare1", 0x0, 1),
BitField("powerLvl", 0x0, 5)
]
class RachControlParametersHdr(Packet):
"""RACH Control Parameters Section 10.5.2.29"""
name = "RACH Control Parameters"
fields_desc = [
BitField("eightBitRCP", None, 1),
XBitField("ieiRCP", None, 7),
BitField("maxRetrans", 0x0, 2),
BitField("txInteger", 0x0, 4),
BitField("cellBarrAccess", 0x0, 1),
BitField("re", 0x0, 1),
BitField("ACC15", 0x0, 1),
BitField("ACC14", 0x0, 1),
BitField("ACC13", 0x0, 1),
BitField("ACC12", 0x0, 1),
BitField("ACC11", 0x0, 1),
BitField("ACC10", 0x0, 1),
BitField("ACC09", 0x0, 1),
BitField("ACC08", 0x0, 1),
BitField("ACC07", 0x0, 1),
BitField("ACC06", 0x0, 1),
BitField("ACC05", 0x0, 1),
BitField("ACC04", 0x0, 1),
BitField("ACC03", 0x0, 1),
BitField("ACC02", 0x0, 1),
BitField("ACC01", 0x0, 1),
BitField("ACC00", 0x0, 1),
]
class RequestReferenceHdr(Packet):
"""Request Reference Section 10.5.2.30"""
name = "Request Reference"
fields_desc = [
BitField("eightBitRR", None, 1),
XBitField("ieiRR", None, 7),
ByteField("ra", 0x0),
BitField("t1", 0x0, 5),
BitField("t3Hi", 0x0, 3),
BitField("t3Lo", 0x0, 3),
BitField("t2", 0x0, 5)
]
class RrCauseHdr(Packet):
"""RR Cause Section 10.5.2.31"""
name = "RR Cause"
fields_desc = [
BitField("eightBitRC", None, 1),
XBitField("ieiRC", None, 7),
ByteField("rrCause", 0x0)
]
class Si1RestOctets(Packet):
"""SI 1 Rest Octets Section 10.5.2.32"""
name = "SI 1 Rest Octets"
fields_desc = [
ByteField("nchPos", 0x0)
]
class Si2bisRestOctets(Packet):
"""SI 2bis Rest Octets Section 10.5.2.33"""
name = "SI 2bis Rest Octets"
fields_desc = [
ByteField("spare", 0x0)
]
class Si2terRestOctets(Packet):
"""SI 2ter Rest Octets Section 10.5.2.33a"""
name = "SI 2ter Rest Octets"
fields_desc = [
ByteField("spare1", 0x0),
ByteField("spare2", 0x0),
ByteField("spare3", 0x0),
ByteField("spare4", 0x0)
]
# len 5
class Si3RestOctets(Packet):
"""SI 3 Rest Octets Section 10.5.2.34"""
name = "SI 3 Rest Octets"
fields_desc = [
ByteField("byte1", 0x0),
ByteField("byte2", 0x0),
ByteField("byte3", 0x0),
ByteField("byte4", 0x0),
ByteField("byte5", 0x0)
]
# len 1 to 11
class Si4RestOctets(Packet):
"""SI 4 Rest Octets Section 10.5.2.35"""
name = "SI 4 Rest Octets"
fields_desc = [
XByteField("lengthSI4", None),
ByteField("byte2", None),
ByteField("byte3", None),
ByteField("byte4", None),
ByteField("byte5", None),
ByteField("byte6", None),
ByteField("byte7", None),
ByteField("byte8", None),
ByteField("byte9", None),
ByteField("byte10", None),
ByteField("byte11", None)
]
def post_build(self, p, pay):
a = [getattr(self, fld.name) for fld in self.fields_desc]
res = adapt(1, 11, a, self.fields_desc, 1)
if self.lengthSI4 is None:
p = struct.pack(">B", res[1]) + p[1:]
if res[0] != 0:
p = p[:-res[0]]
if len(p) is 1: # length of this packet can be 0, but packet is
p = '' # but the IE is manadatory 0_o
return p + pay
class Si6RestOctets(Packet):
"""SI 6 Rest Octets Section 10.5.2.35a"""
name = "SI 4 Rest Octets"
fields_desc = [
# FIXME
]
# len 21
class Si7RestOctets(Packet):
"""SI 7 Rest Octets Section 10.5.2.36"""
name = "SI 7 Rest Octets"
fields_desc = [
# FIXME
XByteField("lengthSI7", 0x15),
ByteField("byte2", 0x0),
ByteField("byte3", 0x0),
ByteField("byte4", 0x0),
ByteField("byte5", 0x0),
ByteField("byte6", 0x0),
ByteField("byte7", 0x0),
ByteField("byte8", 0x0),
ByteField("byte9", 0x0),
ByteField("byte10", 0x0),
ByteField("byte11", 0x0),
ByteField("byte12", 0x0),
ByteField("byte13", 0x0),
ByteField("byte14", 0x0),
ByteField("byte15", 0x0),
ByteField("byte16", 0x0),
ByteField("byte17", 0x0),
ByteField("byte18", 0x0),
ByteField("byte19", 0x0),
ByteField("byte20", 0x0),
ByteField("byte21", 0x0)
]
# len 21
class Si8RestOctets(Packet):
"""SI 8 Rest Octets Section 10.5.2.37"""
name = "SI 8 Rest Octets"
fields_desc = [
# FIXME
XByteField("lengthSI8", 0x15),
ByteField("byte2", 0x0),
ByteField("byte3", 0x0),
ByteField("byte4", 0x0),
ByteField("byte5", 0x0),
ByteField("byte6", 0x0),
ByteField("byte7", 0x0),
ByteField("byte8", 0x0),
ByteField("byte9", 0x0),
ByteField("byte10", 0x0),
ByteField("byte11", 0x0),
ByteField("byte12", 0x0),
ByteField("byte13", 0x0),
ByteField("byte14", 0x0),
ByteField("byte15", 0x0),
ByteField("byte16", 0x0),
ByteField("byte17", 0x0),
ByteField("byte18", 0x0),
ByteField("byte19", 0x0),
ByteField("byte20", 0x0),
ByteField("byte21", 0x0)
]
#len 17
class Si9RestOctets(Packet):
"""SI 9 Rest Octets Section 10.5.2.37a"""
name = "SI 9 Rest Octets"
fields_desc = [
# FIXME
XByteField("lengthSI9", 0x11),
ByteField("byte2", 0x0),
ByteField("byte3", 0x0),
ByteField("byte4", 0x0),
ByteField("byte5", 0x0),
ByteField("byte6", 0x0),
ByteField("byte7", 0x0),
ByteField("byte8", 0x0),
ByteField("byte9", 0x0),
ByteField("byte10", 0x0),
ByteField("byte11", 0x0),
ByteField("byte12", 0x0),
ByteField("byte13", 0x0),
ByteField("byte14", 0x0),
ByteField("byte15", 0x0),
ByteField("byte16", 0x0),
ByteField("byte17", 0x0)
]
# len 21
class Si13RestOctets(Packet):
"""SI 13 Rest Octets Section 10.5.2.37b"""
name = "SI 13 Rest Octets"
fields_desc = [
# FIXME
XByteField("lengthSI3", 0x15),
ByteField("byte2", 0x0),
ByteField("byte3", 0x0),
ByteField("byte4", 0x0),
ByteField("byte5", 0x0),
ByteField("byte6", 0x0),
ByteField("byte7", 0x0),
ByteField("byte8", 0x0),
ByteField("byte9", 0x0),
ByteField("byte10", 0x0),
ByteField("byte11", 0x0),
ByteField("byte12", 0x0),
ByteField("byte13", 0x0),
ByteField("byte14", 0x0),
ByteField("byte15", 0x0),
ByteField("byte16", 0x0),
ByteField("byte17", 0x0),
ByteField("byte18", 0x0),
ByteField("byte19", 0x0),
ByteField("byte20", 0x0),
ByteField("byte21", 0x0)
]
# 10.5.2.37c [spare]
# 10.5.2.37d [spare]
# len 21
class Si16RestOctets(Packet):
"""SI 16 Rest Octets Section 10.5.2.37e"""
name = "SI 16 Rest Octets"
fields_desc = [
# FIXME
XByteField("lengthSI16", 0x15),
ByteField("byte2", 0x0),
ByteField("byte3", 0x0),
ByteField("byte4", 0x0),
ByteField("byte5", 0x0),
ByteField("byte6", 0x0),
ByteField("byte7", 0x0),
ByteField("byte8", 0x0),
ByteField("byte9", 0x0),
ByteField("byte10", 0x0),
ByteField("byte11", 0x0),
ByteField("byte12", 0x0),
ByteField("byte13", 0x0),
ByteField("byte14", 0x0),
ByteField("byte15", 0x0),
ByteField("byte16", 0x0),
ByteField("byte17", 0x0),
ByteField("byte18", 0x0),
ByteField("byte19", 0x0),
ByteField("byte20", 0x0),
ByteField("byte21", 0x0)
]
# len 21
class Si17RestOctets(Packet):
"""SI 17 Rest Octets Section 10.5.2.37f"""
name = "SI 17 Rest Octets"
fields_desc = [
# FIXME
XByteField("lengthSI17", 0x15),
ByteField("byte2", 0x0),
ByteField("byte3", 0x0),
ByteField("byte4", 0x0),
ByteField("byte5", 0x0),
ByteField("byte6", 0x0),
ByteField("byte7", 0x0),
ByteField("byte8", 0x0),
ByteField("byte9", 0x0),
ByteField("byte10", 0x0),
ByteField("byte11", 0x0),
ByteField("byte12", 0x0),
ByteField("byte13", 0x0),
ByteField("byte14", 0x0),
ByteField("byte15", 0x0),
ByteField("byte16", 0x0),
ByteField("byte17", 0x0),
ByteField("byte18", 0x0),
ByteField("byte19", 0x0),
ByteField("byte20", 0x0),
ByteField("byte21", 0x0)
]
class StartingTimeHdr(Packet):
"""Starting Time Section 10.5.2.38"""
name = "Starting Time"
fields_desc = [
BitField("eightBitST", None, 1),
XBitField("ieiST", None, 7),
ByteField("ra", 0x0),
BitField("t1", 0x0, 5),
BitField("t3Hi", 0x0, 3),
BitField("t3Lo", 0x0, 3),
BitField("t2", 0x0, 5)
]
class SynchronizationIndicationHdr(Packet):
"""Synchronization Indication Section 10.5.2.39"""
name = "Synchronization Indication"
fields_desc = [
XBitField("ieiSI", None, 4),
BitField("nci", 0x0, 1),
BitField("rot", 0x0, 1),
BitField("si", 0x0, 2)
]
class TimingAdvanceHdr(Packet):
"""Timing Advance Section 10.5.2.40"""
name = "Timing Advance"
fields_desc = [
BitField("eightBitTA", None, 1),
XBitField("ieiTA", None, 7),
BitField("spare", 0x0, 1),
BitField("spare1", 0x0, 1),
BitField("timingVal", 0x0, 6)
]
class TimeDifferenceHdr(Packet):
""" Time Difference Section 10.5.2.41"""
name = "Time Difference"
fields_desc = [
BitField("eightBitTD", None, 1),
XBitField("ieiTD", None, 7),
XByteField("lengthTD", 0x3),
ByteField("timeValue", 0x0)
]
class TlliHdr(Packet):
""" TLLI Section Section 10.5.2.41a"""
name = "TLLI"
fields_desc = [
BitField("eightBitT", None, 1),
XBitField("ieiT", None, 7),
ByteField("value", 0x0),
ByteField("value1", 0x0),
ByteField("value2", 0x0),
ByteField("value3", 0x0)
]
class TmsiPTmsiHdr(Packet):
""" TMSI/P-TMSI Section 10.5.2.42"""
name = "TMSI/P-TMSI"
fields_desc = [
BitField("eightBitTPT", None, 1),
XBitField("ieiTPT", None, 7),
ByteField("value", 0x0),
ByteField("value1", 0x0),
ByteField("value2", 0x0),
ByteField("value3", 0x0)
]
class VgcsTargetModeIdenticationHdr(Packet):
""" VGCS target Mode Indication 10.5.2.42a"""
name = "VGCS Target Mode Indication"
fields_desc = [
BitField("eightBitVTMI", None, 1),
XBitField("ieiVTMI", None, 7),
XByteField("lengthVTMI", 0x2),
BitField("targerMode", 0x0, 2),
BitField("cipherKeyNb", 0x0, 4),
BitField("spare", 0x0, 1),
BitField("spare1", 0x0, 1)
]
class WaitIndicationHdr(Packet):
""" Wait Indication Section 10.5.2.43"""
name = "Wait Indication"
fields_desc = [ # asciiart of specs strange
BitField("eightBitWI", None, 1),
XBitField("ieiWI", None, 7),
ByteField("timeoutVal", 0x0)
]
# len 17
class ExtendedMeasurementResultsHdr(Packet):
"""EXTENDED MEASUREMENT RESULTS Section 10.5.2.45"""
name = "Extended Measurement Results"
fields_desc = [
BitField("eightBitEMR", None, 1),
XBitField("ieiEMR", None, 7),
BitField("scUsed", None, 1),
BitField("dtxUsed", None, 1),
BitField("rxLevC0", None, 6),
BitField("rxLevC1", None, 6),
BitField("rxLevC2Hi", None, 2),
BitField("rxLevC2Lo", None, 4),
BitField("rxLevC3Hi", None, 4),
BitField("rxLevC3Lo", None, 3),
BitField("rxLevC4", None, 5),
BitField("rxLevC5", None, 6),
BitField("rxLevC6Hi", None, 2),
BitField("rxLevC6Lo", None, 4),
BitField("rxLevC7Hi", None, 4),
BitField("rxLevC7Lo", None, 2),
BitField("rxLevC8", None, 6),
BitField("rxLevC9", None, 6),
BitField("rxLevC10Hi", None, 2),
BitField("rxLevC10Lo", None, 4),
BitField("rxLevC11Hi", None, 4),
BitField("rxLevC13Lo", None, 2),
BitField("rxLevC12", None, 6),
BitField("rxLevC13", None, 6),
BitField("rxLevC14Hi", None, 2),
BitField("rxLevC14Lo", None, 4),
BitField("rxLevC15Hi", None, 4),
BitField("rxLevC15Lo", None, 2),
BitField("rxLevC16", None, 6),
BitField("rxLevC17", None, 6),
BitField("rxLevC18Hi", None, 2),
BitField("rxLevC18Lo", None, 4),
BitField("rxLevC19Hi", None, 4),
BitField("rxLevC19Lo", None, 2),
BitField("rxLevC20", None, 6)
]
# len 17
class ExtendedMeasurementFrequencyListHdr(Packet):
"""Extended Measurement Frequency List Section 10.5.2.46"""
name = "Extended Measurement Frequency List"
fields_desc = [
BitField("eightBitEMFL", None, 1),
XBitField("ieiEMFL", None, 7),
BitField("bit128", 0x0, 1),
BitField("bit127", 0x0, 1),
BitField("spare", 0x0, 1),
BitField("seqCode", 0x0, 1),
BitField("bit124", 0x0, 1),
BitField("bit123", 0x0, 1),
BitField("bit122", 0x0, 1),
BitField("bit121", 0x0, 1),
BitField("bitsRest", 0x0, 128)
]
class SuspensionCauseHdr(Packet):
"""Suspension Cause Section 10.5.2.47"""
name = "Suspension Cause"
fields_desc = [
BitField("eightBitSC", None, 1),
XBitField("ieiSC", None, 7),
ByteField("suspVal", 0x0)
]
class ApduIDHdr(Packet):
"""APDU Flags Section 10.5.2.48"""
name = "Apdu Id"
fields_desc = [
XBitField("ieiAI", None, 4),
BitField("id", None, 4)
]
class ApduFlagsHdr(Packet):
"""APDU Flags Section 10.5.2.49"""
name = "Apdu Flags"
fields_desc = [
XBitField("iei", None, 4),
BitField("spare", 0x0, 1),
BitField("cr", 0x0, 1),
BitField("firstSeg", 0x0, 1),
BitField("lastSeg", 0x0, 1)
]
# Fix 1/2 len problem
class ApduIDAndApduFlags(Packet):
name = "Apu Id and Apdu Flags"
fields_desc = [
BitField("id", None, 4),
BitField("spare", 0x0, 1),
BitField("cr", 0x0, 1),
BitField("firstSeg", 0x0, 1),
BitField("lastSeg", 0x0, 1)
]
# len 2 to max L3 (251) (done)
class ApduDataHdr(Packet):
"""APDU Data Section 10.5.2.50"""
name = "Apdu Data"
fields_desc = [
BitField("eightBitAD", None, 1),
XBitField("ieiAD", None, 7),
XByteField("lengthAD", None),
#optional
ByteField("apuInfo1", None),
ByteField("apuInfo2", None),
ByteField("apuInfo3", None),
ByteField("apuInfo4", None),
ByteField("apuInfo5", None),
ByteField("apuInfo6", None),
ByteField("apuInfo7", None),
ByteField("apuInfo8", None),
ByteField("apuInfo9", None),
ByteField("apuInfo10", None),
ByteField("apuInfo11", None),
ByteField("apuInfo12", None),
ByteField("apuInfo13", None),
ByteField("apuInfo14", None),
ByteField("apuInfo15", None),
ByteField("apuInfo16", None),
ByteField("apuInfo17", None),
ByteField("apuInfo18", None),
ByteField("apuInfo19", None),
ByteField("apuInfo20", None),
ByteField("apuInfo21", None),
ByteField("apuInfo22", None),
ByteField("apuInfo23", None),
ByteField("apuInfo24", None),
ByteField("apuInfo25", None),
ByteField("apuInfo26", None),
ByteField("apuInfo27", None),
ByteField("apuInfo28", None),
ByteField("apuInfo29", None),
ByteField("apuInfo30", None),
ByteField("apuInfo31", None),
ByteField("apuInfo32", None),
ByteField("apuInfo33", None),
ByteField("apuInfo34", None),
ByteField("apuInfo35", None),
ByteField("apuInfo36", None),
ByteField("apuInfo37", None),
ByteField("apuInfo38", None),
ByteField("apuInfo39", None),
ByteField("apuInfo40", None),
ByteField("apuInfo41", None),
ByteField("apuInfo42", None),
ByteField("apuInfo43", None),
ByteField("apuInfo44", None),
ByteField("apuInfo45", None),
ByteField("apuInfo46", None),
ByteField("apuInfo47", None),
ByteField("apuInfo48", None),
ByteField("apuInfo49", None),
ByteField("apuInfo50", None),
ByteField("apuInfo51", None),
ByteField("apuInfo52", None),
ByteField("apuInfo53", None),
ByteField("apuInfo54", None),
ByteField("apuInfo55", None),
ByteField("apuInfo56", None),
ByteField("apuInfo57", None),
ByteField("apuInfo58", None),
ByteField("apuInfo59", None),
ByteField("apuInfo60", None),
ByteField("apuInfo61", None),
ByteField("apuInfo62", None),
ByteField("apuInfo63", None),
ByteField("apuInfo64", None),
ByteField("apuInfo65", None),
ByteField("apuInfo66", None),
ByteField("apuInfo67", None),
ByteField("apuInfo68", None),
ByteField("apuInfo69", None),
ByteField("apuInfo70", None),
ByteField("apuInfo71", None),
ByteField("apuInfo72", None),
ByteField("apuInfo73", None),
ByteField("apuInfo74", None),
ByteField("apuInfo75", None),
ByteField("apuInfo76", None),
ByteField("apuInfo77", None),
ByteField("apuInfo78", None),
ByteField("apuInfo79", None),
ByteField("apuInfo80", None),
ByteField("apuInfo81", None),
ByteField("apuInfo82", None),
ByteField("apuInfo83", None),
ByteField("apuInfo84", None),
ByteField("apuInfo85", None),
ByteField("apuInfo86", None),
ByteField("apuInfo87", None),
ByteField("apuInfo88", None),
ByteField("apuInfo89", None),
ByteField("apuInfo90", None),
ByteField("apuInfo91", None),
ByteField("apuInfo92", None),
ByteField("apuInfo93", None),
ByteField("apuInfo94", None),
ByteField("apuInfo95", None),
ByteField("apuInfo96", None),
ByteField("apuInfo97", None),
ByteField("apuInfo98", None),
ByteField("apuInfo99", None),
ByteField("apuInfo100", None),
ByteField("apuInfo101", None),
ByteField("apuInfo102", None),
ByteField("apuInfo103", None),
ByteField("apuInfo104", None),
ByteField("apuInfo105", None),
ByteField("apuInfo106", None),
ByteField("apuInfo107", None),
ByteField("apuInfo108", None),
ByteField("apuInfo109", None),
ByteField("apuInfo110", None),
ByteField("apuInfo111", None),
ByteField("apuInfo112", None),
ByteField("apuInfo113", None),
ByteField("apuInfo114", None),
ByteField("apuInfo115", None),
ByteField("apuInfo116", None),
ByteField("apuInfo117", None),
ByteField("apuInfo118", None),
ByteField("apuInfo119", None),
ByteField("apuInfo120", None),
ByteField("apuInfo121", None),
ByteField("apuInfo122", None),
ByteField("apuInfo123", None),
ByteField("apuInfo124", None),
ByteField("apuInfo125", None),
ByteField("apuInfo126", None),
ByteField("apuInfo127", None),
ByteField("apuInfo128", None),
ByteField("apuInfo129", None),
ByteField("apuInfo130", None),
ByteField("apuInfo131", None),
ByteField("apuInfo132", None),
ByteField("apuInfo133", None),
ByteField("apuInfo134", None),
ByteField("apuInfo135", None),
ByteField("apuInfo136", None),
ByteField("apuInfo137", None),
ByteField("apuInfo138", None),
ByteField("apuInfo139", None),
ByteField("apuInfo140", None),
ByteField("apuInfo141", None),
ByteField("apuInfo142", None),
ByteField("apuInfo143", None),
ByteField("apuInfo144", None),
ByteField("apuInfo145", None),
ByteField("apuInfo146", None),
ByteField("apuInfo147", None),
ByteField("apuInfo148", None),
ByteField("apuInfo149", None),
ByteField("apuInfo150", None),
ByteField("apuInfo151", None),
ByteField("apuInfo152", None),
ByteField("apuInfo153", None),
ByteField("apuInfo154", None),
ByteField("apuInfo155", None),
ByteField("apuInfo156", None),
ByteField("apuInfo157", None),
ByteField("apuInfo158", None),
ByteField("apuInfo159", None),
ByteField("apuInfo160", None),
ByteField("apuInfo161", None),
ByteField("apuInfo162", None),
ByteField("apuInfo163", None),
ByteField("apuInfo164", None),
ByteField("apuInfo165", None),
ByteField("apuInfo166", None),
ByteField("apuInfo167", None),
ByteField("apuInfo168", None),
ByteField("apuInfo169", None),
ByteField("apuInfo170", None),
ByteField("apuInfo171", None),
ByteField("apuInfo172", None),
ByteField("apuInfo173", None),
ByteField("apuInfo174", None),
ByteField("apuInfo175", None),
ByteField("apuInfo176", None),
ByteField("apuInfo177", None),
ByteField("apuInfo178", None),
ByteField("apuInfo179", None),
ByteField("apuInfo180", None),
ByteField("apuInfo181", None),
ByteField("apuInfo182", None),
ByteField("apuInfo183", None),
ByteField("apuInfo184", None),
ByteField("apuInfo185", None),
ByteField("apuInfo186", None),
ByteField("apuInfo187", None),
ByteField("apuInfo188", None),
ByteField("apuInfo189", None),
ByteField("apuInfo190", None),
ByteField("apuInfo191", None),
ByteField("apuInfo192", None),
ByteField("apuInfo193", None),
ByteField("apuInfo194", None),
ByteField("apuInfo195", None),
ByteField("apuInfo196", None),
ByteField("apuInfo197", None),
ByteField("apuInfo198", None),
ByteField("apuInfo199", None),
ByteField("apuInfo200", None),
ByteField("apuInfo201", None),
ByteField("apuInfo202", None),
ByteField("apuInfo203", None),
ByteField("apuInfo204", None),
ByteField("apuInfo205", None),
ByteField("apuInfo206", None),
ByteField("apuInfo207", None),
ByteField("apuInfo208", None),
ByteField("apuInfo209", None),
ByteField("apuInfo210", None),
ByteField("apuInfo211", None),
ByteField("apuInfo212", None),
ByteField("apuInfo213", None),
ByteField("apuInfo214", None),
ByteField("apuInfo215", None),
ByteField("apuInfo216", None),
ByteField("apuInfo217", None),
ByteField("apuInfo218", None),
ByteField("apuInfo219", None),
ByteField("apuInfo220", None),
ByteField("apuInfo221", None),
ByteField("apuInfo222", None),
ByteField("apuInfo223", None),
ByteField("apuInfo224", None),
ByteField("apuInfo225", None),
ByteField("apuInfo226", None),
ByteField("apuInfo227", None),
ByteField("apuInfo228", None),
ByteField("apuInfo229", None),
ByteField("apuInfo230", None),
ByteField("apuInfo231", None),
ByteField("apuInfo232", None),
ByteField("apuInfo233", None),
ByteField("apuInfo234", None),
ByteField("apuInfo235", None),
ByteField("apuInfo236", None),
ByteField("apuInfo237", None),
ByteField("apuInfo238", None),
ByteField("apuInfo239", None),
ByteField("apuInfo240", None),
ByteField("apuInfo241", None),
ByteField("apuInfo242", None),
ByteField("apuInfo243", None),
ByteField("apuInfo244", None),
ByteField("apuInfo245", None),
ByteField("apuInfo246", None),
ByteField("apuInfo247", None),
ByteField("apuInfo248", None),
ByteField("apuInfo249", None)
]
def post_build(self, p, pay):
a = [getattr(self, fld.name) for fld in self.fields_desc]
res = adapt(2, 251, a, self.fields_desc)
if self.lengthAD is None:
p = p[:1] + struct.pack(">B", res[1]) + p[2:]
if res[0] != 0:
p = p[:-res[0]]
return p + pay
#
# 10.5.3 Mobility management information elements
#
class AuthenticationParameterRAND(Packet):
"""Authentication parameter RAND Section 10.5.3.1"""
name = "Authentication Parameter Rand"
fields_desc = [
ByteField("ieiAPR", None),
BitField("randValue", 0x0, 128)
]
class AuthenticationParameterSRES(Packet):
"""Authentication parameter SRES Section 10.5.3.2"""
name = "Authentication Parameter Sres"
fields_desc = [
ByteField("ieiAPS", None),
BitField("sresValue", 0x0, 40)
]
class CmServiceType(Packet):
"""CM service type Section 10.5.3.3"""
name = "CM Service Type"
fields_desc = [
XBitField("ieiCST", 0x0, 4),
BitField("serviceType", 0x0, 4)
]
class CmServiceTypeAndCiphKeySeqNr(Packet):
name = "CM Service Type and Cipher Key Sequence Number"
fields_desc = [
BitField("keySeq", 0x0, 3),
BitField("spare", 0x0, 1),
BitField("serviceType", 0x0, 4)
]
class IdentityType(Packet):
"""Identity type Section 10.5.3.4"""
name = "Identity Type"
fields_desc = [
XBitField("ieiIT", 0x0, 4),
BitField("spare", 0x0, 1),
BitField("idType", 0x1, 3)
]
# Fix 1/2 len problem
class IdentityTypeAndSpareHalfOctet(Packet):
name = "Identity Type and Spare Half Octet"
fields_desc = [
BitField("spare", 0x0, 1),
BitField("idType", 0x1, 3),
BitField("spareHalfOctets", 0x0, 4)
]
class LocationUpdatingType(Packet):
"""Location updating type Section 10.5.3.5"""
name = "Location Updating Type"
fields_desc = [
XBitField("ieiLUT", 0x0, 4),
BitField("for", 0x0, 1),
BitField("spare", 0x0, 1),
BitField("lut", 0x0, 2)
]
class LocationUpdatingTypeAndCiphKeySeqNr(Packet):
name = "Location Updating Type and Cipher Key Sequence Number"
fields_desc = [
BitField("for", 0x0, 1),
BitField("spare", 0x0, 1),
BitField("lut", 0x0, 2),
BitField("spare", 0x0, 1),
BitField("keySeq", 0x0, 3)
]
# len 3 to L3 max (251) (done)
class NetworkNameHdr(Packet):
"""Network Name Section 10.5.3.5a"""
name = "Network Name"
fields_desc = [
BitField("eightBitNN", None, 1),
XBitField("ieiNN", None, 7),
XByteField("lengthNN", None),
BitField("ext1", 0x1, 1),
BitField("codingScheme", 0x0, 3),
BitField("addCi", 0x0, 1),
BitField("nbSpare", 0x0, 3),
# optional
ByteField("txtString1", None),
ByteField("txtString2", None),
ByteField("txtString3", None),
ByteField("txtString4", None),
ByteField("txtString5", None),
ByteField("txtString6", None),
ByteField("txtString7", None),
ByteField("txtString8", None),
ByteField("txtString9", None),
ByteField("txtString10", None),
ByteField("txtString11", None),
ByteField("txtString12", None),
ByteField("txtString13", None),
ByteField("txtString14", None),
ByteField("txtString15", None),
ByteField("txtString16", None),
ByteField("txtString17", None),
ByteField("txtString18", None),
ByteField("txtString19", None),
ByteField("txtString20", None),
ByteField("txtString21", None),
ByteField("txtString22", None),
ByteField("txtString23", None),
ByteField("txtString24", None),
ByteField("txtString25", None),
ByteField("txtString26", None),
ByteField("txtString27", None),
ByteField("txtString28", None),
ByteField("txtString29", None),
ByteField("txtString30", None),
ByteField("txtString31", None),
ByteField("txtString32", None),
ByteField("txtString33", None),
ByteField("txtString34", None),
ByteField("txtString35", None),
ByteField("txtString36", None),
ByteField("txtString37", None),
ByteField("txtString38", None),
ByteField("txtString39", None),
ByteField("txtString40", None),
ByteField("txtString41", None),
ByteField("txtString42", None),
ByteField("txtString43", None),
ByteField("txtString44", None),
ByteField("txtString45", None),
ByteField("txtString46", None),
ByteField("txtString47", None),
ByteField("txtString48", None),
ByteField("txtString49", None),
ByteField("txtString50", None),
ByteField("txtString51", None),
ByteField("txtString52", None),
ByteField("txtString53", None),
ByteField("txtString54", None),
ByteField("txtString55", None),
ByteField("txtString56", None),
ByteField("txtString57", None),
ByteField("txtString58", None),
ByteField("txtString59", None),
ByteField("txtString60", None),
ByteField("txtString61", None),
ByteField("txtString62", None),
ByteField("txtString63", None),
ByteField("txtString64", None),
ByteField("txtString65", None),
ByteField("txtString66", None),
ByteField("txtString67", None),
ByteField("txtString68", None),
ByteField("txtString69", None),
ByteField("txtString70", None),
ByteField("txtString71", None),
ByteField("txtString72", None),
ByteField("txtString73", None),
ByteField("txtString74", None),
ByteField("txtString75", None),
ByteField("txtString76", None),
ByteField("txtString77", None),
ByteField("txtString78", None),
ByteField("txtString79", None),
ByteField("txtString80", None),
ByteField("txtString81", None),
ByteField("txtString82", None),
ByteField("txtString83", None),
ByteField("txtString84", None),
ByteField("txtString85", None),
ByteField("txtString86", None),
ByteField("txtString87", None),
ByteField("txtString88", None),
ByteField("txtString89", None),
ByteField("txtString90", None),
ByteField("txtString91", None),
ByteField("txtString92", None),
ByteField("txtString93", None),
ByteField("txtString94", None),
ByteField("txtString95", None),
ByteField("txtString96", None),
ByteField("txtString97", None),
ByteField("txtString98", None),
ByteField("txtString99", None),
ByteField("txtString100", None),
ByteField("txtString101", None),
ByteField("txtString102", None),
ByteField("txtString103", None),
ByteField("txtString104", None),
ByteField("txtString105", None),
ByteField("txtString106", None),
ByteField("txtString107", None),
ByteField("txtString108", None),
ByteField("txtString109", None),
ByteField("txtString110", None),
ByteField("txtString111", None),
ByteField("txtString112", None),
ByteField("txtString113", None),
ByteField("txtString114", None),
ByteField("txtString115", None),
ByteField("txtString116", None),
ByteField("txtString117", None),
ByteField("txtString118", None),
ByteField("txtString119", None),
ByteField("txtString120", None),
ByteField("txtString121", None),
ByteField("txtString122", None),
ByteField("txtString123", None),
ByteField("txtString124", None),
ByteField("txtString125", None),
ByteField("txtString126", None),
ByteField("txtString127", None),
ByteField("txtString128", None),
ByteField("txtString129", None),
ByteField("txtString130", None),
ByteField("txtString131", None),
ByteField("txtString132", None),
ByteField("txtString133", None),
ByteField("txtString134", None),
ByteField("txtString135", None),
ByteField("txtString136", None),
ByteField("txtString137", None),
ByteField("txtString138", None),
ByteField("txtString139", None),
ByteField("txtString140", None),
ByteField("txtString141", None),
ByteField("txtString142", None),
ByteField("txtString143", None),
ByteField("txtString144", None),
ByteField("txtString145", None),
ByteField("txtString146", None),
ByteField("txtString147", None),
ByteField("txtString148", None),
ByteField("txtString149", None),
ByteField("txtString150", None),
ByteField("txtString151", None),
ByteField("txtString152", None),
ByteField("txtString153", None),
ByteField("txtString154", None),
ByteField("txtString155", None),
ByteField("txtString156", None),
ByteField("txtString157", None),
ByteField("txtString158", None),
ByteField("txtString159", None),
ByteField("txtString160", None),
ByteField("txtString161", None),
ByteField("txtString162", None),
ByteField("txtString163", None),
ByteField("txtString164", None),
ByteField("txtString165", None),
ByteField("txtString166", None),
ByteField("txtString167", None),
ByteField("txtString168", None),
ByteField("txtString169", None),
ByteField("txtString170", None),
ByteField("txtString171", None),
ByteField("txtString172", None),
ByteField("txtString173", None),
ByteField("txtString174", None),
ByteField("txtString175", None),
ByteField("txtString176", None),
ByteField("txtString177", None),
ByteField("txtString178", None),
ByteField("txtString179", None),
ByteField("txtString180", None),
ByteField("txtString181", None),
ByteField("txtString182", None),
ByteField("txtString183", None),
ByteField("txtString184", None),
ByteField("txtString185", None),
ByteField("txtString186", None),
ByteField("txtString187", None),
ByteField("txtString188", None),
ByteField("txtString189", None),
ByteField("txtString190", None),
ByteField("txtString191", None),
ByteField("txtString192", None),
ByteField("txtString193", None),
ByteField("txtString194", None),
ByteField("txtString195", None),
ByteField("txtString196", None),
ByteField("txtString197", None),
ByteField("txtString198", None),
ByteField("txtString199", None),
ByteField("txtString200", None),
ByteField("txtString201", None),
ByteField("txtString202", None),
ByteField("txtString203", None),
ByteField("txtString204", None),
ByteField("txtString205", None),
ByteField("txtString206", None),
ByteField("txtString207", None),
ByteField("txtString208", None),
ByteField("txtString209", None),
ByteField("txtString210", None),
ByteField("txtString211", None),
ByteField("txtString212", None),
ByteField("txtString213", None),
ByteField("txtString214", None),
ByteField("txtString215", None),
ByteField("txtString216", None),
ByteField("txtString217", None),
ByteField("txtString218", None),
ByteField("txtString219", None),
ByteField("txtString220", None),
ByteField("txtString221", None),
ByteField("txtString222", None),
ByteField("txtString223", None),
ByteField("txtString224", None),
ByteField("txtString225", None),
ByteField("txtString226", None),
ByteField("txtString227", None),
ByteField("txtString228", None),
ByteField("txtString229", None),
ByteField("txtString230", None),
ByteField("txtString231", None),
ByteField("txtString232", None),
ByteField("txtString233", None),
ByteField("txtString234", None),
ByteField("txtString235", None),
ByteField("txtString236", None),
ByteField("txtString237", None),
ByteField("txtString238", None),
ByteField("txtString239", None),
ByteField("txtString240", None),
ByteField("txtString241", None),
ByteField("txtString242", None),
ByteField("txtString243", None),
ByteField("txtString244", None),
ByteField("txtString245", None),
ByteField("txtString246", None),
ByteField("txtString247", None),
ByteField("txtString248", None)
]
def post_build(self, p, pay):
a = [getattr(self, fld.name) for fld in self.fields_desc]
res = adapt(3, 251, a, self.fields_desc)
if self.lengthNN is None:
p = p[:1] + struct.pack(">B", res[1]) + p[2:]
if res[0] != 0:
p = p[:-res[0]]
return p + pay
class RejectCause(Packet):
"""Reject cause Section 10.5.3.6"""
name = "Reject Cause"
fields_desc = [
ByteField("ieiRC", 0x0),
ByteField("rejCause", 0x0)
]
class FollowOnProceed(Packet):
"""Follow-on Proceed Section 10.5.3.7"""
name = "Follow-on Proceed"
fields_desc = [
ByteField("ieiFOP", 0x0),
]
class TimeZoneHdr(Packet):
"""Time Zone Section 10.5.3.8"""
name = "Time Zone"
fields_desc = [
BitField("eightBitTZ", None, 1),
XBitField("ieiTZ", None, 7),
ByteField("timeZone", 0x0),
]
class TimeZoneAndTimeHdr(Packet):
"""Time Zone and Time Section 10.5.3.9"""
name = "Time Zone and Time"
fields_desc = [
BitField("eightBitTZAT", None, 1),
XBitField("ieiTZAT", None, 7),
ByteField("year", 0x0),
ByteField("month", 0x0),
ByteField("day", 0x0),
ByteField("hour", 0x0),
ByteField("minute", 0x0),
ByteField("second", 0x0),
ByteField("timeZone", 0x0)
]
class CtsPermissionHdr(Packet):
"""CTS permission Section 10.5.3.10"""
name = "Cts Permission"
fields_desc = [
BitField("eightBitCP", None, 1),
XBitField("ieiCP", None, 7),
]
class LsaIdentifierHdr(Packet):
"""LSA Identifier Section 10.5.3.11"""
name = "Lsa Identifier"
fields_desc = [
BitField("eightBitLI", None, 1),
XBitField("ieiLI", None, 7),
ByteField("lsaID", 0x0),
ByteField("lsaID1", 0x0),
ByteField("lsaID2", 0x0)
]
#
# 10.5.4 Call control information elements
#
#10.5.4.1 Extensions of codesets
# This is only text and no packet
class LockingShiftProcedureHdr(Packet):
"""Locking shift procedure Section 10.5.4.2"""
name = "Locking Shift Procedure"
fields_desc = [
XBitField("ieiLSP", None, 4),
BitField("lockShift", 0x0, 1),
BitField("codesetId", 0x0, 3)
]
class NonLockingShiftProcedureHdr(Packet):
"""Non-locking shift procedure Section 10.5.4.3"""
name = "Non-locking Shift Procedure"
fields_desc = [
XBitField("ieiNLSP", None, 4),
BitField("nonLockShift", 0x1, 1),
BitField("codesetId", 0x0, 3)
]
class AuxiliaryStatesHdr(Packet):
"""Auxiliary states Section 10.5.4.4"""
name = "Auxiliary States"
fields_desc = [
BitField("eightBitAS", None, 1),
XBitField("ieiAS", None, 7),
XByteField("lengthAS", 0x3),
BitField("ext", 0x1, 1),
BitField("spare", 0x0, 3),
BitField("holdState", 0x0, 2),
BitField("mptyState", 0x0, 2)
]
# len 3 to 15
class BearerCapabilityHdr(Packet):
"""Bearer capability Section 10.5.4.5"""
name = "Bearer Capability"
fields_desc = [
BitField("eightBitBC", None, 1),
XBitField("ieiBC", None, 7),
XByteField("lengthBC", None),
BitField("ext0", 0x1, 1),
BitField("radioChReq", 0x1, 2),
BitField("codingStd", 0x0, 1),
BitField("transMode", 0x0, 1),
BitField("infoTransCa", 0x0, 3),
# optional
ConditionalField(BitField("ext1", 0x1, 1),
lambda pkt: pkt.ext0 == 0),
ConditionalField(BitField("coding", None, 1),
lambda pkt: pkt.ext0 == 0),
ConditionalField(BitField("spare", None, 2),
lambda pkt: pkt.ext0 == 0),
ConditionalField(BitField("speechVers", 0x0, 4),
lambda pkt: pkt.ext0 == 0),
ConditionalField(BitField("ext2", 0x1, 1),
lambda pkt: pkt.ext1 == 0),
ConditionalField(BitField("compress", None, 1),
lambda pkt: pkt.ext1 == 0),
ConditionalField(BitField("structure", None, 2),
lambda pkt: pkt.ext1 == 0),
ConditionalField(BitField("dupMode", None, 1),
lambda pkt: pkt.ext1 == 0),
ConditionalField(BitField("config", None, 1),
lambda pkt: pkt.ext1 == 0),
ConditionalField(BitField("nirr", None, 1),
lambda pkt: pkt.ext1 == 0),
ConditionalField(BitField("establi", 0x0, 1),
lambda pkt: pkt.ext1 == 0),
BitField("ext3", None, 1),
BitField("accessId", None, 2),
BitField("rateAda", None, 2),
BitField("signaling", None, 3),
ConditionalField(BitField("ext4", None, 1),
lambda pkt: pkt.ext3 == 0),
ConditionalField(BitField("otherITC", None, 2),
lambda pkt: pkt.ext3 == 0),
ConditionalField(BitField("otherRate", None, 2),
lambda pkt: pkt.ext3 == 0),
ConditionalField(BitField("spare1", 0x0, 3),
lambda pkt: pkt.ext3 == 0),
ConditionalField(BitField("ext5", 0x1, 1),
lambda pkt: pkt.ext4 == 0),
ConditionalField(BitField("hdr", None, 1),
lambda pkt: pkt.ext4 == 0),
ConditionalField(BitField("multiFr", None, 1),
lambda pkt: pkt.ext4 == 0),
ConditionalField(BitField("mode", None, 1),
lambda pkt: pkt.ext4 == 0),
ConditionalField(BitField("lli", None, 1),
lambda pkt: pkt.ext4 == 0),
ConditionalField(BitField("assig", None, 1),
lambda pkt: pkt.ext4 == 0),
ConditionalField(BitField("inbNeg", None, 1),
lambda pkt: pkt.ext4 == 0),
ConditionalField(BitField("spare2", 0x0, 1),
lambda pkt: pkt.ext4 == 0),
BitField("ext6", None, 1),
BitField("layer1Id", None, 2),
BitField("userInf", None, 4),
BitField("sync", None, 1),
ConditionalField(BitField("ext7", None, 1),
lambda pkt: pkt.ext6 == 0),
ConditionalField(BitField("stopBit", None, 1),
lambda pkt: pkt.ext6 == 0),
ConditionalField(BitField("negoc", None, 1),
lambda pkt: pkt.ext6 == 0),
ConditionalField(BitField("nbDataBit", None, 1),
lambda pkt: pkt.ext6 == 0),
ConditionalField(BitField("userRate", None, 4),
lambda pkt: pkt.ext6 == 0),
ConditionalField(BitField("ext8", None, 1),
lambda pkt: pkt.ext7 == 0),
ConditionalField(BitField("interRate", None, 2),
lambda pkt: pkt.ext7 == 0),
ConditionalField(BitField("nicTX", None, 1),
lambda pkt: pkt.ext7 == 0),
ConditionalField(BitField("nicRX", None, 1),
lambda pkt: pkt.ext7 == 0),
ConditionalField(BitField("parity", None, 3),
lambda pkt: pkt.ext7 == 0),
ConditionalField(BitField("ext9", None, 1),
lambda pkt: pkt.ext8 == 0),
ConditionalField(BitField("connEle", None, 2),
lambda pkt: pkt.ext8 == 0),
ConditionalField(BitField("modemType", None, 5),
lambda pkt: pkt.ext8 == 0),
ConditionalField(BitField("ext10", None, 1),
lambda pkt: pkt.ext9 == 0),
ConditionalField(BitField("otherModemType", None, 2),
lambda pkt: pkt.ext9 == 0),
ConditionalField(BitField("netUserRate", None, 5),
lambda pkt: pkt.ext9 == 0),
ConditionalField(BitField("ext11", None, 1),
lambda pkt: pkt.ext10 == 0),
ConditionalField(BitField("chanCoding", None, 4),
lambda pkt: pkt.ext10 == 0),
ConditionalField(BitField("maxTrafficChan", None, 3),
lambda pkt: pkt.ext10 == 0),
ConditionalField(BitField("ext12", None, 1),
lambda pkt: pkt.ext11 == 0),
ConditionalField(BitField("uimi", None, 3),
lambda pkt: pkt.ext11 == 0),
ConditionalField(BitField("airInterfaceUserRate", None, 4),
lambda pkt: pkt.ext11 == 0),
ConditionalField(BitField("ext13", 0x1, 1),
lambda pkt: pkt.ext12 == 0),
ConditionalField(BitField("layer2Ch", None, 2),
lambda pkt: pkt.ext12 == 0),
ConditionalField(BitField("userInfoL2", 0x0, 5),
lambda pkt: pkt.ext12 == 0)
]
# We have a bug here. packet is not working if used in message
def post_build(self, p, pay):
a = [getattr(self, fld.name) for fld in self.fields_desc]
res = adapt(3, 15, a, self.fields_desc)
if res[0] != 0:
p = p[:-res[0]]
# avoids a bug. find better way
if len(p) is 5:
p = p[:-2]
if self.lengthBC is None:
print "len von a %s" % (len(p),)
p = p[:1] + struct.pack(">B", len(p)-3) + p[2:]
return p + pay
class CallControlCapabilitiesHdr(Packet):
"""Call Control Capabilities Section 10.5.4.5a"""
name = "Call Control Capabilities"
fields_desc = [
BitField("eightBitCCC", None, 1),
XBitField("ieiCCC", None, 7),
XByteField("lengthCCC", 0x3),
BitField("spare", 0x0, 6),
BitField("pcp", 0x0, 1),
BitField("dtmf", 0x0, 1)
]
class CallStateHdr(Packet):
"""Call State Section 10.5.4.6"""
name = "Call State"
fields_desc = [
BitField("eightBitCS", None, 1),
XBitField("ieiCS", None, 7),
BitField("codingStd", 0x0, 2),
BitField("stateValue", 0x0, 6)
]
# len 3 to 43
class CalledPartyBcdNumberHdr(Packet):
"""Called party BCD number Section 10.5.4.7"""
name = "Called Party BCD Number"
fields_desc = [
BitField("eightBitCPBN", None, 1),
XBitField("ieiCPBN", None, 7),
XByteField("lengthCPBN", None),
BitField("ext", 0x1, 1),
BitField("typeNb", 0x0, 3),
BitField("nbPlanId", 0x0, 4),
# optional
BitField("nbDigit2", None, 4),
BitField("nbDigit1", None, 4),
BitField("nbDigit4", None, 4),
BitField("nbDigit3", None, 4),
BitField("nbDigit6", None, 4),
BitField("nbDigit5", None, 4),
BitField("nbDigit8", None, 4),
BitField("nbDigit7", None, 4),
BitField("nbDigit10", None, 4),
BitField("nbDigit9", None, 4),
BitField("nbDigit12", None, 4),
BitField("nbDigit11", None, 4),
BitField("nbDigit14", None, 4),
BitField("nbDigit13", None, 4),
BitField("nbDigit16", None, 4),
BitField("nbDigit15", None, 4),
BitField("nbDigit18", None, 4),
BitField("nbDigit17", None, 4),
BitField("nbDigit20", None, 4),
BitField("nbDigit19", None, 4),
BitField("nbDigit22", None, 4),
BitField("nbDigit21", None, 4),
BitField("nbDigit24", None, 4),
BitField("nbDigit23", None, 4),
BitField("nbDigit26", None, 4),
BitField("nbDigit25", None, 4),
BitField("nbDigit28", None, 4),
BitField("nbDigit27", None, 4),
BitField("nbDigit30", None, 4),
BitField("nbDigit29", None, 4),
BitField("nbDigit32", None, 4),
BitField("nbDigit31", None, 4),
BitField("nbDigit34", None, 4),
BitField("nbDigit33", None, 4),
BitField("nbDigit36", None, 4),
BitField("nbDigit35", None, 4),
BitField("nbDigit38", None, 4),
BitField("nbDigit37", None, 4),
BitField("nbDigit40", None, 4),
BitField("nbDigit39", None, 4),
# ^^^^^^ 20 first optional bytes ^^^^^^^^^^^^^^^
BitField("nbDigit42", None, 4),
BitField("nbDigit41", None, 4),
BitField("nbDigit44", None, 4),
BitField("nbDigit43", None, 4),
BitField("nbDigit46", None, 4),
BitField("nbDigit45", None, 4),
BitField("nbDigit48", None, 4),
BitField("nbDigit47", None, 4),
BitField("nbDigit50", None, 4),
BitField("nbDigit49", None, 4),
BitField("nbDigit52", None, 4),
BitField("nbDigit51", None, 4),
BitField("nbDigit54", None, 4),
BitField("nbDigit53", None, 4),
BitField("nbDigit56", None, 4),
BitField("nbDigit55", None, 4),
BitField("nbDigit58", None, 4),
BitField("nbDigit57", None, 4),
BitField("nbDigit60", None, 4),
BitField("nbDigit59", None, 4),
BitField("nbDigit62", None, 4),
BitField("nbDigit61", None, 4),
BitField("nbDigit64", None, 4),
BitField("nbDigit63", None, 4),
BitField("nbDigit66", None, 4),
BitField("nbDigit65", None, 4),
BitField("nbDigit68", None, 4),
BitField("nbDigit67", None, 4),
BitField("nbDigit70", None, 4),
BitField("nbDigit69", None, 4),
BitField("nbDigit72", None, 4),
BitField("nbDigit71", None, 4),
BitField("nbDigit74", None, 4),
BitField("nbDigit73", None, 4),
BitField("nbDigit76", None, 4),
BitField("nbDigit75", None, 4),
BitField("nbDigit78", None, 4),
BitField("nbDigit77", None, 4),
BitField("nbDigit80", None, 4),
BitField("nbDigit79", None, 4),
]
def post_build(self, p, pay):
a = [getattr(self, fld.name) for fld in self.fields_desc]
res = adapt(3, 43, a, self.fields_desc, 2)
if self.lengthCPBN is None:
p = p[:1] + struct.pack(">B", res[1]) + p[2:]
if res[0] != 0:
p = p[:-res[0]]
return p + pay
# len 2 to 23
class CalledPartySubaddressHdr(Packet):
"""Called party subaddress Section 10.5.4.8"""
name = "Called Party Subaddress"
fields_desc = [
BitField("eightBitCPS", None, 1),
XBitField("ieiCPS", None, 7),
XByteField("lengthCPS", None),
# optional
BitField("ext", None, 1),
BitField("subAddr", None, 3),
BitField("oddEven", None, 1),
BitField("spare", None, 3),
ByteField("subInfo0", None),
ByteField("subInfo1", None),
ByteField("subInfo2", None),
ByteField("subInfo3", None),
ByteField("subInfo4", None),
ByteField("subInfo5", None),
ByteField("subInfo6", None),
ByteField("subInfo7", None),
ByteField("subInfo8", None),
ByteField("subInfo9", None),
ByteField("subInfo10", None),
ByteField("subInfo11", None),
ByteField("subInfo12", None),
ByteField("subInfo13", None),
ByteField("subInfo14", None),
ByteField("subInfo15", None),
ByteField("subInfo16", None),
ByteField("subInfo17", None),
ByteField("subInfo18", None),
ByteField("subInfo19", None)
]
def post_build(self, p, pay):
a = [getattr(self, fld.name) for fld in self.fields_desc]
res = adapt(2, 23, a, self.fields_desc)
if self.lengthCPS is None:
p = p[:1] + struct.pack(">B", res[1]) + p[2:]
if res[0] != 0:
p = p[:-res[0]]
return p + pay
# len 3 to 14
class CallingPartyBcdNumberHdr(Packet):
"""Called party subaddress Section 10.5.4.9"""
name = "Called Party Subaddress"
fields_desc = [
BitField("eightBitCPBN", None, 1),
XBitField("ieiCPBN", None, 7),
XByteField("lengthCPBN", None),
BitField("ext", 0x1, 1),
BitField("typeNb", 0x0, 3),
BitField("nbPlanId", 0x0, 4),
# optional
ConditionalField(BitField("ext1", 0x1, 1),
lambda pkt: pkt.ext == 0),
ConditionalField(BitField("presId", None, 2),
lambda pkt: pkt.ext == 0),
ConditionalField(BitField("spare", None, 3),
lambda pkt: pkt.ext == 0),
ConditionalField(BitField("screenId", 0x0, 2),
lambda pkt: pkt.ext == 0),
BitField("nbDigit2", None, 4),
BitField("nbDigit1", None, 4),
BitField("nbDigit4", None, 4),
BitField("nbDigit3", None, 4),
BitField("nbDigit6", None, 4),
BitField("nbDigit5", None, 4),
BitField("nbDigit8", None, 4),
BitField("nbDigit7", None, 4),
BitField("nbDigit10", None, 4),
BitField("nbDigit9", None, 4),
BitField("nbDigit12", None, 4),
BitField("nbDigit11", None, 4),
BitField("nbDigit14", None, 4),
BitField("nbDigit13", None, 4),
BitField("nbDigit16", None, 4),
BitField("nbDigit15", None, 4),
BitField("nbDigit18", None, 4),
BitField("nbDigit17", None, 4),
BitField("nbDigit20", None, 4),
BitField("nbDigit19", None, 4),
]
def post_build(self, p, pay):
a = [getattr(self, fld.name) for fld in self.fields_desc]
res = adapt(4, 14, a, self.fields_desc)
if res[0] != 0:
p = p[:-res[0]]
if self.lengthCPBN is None:
p = p[:1] + struct.pack(">B", len(p)-2) + p[2:]
return p + pay
# len 2 to 23
class CallingPartySubaddressHdr(Packet):
"""Calling party subaddress Section 10.5.4.10"""
name = "Calling Party Subaddress"
fields_desc = [
BitField("eightBitCPS", None, 1),
XBitField("ieiCPS", None, 7),
XByteField("lengthCPS", None),
# optional
BitField("ext1", None, 1),
BitField("typeAddr", None, 3),
BitField("oddEven", None, 1),
BitField("spare", None, 3),
ByteField("subInfo0", None),
ByteField("subInfo1", None),
ByteField("subInfo2", None),
ByteField("subInfo3", None),
ByteField("subInfo4", None),
ByteField("subInfo5", None),
ByteField("subInfo6", None),
ByteField("subInfo7", None),
ByteField("subInfo8", None),
ByteField("subInfo9", None),
ByteField("subInfo10", None),
ByteField("subInfo11", None),
ByteField("subInfo12", None),
ByteField("subInfo13", None),
ByteField("subInfo14", None),
ByteField("subInfo15", None),
ByteField("subInfo16", None),
ByteField("subInfo17", None),
ByteField("subInfo18", None),
ByteField("subInfo19", None)
]
def post_build(self, p, pay):
a = [getattr(self, fld.name) for fld in self.fields_desc]
res = adapt(2, 23, a, self.fields_desc)
if self.lengthCPS is None:
p = p[:1] + struct.pack(">B", res[1]) + p[2:]
if res[0] != 0:
p = p[:-res[0]]
return p + pay
# len 4 to 32
class CauseHdr(Packet):
"""Cause Section 10.5.4.11"""
name = "Cause"
fields_desc = [
BitField("eightBitC", None, 1),
XBitField("ieiC", None, 7),
XByteField("lengthC", None),
BitField("ext", 0x1, 1),
BitField("codingStd", 0x0, 2),
BitField("spare", 0x0, 1),
BitField("location", 0x0, 4),
ConditionalField(BitField("ext1", 0x1, 1),
lambda pkt: pkt.ext == 0),
ConditionalField(BitField("recommendation", 0x0, 7),
lambda pkt: pkt.ext == 0),
# optional
BitField("ext2", None, 1),
BitField("causeValue", None, 7),
ByteField("diagnositc0", None),
ByteField("diagnositc1", None),
ByteField("diagnositc2", None),
ByteField("diagnositc3", None),
ByteField("diagnositc4", None),
ByteField("diagnositc5", None),
ByteField("diagnositc6", None),
ByteField("diagnositc7", None),
ByteField("diagnositc8", None),
ByteField("diagnositc9", None),
ByteField("diagnositc10", None),
ByteField("diagnositc11", None),
ByteField("diagnositc12", None),
ByteField("diagnositc13", None),
ByteField("diagnositc14", None),
ByteField("diagnositc15", None),
ByteField("diagnositc16", None),
ByteField("diagnositc17", None),
ByteField("diagnositc18", None),
ByteField("diagnositc19", None),
ByteField("diagnositc20", None),
ByteField("diagnositc21", None),
ByteField("diagnositc22", None),
ByteField("diagnositc23", None),
ByteField("diagnositc24", None),
ByteField("diagnositc25", None),
ByteField("diagnositc26", None),
]
def post_build(self, p, pay):
a = [getattr(self, fld.name) for fld in self.fields_desc]
res = adapt(4, 32, a, self.fields_desc)
if res[0] != 0:
p = p[:-res[0]]
if self.lengthC is None:
p = p[:1] + struct.pack(">B", len(p)-2) + p[2:]
return p + pay
class ClirSuppressionHdr(Packet):
"""CLIR suppression Section 10.5.4.11a"""
name = "Clir Suppression"
fields_desc = [
BitField("eightBitCS", None, 1),
XBitField("ieiCS", None, 7),
]
class ClirInvocationHdr(Packet):
"""CLIR invocation Section 10.5.4.11b"""
name = "Clir Invocation"
fields_desc = [
BitField("eightBitCI", None, 1),
XBitField("ieiCI", None, 7),
]
class CongestionLevelHdr(Packet):
"""Congestion level Section 10.5.4.12"""
name = "Congestion Level"
fields_desc = [
XBitField("ieiCL", None, 4),
BitField("notDef", 0x0, 4)
]
# Fix 1/2 len problem
class CongestionLevelAndSpareHalfOctets(Packet):
name = "Congestion Level and Spare Half Octets"
fields_desc = [
BitField("ieiCL", 0x0, 4),
BitField("spareHalfOctets", 0x0, 4)
]
# len 3 to 14
class ConnectedNumberHdr(Packet):
"""Connected number Section 10.5.4.13"""
name = "Connected Number"
fields_desc = [
BitField("eightBitCN", None, 1),
XBitField("ieiCN", None, 7),
XByteField("lengthCN", None),
BitField("ext", 0x1, 1),
BitField("typeNb", 0x0, 3),
BitField("typePlanId", 0x0, 4),
# optional
ConditionalField(BitField("ext1", 0x1, 1),
lambda pkt: pkt.ext == 0),
ConditionalField(BitField("presId", None, 2),
lambda pkt: pkt.ext == 0),
ConditionalField(BitField("spare", None, 3),
lambda pkt: pkt.ext == 0),
ConditionalField(BitField("screenId", None, 2),
lambda pkt: pkt.ext == 0),
BitField("nbDigit2", None, 4),
BitField("nbDigit1", None, 4),
BitField("nbDigit4", None, 4),
BitField("nbDigit3", None, 4),
BitField("nbDigit6", None, 4),
BitField("nbDigit5", None, 4),
BitField("nbDigit8", None, 4),
BitField("nbDigit7", None, 4),
BitField("nbDigit10", None, 4),
BitField("nbDigit9", None, 4),
BitField("nbDigit12", None, 4),
BitField("nbDigit11", None, 4),
BitField("nbDigit14", None, 4),
BitField("nbDigit13", None, 4),
BitField("nbDigit16", None, 4),
BitField("nbDigit15", None, 4),
BitField("nbDigit18", None, 4),
BitField("nbDigit17", None, 4),
BitField("nbDigit20", None, 4),
BitField("nbDigit19", None, 4)
]
def post_build(self, p, pay):
a = [getattr(self, fld.name) for fld in self.fields_desc]
res = adapt(3, 14, a, self.fields_desc)
if res[0] != 0:
p = p[:-res[0]]
if self.lengthCN is None:
p = p[:1] + struct.pack(">B", len(p)-2) + p[2:]
return p + pay
# len 2 to 23
class ConnectedSubaddressHdr(Packet):
"""Connected subaddress Section 10.5.4.14"""
name = "Connected Subaddress"
fields_desc = [
BitField("eightBitCS", None, 1),
XBitField("ieiCS", None, 7),
XByteField("lengthCS", None),
# optional
BitField("ext", None, 1),
BitField("typeOfSub", None, 3),
BitField("oddEven", None, 1),
BitField("spare", None, 3),
ByteField("subInfo0", None),
ByteField("subInfo1", None),
ByteField("subInfo2", None),
ByteField("subInfo3", None),
ByteField("subInfo4", None),
ByteField("subInfo5", None),
ByteField("subInfo6", None),
ByteField("subInfo7", None),
ByteField("subInfo8", None),
ByteField("subInfo9", None),
ByteField("subInfo10", None),
ByteField("subInfo11", None),
ByteField("subInfo12", None),
ByteField("subInfo13", None),
ByteField("subInfo14", None),
ByteField("subInfo15", None),
ByteField("subInfo16", None),
ByteField("subInfo17", None),
ByteField("subInfo18", None),
ByteField("subInfo19", None)
]
def post_build(self, p, pay):
a = [getattr(self, fld.name) for fld in self.fields_desc]
res = adapt(2, 23, a, self.fields_desc)
if self.lengthCS is None:
p = p[:1] + struct.pack(">B", res[1]) + p[2:]
if res[0] != 0:
p = p[:-res[0]]
return p + pay
# len 2 to L3 (251) (done)
class FacilityHdr(Packet):
"""Facility Section 10.5.4.15"""
name = "Facility"
fields_desc = [
BitField("eightBitF", None, 1),
XBitField("ieiF", None, 7),
XByteField("lengthF", None),
# optional
ByteField("facilityInfo1", None),
ByteField("facilityInfo2", None),
ByteField("facilityInfo3", None),
ByteField("facilityInfo4", None),
ByteField("facilityInfo5", None),
ByteField("facilityInfo6", None),
ByteField("facilityInfo7", None),
ByteField("facilityInfo8", None),
ByteField("facilityInfo9", None),
ByteField("facilityInfo10", None),
ByteField("facilityInfo11", None),
ByteField("facilityInfo12", None),
ByteField("facilityInfo13", None),
ByteField("facilityInfo14", None),
ByteField("facilityInfo15", None),
ByteField("facilityInfo16", None),
ByteField("facilityInfo17", None),
ByteField("facilityInfo18", None),
ByteField("facilityInfo19", None),
ByteField("facilityInfo20", None),
ByteField("facilityInfo21", None),
ByteField("facilityInfo22", None),
ByteField("facilityInfo23", None),
ByteField("facilityInfo24", None),
ByteField("facilityInfo25", None),
ByteField("facilityInfo26", None),
ByteField("facilityInfo27", None),
ByteField("facilityInfo28", None),
ByteField("facilityInfo29", None),
ByteField("facilityInfo30", None),
ByteField("facilityInfo31", None),
ByteField("facilityInfo32", None),
ByteField("facilityInfo33", None),
ByteField("facilityInfo34", None),
ByteField("facilityInfo35", None),
ByteField("facilityInfo36", None),
ByteField("facilityInfo37", None),
ByteField("facilityInfo38", None),
ByteField("facilityInfo39", None),
ByteField("facilityInfo40", None),
ByteField("facilityInfo41", None),
ByteField("facilityInfo42", None),
ByteField("facilityInfo43", None),
ByteField("facilityInfo44", None),
ByteField("facilityInfo45", None),
ByteField("facilityInfo46", None),
ByteField("facilityInfo47", None),
ByteField("facilityInfo48", None),
ByteField("facilityInfo49", None),
ByteField("facilityInfo50", None),
ByteField("facilityInfo51", None),
ByteField("facilityInfo52", None),
ByteField("facilityInfo53", None),
ByteField("facilityInfo54", None),
ByteField("facilityInfo55", None),
ByteField("facilityInfo56", None),
ByteField("facilityInfo57", None),
ByteField("facilityInfo58", None),
ByteField("facilityInfo59", None),
ByteField("facilityInfo60", None),
ByteField("facilityInfo61", None),
ByteField("facilityInfo62", None),
ByteField("facilityInfo63", None),
ByteField("facilityInfo64", None),
ByteField("facilityInfo65", None),
ByteField("facilityInfo66", None),
ByteField("facilityInfo67", None),
ByteField("facilityInfo68", None),
ByteField("facilityInfo69", None),
ByteField("facilityInfo70", None),
ByteField("facilityInfo71", None),
ByteField("facilityInfo72", None),
ByteField("facilityInfo73", None),
ByteField("facilityInfo74", None),
ByteField("facilityInfo75", None),
ByteField("facilityInfo76", None),
ByteField("facilityInfo77", None),
ByteField("facilityInfo78", None),
ByteField("facilityInfo79", None),
ByteField("facilityInfo80", None),
ByteField("facilityInfo81", None),
ByteField("facilityInfo82", None),
ByteField("facilityInfo83", None),
ByteField("facilityInfo84", None),
ByteField("facilityInfo85", None),
ByteField("facilityInfo86", None),
ByteField("facilityInfo87", None),
ByteField("facilityInfo88", None),
ByteField("facilityInfo89", None),
ByteField("facilityInfo90", None),
ByteField("facilityInfo91", None),
ByteField("facilityInfo92", None),
ByteField("facilityInfo93", None),
ByteField("facilityInfo94", None),
ByteField("facilityInfo95", None),
ByteField("facilityInfo96", None),
ByteField("facilityInfo97", None),
ByteField("facilityInfo98", None),
ByteField("facilityInfo99", None),
ByteField("facilityInfo100", None),
ByteField("facilityInfo101", None),
ByteField("facilityInfo102", None),
ByteField("facilityInfo103", None),
ByteField("facilityInfo104", None),
ByteField("facilityInfo105", None),
ByteField("facilityInfo106", None),
ByteField("facilityInfo107", None),
ByteField("facilityInfo108", None),
ByteField("facilityInfo109", None),
ByteField("facilityInfo110", None),
ByteField("facilityInfo111", None),
ByteField("facilityInfo112", None),
ByteField("facilityInfo113", None),
ByteField("facilityInfo114", None),
ByteField("facilityInfo115", None),
ByteField("facilityInfo116", None),
ByteField("facilityInfo117", None),
ByteField("facilityInfo118", None),
ByteField("facilityInfo119", None),
ByteField("facilityInfo120", None),
ByteField("facilityInfo121", None),
ByteField("facilityInfo122", None),
ByteField("facilityInfo123", None),
ByteField("facilityInfo124", None),
ByteField("facilityInfo125", None),
ByteField("facilityInfo126", None),
ByteField("facilityInfo127", None),
ByteField("facilityInfo128", None),
ByteField("facilityInfo129", None),
ByteField("facilityInfo130", None),
ByteField("facilityInfo131", None),
ByteField("facilityInfo132", None),
ByteField("facilityInfo133", None),
ByteField("facilityInfo134", None),
ByteField("facilityInfo135", None),
ByteField("facilityInfo136", None),
ByteField("facilityInfo137", None),
ByteField("facilityInfo138", None),
ByteField("facilityInfo139", None),
ByteField("facilityInfo140", None),
ByteField("facilityInfo141", None),
ByteField("facilityInfo142", None),
ByteField("facilityInfo143", None),
ByteField("facilityInfo144", None),
ByteField("facilityInfo145", None),
ByteField("facilityInfo146", None),
ByteField("facilityInfo147", None),
ByteField("facilityInfo148", None),
ByteField("facilityInfo149", None),
ByteField("facilityInfo150", None),
ByteField("facilityInfo151", None),
ByteField("facilityInfo152", None),
ByteField("facilityInfo153", None),
ByteField("facilityInfo154", None),
ByteField("facilityInfo155", None),
ByteField("facilityInfo156", None),
ByteField("facilityInfo157", None),
ByteField("facilityInfo158", None),
ByteField("facilityInfo159", None),
ByteField("facilityInfo160", None),
ByteField("facilityInfo161", None),
ByteField("facilityInfo162", None),
ByteField("facilityInfo163", None),
ByteField("facilityInfo164", None),
ByteField("facilityInfo165", None),
ByteField("facilityInfo166", None),
ByteField("facilityInfo167", None),
ByteField("facilityInfo168", None),
ByteField("facilityInfo169", None),
ByteField("facilityInfo170", None),
ByteField("facilityInfo171", None),
ByteField("facilityInfo172", None),
ByteField("facilityInfo173", None),
ByteField("facilityInfo174", None),
ByteField("facilityInfo175", None),
ByteField("facilityInfo176", None),
ByteField("facilityInfo177", None),
ByteField("facilityInfo178", None),
ByteField("facilityInfo179", None),
ByteField("facilityInfo180", None),
ByteField("facilityInfo181", None),
ByteField("facilityInfo182", None),
ByteField("facilityInfo183", None),
ByteField("facilityInfo184", None),
ByteField("facilityInfo185", None),
ByteField("facilityInfo186", None),
ByteField("facilityInfo187", None),
ByteField("facilityInfo188", None),
ByteField("facilityInfo189", None),
ByteField("facilityInfo190", None),
ByteField("facilityInfo191", None),
ByteField("facilityInfo192", None),
ByteField("facilityInfo193", None),
ByteField("facilityInfo194", None),
ByteField("facilityInfo195", None),
ByteField("facilityInfo196", None),
ByteField("facilityInfo197", None),
ByteField("facilityInfo198", None),
ByteField("facilityInfo199", None),
ByteField("facilityInfo200", None),
ByteField("facilityInfo201", None),
ByteField("facilityInfo202", None),
ByteField("facilityInfo203", None),
ByteField("facilityInfo204", None),
ByteField("facilityInfo205", None),
ByteField("facilityInfo206", None),
ByteField("facilityInfo207", None),
ByteField("facilityInfo208", None),
ByteField("facilityInfo209", None),
ByteField("facilityInfo210", None),
ByteField("facilityInfo211", None),
ByteField("facilityInfo212", None),
ByteField("facilityInfo213", None),
ByteField("facilityInfo214", None),
ByteField("facilityInfo215", None),
ByteField("facilityInfo216", None),
ByteField("facilityInfo217", None),
ByteField("facilityInfo218", None),
ByteField("facilityInfo219", None),
ByteField("facilityInfo220", None),
ByteField("facilityInfo221", None),
ByteField("facilityInfo222", None),
ByteField("facilityInfo223", None),
ByteField("facilityInfo224", None),
ByteField("facilityInfo225", None),
ByteField("facilityInfo226", None),
ByteField("facilityInfo227", None),
ByteField("facilityInfo228", None),
ByteField("facilityInfo229", None),
ByteField("facilityInfo230", None),
ByteField("facilityInfo231", None),
ByteField("facilityInfo232", None),
ByteField("facilityInfo233", None),
ByteField("facilityInfo234", None),
ByteField("facilityInfo235", None),
ByteField("facilityInfo236", None),
ByteField("facilityInfo237", None),
ByteField("facilityInfo238", None),
ByteField("facilityInfo239", None),
ByteField("facilityInfo240", None),
ByteField("facilityInfo241", None),
ByteField("facilityInfo242", None),
ByteField("facilityInfo243", None),
ByteField("facilityInfo244", None),
ByteField("facilityInfo245", None),
ByteField("facilityInfo246", None),
ByteField("facilityInfo247", None),
ByteField("facilityInfo248", None),
ByteField("facilityInfo249", None)
]
def post_build(self, p, pay):
a = [getattr(self, fld.name) for fld in self.fields_desc]
res = adapt(2, 251, a, self.fields_desc)
if self.lengthF is None:
p = p[:1] + struct.pack(">B", res[1]) + p[2:]
if res[0] != 0:
p = p[:-res[0]]
return p + pay
#len 2 to 5
class HighLayerCompatibilityHdr(Packet):
"""High layer compatibility Section 10.5.4.16"""
name = "High Layer Compatibility"
fields_desc = [
BitField("eightBitHLC", None, 1),
XBitField("ieiHLC", None, 7),
XByteField("lengthHLC", None),
# optional
BitField("ext", None, 1),
BitField("codingStd", None, 2),
BitField("interpret", None, 3),
BitField("presMeth", None, 2),
BitField("ext1", None, 1),
BitField("highLayerId", None, 7),
ConditionalField(BitField("ext2", 0x1, 1),
lambda pkt: pkt.ext1 == 0),
ConditionalField(BitField("exHiLayerId", 0x0, 7),
lambda pkt: pkt.ext1 == 0)
]
def post_build(self, p, pay):
a = [getattr(self, fld.name) for fld in self.fields_desc]
res = adapt(2, 5, a, self.fields_desc)
if res[0] != 0:
p = p[:-res[0]]
if self.lengthHLC is None:
p = p[:1] + struct.pack(">B", len(p)-2) + p[2:]
return p + pay
#
# 10.5.4.16.1 Static conditions for the high layer
# compatibility IE contents
#
class KeypadFacilityHdr(Packet):
"""Keypad facility Section 10.5.4.17"""
name = "Keypad Facility"
fields_desc = [
BitField("eightBitKF", None, 1),
XBitField("ieiKF", None, 7),
BitField("spare", 0x0, 1),
BitField("keyPadInfo", 0x0, 7)
]
# len 2 to 15
class LowLayerCompatibilityHdr(Packet):
"""Low layer compatibility Section 10.5.4.18"""
name = "Low Layer Compatibility"
fields_desc = [
BitField("eightBitLLC", None, 1),
XBitField("ieiLLC", None, 7),
XByteField("lengthLLC", None),
# optional
ByteField("rest0", None),
ByteField("rest1", None),
ByteField("rest2", None),
ByteField("rest3", None),
ByteField("rest4", None),
ByteField("rest5", None),
ByteField("rest6", None),
ByteField("rest7", None),
ByteField("rest8", None),
ByteField("rest9", None),
ByteField("rest10", None),
ByteField("rest11", None),
ByteField("rest12", None)
]
def post_build(self, p, pay):
a = [getattr(self, fld.name) for fld in self.fields_desc]
res = adapt(2, 15, a, self.fields_desc)
if self.lengthLLC is None:
p = p[:1] + struct.pack(">B", res[1]) + p[2:]
if res[0] != 0:
p = p[:-res[0]]
return p + pay
class MoreDataHdr(Packet):
"""More data Section 10.5.4.19"""
name = "More Data"
fields_desc = [
BitField("eightBitMD", None, 1),
XBitField("ieiMD", None, 7),
]
class NotificationIndicatorHdr(Packet):
"""Notification indicator Section 10.5.4.20"""
name = "Notification Indicator"
fields_desc = [
BitField("eightBitNI", None, 1),
XBitField("ieiNI", None, 7),
BitField("ext", 0x1, 1),
BitField("notifDesc", 0x0, 7)
]
class ProgressIndicatorHdr(Packet):
"""Progress indicator Section 10.5.4.21"""
name = "Progress Indicator"
fields_desc = [
BitField("eightBitPI", None, 1),
XBitField("ieiPI", None, 7),
XByteField("lengthPI", 0x2),
BitField("ext", 0x1, 1),
BitField("codingStd", 0x0, 2),
BitField("spare", 0x0, 1),
BitField("location", 0x0, 4),
BitField("ext1", 0x1, 1),
BitField("progressDesc", 0x0, 7)
]
class RecallTypeHdr(Packet):
"""Recall type $(CCBS)$ Section 10.5.4.21a"""
name = "Recall Type $(CCBS)$"
fields_desc = [
BitField("eightBitRT", None, 1),
XBitField("ieiRT", None, 7),
BitField("spare", 0x0, 5),
BitField("recallType", 0x0, 3)
]
# len 3 to 19
class RedirectingPartyBcdNumberHdr(Packet):
"""Redirecting party BCD number Section 10.5.4.21b"""
name = "Redirecting Party BCD Number"
fields_desc = [
BitField("eightBitRPBN", None, 1),
XBitField("ieiRPBN", None, 7),
XByteField("lengthRPBN", None),
BitField("ext", 0x1, 1),
BitField("typeNb", 0x0, 3),
BitField("numberingPlan", 0x0, 4),
# optional
ConditionalField(BitField("ext1", 0x1, 1),
lambda pkt: pkt.ext == 0),
ConditionalField(BitField("presId", None, 2),
lambda pkt: pkt.ext == 0),
ConditionalField(BitField("spare", None, 3),
lambda pkt: pkt.ext == 0),
ConditionalField(BitField("screenId", None, 2),
lambda pkt: pkt.ext == 0),
BitField("nbDigit2", None, 4),
BitField("nbDigit1", None, 4),
BitField("nbDigit4", None, 4),
BitField("nbDigit3", None, 4),
BitField("nbDigit6", None, 4),
BitField("nbDigit5", None, 4),
BitField("nbDigit8", None, 4),
BitField("nbDigit7", None, 4),
BitField("nbDigit10", None, 4),
BitField("nbDigit9", None, 4),
BitField("nbDigit12", None, 4),
BitField("nbDigit11", None, 4),
BitField("nbDigit14", None, 4),
BitField("nbDigit13", None, 4),
BitField("nbDigit16", None, 4),
BitField("nbDigit15", None, 4),
BitField("nbDigit18", None, 4),
BitField("nbDigit17", None, 4),
BitField("nbDigit20", None, 4),
BitField("nbDigit19", None, 4),
BitField("nbDigit22", None, 4),
BitField("nbDigit21", None, 4),
BitField("nbDigit24", None, 4),
BitField("nbDigit23", None, 4),
BitField("nbDigit26", None, 4),
BitField("nbDigit25", None, 4),
BitField("nbDigit28", None, 4),
BitField("nbDigit27", None, 4),
BitField("nbDigit30", None, 4),
BitField("nbDigit29", None, 4),
]
def post_build(self, p, pay):
a = [getattr(self, fld.name) for fld in self.fields_desc]
res = adapt(3, 19, a, self.fields_desc)
if res[0] != 0:
p = p[:-res[0]]
if self.lengthRPBN is None:
p = p[:1] + struct.pack(">B", len(p)-2) + p[2:]
return p + pay
# length 2 to 23
class RedirectingPartySubaddressHdr(Packet):
"""Redirecting party subaddress Section 10.5.4.21c"""
name = "Redirecting Party BCD Number"
fields_desc = [
BitField("eightBitRPS", None, 1),
XBitField("ieiRPS", None, 7),
XByteField("lengthRPS", None),
# optional
BitField("ext", None, 1),
BitField("typeSub", None, 3),
BitField("oddEven", None, 1),
BitField("spare", None, 3),
ByteField("subInfo0", None),
ByteField("subInfo1", None),
ByteField("subInfo2", None),
ByteField("subInfo3", None),
ByteField("subInfo4", None),
ByteField("subInfo5", None),
ByteField("subInfo6", None),
ByteField("subInfo7", None),
ByteField("subInfo8", None),
ByteField("subInfo9", None),
ByteField("subInfo10", None),
ByteField("subInfo11", None),
ByteField("subInfo12", None),
ByteField("subInfo13", None),
ByteField("subInfo14", None),
ByteField("subInfo15", None),
ByteField("subInfo16", None),
ByteField("subInfo17", None),
ByteField("subInfo18", None),
ByteField("subInfo19", None)
]
def post_build(self, p, pay):
a = [getattr(self, fld.name) for fld in self.fields_desc]
res = adapt(2, 23, a, self.fields_desc)
if self.lengthRPS is None:
p = p[:1] + struct.pack(">B", res[1]) + p[2:]
if res[0] != 0:
p = p[:-res[0]]
return p + pay
class RepeatIndicatorHdr(Packet):
"""Repeat indicator Section 10.5.4.22"""
name = "Repeat Indicator"
fields_desc = [
XBitField("ieiRI", None, 4),
BitField("repeatIndic", 0x0, 4)
]
class ReverseCallSetupDirectionHdr(Packet):
"""Reverse call setup direction Section 10.5.4.22a"""
name = "Reverse Call Setup Direction"
fields_desc = [
ByteField("ieiRCSD", 0x0)
]
# no upper length min 2(max for L3) (251)
class SetupContainerHdr(Packet):
"""SETUP Container $(CCBS)$ Section 10.5.4.22b"""
name = "Setup Container $(CCBS)$"
fields_desc = [
BitField("eightBitSC", None, 1),
XBitField("ieiSC", None, 7),
XByteField("lengthSC", None),
# optional
ByteField("mess1", None),
ByteField("mess2", None),
ByteField("mess3", None),
ByteField("mess4", None),
ByteField("mess5", None),
ByteField("mess6", None),
ByteField("mess7", None),
ByteField("mess8", None),
ByteField("mess9", None),
ByteField("mess10", None),
ByteField("mess11", None),
ByteField("mess12", None),
ByteField("mess13", None),
ByteField("mess14", None),
ByteField("mess15", None),
ByteField("mess16", None),
ByteField("mess17", None),
ByteField("mess18", None),
ByteField("mess19", None),
ByteField("mess20", None),
ByteField("mess21", None),
ByteField("mess22", None),
ByteField("mess23", None),
ByteField("mess24", None),
ByteField("mess25", None),
ByteField("mess26", None),
ByteField("mess27", None),
ByteField("mess28", None),
ByteField("mess29", None),
ByteField("mess30", None),
ByteField("mess31", None),
ByteField("mess32", None),
ByteField("mess33", None),
ByteField("mess34", None),
ByteField("mess35", None),
ByteField("mess36", None),
ByteField("mess37", None),
ByteField("mess38", None),
ByteField("mess39", None),
ByteField("mess40", None),
ByteField("mess41", None),
ByteField("mess42", None),
ByteField("mess43", None),
ByteField("mess44", None),
ByteField("mess45", None),
ByteField("mess46", None),
ByteField("mess47", None),
ByteField("mess48", None),
ByteField("mess49", None),
ByteField("mess50", None),
ByteField("mess51", None),
ByteField("mess52", None),
ByteField("mess53", None),
ByteField("mess54", None),
ByteField("mess55", None),
ByteField("mess56", None),
ByteField("mess57", None),
ByteField("mess58", None),
ByteField("mess59", None),
ByteField("mess60", None),
ByteField("mess61", None),
ByteField("mess62", None),
ByteField("mess63", None),
ByteField("mess64", None),
ByteField("mess65", None),
ByteField("mess66", None),
ByteField("mess67", None),
ByteField("mess68", None),
ByteField("mess69", None),
ByteField("mess70", None),
ByteField("mess71", None),
ByteField("mess72", None),
ByteField("mess73", None),
ByteField("mess74", None),
ByteField("mess75", None),
ByteField("mess76", None),
ByteField("mess77", None),
ByteField("mess78", None),
ByteField("mess79", None),
ByteField("mess80", None),
ByteField("mess81", None),
ByteField("mess82", None),
ByteField("mess83", None),
ByteField("mess84", None),
ByteField("mess85", None),
ByteField("mess86", None),
ByteField("mess87", None),
ByteField("mess88", None),
ByteField("mess89", None),
ByteField("mess90", None),
ByteField("mess91", None),
ByteField("mess92", None),
ByteField("mess93", None),
ByteField("mess94", None),
ByteField("mess95", None),
ByteField("mess96", None),
ByteField("mess97", None),
ByteField("mess98", None),
ByteField("mess99", None),
ByteField("mess100", None),
ByteField("mess101", None),
ByteField("mess102", None),
ByteField("mess103", None),
ByteField("mess104", None),
ByteField("mess105", None),
ByteField("mess106", None),
ByteField("mess107", None),
ByteField("mess108", None),
ByteField("mess109", None),
ByteField("mess110", None),
ByteField("mess111", None),
ByteField("mess112", None),
ByteField("mess113", None),
ByteField("mess114", None),
ByteField("mess115", None),
ByteField("mess116", None),
ByteField("mess117", None),
ByteField("mess118", None),
ByteField("mess119", None),
ByteField("mess120", None),
ByteField("mess121", None),
ByteField("mess122", None),
ByteField("mess123", None),
ByteField("mess124", None),
ByteField("mess125", None),
ByteField("mess126", None),
ByteField("mess127", None),
ByteField("mess128", None),
ByteField("mess129", None),
ByteField("mess130", None),
ByteField("mess131", None),
ByteField("mess132", None),
ByteField("mess133", None),
ByteField("mess134", None),
ByteField("mess135", None),
ByteField("mess136", None),
ByteField("mess137", None),
ByteField("mess138", None),
ByteField("mess139", None),
ByteField("mess140", None),
ByteField("mess141", None),
ByteField("mess142", None),
ByteField("mess143", None),
ByteField("mess144", None),
ByteField("mess145", None),
ByteField("mess146", None),
ByteField("mess147", None),
ByteField("mess148", None),
ByteField("mess149", None),
ByteField("mess150", None),
ByteField("mess151", None),
ByteField("mess152", None),
ByteField("mess153", None),
ByteField("mess154", None),
ByteField("mess155", None),
ByteField("mess156", None),
ByteField("mess157", None),
ByteField("mess158", None),
ByteField("mess159", None),
ByteField("mess160", None),
ByteField("mess161", None),
ByteField("mess162", None),
ByteField("mess163", None),
ByteField("mess164", None),
ByteField("mess165", None),
ByteField("mess166", None),
ByteField("mess167", None),
ByteField("mess168", None),
ByteField("mess169", None),
ByteField("mess170", None),
ByteField("mess171", None),
ByteField("mess172", None),
ByteField("mess173", None),
ByteField("mess174", None),
ByteField("mess175", None),
ByteField("mess176", None),
ByteField("mess177", None),
ByteField("mess178", None),
ByteField("mess179", None),
ByteField("mess180", None),
ByteField("mess181", None),
ByteField("mess182", None),
ByteField("mess183", None),
ByteField("mess184", None),
ByteField("mess185", None),
ByteField("mess186", None),
ByteField("mess187", None),
ByteField("mess188", None),
ByteField("mess189", None),
ByteField("mess190", None),
ByteField("mess191", None),
ByteField("mess192", None),
ByteField("mess193", None),
ByteField("mess194", None),
ByteField("mess195", None),
ByteField("mess196", None),
ByteField("mess197", None),
ByteField("mess198", None),
ByteField("mess199", None),
ByteField("mess200", None),
ByteField("mess201", None),
ByteField("mess202", None),
ByteField("mess203", None),
ByteField("mess204", None),
ByteField("mess205", None),
ByteField("mess206", None),
ByteField("mess207", None),
ByteField("mess208", None),
ByteField("mess209", None),
ByteField("mess210", None),
ByteField("mess211", None),
ByteField("mess212", None),
ByteField("mess213", None),
ByteField("mess214", None),
ByteField("mess215", None),
ByteField("mess216", None),
ByteField("mess217", None),
ByteField("mess218", None),
ByteField("mess219", None),
ByteField("mess220", None),
ByteField("mess221", None),
ByteField("mess222", None),
ByteField("mess223", None),
ByteField("mess224", None),
ByteField("mess225", None),
ByteField("mess226", None),
ByteField("mess227", None),
ByteField("mess228", None),
ByteField("mess229", None),
ByteField("mess230", None),
ByteField("mess231", None),
ByteField("mess232", None),
ByteField("mess233", None),
ByteField("mess234", None),
ByteField("mess235", None),
ByteField("mess236", None),
ByteField("mess237", None),
ByteField("mess238", None),
ByteField("mess239", None),
ByteField("mess240", None),
ByteField("mess241", None),
ByteField("mess242", None),
ByteField("mess243", None),
ByteField("mess244", None),
ByteField("mess245", None),
ByteField("mess246", None),
ByteField("mess247", None),
ByteField("mess248", None),
ByteField("mess249", None),
]
def post_build(self, p, pay):
a = [getattr(self, fld.name) for fld in self.fields_desc]
res = adapt(2, 251, a, self.fields_desc)
if self.lengthSC is None:
p = p[:1] + struct.pack(">B", res[1]) + p[2:]
if res[0] != 0:
p = p[:-res[0]]
return p + pay
class SignalHdr(Packet):
"""Signal Section 10.5.4.23"""
name = "Signal"
fields_desc = [
BitField("eightBitS", None, 1),
XBitField("ieiS", None, 7),
ByteField("sigValue", 0x0)
]
# length 2 to max for L3 message (251)
class SsVersionIndicatorHdr(Packet):
"""SS Version Indicator Section 10.5.4.24"""
name = "SS Version Indicator"
fields_desc = [
BitField("eightBitSVI", None, 1),
XBitField("ieiSVI", None, 7),
XByteField("lengthSVI", None),
# optional
ByteField("info1", None),
ByteField("info2", None),
ByteField("info3", None),
ByteField("info4", None),
ByteField("info5", None),
ByteField("info6", None),
ByteField("info7", None),
ByteField("info8", None),
ByteField("info9", None),
ByteField("info10", None),
ByteField("info11", None),
ByteField("info12", None),
ByteField("info13", None),
ByteField("info14", None),
ByteField("info15", None),
ByteField("info16", None),
ByteField("info17", None),
ByteField("info18", None),
ByteField("info19", None),
ByteField("info20", None),
ByteField("info21", None),
ByteField("info22", None),
ByteField("info23", None),
ByteField("info24", None),
ByteField("info25", None),
ByteField("info26", None),
ByteField("info27", None),
ByteField("info28", None),
ByteField("info29", None),
ByteField("info30", None),
ByteField("info31", None),
ByteField("info32", None),
ByteField("info33", None),
ByteField("info34", None),
ByteField("info35", None),
ByteField("info36", None),
ByteField("info37", None),
ByteField("info38", None),
ByteField("info39", None),
ByteField("info40", None),
ByteField("info41", None),
ByteField("info42", None),
ByteField("info43", None),
ByteField("info44", None),
ByteField("info45", None),
ByteField("info46", None),
ByteField("info47", None),
ByteField("info48", None),
ByteField("info49", None),
ByteField("info50", None),
ByteField("info51", None),
ByteField("info52", None),
ByteField("info53", None),
ByteField("info54", None),
ByteField("info55", None),
ByteField("info56", None),
ByteField("info57", None),
ByteField("info58", None),
ByteField("info59", None),
ByteField("info60", None),
ByteField("info61", None),
ByteField("info62", None),
ByteField("info63", None),
ByteField("info64", None),
ByteField("info65", None),
ByteField("info66", None),
ByteField("info67", None),
ByteField("info68", None),
ByteField("info69", None),
ByteField("info70", None),
ByteField("info71", None),
ByteField("info72", None),
ByteField("info73", None),
ByteField("info74", None),
ByteField("info75", None),
ByteField("info76", None),
ByteField("info77", None),
ByteField("info78", None),
ByteField("info79", None),
ByteField("info80", None),
ByteField("info81", None),
ByteField("info82", None),
ByteField("info83", None),
ByteField("info84", None),
ByteField("info85", None),
ByteField("info86", None),
ByteField("info87", None),
ByteField("info88", None),
ByteField("info89", None),
ByteField("info90", None),
ByteField("info91", None),
ByteField("info92", None),
ByteField("info93", None),
ByteField("info94", None),
ByteField("info95", None),
ByteField("info96", None),
ByteField("info97", None),
ByteField("info98", None),
ByteField("info99", None),
ByteField("info100", None),
ByteField("info101", None),
ByteField("info102", None),
ByteField("info103", None),
ByteField("info104", None),
ByteField("info105", None),
ByteField("info106", None),
ByteField("info107", None),
ByteField("info108", None),
ByteField("info109", None),
ByteField("info110", None),
ByteField("info111", None),
ByteField("info112", None),
ByteField("info113", None),
ByteField("info114", None),
ByteField("info115", None),
ByteField("info116", None),
ByteField("info117", None),
ByteField("info118", None),
ByteField("info119", None),
ByteField("info120", None),
ByteField("info121", None),
ByteField("info122", None),
ByteField("info123", None),
ByteField("info124", None),
ByteField("info125", None),
ByteField("info126", None),
ByteField("info127", None),
ByteField("info128", None),
ByteField("info129", None),
ByteField("info130", None),
ByteField("info131", None),
ByteField("info132", None),
ByteField("info133", None),
ByteField("info134", None),
ByteField("info135", None),
ByteField("info136", None),
ByteField("info137", None),
ByteField("info138", None),
ByteField("info139", None),
ByteField("info140", None),
ByteField("info141", None),
ByteField("info142", None),
ByteField("info143", None),
ByteField("info144", None),
ByteField("info145", None),
ByteField("info146", None),
ByteField("info147", None),
ByteField("info148", None),
ByteField("info149", None),
ByteField("info150", None),
ByteField("info151", None),
ByteField("info152", None),
ByteField("info153", None),
ByteField("info154", None),
ByteField("info155", None),
ByteField("info156", None),
ByteField("info157", None),
ByteField("info158", None),
ByteField("info159", None),
ByteField("info160", None),
ByteField("info161", None),
ByteField("info162", None),
ByteField("info163", None),
ByteField("info164", None),
ByteField("info165", None),
ByteField("info166", None),
ByteField("info167", None),
ByteField("info168", None),
ByteField("info169", None),
ByteField("info170", None),
ByteField("info171", None),
ByteField("info172", None),
ByteField("info173", None),
ByteField("info174", None),
ByteField("info175", None),
ByteField("info176", None),
ByteField("info177", None),
ByteField("info178", None),
ByteField("info179", None),
ByteField("info180", None),
ByteField("info181", None),
ByteField("info182", None),
ByteField("info183", None),
ByteField("info184", None),
ByteField("info185", None),
ByteField("info186", None),
ByteField("info187", None),
ByteField("info188", None),
ByteField("info189", None),
ByteField("info190", None),
ByteField("info191", None),
ByteField("info192", None),
ByteField("info193", None),
ByteField("info194", None),
ByteField("info195", None),
ByteField("info196", None),
ByteField("info197", None),
ByteField("info198", None),
ByteField("info199", None),
ByteField("info200", None),
ByteField("info201", None),
ByteField("info202", None),
ByteField("info203", None),
ByteField("info204", None),
ByteField("info205", None),
ByteField("info206", None),
ByteField("info207", None),
ByteField("info208", None),
ByteField("info209", None),
ByteField("info210", None),
ByteField("info211", None),
ByteField("info212", None),
ByteField("info213", None),
ByteField("info214", None),
ByteField("info215", None),
ByteField("info216", None),
ByteField("info217", None),
ByteField("info218", None),
ByteField("info219", None),
ByteField("info220", None),
ByteField("info221", None),
ByteField("info222", None),
ByteField("info223", None),
ByteField("info224", None),
ByteField("info225", None),
ByteField("info226", None),
ByteField("info227", None),
ByteField("info228", None),
ByteField("info229", None),
ByteField("info230", None),
ByteField("info231", None),
ByteField("info232", None),
ByteField("info233", None),
ByteField("info234", None),
ByteField("info235", None),
ByteField("info236", None),
ByteField("info237", None),
ByteField("info238", None),
ByteField("info239", None),
ByteField("info240", None),
ByteField("info241", None),
ByteField("info242", None),
ByteField("info243", None),
ByteField("info244", None),
ByteField("info245", None),
ByteField("info246", None),
ByteField("info247", None),
ByteField("info248", None),
ByteField("info249", None),
]
def post_build(self, p, pay):
a = [getattr(self, fld.name) for fld in self.fields_desc]
res = adapt(2, 251, a, self.fields_desc)
if self.lengthSVI is None:
p = p[:1] + struct.pack(">B", res[1]) + p[2:]
if res[0] != 0:
p = p[:-res[0]]
return p + pay
# length 3 to 35 or 131
class UserUserHdr(Packet):
"""User-user Section 10.5.4.25"""
name = "User-User"
fields_desc = [
BitField("eightBitUU", None, 1),
XBitField("ieiUU", None, 7),
XByteField("lengthUU", None), # dynamic length of field depending
# of the type of message
# let user decide which length he
# wants to take
# => more fuzzing options
ByteField("userUserPD", 0x0),
# optional
ByteField("userUserInfo1", None),
ByteField("userUserInfo2", None),
ByteField("userUserInfo3", None),
ByteField("userUserInfo4", None),
ByteField("userUserInfo5", None),
ByteField("userUserInfo6", None),
ByteField("userUserInfo7", None),
ByteField("userUserInfo8", None),
ByteField("userUserInfo9", None),
ByteField("userUserInfo10", None),
ByteField("userUserInfo11", None),
ByteField("userUserInfo12", None),
ByteField("userUserInfo13", None),
ByteField("userUserInfo14", None),
ByteField("userUserInfo15", None),
ByteField("userUserInfo16", None),
ByteField("userUserInfo17", None),
ByteField("userUserInfo18", None),
ByteField("userUserInfo19", None),
ByteField("userUserInfo20", None),
ByteField("userUserInfo21", None),
ByteField("userUserInfo22", None),
ByteField("userUserInfo23", None),
ByteField("userUserInfo24", None),
ByteField("userUserInfo25", None),
ByteField("userUserInfo26", None),
ByteField("userUserInfo27", None),
ByteField("userUserInfo28", None),
ByteField("userUserInfo29", None),
ByteField("userUserInfo30", None),
ByteField("userUserInfo31", None),
ByteField("userUserInfo32", None),
# long packet
ByteField("userUserInfo33", None),
ByteField("userUserInfo34", None),
ByteField("userUserInfo35", None),
ByteField("userUserInfo36", None),
ByteField("userUserInfo37", None),
ByteField("userUserInfo38", None),
ByteField("userUserInfo39", None),
ByteField("userUserInfo40", None),
ByteField("userUserInfo41", None),
ByteField("userUserInfo42", None),
ByteField("userUserInfo43", None),
ByteField("userUserInfo44", None),
ByteField("userUserInfo45", None),
ByteField("userUserInfo46", None),
ByteField("userUserInfo47", None),
ByteField("userUserInfo48", None),
ByteField("userUserInfo49", None),
ByteField("userUserInfo50", None),
ByteField("userUserInfo51", None),
ByteField("userUserInfo52", None),
ByteField("userUserInfo53", None),
ByteField("userUserInfo54", None),
ByteField("userUserInfo55", None),
ByteField("userUserInfo56", None),
ByteField("userUserInfo57", None),
ByteField("userUserInfo58", None),
ByteField("userUserInfo59", None),
ByteField("userUserInfo60", None),
ByteField("userUserInfo61", None),
ByteField("userUserInfo62", None),
ByteField("userUserInfo63", None),
ByteField("userUserInfo64", None),
ByteField("userUserInfo65", None),
ByteField("userUserInfo66", None),
ByteField("userUserInfo67", None),
ByteField("userUserInfo68", None),
ByteField("userUserInfo69", None),
ByteField("userUserInfo70", None),
ByteField("userUserInfo71", None),
ByteField("userUserInfo72", None),
ByteField("userUserInfo73", None),
ByteField("userUserInfo74", None),
ByteField("userUserInfo75", None),
ByteField("userUserInfo76", None),
ByteField("userUserInfo77", None),
ByteField("userUserInfo78", None),
ByteField("userUserInfo79", None),
ByteField("userUserInfo80", None),
ByteField("userUserInfo81", None),
ByteField("userUserInfo82", None),
ByteField("userUserInfo83", None),
ByteField("userUserInfo84", None),
ByteField("userUserInfo85", None),
ByteField("userUserInfo86", None),
ByteField("userUserInfo87", None),
ByteField("userUserInfo88", None),
ByteField("userUserInfo89", None),
ByteField("userUserInfo90", None),
ByteField("userUserInfo91", None),
ByteField("userUserInfo92", None),
ByteField("userUserInfo93", None),
ByteField("userUserInfo94", None),
ByteField("userUserInfo95", None),
ByteField("userUserInfo96", None),
ByteField("userUserInfo97", None),
ByteField("userUserInfo98", None),
ByteField("userUserInfo99", None),
ByteField("userUserInfo100", None),
ByteField("userUserInfo101", None),
ByteField("userUserInfo102", None),
ByteField("userUserInfo103", None),
ByteField("userUserInfo104", None),
ByteField("userUserInfo105", None),
ByteField("userUserInfo106", None),
ByteField("userUserInfo107", None),
ByteField("userUserInfo108", None),
ByteField("userUserInfo109", None),
ByteField("userUserInfo110", None),
ByteField("userUserInfo111", None),
ByteField("userUserInfo112", None),
ByteField("userUserInfo113", None),
ByteField("userUserInfo114", None),
ByteField("userUserInfo115", None),
ByteField("userUserInfo116", None),
ByteField("userUserInfo117", None),
ByteField("userUserInfo118", None),
ByteField("userUserInfo119", None),
ByteField("userUserInfo120", None),
ByteField("userUserInfo121", None),
ByteField("userUserInfo122", None),
ByteField("userUserInfo123", None),
ByteField("userUserInfo124", None),
ByteField("userUserInfo125", None),
ByteField("userUserInfo126", None),
ByteField("userUserInfo127", None),
ByteField("userUserInfo128", None),
ByteField("userUserInfo129", None),
ByteField("userUserInfo130", None),
ByteField("userUserInfo131", None)
]
def post_build(self, p, pay):
a = [getattr(self, fld.name) for fld in self.fields_desc]
res = adapt(3, 131, a, self.fields_desc)
if self.lengthUU is None:
p = p[:1] + struct.pack(">B", res[1]) + p[2:]
if res[0] != 0:
p = p[:-res[0]]
return p + pay
class AlertingPatternHdr(Packet):
"""Alerting Pattern 10.5.4.26"""
name = "Alerting Pattern"
fields_desc = [
BitField("eightBitAP", None, 1),
XBitField("ieiAP", None, 7),
XByteField("lengthAP", 0x3),
BitField("spare", 0x0, 4),
BitField("alertingValue", 0x0, 4)
]
class AllowedActionsHdr(Packet):
"""Allowed actions $(CCBS)$ Section 10.5.4.26"""
name = "Allowed Actions $(CCBS)$"
fields_desc = [
BitField("eightBitAA", None, 1),
XBitField("ieiAA", None, 7),
XByteField("lengthAP", 0x3),
BitField("CCBS", 0x0, 1),
BitField("spare", 0x0, 7)
]
#
# 10.5.5 GPRS mobility management information elements
#
class AttachResult(Packet):
"""Attach result Section 10.5.5.1"""
name = "Attach Result"
fields_desc = [
XBitField("ieiAR", 0x0, 4),
BitField("spare", 0x0, 1),
BitField("result", 0x1, 3)
]
class AttachTypeHdr(Packet):
"""Attach type Section 10.5.5.2"""
name = "Attach Type"
fields_desc = [
XBitField("ieiAT", None, 4),
BitField("spare", 0x0, 1),
BitField("type", 0x1, 3)
]
# Fix 1/2 len problem
class AttachTypeAndCiphKeySeqNr(Packet):
name = "Attach Type and Cipher Key Sequence Number"
fields_desc = [
BitField("spare", 0x0, 1),
BitField("type", 0x1, 3),
BitField("spareHalfOctets", 0x0, 4)
]
class CipheringAlgorithm(Packet):
"""Ciphering algorithm Section 10.5.5.3"""
name = "Ciphering Algorithm"
fields_desc = [
XBitField("ieiCA", 0x0, 4),
BitField("spare", 0x0, 1),
BitField("type", 0x1, 3)
]
# Fix 1/2 len problem
class CipheringAlgorithmAndImeisvRequest(Packet):
name = "Ciphering Algorithm and Imeisv Request"
fields_desc = [
BitField("spare", 0x0, 1),
BitField("type", 0x1, 3),
BitField("spare", 0x0, 1),
BitField("imeisvVal", 0x0, 3)
]
# [Spare]
class TmsiStatus(Packet):
"""[Spare] TMSI status Section 10.5.5.4"""
name = "[Spare] TMSI Status"
fields_desc = [
XBitField("ieiTS", None, 4),
BitField("spare", 0x0, 3),
BitField("flag", 0x1, 1)
]
class DetachType(Packet):
"""Detach type Section 10.5.5.5"""
name = "Detach Type"
fields_desc = [
XBitField("ieiDT", 0x0, 4),
BitField("poweroff", 0x0, 1),
BitField("type", 0x1, 3)
]
# Fix 1/2 len problem
class DetachTypeAndForceToStandby(Packet):
name = "Detach Type and Force To Standby"
fields_desc = [
BitField("poweroff", 0x0, 1),
BitField("type", 0x1, 3),
BitField("spare", 0x0, 1),
BitField("forceStandby", 0x0, 3)
]
# Fix 1/2 len problem
class DetachTypeAndSpareHalfOctets(Packet):
name = "Detach Type and Spare Half Octets"
fields_desc = [
BitField("poweroff", 0x0, 1),
BitField("type", 0x1, 3),
BitField("spareHalfOctets", 0x0, 4)
]
class DrxParameter(Packet):
"""DRX parameter Section 10.5.5.6"""
name = "DRX Parameter"
fields_desc = [
ByteField("ieiDP", 0x0),
ByteField("splitPG", 0x0),
BitField("spare", 0x0, 4),
BitField("splitCCCH", 0x0, 1),
BitField("NonDrxTimer", 0x1, 3)
]
class ForceToStandby(Packet):
"""Force to standby Section 10.5.5.7"""
name = "Force To Standby"
fields_desc = [
XBitField("ieiFTS", 0x0, 4),
BitField("spare", 0x0, 1),
BitField("forceStandby", 0x0, 3)
]
# Fix 1/2 len problem
class ForceToStandbyAndAcReferenceNumber(Packet):
name = "Force To Standby And Ac Reference Number"
fields_desc = [
BitField("spare", 0x0, 1),
BitField("forceStandby", 0x0, 3),
BitField("acRefVal", 0x0, 4)
]
# Fix 1/2 len problem
class ForceToStandbyAndUpdateResult(Packet):
name = "Force To Standby And Update Result"
fields_desc = [
BitField("spare", 0x0, 1),
BitField("forceStandby", 0x0, 3),
BitField("spare", 0x0, 1),
BitField("updateResVal", 0x0, 3)
]
# Fix 1/2 len problem
class ForceToStandbyAndSpareHalfOctets(Packet):
name = "Force To Standby And Spare Half Octets"
fields_desc = [
BitField("spare", 0x0, 1),
BitField("forceStandby", 0x0, 3),
BitField("spareHalfOctets", 0x0, 4)
]
class PTmsiSignature(Packet):
"""P-TMSI signature Section 10.5.5.8"""
name = "P-TMSI Signature"
fields_desc = [
ByteField("ieiPTS", 0x0),
BitField("sgnature", 0x0, 24)
]
class IdentityType2(Packet):
"""Identity type 2 Section 10.5.5.9"""
name = "Identity Type 2"
fields_desc = [
XBitField("ieiIT2", 0x0, 4),
BitField("spare", 0x0, 1),
BitField("typeOfIdentity", 0x0, 3)
]
# Fix 1/2 len problem
class IdentityType2AndforceToStandby(Packet):
name = "Identity Type 2 and Force to Standby"
fields_desc = [
BitField("spare", 0x0, 1),
BitField("typeOfIdentity", 0x0, 3),
BitField("spare", 0x0, 1),
BitField("forceStandby", 0x0, 3)
]
class ImeisvRequest(Packet):
"""IMEISV request Section 10.5.5.10"""
name = "IMEISV Request"
fields_desc = [
XBitField("ieiIR", 0x0, 4),
BitField("spare", 0x0, 1),
BitField("imeisvVal", 0x0, 3)
]
# Fix 1/2 len problem
class ImeisvRequestAndForceToStandby(Packet):
name = "IMEISV Request and Force To Standby"
fields_desc = [
BitField("spare", 0x0, 1),
BitField("imeisvVal", 0x0, 3),
BitField("spareHalfOctets", 0x0, 4)
]
# length 4 to 19
class ReceiveNpduNumbersList(Packet):
"""Receive N-PDU Numbers list Section 10.5.5.11"""
name = "Receive N-PDU Numbers list"
fields_desc = [
ByteField("ieiRNNL", 0x0),
XByteField("lengthRNNL", None),
BitField("nbList0", 0x0, 16),
# optional
ByteField("nbList1", None),
ByteField("nbList2", None),
ByteField("nbList3", None),
ByteField("nbList4", None),
ByteField("nbList5", None),
ByteField("nbList6", None),
ByteField("nbList7", None),
ByteField("nbList8", None),
ByteField("nbList9", None),
ByteField("nbList10", None),
ByteField("nbList11", None),
ByteField("nbList12", None),
ByteField("nbList13", None),
ByteField("nbList14", None),
ByteField("nbList15", None),
]
def post_build(self, p, pay):
a = [getattr(self, fld.name) for fld in self.fields_desc]
res = adapt(4, 19, a, self.fields_desc)
if self.lengthRNNL is None:
p = p[:1] + struct.pack(">B", res[1]) + p[2:]
if res[0] != 0:
p = p[:-res[0]]
return p + pay
class MsNetworkCapability(Packet):
"""MS network capability Section 10.5.5.12"""
name = "MS Network Capability"
fields_desc = [
ByteField("ieiMNC", 0x0),
XByteField("lengthMNC", 0x3),
ByteField("msNetValue", 0x0)
]
# length 6 to 14
class MsRadioAccessCapability(Packet):
"""MS Radio Access capability Section 10.5.5.12a"""
name = "MS Radio Access Capability"
fields_desc = [
ByteField("ieiMRAC", 0x24),
XByteField("lengthMRAC", None),
BitField("spare1", 0x0, 1), # ...
BitField("accessCap", 0x0, 4),
BitField("accessTechType", 0x0, 4),
# access capability
BitField("bool", 0x0, 1),
BitField("lengthContent", 0x0, 7),
BitField("spare1", 0x0, 1), # ...
# content
BitField("pwrCap", 0x0, 3),
BitField("bool1", 0x0, 1),
BitField("a51", 0x0, 1),
BitField("a52", 0x0, 1),
BitField("a53", 0x0, 1),
BitField("a54", 0x0, 1),
BitField("a55", 0x0, 1),
BitField("a56", 0x0, 1),
BitField("a57", 0x0, 1),
BitField("esInd", 0x0, 1),
BitField("ps", 0x0, 1),
BitField("vgcs", 0x0, 1),
BitField("vbs", 0x0, 1),
BitField("bool2", 0x0, 1),
# multislot
BitField("bool3", 0x0, 1),
BitField("hscsd", 0x0, 5),
BitField("bool4", 0x0, 1),
BitField("gprs", 0x0, 5),
BitField("gprsExt", 0x0, 1),
BitField("bool5", 0x0, 1),
BitField("smsVal", 0x0, 4),
BitField("smVal", 0x0, 4)
]
# 10.5.5.13 Spare
# This is intentionally left spare.
class GmmCause(Packet):
"""GMM cause Section 10.5.5.14"""
name = "GMM Cause"
fields_desc = [
ByteField("ieiGC", 0x0),
ByteField("causeValue", 0x0)
]
class RoutingAreaIdentification(Packet):
"""Routing area identification Section 10.5.5.15"""
name = "Routing Area Identification"
fields_desc = [
ByteField("ieiRAI", 0x0),
BitField("mccDigit2", 0x0, 4),
BitField("mccDigit1", 0x0, 4),
BitField("mncDigit3", 0x0, 4),
BitField("mccDigit3", 0x0, 4),
BitField("mccDigit2", 0x0, 4),
BitField("mccDigit1", 0x0, 4),
ByteField("LAC", 0x0),
ByteField("LAC1", 0x0),
ByteField("LAC", 0x0)
]
# 10.5.5.16 Spare
# This is intentionally left spare.
class UpdateResult(Packet):
"""Update result Section 10.5.5.17"""
name = "Update Result"
fields_desc = [
XBitField("ieiUR", 0x0, 4),
BitField("spare", 0x0, 1),
BitField("updateResVal", 0x0, 3)
]
class UpdateType(Packet):
"""Update type Section 10.5.5.18"""
name = "Update Type"
fields_desc = [
XBitField("ieiUT", 0x0, 4),
BitField("spare", 0x0, 1),
BitField("updateTypeVal", 0x0, 3)
]
# Fix 1/2 len problem
class UpdateTypeAndCiphKeySeqNr(Packet):
name = "Update Type and Cipher Key Sequence Number"
fields_desc = [
BitField("spare", 0x0, 1),
BitField("updateTypeVal", 0x0, 3),
BitField("spare", 0x0, 1),
BitField("keySeq", 0x0, 3)
]
class AcReferenceNumber(Packet):
"""A&C reference number Section 10.5.5.19"""
name = "A&C Reference Number"
fields_desc = [
XBitField("ieiARN", 0x0, 4),
BitField("acRefVal", 0x0, 4)
]
# Fix 1/2 len problem
class AcReferenceNumberAndSpareHalfOctets(Packet):
name = "A&C Reference Number and Spare Half Octets"
fields_desc = [
BitField("acRefVal", 0x0, 4),
BitField("spareHalfOctets", 0x0, 4)
]
#
# 10.5.6 Session management information elements
#
# length 3 to 102
class AccessPointName(Packet):
"""Access Point Name Section 10.5.6.1"""
name = "Access Point Name"
fields_desc = [
ByteField("ieiAPN", 0x0),
XByteField("lengthAPN", None),
ByteField("apName", 0x0),
# optional
ByteField("apName1", None),
ByteField("apName2", None),
ByteField("apName3", None),
ByteField("apName4", None),
ByteField("apName5", None),
ByteField("apName6", None),
ByteField("apName7", None),
ByteField("apName8", None),
ByteField("apName9", None),
ByteField("apName10", None),
ByteField("apName11", None),
ByteField("apName12", None),
ByteField("apName13", None),
ByteField("apName14", None),
ByteField("apName15", None),
ByteField("apName16", None),
ByteField("apName17", None),
ByteField("apName18", None),
ByteField("apName19", None),
ByteField("apName20", None),
ByteField("apName21", None),
ByteField("apName22", None),
ByteField("apName23", None),
ByteField("apName24", None),
ByteField("apName25", None),
ByteField("apName26", None),
ByteField("apName27", None),
ByteField("apName28", None),
ByteField("apName29", None),
ByteField("apName30", None),
ByteField("apName31", None),
ByteField("apName32", None),
ByteField("apName33", None),
ByteField("apName34", None),
ByteField("apName35", None),
ByteField("apName36", None),
ByteField("apName37", None),
ByteField("apName38", None),
ByteField("apName39", None),
ByteField("apName40", None),
ByteField("apName41", None),
ByteField("apName42", None),
ByteField("apName43", None),
ByteField("apName44", None),
ByteField("apName45", None),
ByteField("apName46", None),
ByteField("apName47", None),
ByteField("apName48", None),
ByteField("apName49", None),
ByteField("apName50", None),
ByteField("apName51", None),
ByteField("apName52", None),
ByteField("apName53", None),
ByteField("apName54", None),
ByteField("apName55", None),
ByteField("apName56", None),
ByteField("apName57", None),
ByteField("apName58", None),
ByteField("apName59", None),
ByteField("apName60", None),
ByteField("apName61", None),
ByteField("apName62", None),
ByteField("apName63", None),
ByteField("apName64", None),
ByteField("apName65", None),
ByteField("apName66", None),
ByteField("apName67", None),
ByteField("apName68", None),
ByteField("apName69", None),
ByteField("apName70", None),
ByteField("apName71", None),
ByteField("apName72", None),
ByteField("apName73", None),
ByteField("apName74", None),
ByteField("apName75", None),
ByteField("apName76", None),
ByteField("apName77", None),
ByteField("apName78", None),
ByteField("apName79", None),
ByteField("apName80", None),
ByteField("apName81", None),
ByteField("apName82", None),
ByteField("apName83", None),
ByteField("apName84", None),
ByteField("apName85", None),
ByteField("apName86", None),
ByteField("apName87", None),
ByteField("apName88", None),
ByteField("apName89", None),
ByteField("apName90", None),
ByteField("apName91", None),
ByteField("apName92", None),
ByteField("apName93", None),
ByteField("apName94", None),
ByteField("apName95", None),
ByteField("apName96", None),
ByteField("apName97", None),
ByteField("apName98", None),
ByteField("apName99", None)
]
def post_build(self, p, pay):
a = [getattr(self, fld.name) for fld in self.fields_desc]
res = adapt(3, 102, a, self.fields_desc)
if self.lengthAPN is None:
p = p[:1] + struct.pack(">B", res[1]) + p[2:]
if res[0] != 0:
p = p[:-res[0]]
return p + pay
class NetworkServiceAccessPointIdentifier(Packet):
"""Network service access point identifier Section 10.5.6.2"""
name = "Network Service Access Point Identifier"
fields_desc = [
ByteField("ieiNSAPI", 0x0),
BitField("spare", 0x0, 4),
BitField("nsapiVal", 0x0, 4)
]
# length 2 to 253
class ProtocolConfigurationOptions(Packet):
"""Protocol configuration options Section 10.5.6.3"""
name = "Protocol Configuration Options"
fields_desc = [
ByteField("ieiPCO", 0x0),
XByteField("lengthPCO", None),
# optional
BitField("ext", None, 1),
BitField("spare", None, 4),
BitField("configProto", None, 3),
ByteField("protoId1", None),
ByteField("lenProto1", None),
ByteField("proto1Content", None),
ByteField("protoId2", None),
ByteField("lenProto2", None),
ByteField("proto2Content", None),
ByteField("protoId3", None),
ByteField("lenProto3", None),
ByteField("proto3Content", None),
ByteField("protoId4", None),
ByteField("lenProto4", None),
ByteField("proto4Content", None),
ByteField("protoId5", None),
ByteField("lenProto5", None),
ByteField("proto5Content", None),
ByteField("protoId6", None),
ByteField("lenProto6", None),
ByteField("proto6Content", None),
ByteField("protoId7", None),
ByteField("lenProto7", None),
ByteField("proto7Content", None),
ByteField("protoId8", None),
ByteField("lenProto8", None),
ByteField("proto8Content", None),
ByteField("protoId9", None),
ByteField("lenProto9", None),
ByteField("proto9Content", None),
ByteField("protoId10", None),
ByteField("lenProto10", None),
ByteField("proto10Content", None),
ByteField("protoId11", None),
ByteField("lenProto11", None),
ByteField("proto11Content", None),
ByteField("protoId12", None),
ByteField("lenProto12", None),
ByteField("proto12Content", None),
ByteField("protoId13", None),
ByteField("lenProto13", None),
ByteField("proto13Content", None),
ByteField("protoId14", None),
ByteField("lenProto14", None),
ByteField("proto14Content", None),
ByteField("protoId15", None),
ByteField("lenProto15", None),
ByteField("proto15Content", None),
ByteField("protoId16", None),
ByteField("lenProto16", None),
ByteField("proto16Content", None),
ByteField("protoId17", None),
ByteField("lenProto17", None),
ByteField("proto17Content", None),
ByteField("protoId18", None),
ByteField("lenProto18", None),
ByteField("proto18Content", None),
ByteField("protoId19", None),
ByteField("lenProto19", None),
ByteField("proto19Content", None),
ByteField("protoId20", None),
ByteField("lenProto20", None),
ByteField("proto20Content", None),
ByteField("protoId21", None),
ByteField("lenProto21", None),
ByteField("proto21Content", None),
ByteField("protoId22", None),
ByteField("lenProto22", None),
ByteField("proto22Content", None),
ByteField("protoId23", None),
ByteField("lenProto23", None),
ByteField("proto23Content", None),
ByteField("protoId24", None),
ByteField("lenProto24", None),
ByteField("proto24Content", None),
ByteField("protoId25", None),
ByteField("lenProto25", None),
ByteField("proto25Content", None),
ByteField("protoId26", None),
ByteField("lenProto26", None),
ByteField("proto26Content", None),
ByteField("protoId27", None),
ByteField("lenProto27", None),
ByteField("proto27Content", None),
ByteField("protoId28", None),
ByteField("lenProto28", None),
ByteField("proto28Content", None),
ByteField("protoId29", None),
ByteField("lenProto29", None),
ByteField("proto29Content", None),
ByteField("protoId30", None),
ByteField("lenProto30", None),
ByteField("proto30Content", None),
ByteField("protoId31", None),
ByteField("lenProto31", None),
ByteField("proto31Content", None),
ByteField("protoId32", None),
ByteField("lenProto32", None),
ByteField("proto32Content", None),
ByteField("protoId33", None),
ByteField("lenProto33", None),
ByteField("proto33Content", None),
ByteField("protoId34", None),
ByteField("lenProto34", None),
ByteField("proto34Content", None),
ByteField("protoId35", None),
ByteField("lenProto35", None),
ByteField("proto35Content", None),
ByteField("protoId36", None),
ByteField("lenProto36", None),
ByteField("proto36Content", None),
ByteField("protoId37", None),
ByteField("lenProto37", None),
ByteField("proto37Content", None),
ByteField("protoId38", None),
ByteField("lenProto38", None),
ByteField("proto38Content", None),
ByteField("protoId39", None),
ByteField("lenProto39", None),
ByteField("proto39Content", None),
ByteField("protoId40", None),
ByteField("lenProto40", None),
ByteField("proto40Content", None),
ByteField("protoId41", None),
ByteField("lenProto41", None),
ByteField("proto41Content", None),
ByteField("protoId42", None),
ByteField("lenProto42", None),
ByteField("proto42Content", None),
ByteField("protoId43", None),
ByteField("lenProto43", None),
ByteField("proto43Content", None),
ByteField("protoId44", None),
ByteField("lenProto44", None),
ByteField("proto44Content", None),
ByteField("protoId45", None),
ByteField("lenProto45", None),
ByteField("proto45Content", None),
ByteField("protoId46", None),
ByteField("lenProto46", None),
ByteField("proto46Content", None),
ByteField("protoId47", None),
ByteField("lenProto47", None),
ByteField("proto47Content", None),
ByteField("protoId48", None),
ByteField("lenProto48", None),
ByteField("proto48Content", None),
ByteField("protoId49", None),
ByteField("lenProto49", None),
ByteField("proto49Content", None),
ByteField("protoId50", None),
ByteField("lenProto50", None),
ByteField("proto50Content", None),
ByteField("protoId51", None),
ByteField("lenProto51", None),
ByteField("proto51Content", None),
ByteField("protoId52", None),
ByteField("lenProto52", None),
ByteField("proto52Content", None),
ByteField("protoId53", None),
ByteField("lenProto53", None),
ByteField("proto53Content", None),
ByteField("protoId54", None),
ByteField("lenProto54", None),
ByteField("proto54Content", None),
ByteField("protoId55", None),
ByteField("lenProto55", None),
ByteField("proto55Content", None),
ByteField("protoId56", None),
ByteField("lenProto56", None),
ByteField("proto56Content", None),
ByteField("protoId57", None),
ByteField("lenProto57", None),
ByteField("proto57Content", None),
ByteField("protoId58", None),
ByteField("lenProto58", None),
ByteField("proto58Content", None),
ByteField("protoId59", None),
ByteField("lenProto59", None),
ByteField("proto59Content", None),
ByteField("protoId60", None),
ByteField("lenProto60", None),
ByteField("proto60Content", None),
ByteField("protoId61", None),
ByteField("lenProto61", None),
ByteField("proto61Content", None),
ByteField("protoId62", None),
ByteField("lenProto62", None),
ByteField("proto62Content", None),
ByteField("protoId63", None),
ByteField("lenProto63", None),
ByteField("proto63Content", None),
ByteField("protoId64", None),
ByteField("lenProto64", None),
ByteField("proto64Content", None),
ByteField("protoId65", None),
ByteField("lenProto65", None),
ByteField("proto65Content", None),
ByteField("protoId66", None),
ByteField("lenProto66", None),
ByteField("proto66Content", None),
ByteField("protoId67", None),
ByteField("lenProto67", None),
ByteField("proto67Content", None),
ByteField("protoId68", None),
ByteField("lenProto68", None),
ByteField("proto68Content", None),
ByteField("protoId69", None),
ByteField("lenProto69", None),
ByteField("proto69Content", None),
ByteField("protoId70", None),
ByteField("lenProto70", None),
ByteField("proto70Content", None),
ByteField("protoId71", None),
ByteField("lenProto71", None),
ByteField("proto71Content", None),
ByteField("protoId72", None),
ByteField("lenProto72", None),
ByteField("proto72Content", None),
ByteField("protoId73", None),
ByteField("lenProto73", None),
ByteField("proto73Content", None),
ByteField("protoId74", None),
ByteField("lenProto74", None),
ByteField("proto74Content", None),
ByteField("protoId75", None),
ByteField("lenProto75", None),
ByteField("proto75Content", None),
ByteField("protoId76", None),
ByteField("lenProto76", None),
ByteField("proto76Content", None),
ByteField("protoId77", None),
ByteField("lenProto77", None),
ByteField("proto77Content", None),
ByteField("protoId78", None),
ByteField("lenProto78", None),
ByteField("proto78Content", None),
ByteField("protoId79", None),
ByteField("lenProto79", None),
ByteField("proto79Content", None),
ByteField("protoId80", None),
ByteField("lenProto80", None),
ByteField("proto80Content", None),
ByteField("protoId81", None),
ByteField("lenProto81", None),
ByteField("proto81Content", None),
ByteField("protoId82", None),
ByteField("lenProto82", None),
ByteField("proto82Content", None),
ByteField("protoId83", None),
ByteField("lenProto83", None),
ByteField("proto83Content", None),
]
def post_build(self, p, pay):
a = [getattr(self, fld.name) for fld in self.fields_desc]
res = adapt(2, 253, a, self.fields_desc)
if self.lengthPCO is None:
p = p[:1] + struct.pack(">B", res[1]) + p[2:]
if res[0] != 0:
p = p[:-res[0]]
return p + pay
# len 4 to 20
class PacketDataProtocolAddress(Packet):
"""Packet data protocol address Section 10.5.6.4"""
name = "Packet Data Protocol Address"
fields_desc = [
ByteField("ieiPDPA", 0x0),
XByteField("lengthPDPA", None),
BitField("spare", 0x0, 4),
BitField("pdpTypeOrga", 0x0, 4),
ByteField("pdpTypeNb", 0x0),
# optional
ByteField("addressInfo1", None),
ByteField("addressInfo2", None),
ByteField("addressInfo3", None),
ByteField("addressInfo4", None),
ByteField("addressInfo5", None),
ByteField("addressInfo6", None),
ByteField("addressInfo7", None),
ByteField("addressInfo8", None),
ByteField("addressInfo9", None),
ByteField("addressInfo10", None),
ByteField("addressInfo11", None),
ByteField("addressInfo12", None),
ByteField("addressInfo13", None),
ByteField("addressInfo14", None),
ByteField("addressInfo15", None),
ByteField("addressInfo16", None)
]
def post_build(self, p, pay):
a = [getattr(self, fld.name) for fld in self.fields_desc]
res = adapt(4, 20, a, self.fields_desc)
if self.lengthPDPA is None:
p = p[:1] + struct.pack(">B", res[1]) + p[2:]
if res[0] != 0:
p = p[:-res[0]]
return p + pay
class QualityOfService(Packet):
"""Quality of service Section 10.5.6.5"""
name = "Quality of Service"
fields_desc = [
ByteField("ieiQOS", 0x0),
XByteField("lengthQOS", 0x5),
BitField("spare", 0x0, 2),
BitField("delayClass", 0x0, 3),
BitField("reliaClass", 0x0, 3),
BitField("peak", 0x0, 4),
BitField("spare", 0x0, 1),
BitField("precedenceCl", 0x0, 3),
BitField("spare", 0x0, 3),
BitField("mean", 0x0, 5)
]
class SmCause(Packet):
"""SM cause Section 10.5.6.6"""
name = "SM Cause"
fields_desc = [
ByteField("ieiSC", 0x0),
ByteField("causeVal", 0x0)
]
# 10.5.6.7 Spare
# This is intentionally left spare.
class AaDeactivationCause(Packet):
"""AA deactivation cause Section 10.5.6.8"""
name = "AA Deactivation Cause"
fields_desc = [
XBitField("ieiADC", 0x0, 4),
BitField("spare", 0x0, 1),
BitField("aaVal", 0x0, 3)
]
# Fix 1/2 len problem
class AaDeactivationCauseAndSpareHalfOctets(Packet):
name = "AA Deactivation Cause and Spare Half Octets"
fields_desc = [
BitField("spare", 0x0, 1),
BitField("aaVal", 0x0, 3),
BitField("spareHalfOctets", 0x0, 4)
]
class LlcServiceAccessPointIdentifier(Packet):
"""LLC service access point identifier Section 10.5.6.9"""
name = "LLC Service Access Point Identifier"
fields_desc = [
ByteField("ieiLSAPI", None),
BitField("spare", 0x0, 4),
BitField("llcVal", 0x0, 4)
]
#
# 10.5.7 GPRS Common information elements
#
# 10.5.7.1 [Spare]
class RadioPriority(Packet):
"""Radio priority Section 10.5.7.2"""
name = "Radio Priority"
fields_desc = [
XBitField("ieiRP", 0x0, 4),
BitField("spare", 0x1, 1),
BitField("rplv", 0x0, 3)
]
# Fix 1/2 len problem
class RadioPriorityAndSpareHalfOctets(Packet):
name = "Radio Priority and Spare Half Octets"
fields_desc = [
BitField("spare", 0x1, 1),
BitField("rplv", 0x0, 3),
BitField("spareHalfOctets", 0x0, 4)
]
class GprsTimer(Packet):
"""GPRS Timer Section 10.5.7.3"""
name = "GPRS Timer"
fields_desc = [
ByteField("ieiGT", 0x0),
BitField("unit", 0x0, 3),
BitField("timerVal", 0x0, 5)
]
class CellIdentity(Packet):
""" Cell identity Section 10.5.1.1 """
name = "Cell Identity"
fields_desc = [
ByteField("ciValue1", 0x0),
ByteField("ciValue2", 0x0)
]
class CiphKeySeqNr(Packet):
""" Ciphering Key Sequence Number Section 10.5.1.2 """
name = "Cipher Key Sequence Number"
fields_desc = [
BitField("spare", 0x0, 1),
BitField("keySeq", 0x0, 3)
]
class LocalAreaId(Packet):
""" Local Area Identification Section 10.5.1.3 """
name = "Location Area Identification"
fields_desc = [
BitField("mccDigit2", 0x0, 4),
BitField("mccDigit1", 0x0, 4),
BitField("mncDigit3", 0x0, 4),
BitField("mccDigit3", 0x0, 4),
BitField("mncDigit2", 0x0, 4),
BitField("mncDigit1", 0x0, 4),
ByteField("lac1", 0x0),
ByteField("lac2", 0x0)
]
#
# The Mobile Identity is a type 4 information element with a minimum
# length of 3 octet and 11 octets length maximal.
#
# len 3 - 11
class MobileId(Packet):
""" Mobile Identity Section 10.5.1.4 """
name = "Mobile Identity"
fields_desc = [
XByteField("lengthMI", None),
BitField("idDigit1", 0x0, 4),
BitField("oddEven", 0x0, 1),
BitField("typeOfId", 0x0, 3),
BitField("idDigit2_1", None, 4), # optional
BitField("idDigit2", None, 4),
BitField("idDigit3_1", None, 4),
BitField("idDigit3", None, 4),
BitField("idDigit4_1", None, 4),
BitField("idDigit4", None, 4),
BitField("idDigit5_1", None, 4),
BitField("idDigit5", None, 4),
BitField("idDigit6_1", None, 4),
BitField("idDigit6", None, 4),
BitField("idDigit7_1", None, 4),
BitField("idDigit7", None, 4),
BitField("idDigit8_1", None, 4),
BitField("idDigit8", None, 4),
BitField("idDigit9_1", None, 4),
BitField("idDigit9", None, 4),
]
def post_build(self, p, pay):
a = [getattr(self, fld.name) for fld in self.fields_desc]
res = adapt(2, 10, a, self.fields_desc, 1)
if self.lengthMI is None:
p = struct.pack(">B", res[1]) + p[1:]
if res[0] != 0:
p = p[:-res[0]]
return p + pay
class MobileStationClassmark1(Packet):
""" Mobile Station Classmark 1 Section 10.5.1.5 """
name = "Mobile Station Classmark 1"
fields_desc = [
BitField("spare", 0x0, 1),
BitField("revisionLvl", 0x0, 2),
BitField("esInd", 0x0, 1),
BitField("a51", 0x0, 1),
BitField("rfPowerCap", 0x0, 3)
]
class MobileStationClassmark2(Packet):
""" Mobile Station Classmark 2 Section 10.5.1.6 """
name = "Mobile Station Classmark 2"
fields_desc = [
XByteField("lengthMSC2", 0x3),
BitField("spare", 0x0, 1),
BitField("revisionLvl", 0x0, 2),
BitField("esInd", 0x0, 1),
BitField("a51", 0x0, 1),
BitField("rfPowerCap", 0x0, 3),
BitField("spare1", 0x0, 1),
BitField("psCap", 0x0, 1),
BitField("ssScreenInd", 0x0, 2),
BitField("smCaPabi", 0x0, 1),
BitField("vbs", 0x0, 1),
BitField("vgcs", 0x0, 1),
BitField("fc", 0x0, 1),
BitField("cm3", 0x0, 1),
BitField("spare2", 0x0, 1),
BitField("lcsvaCap", 0x0, 1),
BitField("spare3", 0x0, 1),
BitField("soLsa", 0x0, 1),
BitField("cmsp", 0x0, 1),
BitField("a53", 0x0, 1),
BitField("a52", 0x0, 1)
]
class DescriptiveGroupOrBroadcastCallReference(Packet):
""" Descriptive group or broadcast call reference Section 10.5.1.9 """
name = "Descriptive Group or Broadcast Call Reference"
fields_desc = [
BitField("binCallRef", 0x0, 27),
BitField("sf", 0x0, 1),
BitField("fa", 0x0, 1),
BitField("callPrio", 0x0, 3),
BitField("cipherInfo", 0x0, 4),
BitField("spare1", 0x0, 1),
BitField("spare2", 0x0, 1),
BitField("spare3", 0x0, 1),
BitField("spare4", 0x0, 1)
]
class PdAndSapi(Packet):
""" PD and SAPI $(CCBS)$ Section 10.5.1.10a """
name = "PD and SAPI $(CCBS)$"
fields_desc = [
BitField("spare", 0x0, 1),
BitField("spare1", 0x0, 1),
BitField("sapi", 0x0, 2),
BitField("pd", 0x0, 4)
]
class PriorityLevel(Packet):
""" Priority Level Section 10.5.1.11 """
name = "Priority Level"
fields_desc = [
BitField("spare", 0x0, 1),
BitField("callPrio", 0x0, 3)
]
#
# Radio Resource management information elements
#
# len 6 to max for L3 message (251)
class BaRange(Packet):
""" BA Range Section 10.5.2.1a """
name = "BA Range"
fields_desc = [
XByteField("lengthBR", None),
#error: byte format requires -128 <= number <= 127
ByteField("nrOfRanges", 0x0),
# # rX = range X
# # L o = Lower H i = higher
# # H p = high Part Lp = low Part
ByteField("r1LoHp", 0x0),
BitField("r1LoLp", 0x0, 3),
BitField("r1HiHp", 0x0, 5),
BitField("r1HiLp", 0x0, 4),
BitField("r2LoHp", 0x0, 4),
# optional
BitField("r2LoLp", None, 5),
BitField("r2HiHp", None, 3),
ByteField("r2HiLp", None),
ByteField("r3LoHp", None),
BitField("r3LoLp", None, 5),
BitField("r3HiHp", None, 3),
ByteField("r3HiLp", None),
ByteField("r4LoHp", None),
BitField("r4LoLp", None, 5),
BitField("r4HiHp", None, 3),
ByteField("r4HiLp", None),
ByteField("r5LoHp", None),
BitField("r5LoLp", None, 5),
BitField("r5HiHp", None, 3),
ByteField("r5HiLp", None),
ByteField("r6LoHp", None),
BitField("r6LoLp", None, 5),
BitField("r6HiHp", None, 3),
ByteField("r6HiLp", None),
ByteField("r7LoHp", None),
BitField("r7LoLp", None, 5),
BitField("r7HiHp", None, 3),
ByteField("r7HiLp", None),
ByteField("r8LoHp", None),
BitField("r8LoLp", None, 5),
BitField("r8HiHp", None, 3),
ByteField("r8HiLp", None),
ByteField("r9LoHp", None),
BitField("r9LoLp", None, 5),
BitField("r9HiHp", None, 3),
ByteField("r9HiLp", None),
ByteField("r10LoHp", None),
BitField("r10LoLp", None, 5),
BitField("r10HiHp", None, 3),
ByteField("r10HiLp", None),
ByteField("r11LoHp", None),
BitField("r11LoLp", None, 5),
BitField("r11HiHp", None, 3),
ByteField("r11HiLp", None),
ByteField("r12LoHp", None),
BitField("r12LoLp", None, 5),
BitField("r12HiHp", None, 3),
ByteField("r12HiLp", None),
ByteField("r13LoHp", None),
BitField("r13LoLp", None, 5),
BitField("r13HiHp", None, 3),
ByteField("r13HiLp", None),
ByteField("r14LoHp", None),
BitField("r14LoLp", None, 5),
BitField("r14HiHp", None, 3),
ByteField("r14HiLp", None),
ByteField("r15LoHp", None),
BitField("r15LoLp", None, 5),
BitField("r15HiHp", None, 3),
ByteField("r15HiLp", None),
ByteField("r16LoHp", None),
BitField("r16LoLp", None, 5),
BitField("r16HiHp", None, 3),
ByteField("r16HiLp", None),
ByteField("r17LoHp", None),
BitField("r17LoLp", None, 5),
BitField("r17HiHp", None, 3),
ByteField("r17HiLp", None),
ByteField("r18LoHp", None),
BitField("r18LoLp", None, 5),
BitField("r18HiHp", None, 3),
ByteField("r18HiLp", None),
ByteField("r19LoHp", None),
BitField("r19LoLp", None, 5),
BitField("r19HiHp", None, 3),
ByteField("r19HiLp", None),
ByteField("r20LoHp", None),
BitField("r20LoLp", None, 5),
BitField("r20HiHp", None, 3),
ByteField("r20HiLp", None),
ByteField("r21LoHp", None),
BitField("r21LoLp", None, 5),
BitField("r21HiHp", None, 3),
ByteField("r21HiLp", None),
ByteField("r22LoHp", None),
BitField("r22LoLp", None, 5),
BitField("r22HiHp", None, 3),
ByteField("r22HiLp", None),
ByteField("r23LoHp", None),
BitField("r23LoLp", None, 5),
BitField("r23HiHp", None, 3),
ByteField("r23HiLp", None),
ByteField("r24LoHp", None),
BitField("r24LoLp", None, 5),
BitField("r24HiHp", None, 3),
ByteField("r24HiLp", None),
ByteField("r25LoHp", None),
BitField("r25LoLp", None, 5),
BitField("r25HiHp", None, 3),
ByteField("r25HiLp", None),
ByteField("r26LoHp", None),
BitField("r26LoLp", None, 5),
BitField("r26HiHp", None, 3),
ByteField("r26HiLp", None),
ByteField("r27LoHp", None),
BitField("r27LoLp", None, 5),
BitField("r27HiHp", None, 3),
ByteField("r27HiLp", None),
ByteField("r28LoHp", None),
BitField("r28LoLp", None, 5),
BitField("r28HiHp", None, 3),
ByteField("r28HiLp", None),
ByteField("r29LoHp", None),
BitField("r29LoLp", None, 5),
BitField("r29HiHp", None, 3),
ByteField("r29HiLp", None),
ByteField("r30LoHp", None),
BitField("r30LoLp", None, 5),
BitField("r30HiHp", None, 3),
ByteField("r30HiLp", None),
ByteField("r31LoHp", None),
BitField("r31LoLp", None, 5),
BitField("r31HiHp", None, 3),
ByteField("r31HiLp", None),
ByteField("r32LoHp", None),
BitField("r32LoLp", None, 5),
BitField("r32HiHp", None, 3),
ByteField("r32HiLp", None),
ByteField("r33LoHp", None),
BitField("r33LoLp", None, 5),
BitField("r33HiHp", None, 3),
ByteField("r33HiLp", None),
ByteField("r34LoHp", None),
BitField("r34LoLp", None, 5),
BitField("r34HiHp", None, 3),
ByteField("r34HiLp", None),
ByteField("r35LoHp", None),
BitField("r35LoLp", None, 5),
BitField("r35HiHp", None, 3),
ByteField("r35HiLp", None),
ByteField("r36LoHp", None),
BitField("r36LoLp", None, 5),
BitField("r36HiHp", None, 3),
ByteField("r36HiLp", None),
ByteField("r37LoHp", None),
BitField("r37LoLp", None, 5),
BitField("r37HiHp", None, 3),
ByteField("r37HiLp", None),
ByteField("r38LoHp", None),
BitField("r38LoLp", None, 5),
BitField("r38HiHp", None, 3),
ByteField("r38HiLp", None),
ByteField("r39LoHp", None),
BitField("r39LoLp", None, 5),
BitField("r39HiHp", None, 3),
ByteField("r39HiLp", None),
ByteField("r40LoHp", None),
BitField("r40LoLp", None, 5),
BitField("r40HiHp", None, 3),
ByteField("r40HiLp", None),
ByteField("r41LoHp", None),
BitField("r41LoLp", None, 5),
BitField("r41HiHp", None, 3),
ByteField("r41HiLp", None),
ByteField("r42LoHp", None),
BitField("r42LoLp", None, 5),
BitField("r42HiHp", None, 3),
ByteField("r42HiLp", None),
ByteField("r43LoHp", None),
BitField("r43LoLp", None, 5),
BitField("r43HiHp", None, 3),
ByteField("r43HiLp", None),
ByteField("r44LoHp", None),
BitField("r44LoLp", None, 5),
BitField("r44HiHp", None, 3),
ByteField("r44HiLp", None),
ByteField("r45LoHp", None),
BitField("r45LoLp", None, 5),
BitField("r45HiHp", None, 3),
ByteField("r45HiLp", None),
ByteField("r46LoHp", None),
BitField("r46LoLp", None, 5),
BitField("r46HiHp", None, 3),
ByteField("r46HiLp", None),
ByteField("r47LoHp", None),
BitField("r47LoLp", None, 5),
BitField("r47HiHp", None, 3),
ByteField("r47HiLp", None),
ByteField("r48LoHp", None),
BitField("r48LoLp", None, 5),
BitField("r48HiHp", None, 3),
ByteField("r48HiLp", None),
ByteField("r49LoHp", None),
BitField("r49LoLp", None, 5),
BitField("r49HiHp", None, 3),
ByteField("r49HiLp", None),
ByteField("r50LoHp", None),
BitField("r50LoLp", None, 5),
BitField("r50HiHp", None, 3),
ByteField("r50HiLp", None),
ByteField("r51LoHp", None),
BitField("r51LoLp", None, 5),
BitField("r51HiHp", None, 3),
ByteField("r51HiLp", None),
ByteField("r52LoHp", None),
BitField("r52LoLp", None, 5),
BitField("r52HiHp", None, 3),
ByteField("r52HiLp", None),
ByteField("r53LoHp", None),
BitField("r53LoLp", None, 5),
BitField("r53HiHp", None, 3),
ByteField("r53HiLp", None),
ByteField("r54LoHp", None),
BitField("r54LoLp", None, 5),
BitField("r54HiHp", None, 3),
ByteField("r54HiLp", None),
ByteField("r55LoHp", None),
BitField("r55LoLp", None, 5),
BitField("r55HiHp", None, 3),
ByteField("r55HiLp", None),
ByteField("r56LoHp", None),
BitField("r56LoLp", None, 5),
BitField("r56HiHp", None, 3),
ByteField("r56HiLp", None),
ByteField("r57LoHp", None),
BitField("r57LoLp", None, 5),
BitField("r57HiHp", None, 3),
ByteField("r57HiLp", None),
ByteField("r58LoHp", None),
BitField("r58LoLp", None, 5),
BitField("r58HiHp", None, 3),
ByteField("r58HiLp", None),
ByteField("r59LoHp", None),
BitField("r59LoLp", None, 5),
BitField("r59HiHp", None, 3),
ByteField("r59HiLp", None),
ByteField("r60LoHp", None),
BitField("r60LoLp", None, 5),
BitField("r60HiHp", None, 3),
ByteField("r60HiLp", None),
ByteField("r61LoHp", None),
BitField("r61LoLp", None, 5),
BitField("r61HiHp", None, 3),
ByteField("r61HiLp", None),
ByteField("r62LoHp", None),
BitField("r62LoLp", None, 5),
BitField("r62HiHp", None, 3),
ByteField("r62HiLp", None),
ByteField("r63LoHp", None),
BitField("r63LoLp", None, 5),
BitField("r63HiHp", None, 3),
ByteField("r63HiLp", None),
ByteField("r64LoHp", None),
BitField("r64LoLp", None, 5),
BitField("r64HiHp", None, 3),
ByteField("r64HiLp", None),
ByteField("r65LoHp", None),
BitField("r65LoLp", None, 5),
BitField("r65HiHp", None, 3),
ByteField("r65HiLp", None),
ByteField("r66LoHp", None),
BitField("r66LoLp", None, 5),
BitField("r66HiHp", None, 3),
ByteField("r66HiLp", None),
ByteField("r67LoHp", None),
BitField("r67LoLp", None, 5),
BitField("r67HiHp", None, 3),
ByteField("r67HiLp", None),
ByteField("r68LoHp", None),
BitField("r68LoLp", None, 5),
BitField("r68HiHp", None, 3),
ByteField("r68HiLp", None),
ByteField("r69LoHp", None),
BitField("r69LoLp", None, 5),
BitField("r69HiHp", None, 3),
ByteField("r69HiLp", None),
ByteField("r70LoHp", None),
BitField("r70LoLp", None, 5),
BitField("r70HiHp", None, 3),
ByteField("r70HiLp", None),
ByteField("r71LoHp", None),
BitField("r71LoLp", None, 5),
BitField("r71HiHp", None, 3),
ByteField("r71HiLp", None),
ByteField("r72LoHp", None),
BitField("r72LoLp", None, 5),
BitField("r72HiHp", None, 3),
ByteField("r72HiLp", None),
ByteField("r73LoHp", None),
BitField("r73LoLp", None, 5),
BitField("r73HiHp", None, 3),
ByteField("r73HiLp", None),
ByteField("r74LoHp", None),
BitField("r74LoLp", None, 5),
BitField("r74HiHp", None, 3),
ByteField("r74HiLp", None),
ByteField("r75LoHp", None),
BitField("r75LoLp", None, 5),
BitField("r75HiHp", None, 3),
ByteField("r75HiLp", None),
ByteField("r76LoHp", None),
BitField("r76LoLp", None, 5),
BitField("r76HiHp", None, 3),
ByteField("r76HiLp", None),
ByteField("r77LoHp", None),
BitField("r77LoLp", None, 5),
BitField("r77HiHp", None, 3),
ByteField("r77HiLp", None),
ByteField("r78LoHp", None),
BitField("r78LoLp", None, 5),
BitField("r78HiHp", None, 3),
ByteField("r78HiLp", None),
ByteField("r79LoHp", None),
BitField("r79LoLp", None, 5),
BitField("r79HiHp", None, 3),
ByteField("r79HiLp", None),
ByteField("r80LoHp", None),
BitField("r80LoLp", None, 5),
BitField("r80HiHp", None, 3),
ByteField("r80HiLp", None),
ByteField("r81LoHp", None),
BitField("r81LoLp", None, 5),
BitField("r81HiHp", None, 3),
ByteField("r81HiLp", None),
ByteField("r82LoHp", None),
BitField("r82LoLp", None, 5),
BitField("r82HiHp", None, 3),
ByteField("r82HiLp", None),
ByteField("r83LoHp", None),
BitField("r83LoLp", None, 5),
BitField("r83HiHp", None, 3),
ByteField("r83HiLp", None),
ByteField("r84LoHp", None),
BitField("r84LoLp", None, 5),
BitField("r84HiHp", None, 3),
ByteField("r84HiLp", None)
]
def post_build(self, p, pay):
a = [getattr(self, fld.name) for fld in self.fields_desc]
res = adapt(5, 253, a, self.fields_desc, 1)
if self.lengthBR is None:
p = struct.pack(">B", res[1]) + p[1:]
if res[0] != 0:
p = p[:-res[0]]
return p + pay
# len 3 to max for L3 message (251)
class BaListPref(Packet):
""" BA List Pref Section 10.5.2.1c """
name = "BA List Pref"
fields_desc = [
XByteField("lengthBLP", None),
BitField("fixBit", 0x0, 1),
BitField("rangeLower", 0x0, 10),
BitField("fixBit2", 0x0, 1),
BitField("rangeUpper", 0x0, 10),
BitField("baFreq", 0x0, 10),
BitField("sparePad", 0x0, 8)
]
# len 17 || Have a look at the specs for the field format
# Bit map 0 format
# Range 1024 format
# Range 512 format
# Range 256 format
# Range 128 format
# Variable bit map format
class CellChannelDescription(Packet):
""" Cell Channel Description Section 10.5.2.1b """
name = "Cell Channel Description "
fields_desc = [
BitField("bit128", 0x0, 1),
BitField("bit127", 0x0, 1),
BitField("spare1", 0x0, 1),
BitField("spare2", 0x0, 1),
BitField("bit124", 0x0, 1),
BitField("bit123", 0x0, 1),
BitField("bit122", 0x0, 1),
BitField("bit121", 0x0, 1),
ByteField("bit120", 0x0),
ByteField("bit112", 0x0),
ByteField("bit104", 0x0),
ByteField("bit96", 0x0),
ByteField("bit88", 0x0),
ByteField("bit80", 0x0),
ByteField("bit72", 0x0),
ByteField("bit64", 0x0),
ByteField("bit56", 0x0),
ByteField("bit48", 0x0),
ByteField("bit40", 0x0),
ByteField("bit32", 0x0),
ByteField("bit24", 0x0),
ByteField("bit16", 0x0),
ByteField("bit8", 0x0)
]
class CellDescription(Packet):
""" Cell Description Section 10.5.2.2 """
name = "Cell Description"
fields_desc = [
BitField("bcchHigh", 0x0, 2),
BitField("ncc", 0x0, 3),
BitField("bcc", 0x0, 3),
ByteField("bcchLow", 0x0)
]
class CellOptionsBCCH(Packet):
""" Cell Options (BCCH) Section 10.5.2.3 """
name = "Cell Options (BCCH)"
fields_desc = [
BitField("spare", 0x0, 1),
BitField("pwrc", 0x0, 1),
BitField("dtx", 0x0, 2),
BitField("rLinkTout", 0x0, 4)
]
class CellOptionsSACCH(Packet):
""" Cell Options (SACCH) Section 10.5.2.3a """
name = "Cell Options (SACCH)"
fields_desc = [
BitField("dtx", 0x0, 1),
BitField("pwrc", 0x0, 1),
BitField("dtx", 0x0, 1),
BitField("rLinkTout", 0x0, 4)
]
class CellSelectionParameters(Packet):
""" Cell Selection Parameters Section 10.5.2.4 """
name = "Cell Selection Parameters"
fields_desc = [
BitField("cellReselect", 0x0, 3),
BitField("msTxPwrMax", 0x0, 5),
BitField("acs", None, 1),
BitField("neci", None, 1),
BitField("rxlenAccMin", None, 6)
]
class MacModeAndChannelCodingRequest(Packet):
""" MAC Mode and Channel Coding Requested Section 10.5.2.4a """
name = "MAC Mode and Channel Coding Requested"
fields_desc = [
BitField("macMode", 0x0, 2),
BitField("cs", 0x0, 2)
]
class ChannelDescription(Packet):
""" Channel Description Section 10.5.2.5 """
name = "Channel Description"
fields_desc = [
BitField("channelTyp", 0x0, 5),
BitField("tn", 0x0, 3),
BitField("tsc", 0x0, 3),
BitField("h", 0x1, 1),
BitField("maioHi", 0x0, 4),
BitField("maioLo", 0x0, 2),
BitField("hsn", 0x0, 6)
]
class ChannelDescription2(Packet):
""" Channel Description 2 Section 10.5.2.5a """
name = "Channel Description 2"
fields_desc = [
BitField("channelTyp", 0x0, 5),
BitField("tn", 0x0, 3),
BitField("tsc", 0x0, 3),
BitField("h", 0x0, 1),
# if h=1
# BitField("maioHi", 0x0, 4),
# BitField("maioLo", 0x0, 2),
# BitField("hsn", 0x0, 6)
BitField("spare", 0x0, 2),
BitField("arfcnHigh", 0x0, 2),
ByteField("arfcnLow", 0x0)
]
class ChannelMode(Packet):
""" Channel Mode Section 10.5.2.6 """
name = "Channel Mode"
fields_desc = [
ByteField("mode", 0x0)
]
class ChannelMode2(Packet):
""" Channel Mode 2 Section 10.5.2.7 """
name = "Channel Mode 2"
fields_desc = [
ByteField("mode", 0x0)
]
class ChannelNeeded(Packet):
""" Channel Needed Section 10.5.2.8 """
name = "Channel Needed"
fields_desc = [
BitField("channel2", 0x0, 2),
BitField("channel1", 0x0, 2),
]
class ChannelRequestDescription(Packet):
"""Channel Request Description Section 10.5.2.8a """
name = "Channel Request Description"
fields_desc = [
BitField("mt", 0x0, 1),
ConditionalField(BitField("spare", 0x0, 39),
lambda pkt: pkt.mt == 0),
ConditionalField(BitField("spare", 0x0, 3),
lambda pkt: pkt.mt == 1),
ConditionalField(BitField("priority", 0x0, 2),
lambda pkt: pkt.mt == 1),
ConditionalField(BitField("rlcMode", 0x0, 1),
lambda pkt: pkt.mt == 1),
ConditionalField(BitField("llcFrame", 0x1, 1),
lambda pkt: pkt.mt == 1),
ConditionalField(ByteField("reqBandMsb", 0x0),
lambda pkt: pkt.mt == 1),
ConditionalField(ByteField("reqBandLsb", 0x0),
lambda pkt: pkt.mt == 1),
ConditionalField(ByteField("rlcMsb", 0x0),
lambda pkt: pkt.mt == 1),
ConditionalField(ByteField("rlcLsb", 0x0),
lambda pkt: pkt.mt == 1)
]
class CipherModeSetting(Packet):
"""Cipher Mode Setting Section 10.5.2.9 """
name = "Cipher Mode Setting"
fields_desc = [
BitField("algoId", 0x0, 3),
BitField("sc", 0x0, 1),
]
class CipherResponse(Packet):
"""Cipher Response Section 10.5.2.10 """
name = "Cipher Response"
fields_desc = [
BitField("spare", 0x0, 3),
BitField("cr", 0x0, 1),
]
class ControlChannelDescription(Packet):
"""Control Channel Description Section 10.5.2.11 """
name = "Control Channel Description"
fields_desc = [
BitField("spare", 0x0, 1),
BitField("att", 0x0, 1),
BitField("bsAgBlksRes", 0x0, 3),
BitField("ccchConf", 0x0, 3),
BitField("spare", 0x0, 1),
BitField("spare1", 0x0, 1),
BitField("spare2", 0x0, 1),
BitField("spare3", 0x0, 1),
BitField("spare4", 0x0, 1),
BitField("bsPaMfrms", 0x0, 3),
ByteField("t3212", 0x0)
]
class FrequencyChannelSequence(Packet):
"""Frequency Channel Sequence Section 10.5.2.12"""
name = "Frequency Channel Sequence"
fields_desc = [
BitField("spare", 0x0, 1),
BitField("lowestArfcn", 0x0, 7),
BitField("skipArfcn01", 0x0, 4),
BitField("skipArfcn02", 0x0, 4),
BitField("skipArfcn03", 0x0, 4),
BitField("skipArfcn04", 0x0, 4),
BitField("skipArfcn05", 0x0, 4),
BitField("skipArfcn06", 0x0, 4),
BitField("skipArfcn07", 0x0, 4),
BitField("skipArfcn08", 0x0, 4),
BitField("skipArfcn09", 0x0, 4),
BitField("skipArfcn10", 0x0, 4),
BitField("skipArfcn11", 0x0, 4),
BitField("skipArfcn12", 0x0, 4),
BitField("skipArfcn13", 0x0, 4),
BitField("skipArfcn14", 0x0, 4),
BitField("skipArfcn15", 0x0, 4),
BitField("skipArfcn16", 0x0, 4)
]
class FrequencyList(Packet):
"""Frequency List Section 10.5.2.13"""
name = "Frequency List"
# Problem:
# There are several formats for the Frequency List information
# element, distinguished by the "format indicator" subfield.
# Some formats are frequency bit maps, the others use a special encoding
# scheme.
fields_desc = [
XByteField("lengthFL", None),
BitField("formatID", 0x0, 2),
BitField("spare", 0x0, 2),
BitField("arfcn124", 0x0, 1),
BitField("arfcn123", 0x0, 1),
BitField("arfcn122", 0x0, 1),
BitField("arfcn121", 0x0, 1),
ByteField("arfcn120", 0x0),
ByteField("arfcn112", 0x0),
ByteField("arfcn104", 0x0),
ByteField("arfcn96", 0x0),
ByteField("arfcn88", 0x0),
ByteField("arfcn80", 0x0),
ByteField("arfcn72", 0x0),
ByteField("arfcn64", 0x0),
ByteField("arfcn56", 0x0),
ByteField("arfcn48", 0x0),
ByteField("arfcn40", 0x0),
ByteField("arfcn32", 0x0),
ByteField("arfcn24", 0x0),
ByteField("arfcn16", 0x0),
ByteField("arfcn8", 0x0)
]
# len 4 to 13
class GroupChannelDescription(Packet):
"""Group Channel Description Section 10.5.2.14b"""
name = "Group Channel Description"
fields_desc = [
XByteField("lengthGCD", None),
BitField("channelType", 0x0, 5),
BitField("tn", 0x0, 3),
BitField("tsc", 0x0, 3),
BitField("h", 0x0, 1),
# if h == 0 the packet looks the following way:
ConditionalField(BitField("spare", 0x0, 2),
lambda pkt: pkt. h == 0x0),
ConditionalField(BitField("arfcnHi", 0x0, 2),
lambda pkt: pkt. h == 0x0),
ConditionalField(ByteField("arfcnLo", None),
lambda pkt: pkt. h == 0x0),
# if h == 1 the packet looks the following way:
ConditionalField(BitField("maioHi", 0x0, 4),
lambda pkt: pkt. h == 0x1),
ConditionalField(BitField("maioLo", None, 2),
lambda pkt: pkt. h == 0x1),
ConditionalField(BitField("hsn", None, 6),
lambda pkt: pkt. h == 0x1),
# finished with conditional fields
ByteField("maC6", None),
ByteField("maC7", None),
ByteField("maC8", None),
ByteField("maC9", None),
ByteField("maC10", None),
ByteField("maC11", None),
ByteField("maC12", None),
ByteField("maC13", None),
ByteField("maC14", None)
]
def post_build(self, p, pay):
a = [getattr(self, fld.name) for fld in self.fields_desc]
res = adapt(4, 13, a, self.fields_desc, 1)
if self.lengthGCD is None:
p = struct.pack(">B", res[1]) + p[1:]
if res[0] != 0:
p = p[:-res[0]]
return p + pay
class GprsResumption(Packet):
"""GPRS Resumption Section 10.5.2.14c"""
name = "GPRS Resumption"
fields_desc = [
BitField("spare", 0x0, 3),
BitField("ack", 0x0, 1)
]
class HandoverReference(Packet):
"""Handover Reference Section 10.5.2.15"""
name = "Handover Reference"
fields_desc = [
ByteField("handoverRef", 0x0)
]
class IraRestOctets(Packet):
"""IAR Rest Octets Section 10.5.2.17"""
name = "IAR Rest Octets"
fields_desc = [
BitField("spare01", 0x0, 1),
BitField("spare02", 0x0, 1),
BitField("spare03", 0x1, 1),
BitField("spare04", 0x0, 1),
BitField("spare05", 0x1, 1),
BitField("spare06", 0x0, 1),
BitField("spare07", 0x1, 1),
BitField("spare08", 0x1, 1),
BitField("spare09", 0x0, 1),
BitField("spare10", 0x0, 1),
BitField("spare11", 0x1, 1),
BitField("spare12", 0x0, 1),
BitField("spare13", 0x1, 1),
BitField("spare14", 0x0, 1),
BitField("spare15", 0x1, 1),
BitField("spare16", 0x1, 1),
BitField("spare17", 0x0, 1),
BitField("spare18", 0x0, 1),
BitField("spare19", 0x1, 1),
BitField("spare20", 0x0, 1),
BitField("spare21", 0x1, 1),
BitField("spare22", 0x0, 1),
BitField("spare23", 0x1, 1),
BitField("spare24", 0x1, 1)
]
# len is 1 to 5 what do we do with the variable size? no lenght
# field?! WTF
class IaxRestOctets(Packet):
"""IAX Rest Octets Section 10.5.2.18"""
name = "IAX Rest Octets"
fields_desc = [
BitField("spare01", 0x0, 1),
BitField("spare02", 0x0, 1),
BitField("spare03", 0x1, 1),
BitField("spare04", 0x0, 1),
BitField("spare05", 0x1, 1),
BitField("spare06", 0x0, 1),
BitField("spare07", 0x1, 1),
BitField("spare08", 0x1, 1),
ByteField("spareB1", None),
ByteField("spareB2", None),
ByteField("spareB3", None)
]
class L2PseudoLength(Packet):
"""L2 Pseudo Length Section 10.5.2.19"""
name = "L2 Pseudo Length"
fields_desc = [
BitField("l2pLength", None, 6),
BitField("bit2", 0x0, 1),
BitField("bit1", 0x1, 1)
]
class MeasurementResults(Packet):
"""Measurement Results Section 10.5.2.20"""
name = "Measurement Results"
fields_desc = [
BitField("baUsed", 0x0, 1),
BitField("dtxUsed", 0x0, 1),
BitField("rxLevFull", 0x0, 6),
BitField("spare", 0x0, 1),
BitField("measValid", 0x0, 1),
BitField("rxLevSub", 0x0, 6),
BitField("spare0", 0x0, 1),
BitField("rxqualFull", 0x0, 3),
BitField("rxqualSub", 0x0, 3),
BitField("noNcellHi", 0x0, 1),
BitField("noNcellLo", 0x0, 2),
BitField("rxlevC1", 0x0, 6),
BitField("bcchC1", 0x0, 5),
BitField("bsicC1Hi", 0x0, 3),
BitField("bsicC1Lo", 0x0, 3),
BitField("rxlevC2", 0x0, 5),
BitField("rxlevC2Lo", 0x0, 1),
BitField("bcchC2", 0x0, 5),
BitField("bsicC2Hi", 0x0, 2),
BitField("bscicC2Lo", 0x0, 4),
BitField("bscicC2Hi", 0x0, 4),
BitField("rxlevC3Lo", 0x0, 2),
BitField("bcchC3", 0x0, 5),
BitField("rxlevC3Hi", 0x0, 1),
BitField("bsicC3Lo", 0x0, 5),
BitField("bsicC3Hi", 0x0, 3),
BitField("rxlevC4Lo", 0x0, 3),
BitField("bcchC4", 0x0, 5),
BitField("bsicC4", 0x0, 6),
BitField("rxlevC5Hi", 0x0, 2),
BitField("rxlevC5Lo", 0x0, 4),
BitField("bcchC5Hi", 0x0, 4),
BitField("bcchC5Lo", 0x0, 1),
BitField("bsicC5", 0x0, 6),
BitField("rxlevC6", 0x0, 1),
BitField("rxlevC6Lo", 0x0, 5),
BitField("bcchC6Hi", 0x0, 3),
BitField("bcchC6Lo", 0x0, 3),
BitField("bsicC6", 0x0, 5)
]
class GprsMeasurementResults(Packet):
"""GPRS Measurement Results Section 10.5.2.20a"""
name = "GPRS Measurement Results"
fields_desc = [
BitField("cValue", 0x0, 6),
BitField("rxqualHi", 0x0, 2),
BitField("rxqL", 0x0, 1),
BitField("spare", 0x0, 1),
BitField("signVar", 0x0, 6)
]
# len 3 to 10
class MobileAllocation(Packet):
"""Mobile Allocation Section 10.5.2.21"""
name = "Mobile Allocation"
fields_desc = [
XByteField("lengthMA", None),
ByteField("maC64", 0x12),
ByteField("maC56", None), # optional fields start here
ByteField("maC48", None),
ByteField("maC40", None),
ByteField("maC32", None),
ByteField("maC24", None),
ByteField("maC16", None),
ByteField("maC8", None)
]
def post_build(self, p, pay):
a = [getattr(self, fld.name) for fld in self.fields_desc]
res = adapt(2, 9, a, self.fields_desc, 1)
if self.lengthMA is None:
p = struct.pack(">B", res[1]) + p[1:]
if res[0] != 0:
p = p[:-res[0]]
return p + pay
class MobileTimeDifference(Packet):
"""Mobile Time Difference Section 10.5.2.21a"""
name = "Mobile Time Difference"
fields_desc = [
XByteField("lengthMTD", 0x5),
ByteField("valueHi", 0x0),
ByteField("valueCnt", 0x0),
BitField("valueLow", 0x0, 5),
BitField("spare", 0x0, 1),
BitField("spare1", 0x0, 1),
BitField("spare2", 0x0, 1)
]
# min 4 octets max 8
class MultiRateConfiguration(Packet):
""" MultiRate configuration Section 10.5.2.21aa"""
name = "MultiRate Configuration"
# This packet has a variable length and hence structure. This packet
# implements the longuest possible packet. If you biuild a shorter
# packet, for example having only 6 bytes, the last 4 bytes are named
# "Spare" in the specs. Here they are named "threshold2"
fields_desc = [
XByteField("lengthMRC", None),
BitField("mrVersion", 0x0, 3),
BitField("spare", 0x0, 1),
BitField("icmi", 0x0, 1),
BitField("spare", 0x0, 1),
BitField("startMode", 0x0, 2),
ByteField("amrCodec", None),
BitField("spare", None, 2),
BitField("threshold1", None, 6),
BitField("hysteresis1", None, 4),
BitField("threshold2", None, 4),
BitField("threshold2cnt", None, 2),
BitField("hysteresis2", None, 4),
BitField("threshold3", None, 2),
BitField("threshold3cnt", None, 4),
BitField("hysteresis3", None, 4)
]
def post_build(self, p, pay):
# we set the length
a = [getattr(self, fld.name) for fld in self.fields_desc]
res = adapt(3, 7, a, self.fields_desc, 1)
if self.lengthMRC is None:
p = struct.pack(">B", res[1]) + p[1:]
if res[0] != 0:
p = p[:-res[0]]
return p + pay
# len 2 to 11
class MultislotAllocation(Packet):
"""Multislot Allocation Section 10.5.2.21b"""
name = "Multislot Allocation"
fields_desc = [
XByteField("lengthMSA", None),
BitField("ext0", 0x1, 1),
BitField("da", 0x0, 7),
ConditionalField(BitField("ext1", 0x1, 1), # optional
lambda pkt: pkt.ext0 == 0),
ConditionalField(BitField("ua", 0x0, 7),
lambda pkt: pkt.ext0 == 0),
ByteField("chan1", None),
ByteField("chan2", None),
ByteField("chan3", None),
ByteField("chan4", None),
ByteField("chan5", None),
ByteField("chan6", None),
ByteField("chan7", None),
ByteField("chan8", None)
]
def post_build(self, p, pay):
a = [getattr(self, fld.name) for fld in self.fields_desc]
res = adapt(1, 11, a, self.fields_desc, 1)
if res[0] != 0:
p = p[:-res[0]]
if self.lengthMSA is None:
p = struct.pack(">B", len(p)-1) + p[1:]
return p + pay
class NcMode(Packet):
"""NC mode Section 10.5.2.21c"""
name = "NC Mode"
fields_desc = [
BitField("spare", 0x0, 2),
BitField("ncMode", 0x0, 2)
]
class NeighbourCellsDescription(Packet):
"""Neighbour Cells Description Section 10.5.2.22"""
name = "Neighbour Cells Description"
fields_desc = [
BitField("bit128", 0x0, 1),
BitField("bit127", 0x0, 1),
BitField("extInd", 0x0, 1),
BitField("baInd", 0x0, 1),
BitField("bit124", 0x0, 1),
BitField("bit123", 0x0, 1),
BitField("bit122", 0x0, 1),
BitField("bit121", 0x0, 1),
BitField("120bits", 0x0, 120)
]
class NeighbourCellsDescription2(Packet):
"""Neighbour Cells Description 2 Section 10.5.2.22a"""
name = "Neighbour Cells Description 2"
fields_desc = [
BitField("bit128", 0x0, 1),
BitField("multiband", 0x0, 2),
BitField("baInd", 0x0, 1),
BitField("bit124", 0x0, 1),
BitField("bit123", 0x0, 1),
BitField("bit122", 0x0, 1),
BitField("bit121", 0x0, 1),
BitField("120bits", 0x0, 120)
]
# len 4
# strange packet, lots of valid formats
# ideas for the dynamic packets:
# 1] for user interaction: Create an interactive "builder" based on a
# Q/A process (not very scapy like)
# 2] for usage in scripts, create an alternative packet for every
# possible packet layout
#
class DedicatedModeOrTBF(Packet):
"""Dedicated mode or TBF Section 10.5.2.25b"""
name = "Dedicated Mode or TBF"
fields_desc = [
BitField("spare", 0x0, 1),
BitField("tma", 0x0, 1),
BitField("downlink", 0x0, 1),
BitField("td", 0x0, 1)
]
class PageMode(Packet):
"""Page Mode Section 10.5.2.26"""
name = "Page Mode"
fields_desc = [
BitField("spare", 0x0, 1),
BitField("spare1", 0x0, 1),
BitField("pm", 0x0, 2)
]
class NccPermitted(Packet):
"""NCC Permitted Section 10.5.2.27"""
name = "NCC Permited"
fields_desc = [
ByteField("nccPerm", 0x0)
]
class PowerCommand(Packet):
"""Power Command Section 10.5.2.28"""
name = "Power Command"
fields_desc = [
BitField("spare", 0x0, 1),
BitField("spare1", 0x0, 1),
BitField("spare2", 0x0, 1),
BitField("powerLvl", 0x0, 5)
]
class PowerCommandAndAccessType(Packet):
"""Power Command and access type Section 10.5.2.28a"""
name = "Power Command and Access Type"
fields_desc = [
BitField("atc", 0x0, 1),
BitField("spare", 0x0, 1),
BitField("spare1", 0x0, 1),
BitField("powerLvl", 0x0, 5)
]
class RachControlParameters(Packet):
"""RACH Control Parameters Section 10.5.2.29"""
name = "RACH Control Parameters"
fields_desc = [
BitField("maxRetrans", 0x0, 2),
BitField("txInteger", 0x0, 4),
BitField("cellBarrAccess", 0x0, 1),
BitField("re", 0x0, 1),
BitField("ACC15", 0x0, 1),
BitField("ACC14", 0x0, 1),
BitField("ACC13", 0x0, 1),
BitField("ACC12", 0x0, 1),
BitField("ACC11", 0x0, 1),
BitField("ACC10", 0x0, 1),
BitField("ACC09", 0x0, 1),
BitField("ACC08", 0x0, 1),
BitField("ACC07", 0x0, 1),
BitField("ACC06", 0x0, 1),
BitField("ACC05", 0x0, 1),
BitField("ACC04", 0x0, 1),
BitField("ACC03", 0x0, 1),
BitField("ACC02", 0x0, 1),
BitField("ACC01", 0x0, 1),
BitField("ACC00", 0x0, 1),
]
class RequestReference(Packet):
"""Request Reference Section 10.5.2.30"""
name = "Request Reference"
fields_desc = [
ByteField("ra", 0x0),
BitField("t1", 0x0, 5),
BitField("t3Hi", 0x0, 3),
BitField("t3Lo", 0x0, 3),
BitField("t2", 0x0, 5)
]
class RrCause(Packet):
"""RR Cause Section 10.5.2.31"""
name = "RR Cause"
fields_desc = [
ByteField("rrCause", 0x0)
]
class StartingTime(Packet):
"""Starting Time Section 10.5.2.38"""
name = "Starting Time"
fields_desc = [
ByteField("ra", 0x0),
BitField("t1", 0x0, 5),
BitField("t3Hi", 0x0, 3),
BitField("t3Lo", 0x0, 3),
BitField("t2", 0x0, 5)
]
class SynchronizationIndication(Packet):
"""Synchronization Indication Section 10.5.2.39"""
name = "Synchronization Indication"
fields_desc = [
BitField("nci", 0x0, 1),
BitField("rot", 0x0, 1),
BitField("si", 0x0, 2)
]
class TimingAdvance(Packet):
"""Timing Advance Section 10.5.2.40"""
name = "Timing Advance"
fields_desc = [
BitField("spare", 0x0, 1),
BitField("spare1", 0x0, 1),
BitField("timingVal", 0x0, 6)
]
class TimeDifference(Packet):
""" Time Difference Section 10.5.2.41"""
name = "Time Difference"
fields_desc = [
XByteField("lengthTD", 0x3),
ByteField("timeValue", 0x0)
]
class Tlli(Packet):
""" TLLI Section Section 10.5.2.41a"""
name = "TLLI"
fields_desc = [
ByteField("value", 0x0),
ByteField("value1", 0x0),
ByteField("value2", 0x0),
ByteField("value3", 0x0)
]
class TmsiPTmsi(Packet):
""" TMSI/P-TMSI Section 10.5.2.42"""
name = "TMSI/P-TMSI"
fields_desc = [
ByteField("value", 0x0),
ByteField("value1", 0x0),
ByteField("value2", 0x0),
ByteField("value3", 0x0)
]
class VgcsTargetModeIdentication(Packet):
""" VGCS target Mode Indication 10.5.2.42a"""
name = "VGCS Target Mode Indication"
fields_desc = [
XByteField("lengthVTMI", 0x2),
BitField("targerMode", 0x0, 2),
BitField("cipherKeyNb", 0x0, 4),
BitField("spare", 0x0, 1),
BitField("spare1", 0x0, 1)
]
class WaitIndication(Packet):
""" Wait Indication Section 10.5.2.43"""
name = "Wait Indication"
fields_desc = [ # asciiart of specs strange
ByteField("timeoutVal", 0x0)
]
#class Si10RestOctets(Packet):
# """SI10 rest octets 10.5.2.44"""
# name = "SI10 rest octets"
# fields_desc = [
# len 17
class ExtendedMeasurementResults(Packet):
"""EXTENDED MEASUREMENT RESULTS Section 10.5.2.45"""
name = "Extended Measurement Results"
fields_desc = [
BitField("scUsed", None, 1),
BitField("dtxUsed", None, 1),
BitField("rxLevC0", None, 6),
BitField("rxLevC1", None, 6),
BitField("rxLevC2Hi", None, 2),
BitField("rxLevC2Lo", None, 4),
BitField("rxLevC3Hi", None, 4),
BitField("rxLevC3Lo", None, 3),
BitField("rxLevC4", None, 5),
BitField("rxLevC5", None, 6),
BitField("rxLevC6Hi", None, 2),
BitField("rxLevC6Lo", None, 4),
BitField("rxLevC7Hi", None, 4),
BitField("rxLevC7Lo", None, 2),
BitField("rxLevC8", None, 6),
BitField("rxLevC9", None, 6),
BitField("rxLevC10Hi", None, 2),
BitField("rxLevC10Lo", None, 4),
BitField("rxLevC11Hi", None, 4),
BitField("rxLevC13Lo", None, 2),
BitField("rxLevC12", None, 6),
BitField("rxLevC13", None, 6),
BitField("rxLevC14Hi", None, 2),
BitField("rxLevC14Lo", None, 4),
BitField("rxLevC15Hi", None, 4),
BitField("rxLevC15Lo", None, 2),
BitField("rxLevC16", None, 6),
BitField("rxLevC17", None, 6),
BitField("rxLevC18Hi", None, 2),
BitField("rxLevC18Lo", None, 4),
BitField("rxLevC19Hi", None, 4),
BitField("rxLevC19Lo", None, 2),
BitField("rxLevC20", None, 6)
]
# len 17
class ExtendedMeasurementFrequencyList(Packet):
"""Extended Measurement Frequency List Section 10.5.2.46"""
name = "Extended Measurement Frequency List"
fields_desc = [
BitField("bit128", 0x0, 1),
BitField("bit127", 0x0, 1),
BitField("spare", 0x0, 1),
BitField("seqCode", 0x0, 1),
BitField("bit124", 0x0, 1),
BitField("bit123", 0x0, 1),
BitField("bit122", 0x0, 1),
BitField("bit121", 0x0, 1),
BitField("bitsRest", 0x0, 128)
]
class SuspensionCause(Packet):
"""Suspension Cause Section 10.5.2.47"""
name = "Suspension Cause"
fields_desc = [
ByteField("suspVal", 0x0)
]
class ApduID(Packet):
"""APDU Flags Section 10.5.2.48"""
name = "Apdu Id"
fields_desc = [
BitField("id", None, 4)
]
class ApduFlags(Packet):
"""APDU Flags Section 10.5.2.49"""
name = "Apdu Flags"
fields_desc = [
BitField("spare", 0x0, 1),
BitField("cr", 0x0, 1),
BitField("firstSeg", 0x0, 1),
BitField("lastSeg", 0x0, 1)
]
# len 1 to max L3 (251) (done)
class ApduData(Packet):
"""APDU Data Section 10.5.2.50"""
name = "Apdu Data"
fields_desc = [
XByteField("lengthAD", None),
#optional
ByteField("apuInfo1", None),
ByteField("apuInfo2", None),
ByteField("apuInfo3", None),
ByteField("apuInfo4", None),
ByteField("apuInfo5", None),
ByteField("apuInfo6", None),
ByteField("apuInfo7", None),
ByteField("apuInfo8", None),
ByteField("apuInfo9", None),
ByteField("apuInfo10", None),
ByteField("apuInfo11", None),
ByteField("apuInfo12", None),
ByteField("apuInfo13", None),
ByteField("apuInfo14", None),
ByteField("apuInfo15", None),
ByteField("apuInfo16", None),
ByteField("apuInfo17", None),
ByteField("apuInfo18", None),
ByteField("apuInfo19", None),
ByteField("apuInfo20", None),
ByteField("apuInfo21", None),
ByteField("apuInfo22", None),
ByteField("apuInfo23", None),
ByteField("apuInfo24", None),
ByteField("apuInfo25", None),
ByteField("apuInfo26", None),
ByteField("apuInfo27", None),
ByteField("apuInfo28", None),
ByteField("apuInfo29", None),
ByteField("apuInfo30", None),
ByteField("apuInfo31", None),
ByteField("apuInfo32", None),
ByteField("apuInfo33", None),
ByteField("apuInfo34", None),
ByteField("apuInfo35", None),
ByteField("apuInfo36", None),
ByteField("apuInfo37", None),
ByteField("apuInfo38", None),
ByteField("apuInfo39", None),
ByteField("apuInfo40", None),
ByteField("apuInfo41", None),
ByteField("apuInfo42", None),
ByteField("apuInfo43", None),
ByteField("apuInfo44", None),
ByteField("apuInfo45", None),
ByteField("apuInfo46", None),
ByteField("apuInfo47", None),
ByteField("apuInfo48", None),
ByteField("apuInfo49", None),
ByteField("apuInfo50", None),
ByteField("apuInfo51", None),
ByteField("apuInfo52", None),
ByteField("apuInfo53", None),
ByteField("apuInfo54", None),
ByteField("apuInfo55", None),
ByteField("apuInfo56", None),
ByteField("apuInfo57", None),
ByteField("apuInfo58", None),
ByteField("apuInfo59", None),
ByteField("apuInfo60", None),
ByteField("apuInfo61", None),
ByteField("apuInfo62", None),
ByteField("apuInfo63", None),
ByteField("apuInfo64", None),
ByteField("apuInfo65", None),
ByteField("apuInfo66", None),
ByteField("apuInfo67", None),
ByteField("apuInfo68", None),
ByteField("apuInfo69", None),
ByteField("apuInfo70", None),
ByteField("apuInfo71", None),
ByteField("apuInfo72", None),
ByteField("apuInfo73", None),
ByteField("apuInfo74", None),
ByteField("apuInfo75", None),
ByteField("apuInfo76", None),
ByteField("apuInfo77", None),
ByteField("apuInfo78", None),
ByteField("apuInfo79", None),
ByteField("apuInfo80", None),
ByteField("apuInfo81", None),
ByteField("apuInfo82", None),
ByteField("apuInfo83", None),
ByteField("apuInfo84", None),
ByteField("apuInfo85", None),
ByteField("apuInfo86", None),
ByteField("apuInfo87", None),
ByteField("apuInfo88", None),
ByteField("apuInfo89", None),
ByteField("apuInfo90", None),
ByteField("apuInfo91", None),
ByteField("apuInfo92", None),
ByteField("apuInfo93", None),
ByteField("apuInfo94", None),
ByteField("apuInfo95", None),
ByteField("apuInfo96", None),
ByteField("apuInfo97", None),
ByteField("apuInfo98", None),
ByteField("apuInfo99", None),
ByteField("apuInfo100", None),
ByteField("apuInfo101", None),
ByteField("apuInfo102", None),
ByteField("apuInfo103", None),
ByteField("apuInfo104", None),
ByteField("apuInfo105", None),
ByteField("apuInfo106", None),
ByteField("apuInfo107", None),
ByteField("apuInfo108", None),
ByteField("apuInfo109", None),
ByteField("apuInfo110", None),
ByteField("apuInfo111", None),
ByteField("apuInfo112", None),
ByteField("apuInfo113", None),
ByteField("apuInfo114", None),
ByteField("apuInfo115", None),
ByteField("apuInfo116", None),
ByteField("apuInfo117", None),
ByteField("apuInfo118", None),
ByteField("apuInfo119", None),
ByteField("apuInfo120", None),
ByteField("apuInfo121", None),
ByteField("apuInfo122", None),
ByteField("apuInfo123", None),
ByteField("apuInfo124", None),
ByteField("apuInfo125", None),
ByteField("apuInfo126", None),
ByteField("apuInfo127", None),
ByteField("apuInfo128", None),
ByteField("apuInfo129", None),
ByteField("apuInfo130", None),
ByteField("apuInfo131", None),
ByteField("apuInfo132", None),
ByteField("apuInfo133", None),
ByteField("apuInfo134", None),
ByteField("apuInfo135", None),
ByteField("apuInfo136", None),
ByteField("apuInfo137", None),
ByteField("apuInfo138", None),
ByteField("apuInfo139", None),
ByteField("apuInfo140", None),
ByteField("apuInfo141", None),
ByteField("apuInfo142", None),
ByteField("apuInfo143", None),
ByteField("apuInfo144", None),
ByteField("apuInfo145", None),
ByteField("apuInfo146", None),
ByteField("apuInfo147", None),
ByteField("apuInfo148", None),
ByteField("apuInfo149", None),
ByteField("apuInfo150", None),
ByteField("apuInfo151", None),
ByteField("apuInfo152", None),
ByteField("apuInfo153", None),
ByteField("apuInfo154", None),
ByteField("apuInfo155", None),
ByteField("apuInfo156", None),
ByteField("apuInfo157", None),
ByteField("apuInfo158", None),
ByteField("apuInfo159", None),
ByteField("apuInfo160", None),
ByteField("apuInfo161", None),
ByteField("apuInfo162", None),
ByteField("apuInfo163", None),
ByteField("apuInfo164", None),
ByteField("apuInfo165", None),
ByteField("apuInfo166", None),
ByteField("apuInfo167", None),
ByteField("apuInfo168", None),
ByteField("apuInfo169", None),
ByteField("apuInfo170", None),
ByteField("apuInfo171", None),
ByteField("apuInfo172", None),
ByteField("apuInfo173", None),
ByteField("apuInfo174", None),
ByteField("apuInfo175", None),
ByteField("apuInfo176", None),
ByteField("apuInfo177", None),
ByteField("apuInfo178", None),
ByteField("apuInfo179", None),
ByteField("apuInfo180", None),
ByteField("apuInfo181", None),
ByteField("apuInfo182", None),
ByteField("apuInfo183", None),
ByteField("apuInfo184", None),
ByteField("apuInfo185", None),
ByteField("apuInfo186", None),
ByteField("apuInfo187", None),
ByteField("apuInfo188", None),
ByteField("apuInfo189", None),
ByteField("apuInfo190", None),
ByteField("apuInfo191", None),
ByteField("apuInfo192", None),
ByteField("apuInfo193", None),
ByteField("apuInfo194", None),
ByteField("apuInfo195", None),
ByteField("apuInfo196", None),
ByteField("apuInfo197", None),
ByteField("apuInfo198", None),
ByteField("apuInfo199", None),
ByteField("apuInfo200", None),
ByteField("apuInfo201", None),
ByteField("apuInfo202", None),
ByteField("apuInfo203", None),
ByteField("apuInfo204", None),
ByteField("apuInfo205", None),
ByteField("apuInfo206", None),
ByteField("apuInfo207", None),
ByteField("apuInfo208", None),
ByteField("apuInfo209", None),
ByteField("apuInfo210", None),
ByteField("apuInfo211", None),
ByteField("apuInfo212", None),
ByteField("apuInfo213", None),
ByteField("apuInfo214", None),
ByteField("apuInfo215", None),
ByteField("apuInfo216", None),
ByteField("apuInfo217", None),
ByteField("apuInfo218", None),
ByteField("apuInfo219", None),
ByteField("apuInfo220", None),
ByteField("apuInfo221", None),
ByteField("apuInfo222", None),
ByteField("apuInfo223", None),
ByteField("apuInfo224", None),
ByteField("apuInfo225", None),
ByteField("apuInfo226", None),
ByteField("apuInfo227", None),
ByteField("apuInfo228", None),
ByteField("apuInfo229", None),
ByteField("apuInfo230", None),
ByteField("apuInfo231", None),
ByteField("apuInfo232", None),
ByteField("apuInfo233", None),
ByteField("apuInfo234", None),
ByteField("apuInfo235", None),
ByteField("apuInfo236", None),
ByteField("apuInfo237", None),
ByteField("apuInfo238", None),
ByteField("apuInfo239", None),
ByteField("apuInfo240", None),
ByteField("apuInfo241", None),
ByteField("apuInfo242", None),
ByteField("apuInfo243", None),
ByteField("apuInfo244", None),
ByteField("apuInfo245", None),
ByteField("apuInfo246", None),
ByteField("apuInfo247", None),
ByteField("apuInfo248", None),
ByteField("apuInfo249", None)
]
def post_build(self, p, pay):
a = [getattr(self, fld.name) for fld in self.fields_desc]
res = adapt(1, 250, a, self.fields_desc, 1)
if self.lengthAD is None:
p = struct.pack(">B", res[1]) + p[1:]
if res[0] != 0:
p = p[:-res[0]]
return p + pay
#
# 10.5.3 Mobility management information elements
#
# len 3 to L3 max (251) (done)
class NetworkName(Packet):
"""Network Name Section 10.5.3.5a"""
name = "Network Name"
fields_desc = [
XByteField("lengthNN", None),
BitField("ext", 0x1, 1),
BitField("codingScheme", 0x0, 3),
BitField("addCi", 0x0, 1),
BitField("nbSpare", 0x0, 3),
# optional
ByteField("txtString1", None),
ByteField("txtString2", None),
ByteField("txtString3", None),
ByteField("txtString4", None),
ByteField("txtString5", None),
ByteField("txtString6", None),
ByteField("txtString7", None),
ByteField("txtString8", None),
ByteField("txtString9", None),
ByteField("txtString10", None),
ByteField("txtString11", None),
ByteField("txtString12", None),
ByteField("txtString13", None),
ByteField("txtString14", None),
ByteField("txtString15", None),
ByteField("txtString16", None),
ByteField("txtString17", None),
ByteField("txtString18", None),
ByteField("txtString19", None),
ByteField("txtString20", None),
ByteField("txtString21", None),
ByteField("txtString22", None),
ByteField("txtString23", None),
ByteField("txtString24", None),
ByteField("txtString25", None),
ByteField("txtString26", None),
ByteField("txtString27", None),
ByteField("txtString28", None),
ByteField("txtString29", None),
ByteField("txtString30", None),
ByteField("txtString31", None),
ByteField("txtString32", None),
ByteField("txtString33", None),
ByteField("txtString34", None),
ByteField("txtString35", None),
ByteField("txtString36", None),
ByteField("txtString37", None),
ByteField("txtString38", None),
ByteField("txtString39", None),
ByteField("txtString40", None),
ByteField("txtString41", None),
ByteField("txtString42", None),
ByteField("txtString43", None),
ByteField("txtString44", None),
ByteField("txtString45", None),
ByteField("txtString46", None),
ByteField("txtString47", None),
ByteField("txtString48", None),
ByteField("txtString49", None),
ByteField("txtString50", None),
ByteField("txtString51", None),
ByteField("txtString52", None),
ByteField("txtString53", None),
ByteField("txtString54", None),
ByteField("txtString55", None),
ByteField("txtString56", None),
ByteField("txtString57", None),
ByteField("txtString58", None),
ByteField("txtString59", None),
ByteField("txtString60", None),
ByteField("txtString61", None),
ByteField("txtString62", None),
ByteField("txtString63", None),
ByteField("txtString64", None),
ByteField("txtString65", None),
ByteField("txtString66", None),
ByteField("txtString67", None),
ByteField("txtString68", None),
ByteField("txtString69", None),
ByteField("txtString70", None),
ByteField("txtString71", None),
ByteField("txtString72", None),
ByteField("txtString73", None),
ByteField("txtString74", None),
ByteField("txtString75", None),
ByteField("txtString76", None),
ByteField("txtString77", None),
ByteField("txtString78", None),
ByteField("txtString79", None),
ByteField("txtString80", None),
ByteField("txtString81", None),
ByteField("txtString82", None),
ByteField("txtString83", None),
ByteField("txtString84", None),
ByteField("txtString85", None),
ByteField("txtString86", None),
ByteField("txtString87", None),
ByteField("txtString88", None),
ByteField("txtString89", None),
ByteField("txtString90", None),
ByteField("txtString91", None),
ByteField("txtString92", None),
ByteField("txtString93", None),
ByteField("txtString94", None),
ByteField("txtString95", None),
ByteField("txtString96", None),
ByteField("txtString97", None),
ByteField("txtString98", None),
ByteField("txtString99", None),
ByteField("txtString100", None),
ByteField("txtString101", None),
ByteField("txtString102", None),
ByteField("txtString103", None),
ByteField("txtString104", None),
ByteField("txtString105", None),
ByteField("txtString106", None),
ByteField("txtString107", None),
ByteField("txtString108", None),
ByteField("txtString109", None),
ByteField("txtString110", None),
ByteField("txtString111", None),
ByteField("txtString112", None),
ByteField("txtString113", None),
ByteField("txtString114", None),
ByteField("txtString115", None),
ByteField("txtString116", None),
ByteField("txtString117", None),
ByteField("txtString118", None),
ByteField("txtString119", None),
ByteField("txtString120", None),
ByteField("txtString121", None),
ByteField("txtString122", None),
ByteField("txtString123", None),
ByteField("txtString124", None),
ByteField("txtString125", None),
ByteField("txtString126", None),
ByteField("txtString127", None),
ByteField("txtString128", None),
ByteField("txtString129", None),
ByteField("txtString130", None),
ByteField("txtString131", None),
ByteField("txtString132", None),
ByteField("txtString133", None),
ByteField("txtString134", None),
ByteField("txtString135", None),
ByteField("txtString136", None),
ByteField("txtString137", None),
ByteField("txtString138", None),
ByteField("txtString139", None),
ByteField("txtString140", None),
ByteField("txtString141", None),
ByteField("txtString142", None),
ByteField("txtString143", None),
ByteField("txtString144", None),
ByteField("txtString145", None),
ByteField("txtString146", None),
ByteField("txtString147", None),
ByteField("txtString148", None),
ByteField("txtString149", None),
ByteField("txtString150", None),
ByteField("txtString151", None),
ByteField("txtString152", None),
ByteField("txtString153", None),
ByteField("txtString154", None),
ByteField("txtString155", None),
ByteField("txtString156", None),
ByteField("txtString157", None),
ByteField("txtString158", None),
ByteField("txtString159", None),
ByteField("txtString160", None),
ByteField("txtString161", None),
ByteField("txtString162", None),
ByteField("txtString163", None),
ByteField("txtString164", None),
ByteField("txtString165", None),
ByteField("txtString166", None),
ByteField("txtString167", None),
ByteField("txtString168", None),
ByteField("txtString169", None),
ByteField("txtString170", None),
ByteField("txtString171", None),
ByteField("txtString172", None),
ByteField("txtString173", None),
ByteField("txtString174", None),
ByteField("txtString175", None),
ByteField("txtString176", None),
ByteField("txtString177", None),
ByteField("txtString178", None),
ByteField("txtString179", None),
ByteField("txtString180", None),
ByteField("txtString181", None),
ByteField("txtString182", None),
ByteField("txtString183", None),
ByteField("txtString184", None),
ByteField("txtString185", None),
ByteField("txtString186", None),
ByteField("txtString187", None),
ByteField("txtString188", None),
ByteField("txtString189", None),
ByteField("txtString190", None),
ByteField("txtString191", None),
ByteField("txtString192", None),
ByteField("txtString193", None),
ByteField("txtString194", None),
ByteField("txtString195", None),
ByteField("txtString196", None),
ByteField("txtString197", None),
ByteField("txtString198", None),
ByteField("txtString199", None),
ByteField("txtString200", None),
ByteField("txtString201", None),
ByteField("txtString202", None),
ByteField("txtString203", None),
ByteField("txtString204", None),
ByteField("txtString205", None),
ByteField("txtString206", None),
ByteField("txtString207", None),
ByteField("txtString208", None),
ByteField("txtString209", None),
ByteField("txtString210", None),
ByteField("txtString211", None),
ByteField("txtString212", None),
ByteField("txtString213", None),
ByteField("txtString214", None),
ByteField("txtString215", None),
ByteField("txtString216", None),
ByteField("txtString217", None),
ByteField("txtString218", None),
ByteField("txtString219", None),
ByteField("txtString220", None),
ByteField("txtString221", None),
ByteField("txtString222", None),
ByteField("txtString223", None),
ByteField("txtString224", None),
ByteField("txtString225", None),
ByteField("txtString226", None),
ByteField("txtString227", None),
ByteField("txtString228", None),
ByteField("txtString229", None),
ByteField("txtString230", None),
ByteField("txtString231", None),
ByteField("txtString232", None),
ByteField("txtString233", None),
ByteField("txtString234", None),
ByteField("txtString235", None),
ByteField("txtString236", None),
ByteField("txtString237", None),
ByteField("txtString238", None),
ByteField("txtString239", None),
ByteField("txtString240", None),
ByteField("txtString241", None),
ByteField("txtString242", None),
ByteField("txtString243", None),
ByteField("txtString244", None),
ByteField("txtString245", None),
ByteField("txtString246", None),
ByteField("txtString247", None),
ByteField("txtString248", None)
]
def post_build(self, p, pay):
a = [getattr(self, fld.name) for fld in self.fields_desc]
res = adapt(2, 250, a, self.fields_desc, 1)
if self.lengthNN is None:
p = struct.pack(">B", res[1]) + p[1:]
if res[0] != 0:
p = p[:-res[0]]
return p + pay
class TimeZone(Packet):
"""Time Zone Section 10.5.3.8"""
name = "Time Zone"
fields_desc = [
ByteField("timeZone", 0x0),
]
class TimeZoneAndTime(Packet):
"""Time Zone and Time Section 10.5.3.9"""
name = "Time Zone and Time"
fields_desc = [
ByteField("year", 0x0),
ByteField("month", 0x0),
ByteField("day", 0x0),
ByteField("hour", 0x0),
ByteField("minute", 0x0),
ByteField("second", 0x0),
ByteField("timeZone", 0x0)
]
class CtsPermission(Packet):
"""CTS permission Section 10.5.3.10"""
name = "Cts Permission"
fields_desc = [
]
class LsaIdentifier(Packet):
"""LSA Identifier Section 10.5.3.11"""
name = "Lsa Identifier"
fields_desc = [
ByteField("lsaID", 0x0),
ByteField("lsaID1", 0x0),
ByteField("lsaID2", 0x0)
]
#
# 10.5.4 Call control information elements
#
#10.5.4.1 Extensions of codesets
# This is only text and no packet
class LockingShiftProcedure(Packet):
"""Locking shift procedure Section 10.5.4.2"""
name = "Locking Shift Procedure"
fields_desc = [
BitField("lockShift", 0x0, 1),
BitField("codesetId", 0x0, 3)
]
class NonLockingShiftProcedure(Packet):
"""Non-locking shift procedure Section 10.5.4.3"""
name = "Non-locking Shift Procedure"
fields_desc = [
BitField("nonLockShift", 0x1, 1),
BitField("codesetId", 0x0, 3)
]
class AuxiliaryStates(Packet):
"""Auxiliary states Section 10.5.4.4"""
name = "Auxiliary States"
fields_desc = [
XByteField("lengthAS", 0x3),
BitField("ext", 0x1, 1),
BitField("spare", 0x0, 3),
BitField("holdState", 0x0, 2),
BitField("mptyState", 0x0, 2)
]
# len 3 to 15
class BearerCapability(Packet):
"""Bearer capability Section 10.5.4.5"""
name = "Bearer Capability"
fields_desc = [
XByteField("lengthBC", None),
BitField("ext0", 0x1, 1),
BitField("radioChReq", 0x1, 2),
BitField("codingStd", 0x0, 1),
BitField("transMode", 0x0, 1),
BitField("infoTransCa", 0x0, 3),
# optional
ConditionalField(BitField("ext1", 0x1, 1),
lambda pkt: pkt.ext0 == 0),
ConditionalField(BitField("coding", None, 1),
lambda pkt: pkt.ext0 == 0),
ConditionalField(BitField("spare", None, 2),
lambda pkt: pkt.ext0 == 0),
ConditionalField(BitField("speechVers", 0x0, 4),
lambda pkt: pkt.ext0 == 0),
ConditionalField(BitField("ext2", 0x1, 1),
lambda pkt: pkt.ext1 == 0),
ConditionalField(BitField("compress", None, 1),
lambda pkt: pkt.ext1 == 0),
ConditionalField(BitField("structure", None, 2),
lambda pkt: pkt.ext1 == 0),
ConditionalField(BitField("dupMode", None, 1),
lambda pkt: pkt.ext1 == 0),
ConditionalField(BitField("config", None, 1),
lambda pkt: pkt.ext1 == 0),
ConditionalField(BitField("nirr", None, 1),
lambda pkt: pkt.ext1 == 0),
ConditionalField(BitField("establi", 0x0, 1),
lambda pkt: pkt.ext1 == 0),
BitField("ext3", None, 1),
BitField("accessId", None, 2),
BitField("rateAda", None, 2),
BitField("signaling", None, 3),
ConditionalField(BitField("ext4", None, 1),
lambda pkt: pkt.ext3 == 0),
ConditionalField(BitField("otherITC", None, 2),
lambda pkt: pkt.ext3 == 0),
ConditionalField(BitField("otherRate", None, 2),
lambda pkt: pkt.ext3 == 0),
ConditionalField(BitField("spare1", 0x0, 3),
lambda pkt: pkt.ext3 == 0),
ConditionalField(BitField("ext5", 0x1, 1),
lambda pkt: pkt.ext4 == 0),
ConditionalField(BitField("hdr", None, 1),
lambda pkt: pkt.ext4 == 0),
ConditionalField(BitField("multiFr", None, 1),
lambda pkt: pkt.ext4 == 0),
ConditionalField(BitField("mode", None, 1),
lambda pkt: pkt.ext4 == 0),
ConditionalField(BitField("lli", None, 1),
lambda pkt: pkt.ext4 == 0),
ConditionalField(BitField("assig", None, 1),
lambda pkt: pkt.ext4 == 0),
ConditionalField(BitField("inbNeg", None, 1),
lambda pkt: pkt.ext4 == 0),
ConditionalField(BitField("spare2", 0x0, 1),
lambda pkt: pkt.ext4 == 0),
BitField("ext6", None, 1),
BitField("layer1Id", None, 2),
BitField("userInf", None, 4),
BitField("sync", None, 1),
ConditionalField(BitField("ext7", None, 1),
lambda pkt: pkt.ext6 == 0),
ConditionalField(BitField("stopBit", None, 1),
lambda pkt: pkt.ext6 == 0),
ConditionalField(BitField("negoc", None, 1),
lambda pkt: pkt.ext6 == 0),
ConditionalField(BitField("nbDataBit", None, 1),
lambda pkt: pkt.ext6 == 0),
ConditionalField(BitField("userRate", None, 4),
lambda pkt: pkt.ext6 == 0),
ConditionalField(BitField("ext8", None, 1),
lambda pkt: pkt.ext7 == 0),
ConditionalField(BitField("interRate", None, 2),
lambda pkt: pkt.ext7 == 0),
ConditionalField(BitField("nicTX", None, 1),
lambda pkt: pkt.ext7 == 0),
ConditionalField(BitField("nicRX", None, 1),
lambda pkt: pkt.ext7 == 0),
ConditionalField(BitField("parity", None, 3),
lambda pkt: pkt.ext7 == 0),
ConditionalField(BitField("ext9", None, 1),
lambda pkt: pkt.ext8 == 0),
ConditionalField(BitField("connEle", None, 2),
lambda pkt: pkt.ext8 == 0),
ConditionalField(BitField("modemType", None, 5),
lambda pkt: pkt.ext8 == 0),
ConditionalField(BitField("ext10", None, 1),
lambda pkt: pkt.ext9 == 0),
ConditionalField(BitField("otherModemType", None, 2),
lambda pkt: pkt.ext9 == 0),
ConditionalField(BitField("netUserRate", None, 5),
lambda pkt: pkt.ext9 == 0),
ConditionalField(BitField("ext11", None, 1),
lambda pkt: pkt.ext10 == 0),
ConditionalField(BitField("chanCoding", None, 4),
lambda pkt: pkt.ext10 == 0),
ConditionalField(BitField("maxTrafficChan", None, 3),
lambda pkt: pkt.ext10 == 0),
ConditionalField(BitField("ext12", None, 1),
lambda pkt: pkt.ext11 == 0),
ConditionalField(BitField("uimi", None, 3),
lambda pkt: pkt.ext11 == 0),
ConditionalField(BitField("airInterfaceUserRate", None, 4),
lambda pkt: pkt.ext11 == 0),
ConditionalField(BitField("ext13", 0x1, 1),
lambda pkt: pkt.ext12 == 0),
ConditionalField(BitField("layer2Ch", None, 2),
lambda pkt: pkt.ext12 == 0),
ConditionalField(BitField("userInfoL2", 0x0, 5),
lambda pkt: pkt.ext12 == 0)
]
def post_build(self, p, pay):
a = [getattr(self, fld.name) for fld in self.fields_desc]
res = adapt(2, 15, a, self.fields_desc, 1)
if res[0] != 0:
p = p[:-res[0]]
if self.lengthBC is None:
p = struct.pack(">B", len(p)-1) + p[1:]
return p + pay
class CallControlCapabilities(Packet):
"""Call Control Capabilities Section 10.5.4.5a"""
name = "Call Control Capabilities"
fields_desc = [
XByteField("lengthCCC", 0x3),
BitField("spare", 0x0, 6),
BitField("pcp", 0x0, 1),
BitField("dtmf", 0x0, 1)
]
class CallState(Packet):
"""Call State Section 10.5.4.6"""
name = "Call State"
fields_desc = [
BitField("codingStd", 0x0, 2),
BitField("stateValue", 0x0, 6)
]
# len 3 to 43
class CalledPartyBcdNumber(Packet):
"""Called party BCD number Section 10.5.4.7"""
name = "Called Party BCD Number"
fields_desc = [
XByteField("lengthCPBN", None),
BitField("ext", 0x1, 1),
BitField("typeNb", 0x0, 3),
BitField("nbPlanId", 0x0, 4),
# optional
BitField("nbDigit2", None, 4),
BitField("nbDigit1", None, 4),
BitField("nbDigit4", None, 4),
BitField("nbDigit3", None, 4),
BitField("nbDigit6", None, 4),
BitField("nbDigit5", None, 4),
BitField("nbDigit8", None, 4),
BitField("nbDigit7", None, 4),
BitField("nbDigit10", None, 4),
BitField("nbDigit9", None, 4),
BitField("nbDigit12", None, 4),
BitField("nbDigit11", None, 4),
BitField("nbDigit14", None, 4),
BitField("nbDigit13", None, 4),
BitField("nbDigit16", None, 4),
BitField("nbDigit15", None, 4),
BitField("nbDigit18", None, 4),
BitField("nbDigit17", None, 4),
BitField("nbDigit20", None, 4),
BitField("nbDigit19", None, 4),
BitField("nbDigit22", None, 4),
BitField("nbDigit21", None, 4),
BitField("nbDigit24", None, 4),
BitField("nbDigit23", None, 4),
BitField("nbDigit26", None, 4),
BitField("nbDigit25", None, 4),
BitField("nbDigit28", None, 4),
BitField("nbDigit27", None, 4),
BitField("nbDigit30", None, 4),
BitField("nbDigit29", None, 4),
BitField("nbDigit32", None, 4),
BitField("nbDigit31", None, 4),
BitField("nbDigit34", None, 4),
BitField("nbDigit33", None, 4),
BitField("nbDigit36", None, 4),
BitField("nbDigit35", None, 4),
BitField("nbDigit38", None, 4),
BitField("nbDigit37", None, 4),
BitField("nbDigit40", None, 4),
BitField("nbDigit39", None, 4),
# ^^^^^^ 20 first optional bytes ^^^^^^^^^^^^^^^
BitField("nbDigit42", None, 4),
BitField("nbDigit41", None, 4),
BitField("nbDigit44", None, 4),
BitField("nbDigit43", None, 4),
BitField("nbDigit46", None, 4),
BitField("nbDigit45", None, 4),
BitField("nbDigit48", None, 4),
BitField("nbDigit47", None, 4),
BitField("nbDigit50", None, 4),
BitField("nbDigit49", None, 4),
BitField("nbDigit52", None, 4),
BitField("nbDigit51", None, 4),
BitField("nbDigit54", None, 4),
BitField("nbDigit53", None, 4),
BitField("nbDigit56", None, 4),
BitField("nbDigit55", None, 4),
BitField("nbDigit58", None, 4),
BitField("nbDigit57", None, 4),
BitField("nbDigit60", None, 4),
BitField("nbDigit59", None, 4),
BitField("nbDigit62", None, 4),
BitField("nbDigit61", None, 4),
BitField("nbDigit64", None, 4),
BitField("nbDigit63", None, 4),
BitField("nbDigit66", None, 4),
BitField("nbDigit65", None, 4),
BitField("nbDigit68", None, 4),
BitField("nbDigit67", None, 4),
BitField("nbDigit70", None, 4),
BitField("nbDigit69", None, 4),
BitField("nbDigit72", None, 4),
BitField("nbDigit71", None, 4),
BitField("nbDigit74", None, 4),
BitField("nbDigit73", None, 4),
BitField("nbDigit76", None, 4),
BitField("nbDigit75", None, 4),
BitField("nbDigit78", None, 4),
BitField("nbDigit77", None, 4),
BitField("nbDigit80", None, 4),
BitField("nbDigit79", None, 4),
]
def post_build(self, p, pay):
a = [getattr(self, fld.name) for fld in self.fields_desc]
res = adapt(2, 42, a, self.fields_desc, 1)
if self.lengthCPBN is None:
p = struct.pack(">B", res[1]) + p[1:]
if res[0] != 0:
p = p[:-res[0]]
return p + pay
# len 2 to 23
class CalledPartySubaddress(Packet):
"""Called party subaddress Section 10.5.4.8"""
name = "Called Party Subaddress"
fields_desc = [
XByteField("lengthCPS", None),
# optional
BitField("ext", None, 1),
BitField("subAddr", None, 3),
BitField("oddEven", None, 1),
BitField("spare", None, 3),
ByteField("subInfo0", None),
ByteField("subInfo1", None),
ByteField("subInfo2", None),
ByteField("subInfo3", None),
ByteField("subInfo4", None),
ByteField("subInfo5", None),
ByteField("subInfo6", None),
ByteField("subInfo7", None),
ByteField("subInfo8", None),
ByteField("subInfo9", None),
ByteField("subInfo10", None),
ByteField("subInfo11", None),
ByteField("subInfo12", None),
ByteField("subInfo13", None),
ByteField("subInfo14", None),
ByteField("subInfo15", None),
ByteField("subInfo16", None),
ByteField("subInfo17", None),
ByteField("subInfo18", None),
ByteField("subInfo19", None)
]
def post_build(self, p, pay):
a = [getattr(self, fld.name) for fld in self.fields_desc]
res = adapt(2, 23, a, self.fields_desc, 1)
if self.lengthCPS is None:
p = struct.pack(">B", res[1]) + p[1:]
if res[0] != 0:
p = p[:-res[0]]
return p + pay
# len 3 to 14
class CallingPartyBcdNumber(Packet):
"""Called party subaddress Section 10.5.4.9"""
name = "Called Party Subaddress"
fields_desc = [
XByteField("lengthCPBN", None),
BitField("ext", 0x1, 1),
BitField("typeNb", 0x0, 3),
BitField("nbPlanId", 0x0, 4),
# optional
ConditionalField(BitField("ext1", 0x1, 1),
lambda pkt: pkt.ext == 0),
ConditionalField(BitField("presId", None, 2),
lambda pkt: pkt.ext == 0),
ConditionalField(BitField("spare", None, 3),
lambda pkt: pkt.ext == 0),
ConditionalField(BitField("screenId", 0x0, 2),
lambda pkt: pkt.ext == 0),
BitField("nbDigit2", None, 4),
BitField("nbDigit1", None, 4),
BitField("nbDigit4", None, 4),
BitField("nbDigit3", None, 4),
BitField("nbDigit6", None, 4),
BitField("nbDigit5", None, 4),
BitField("nbDigit8", None, 4),
BitField("nbDigit7", None, 4),
BitField("nbDigit10", None, 4),
BitField("nbDigit9", None, 4),
BitField("nbDigit12", None, 4),
BitField("nbDigit11", None, 4),
BitField("nbDigit14", None, 4),
BitField("nbDigit13", None, 4),
BitField("nbDigit16", None, 4),
BitField("nbDigit15", None, 4),
BitField("nbDigit18", None, 4),
BitField("nbDigit17", None, 4),
BitField("nbDigit20", None, 4),
BitField("nbDigit19", None, 4),
]
def post_build(self, p, pay):
a = [getattr(self, fld.name) for fld in self.fields_desc]
res = adapt(2, 13, a, self.fields_desc, 1)
if res[0] != 0:
p = p[:-res[0]]
if self.lengthCPBN is None:
p = struct.pack(">B", len(p)-1) + p[1:]
return p + pay
# len 2 to 23
class CallingPartySubaddress(Packet):
"""Calling party subaddress Section 10.5.4.10"""
name = "Calling Party Subaddress"
fields_desc = [
XByteField("lengthCPS", None),
# optional
BitField("ext1", None, 1),
BitField("typeAddr", None, 3),
BitField("oddEven", None, 1),
BitField("spare", None, 3),
ByteField("subInfo0", None),
ByteField("subInfo1", None),
ByteField("subInfo2", None),
ByteField("subInfo3", None),
ByteField("subInfo4", None),
ByteField("subInfo5", None),
ByteField("subInfo6", None),
ByteField("subInfo7", None),
ByteField("subInfo8", None),
ByteField("subInfo9", None),
ByteField("subInfo10", None),
ByteField("subInfo11", None),
ByteField("subInfo12", None),
ByteField("subInfo13", None),
ByteField("subInfo14", None),
ByteField("subInfo15", None),
ByteField("subInfo16", None),
ByteField("subInfo17", None),
ByteField("subInfo18", None),
ByteField("subInfo19", None)
]
def post_build(self, p, pay):
a = [getattr(self, fld.name) for fld in self.fields_desc]
res = adapt(1, 22, a, self.fields_desc, 1)
if self.lengthCPS is None:
p = struct.pack(">B", res[1]) + p[1:]
if res[0] != 0:
p = p[:-res[0]]
return p + pay
# len 4 to 32
class Cause(Packet):
"""Cause Section 10.5.4.11"""
name = "Cause"
fields_desc = [
XByteField("lengthC", None),
BitField("ext", 0x1, 1),
BitField("codingStd", 0x0, 2),
BitField("spare", 0x0, 1),
BitField("location", 0x0, 4),
ConditionalField(BitField("ext1", 0x1, 1),
lambda pkt: pkt.ext == 0),
ConditionalField(BitField("recommendation", 0x1, 7),
lambda pkt: pkt.ext == 0),
# optional
BitField("ext2", None, 1),
BitField("causeValue", None, 7),
ByteField("diagnositc0", None),
ByteField("diagnositc1", None),
ByteField("diagnositc2", None),
ByteField("diagnositc3", None),
ByteField("diagnositc4", None),
ByteField("diagnositc5", None),
ByteField("diagnositc6", None),
ByteField("diagnositc7", None),
ByteField("diagnositc8", None),
ByteField("diagnositc9", None),
ByteField("diagnositc10", None),
ByteField("diagnositc11", None),
ByteField("diagnositc12", None),
ByteField("diagnositc13", None),
ByteField("diagnositc14", None),
ByteField("diagnositc15", None),
ByteField("diagnositc16", None),
ByteField("diagnositc17", None),
ByteField("diagnositc18", None),
ByteField("diagnositc19", None),
ByteField("diagnositc20", None),
ByteField("diagnositc21", None),
ByteField("diagnositc22", None),
ByteField("diagnositc23", None),
ByteField("diagnositc24", None),
ByteField("diagnositc25", None),
ByteField("diagnositc26", None),
]
def post_build(self, p, pay):
a = [getattr(self, fld.name) for fld in self.fields_desc]
res = adapt(3, 31, a, self.fields_desc, 1)
if res[0] != 0:
p = p[:-res[0]]
if self.lengthC is None:
p = struct.pack(">B", len(p)-1) + p[1:]
return p + pay
class ClirSuppression(Packet):
"""CLIR suppression Section 10.5.4.11a"""
name = "Clir Suppression"
fields_desc = [
]
class ClirInvocation(Packet):
"""CLIR invocation Section 10.5.4.11b"""
name = "Clir Invocation"
fields_desc = [
]
class CongestionLevel(Packet):
"""Congestion level Section 10.5.4.12"""
name = "Congestion Level"
fields_desc = [
BitField("notDef", 0x0, 4) # not defined by the std
]
# len 3 to 14
class ConnectedNumber(Packet):
"""Connected number Section 10.5.4.13"""
name = "Connected Number"
fields_desc = [
XByteField("lengthCN", None),
BitField("ext", 0x1, 1),
BitField("typeNb", 0x0, 3),
BitField("typePlanId", 0x0, 4),
# optional
ConditionalField(BitField("ext1", 0x1, 1),
lambda pkt: pkt.ext == 0),
ConditionalField(BitField("presId", None, 2),
lambda pkt: pkt.ext == 0),
ConditionalField(BitField("spare", None, 3),
lambda pkt: pkt.ext == 0),
ConditionalField(BitField("screenId", None, 2),
lambda pkt: pkt.ext == 0),
BitField("nbDigit2", None, 4),
BitField("nbDigit1", None, 4),
BitField("nbDigit4", None, 4),
BitField("nbDigit3", None, 4),
BitField("nbDigit6", None, 4),
BitField("nbDigit5", None, 4),
BitField("nbDigit8", None, 4),
BitField("nbDigit7", None, 4),
BitField("nbDigit10", None, 4),
BitField("nbDigit9", None, 4),
BitField("nbDigit12", None, 4),
BitField("nbDigit11", None, 4),
BitField("nbDigit14", None, 4),
BitField("nbDigit13", None, 4),
BitField("nbDigit16", None, 4),
BitField("nbDigit15", None, 4),
BitField("nbDigit18", None, 4),
BitField("nbDigit17", None, 4),
BitField("nbDigit20", None, 4),
BitField("nbDigit19", None, 4)
]
def post_build(self, p, pay):
a = [getattr(self, fld.name) for fld in self.fields_desc]
res = adapt(2, 13, a, self.fields_desc, 1)
if res[0] != 0:
p = p[:-res[0]]
if self.lengthCN is None:
p = struct.pack(">B", len(p)-1) + p[1:]
return p + pay
# len 2 to 23
class ConnectedSubaddress(Packet):
"""Connected subaddress Section 10.5.4.14"""
name = "Connected Subaddress"
fields_desc = [
XByteField("lengthCS", None),
# optional
BitField("ext", None, 1),
BitField("typeOfSub", None, 3),
BitField("oddEven", None, 1),
BitField("spare", None, 3),
ByteField("subInfo0", None),
ByteField("subInfo1", None),
ByteField("subInfo2", None),
ByteField("subInfo3", None),
ByteField("subInfo4", None),
ByteField("subInfo5", None),
ByteField("subInfo6", None),
ByteField("subInfo7", None),
ByteField("subInfo8", None),
ByteField("subInfo9", None),
ByteField("subInfo10", None),
ByteField("subInfo11", None),
ByteField("subInfo12", None),
ByteField("subInfo13", None),
ByteField("subInfo14", None),
ByteField("subInfo15", None),
ByteField("subInfo16", None),
ByteField("subInfo17", None),
ByteField("subInfo18", None),
ByteField("subInfo19", None)
]
def post_build(self, p, pay):
a = [getattr(self, fld.name) for fld in self.fields_desc]
res = adapt(1, 22, a, self.fields_desc, 1)
if self.lengthCS is None:
p = struct.pack(">B", res[1]) + p[1:]
if res[0] != 0:
p = p[:-res[0]]
return p + pay
# len 2 to L3 (251) (done)
class Facility(Packet):
"""Facility Section 10.5.4.15"""
name = "Facility"
fields_desc = [
XByteField("lengthF", None),
# optional
ByteField("facilityInfo1", None),
ByteField("facilityInfo2", None),
ByteField("facilityInfo3", None),
ByteField("facilityInfo4", None),
ByteField("facilityInfo5", None),
ByteField("facilityInfo6", None),
ByteField("facilityInfo7", None),
ByteField("facilityInfo8", None),
ByteField("facilityInfo9", None),
ByteField("facilityInfo10", None),
ByteField("facilityInfo11", None),
ByteField("facilityInfo12", None),
ByteField("facilityInfo13", None),
ByteField("facilityInfo14", None),
ByteField("facilityInfo15", None),
ByteField("facilityInfo16", None),
ByteField("facilityInfo17", None),
ByteField("facilityInfo18", None),
ByteField("facilityInfo19", None),
ByteField("facilityInfo20", None),
ByteField("facilityInfo21", None),
ByteField("facilityInfo22", None),
ByteField("facilityInfo23", None),
ByteField("facilityInfo24", None),
ByteField("facilityInfo25", None),
ByteField("facilityInfo26", None),
ByteField("facilityInfo27", None),
ByteField("facilityInfo28", None),
ByteField("facilityInfo29", None),
ByteField("facilityInfo30", None),
ByteField("facilityInfo31", None),
ByteField("facilityInfo32", None),
ByteField("facilityInfo33", None),
ByteField("facilityInfo34", None),
ByteField("facilityInfo35", None),
ByteField("facilityInfo36", None),
ByteField("facilityInfo37", None),
ByteField("facilityInfo38", None),
ByteField("facilityInfo39", None),
ByteField("facilityInfo40", None),
ByteField("facilityInfo41", None),
ByteField("facilityInfo42", None),
ByteField("facilityInfo43", None),
ByteField("facilityInfo44", None),
ByteField("facilityInfo45", None),
ByteField("facilityInfo46", None),
ByteField("facilityInfo47", None),
ByteField("facilityInfo48", None),
ByteField("facilityInfo49", None),
ByteField("facilityInfo50", None),
ByteField("facilityInfo51", None),
ByteField("facilityInfo52", None),
ByteField("facilityInfo53", None),
ByteField("facilityInfo54", None),
ByteField("facilityInfo55", None),
ByteField("facilityInfo56", None),
ByteField("facilityInfo57", None),
ByteField("facilityInfo58", None),
ByteField("facilityInfo59", None),
ByteField("facilityInfo60", None),
ByteField("facilityInfo61", None),
ByteField("facilityInfo62", None),
ByteField("facilityInfo63", None),
ByteField("facilityInfo64", None),
ByteField("facilityInfo65", None),
ByteField("facilityInfo66", None),
ByteField("facilityInfo67", None),
ByteField("facilityInfo68", None),
ByteField("facilityInfo69", None),
ByteField("facilityInfo70", None),
ByteField("facilityInfo71", None),
ByteField("facilityInfo72", None),
ByteField("facilityInfo73", None),
ByteField("facilityInfo74", None),
ByteField("facilityInfo75", None),
ByteField("facilityInfo76", None),
ByteField("facilityInfo77", None),
ByteField("facilityInfo78", None),
ByteField("facilityInfo79", None),
ByteField("facilityInfo80", None),
ByteField("facilityInfo81", None),
ByteField("facilityInfo82", None),
ByteField("facilityInfo83", None),
ByteField("facilityInfo84", None),
ByteField("facilityInfo85", None),
ByteField("facilityInfo86", None),
ByteField("facilityInfo87", None),
ByteField("facilityInfo88", None),
ByteField("facilityInfo89", None),
ByteField("facilityInfo90", None),
ByteField("facilityInfo91", None),
ByteField("facilityInfo92", None),
ByteField("facilityInfo93", None),
ByteField("facilityInfo94", None),
ByteField("facilityInfo95", None),
ByteField("facilityInfo96", None),
ByteField("facilityInfo97", None),
ByteField("facilityInfo98", None),
ByteField("facilityInfo99", None),
ByteField("facilityInfo100", None),
ByteField("facilityInfo101", None),
ByteField("facilityInfo102", None),
ByteField("facilityInfo103", None),
ByteField("facilityInfo104", None),
ByteField("facilityInfo105", None),
ByteField("facilityInfo106", None),
ByteField("facilityInfo107", None),
ByteField("facilityInfo108", None),
ByteField("facilityInfo109", None),
ByteField("facilityInfo110", None),
ByteField("facilityInfo111", None),
ByteField("facilityInfo112", None),
ByteField("facilityInfo113", None),
ByteField("facilityInfo114", None),
ByteField("facilityInfo115", None),
ByteField("facilityInfo116", None),
ByteField("facilityInfo117", None),
ByteField("facilityInfo118", None),
ByteField("facilityInfo119", None),
ByteField("facilityInfo120", None),
ByteField("facilityInfo121", None),
ByteField("facilityInfo122", None),
ByteField("facilityInfo123", None),
ByteField("facilityInfo124", None),
ByteField("facilityInfo125", None),
ByteField("facilityInfo126", None),
ByteField("facilityInfo127", None),
ByteField("facilityInfo128", None),
ByteField("facilityInfo129", None),
ByteField("facilityInfo130", None),
ByteField("facilityInfo131", None),
ByteField("facilityInfo132", None),
ByteField("facilityInfo133", None),
ByteField("facilityInfo134", None),
ByteField("facilityInfo135", None),
ByteField("facilityInfo136", None),
ByteField("facilityInfo137", None),
ByteField("facilityInfo138", None),
ByteField("facilityInfo139", None),
ByteField("facilityInfo140", None),
ByteField("facilityInfo141", None),
ByteField("facilityInfo142", None),
ByteField("facilityInfo143", None),
ByteField("facilityInfo144", None),
ByteField("facilityInfo145", None),
ByteField("facilityInfo146", None),
ByteField("facilityInfo147", None),
ByteField("facilityInfo148", None),
ByteField("facilityInfo149", None),
ByteField("facilityInfo150", None),
ByteField("facilityInfo151", None),
ByteField("facilityInfo152", None),
ByteField("facilityInfo153", None),
ByteField("facilityInfo154", None),
ByteField("facilityInfo155", None),
ByteField("facilityInfo156", None),
ByteField("facilityInfo157", None),
ByteField("facilityInfo158", None),
ByteField("facilityInfo159", None),
ByteField("facilityInfo160", None),
ByteField("facilityInfo161", None),
ByteField("facilityInfo162", None),
ByteField("facilityInfo163", None),
ByteField("facilityInfo164", None),
ByteField("facilityInfo165", None),
ByteField("facilityInfo166", None),
ByteField("facilityInfo167", None),
ByteField("facilityInfo168", None),
ByteField("facilityInfo169", None),
ByteField("facilityInfo170", None),
ByteField("facilityInfo171", None),
ByteField("facilityInfo172", None),
ByteField("facilityInfo173", None),
ByteField("facilityInfo174", None),
ByteField("facilityInfo175", None),
ByteField("facilityInfo176", None),
ByteField("facilityInfo177", None),
ByteField("facilityInfo178", None),
ByteField("facilityInfo179", None),
ByteField("facilityInfo180", None),
ByteField("facilityInfo181", None),
ByteField("facilityInfo182", None),
ByteField("facilityInfo183", None),
ByteField("facilityInfo184", None),
ByteField("facilityInfo185", None),
ByteField("facilityInfo186", None),
ByteField("facilityInfo187", None),
ByteField("facilityInfo188", None),
ByteField("facilityInfo189", None),
ByteField("facilityInfo190", None),
ByteField("facilityInfo191", None),
ByteField("facilityInfo192", None),
ByteField("facilityInfo193", None),
ByteField("facilityInfo194", None),
ByteField("facilityInfo195", None),
ByteField("facilityInfo196", None),
ByteField("facilityInfo197", None),
ByteField("facilityInfo198", None),
ByteField("facilityInfo199", None),
ByteField("facilityInfo200", None),
ByteField("facilityInfo201", None),
ByteField("facilityInfo202", None),
ByteField("facilityInfo203", None),
ByteField("facilityInfo204", None),
ByteField("facilityInfo205", None),
ByteField("facilityInfo206", None),
ByteField("facilityInfo207", None),
ByteField("facilityInfo208", None),
ByteField("facilityInfo209", None),
ByteField("facilityInfo210", None),
ByteField("facilityInfo211", None),
ByteField("facilityInfo212", None),
ByteField("facilityInfo213", None),
ByteField("facilityInfo214", None),
ByteField("facilityInfo215", None),
ByteField("facilityInfo216", None),
ByteField("facilityInfo217", None),
ByteField("facilityInfo218", None),
ByteField("facilityInfo219", None),
ByteField("facilityInfo220", None),
ByteField("facilityInfo221", None),
ByteField("facilityInfo222", None),
ByteField("facilityInfo223", None),
ByteField("facilityInfo224", None),
ByteField("facilityInfo225", None),
ByteField("facilityInfo226", None),
ByteField("facilityInfo227", None),
ByteField("facilityInfo228", None),
ByteField("facilityInfo229", None),
ByteField("facilityInfo230", None),
ByteField("facilityInfo231", None),
ByteField("facilityInfo232", None),
ByteField("facilityInfo233", None),
ByteField("facilityInfo234", None),
ByteField("facilityInfo235", None),
ByteField("facilityInfo236", None),
ByteField("facilityInfo237", None),
ByteField("facilityInfo238", None),
ByteField("facilityInfo239", None),
ByteField("facilityInfo240", None),
ByteField("facilityInfo241", None),
ByteField("facilityInfo242", None),
ByteField("facilityInfo243", None),
ByteField("facilityInfo244", None),
ByteField("facilityInfo245", None),
ByteField("facilityInfo246", None),
ByteField("facilityInfo247", None),
ByteField("facilityInfo248", None),
ByteField("facilityInfo249", None)
]
def post_build(self, p, pay):
a = [getattr(self, fld.name) for fld in self.fields_desc]
res = adapt(7, 250, a, self.fields_desc, 1)
if self.lengthF is None:
p = struct.pack(">B", res[1]) + p[1:]
if res[0] != 0:
p = p[:-res[0]]
return p + pay
#len 2 to 5
class HighLayerCompatibility(Packet):
"""High layer compatibility Section 10.5.4.16"""
name = "High Layer Compatibility"
fields_desc = [
XByteField("lengthHLC", None),
# optional
BitField("ext", None, 1),
BitField("codingStd", None, 2),
BitField("interpret", None, 3),
BitField("presMeth", None, 2),
BitField("ext1", None, 1),
BitField("highLayerId", None, 7),
ConditionalField(BitField("ext2", 0x1, 1),
lambda pkt: pkt.ext1 == 0),
ConditionalField(BitField("exHiLayerId", 0x0, 7),
lambda pkt: pkt.ext1 == 0),
]
def post_build(self, p, pay):
a = [getattr(self, fld.name) for fld in self.fields_desc]
res = adapt(1, 4, a, self.fields_desc, 1)
if res[0] != 0:
p = p[:-res[0]]
if self.lengthHLC is None:
p = struct.pack(">B", len(p)-1) + p[1:]
return p + pay
#
# 10.5.4.16.1 Static conditions for the high layer
# compatibility IE contents
#
class KeypadFacility(Packet):
"""Keypad facility Section 10.5.4.17"""
name = "Keypad Facility"
fields_desc = [
BitField("spare", 0x0, 1),
BitField("keyPadInfo", 0x0, 7)
]
# len 2 to 15
class LowLayerCompatibility(Packet):
"""Low layer compatibility Section 10.5.4.18"""
name = "Low Layer Compatibility"
fields_desc = [
XByteField("lengthLLC", None),
# optional
ByteField("rest0", None),
ByteField("rest1", None),
ByteField("rest2", None),
ByteField("rest3", None),
ByteField("rest4", None),
ByteField("rest5", None),
ByteField("rest6", None),
ByteField("rest7", None),
ByteField("rest8", None),
ByteField("rest9", None),
ByteField("rest10", None),
ByteField("rest11", None),
ByteField("rest12", None)
]
def post_build(self, p, pay):
a = [getattr(self, fld.name) for fld in self.fields_desc]
res = adapt(1, 14, a, self.fields_desc, 1)
if self.lengthLLC is None:
p = struct.pack(">B", res[1]) + p[1:]
if res[0] != 0:
p = p[:-res[0]]
return p + pay
class MoreData(Packet):
"""More data Section 10.5.4.19"""
name = "More Data"
fields_desc = [
]
class NotificationIndicator(Packet):
"""Notification indicator Section 10.5.4.20"""
name = "Notification Indicator"
fields_desc = [
BitField("ext1", 0x1, 1),
BitField("notifDesc", 0x0, 7)
]
class ProgressIndicator(Packet):
"""Progress indicator Section 10.5.4.21"""
name = "Progress Indicator"
fields_desc = [
XByteField("lengthPI", 0x2),
BitField("ext", 0x1, 1),
BitField("codingStd", 0x0, 2),
BitField("spare", 0x0, 1),
BitField("location", 0x0, 4),
BitField("ext1", 0x1, 1),
BitField("progressDesc", 0x0, 7)
]
class RecallType(Packet):
"""Recall type $(CCBS)$ Section 10.5.4.21a"""
name = "Recall Type $(CCBS)$"
fields_desc = [
BitField("spare", 0x0, 5),
BitField("recallType", 0x0, 3)
]
# len 3 to 19
class RedirectingPartyBcdNumber(Packet):
"""Redirecting party BCD number Section 10.5.4.21b"""
name = "Redirecting Party BCD Number"
fields_desc = [
XByteField("lengthRPBN", None),
BitField("ext", 0x1, 1),
BitField("typeNb", 0x0, 3),
BitField("numberingPlan", 0x0, 4),
# optional
ConditionalField(BitField("ext1", 0x1, 1),
lambda pkt: pkt.ext == 0),
ConditionalField(BitField("presId", 0x0, 2),
lambda pkt: pkt.ext == 0),
ConditionalField(BitField("spare", 0x0, 3),
lambda pkt: pkt.ext == 0),
ConditionalField(BitField("screenId", 0x0, 2),
lambda pkt: pkt.ext == 0),
BitField("nbDigit2", None, 4),
BitField("nbDigit1", None, 4),
BitField("nbDigit4", None, 4),
BitField("nbDigit3", None, 4),
BitField("nbDigit6", None, 4),
BitField("nbDigit5", None, 4),
BitField("nbDigit8", None, 4),
BitField("nbDigit7", None, 4),
BitField("nbDigit10", None, 4),
BitField("nbDigit9", None, 4),
BitField("nbDigit12", None, 4),
BitField("nbDigit11", None, 4),
BitField("nbDigit14", None, 4),
BitField("nbDigit13", None, 4),
BitField("nbDigit16", None, 4),
BitField("nbDigit15", None, 4),
BitField("nbDigit18", None, 4),
BitField("nbDigit17", None, 4),
BitField("nbDigit20", None, 4),
BitField("nbDigit19", None, 4),
BitField("nbDigit22", None, 4),
BitField("nbDigit21", None, 4),
BitField("nbDigit24", None, 4),
BitField("nbDigit23", None, 4),
BitField("nbDigit26", None, 4),
BitField("nbDigit25", None, 4),
BitField("nbDigit28", None, 4),
BitField("nbDigit27", None, 4),
BitField("nbDigit30", None, 4),
BitField("nbDigit29", None, 4),
]
def post_build(self, p, pay):
a = [getattr(self, fld.name) for fld in self.fields_desc]
res = adapt(2, 18, a, self.fields_desc, 1)
if res[0] != 0:
p = p[:-res[0]]
if self.lengthRPBN is None:
p = struct.pack(">B", len(p)-1) + p[1:]
return p + pay
# length 2 to 23
class RedirectingPartySubaddress(Packet):
"""Redirecting party subaddress Section 10.5.4.21c"""
name = "Redirecting Party BCD Number"
fields_desc = [
XByteField("lengthRPS", None),
# optional
BitField("ext", None, 1),
BitField("typeSub", None, 3),
BitField("oddEven", None, 1),
BitField("spare", None, 3),
ByteField("subInfo0", None),
ByteField("subInfo1", None),
ByteField("subInfo2", None),
ByteField("subInfo3", None),
ByteField("subInfo4", None),
ByteField("subInfo5", None),
ByteField("subInfo6", None),
ByteField("subInfo7", None),
ByteField("subInfo8", None),
ByteField("subInfo9", None),
ByteField("subInfo10", None),
ByteField("subInfo11", None),
ByteField("subInfo12", None),
ByteField("subInfo13", None),
ByteField("subInfo14", None),
ByteField("subInfo15", None),
ByteField("subInfo16", None),
ByteField("subInfo17", None),
ByteField("subInfo18", None),
ByteField("subInfo19", None)
]
def post_build(self, p, pay):
a = [getattr(self, fld.name) for fld in self.fields_desc]
res = adapt(1, 22, a, self.fields_desc, 1)
if self.lengthRPS is None:
p = struct.pack(">B", res[1]) + p[1:]
if res[0] != 0:
p = p[:-res[0]]
return p + pay
class RepeatIndicator(Packet):
"""Repeat indicator Section 10.5.4.22"""
name = "Repeat Indicator"
fields_desc = [
BitField("repeatIndic", 0x0, 4)
]
# no upper length min 2(max for L3) (251)
class SetupContainer(Packet):
"""SETUP Container $(CCBS)$ Section 10.5.4.22b"""
name = "Setup Container $(CCBS)$"
fields_desc = [
XByteField("lengthSC", None),
# optional
ByteField("mess1", None),
ByteField("mess2", None),
ByteField("mess3", None),
ByteField("mess4", None),
ByteField("mess5", None),
ByteField("mess6", None),
ByteField("mess7", None),
ByteField("mess8", None),
ByteField("mess9", None),
ByteField("mess10", None),
ByteField("mess11", None),
ByteField("mess12", None),
ByteField("mess13", None),
ByteField("mess14", None),
ByteField("mess15", None),
ByteField("mess16", None),
ByteField("mess17", None),
ByteField("mess18", None),
ByteField("mess19", None),
ByteField("mess20", None),
ByteField("mess21", None),
ByteField("mess22", None),
ByteField("mess23", None),
ByteField("mess24", None),
ByteField("mess25", None),
ByteField("mess26", None),
ByteField("mess27", None),
ByteField("mess28", None),
ByteField("mess29", None),
ByteField("mess30", None),
ByteField("mess31", None),
ByteField("mess32", None),
ByteField("mess33", None),
ByteField("mess34", None),
ByteField("mess35", None),
ByteField("mess36", None),
ByteField("mess37", None),
ByteField("mess38", None),
ByteField("mess39", None),
ByteField("mess40", None),
ByteField("mess41", None),
ByteField("mess42", None),
ByteField("mess43", None),
ByteField("mess44", None),
ByteField("mess45", None),
ByteField("mess46", None),
ByteField("mess47", None),
ByteField("mess48", None),
ByteField("mess49", None),
ByteField("mess50", None),
ByteField("mess51", None),
ByteField("mess52", None),
ByteField("mess53", None),
ByteField("mess54", None),
ByteField("mess55", None),
ByteField("mess56", None),
ByteField("mess57", None),
ByteField("mess58", None),
ByteField("mess59", None),
ByteField("mess60", None),
ByteField("mess61", None),
ByteField("mess62", None),
ByteField("mess63", None),
ByteField("mess64", None),
ByteField("mess65", None),
ByteField("mess66", None),
ByteField("mess67", None),
ByteField("mess68", None),
ByteField("mess69", None),
ByteField("mess70", None),
ByteField("mess71", None),
ByteField("mess72", None),
ByteField("mess73", None),
ByteField("mess74", None),
ByteField("mess75", None),
ByteField("mess76", None),
ByteField("mess77", None),
ByteField("mess78", None),
ByteField("mess79", None),
ByteField("mess80", None),
ByteField("mess81", None),
ByteField("mess82", None),
ByteField("mess83", None),
ByteField("mess84", None),
ByteField("mess85", None),
ByteField("mess86", None),
ByteField("mess87", None),
ByteField("mess88", None),
ByteField("mess89", None),
ByteField("mess90", None),
ByteField("mess91", None),
ByteField("mess92", None),
ByteField("mess93", None),
ByteField("mess94", None),
ByteField("mess95", None),
ByteField("mess96", None),
ByteField("mess97", None),
ByteField("mess98", None),
ByteField("mess99", None),
ByteField("mess100", None),
ByteField("mess101", None),
ByteField("mess102", None),
ByteField("mess103", None),
ByteField("mess104", None),
ByteField("mess105", None),
ByteField("mess106", None),
ByteField("mess107", None),
ByteField("mess108", None),
ByteField("mess109", None),
ByteField("mess110", None),
ByteField("mess111", None),
ByteField("mess112", None),
ByteField("mess113", None),
ByteField("mess114", None),
ByteField("mess115", None),
ByteField("mess116", None),
ByteField("mess117", None),
ByteField("mess118", None),
ByteField("mess119", None),
ByteField("mess120", None),
ByteField("mess121", None),
ByteField("mess122", None),
ByteField("mess123", None),
ByteField("mess124", None),
ByteField("mess125", None),
ByteField("mess126", None),
ByteField("mess127", None),
ByteField("mess128", None),
ByteField("mess129", None),
ByteField("mess130", None),
ByteField("mess131", None),
ByteField("mess132", None),
ByteField("mess133", None),
ByteField("mess134", None),
ByteField("mess135", None),
ByteField("mess136", None),
ByteField("mess137", None),
ByteField("mess138", None),
ByteField("mess139", None),
ByteField("mess140", None),
ByteField("mess141", None),
ByteField("mess142", None),
ByteField("mess143", None),
ByteField("mess144", None),
ByteField("mess145", None),
ByteField("mess146", None),
ByteField("mess147", None),
ByteField("mess148", None),
ByteField("mess149", None),
ByteField("mess150", None),
ByteField("mess151", None),
ByteField("mess152", None),
ByteField("mess153", None),
ByteField("mess154", None),
ByteField("mess155", None),
ByteField("mess156", None),
ByteField("mess157", None),
ByteField("mess158", None),
ByteField("mess159", None),
ByteField("mess160", None),
ByteField("mess161", None),
ByteField("mess162", None),
ByteField("mess163", None),
ByteField("mess164", None),
ByteField("mess165", None),
ByteField("mess166", None),
ByteField("mess167", None),
ByteField("mess168", None),
ByteField("mess169", None),
ByteField("mess170", None),
ByteField("mess171", None),
ByteField("mess172", None),
ByteField("mess173", None),
ByteField("mess174", None),
ByteField("mess175", None),
ByteField("mess176", None),
ByteField("mess177", None),
ByteField("mess178", None),
ByteField("mess179", None),
ByteField("mess180", None),
ByteField("mess181", None),
ByteField("mess182", None),
ByteField("mess183", None),
ByteField("mess184", None),
ByteField("mess185", None),
ByteField("mess186", None),
ByteField("mess187", None),
ByteField("mess188", None),
ByteField("mess189", None),
ByteField("mess190", None),
ByteField("mess191", None),
ByteField("mess192", None),
ByteField("mess193", None),
ByteField("mess194", None),
ByteField("mess195", None),
ByteField("mess196", None),
ByteField("mess197", None),
ByteField("mess198", None),
ByteField("mess199", None),
ByteField("mess200", None),
ByteField("mess201", None),
ByteField("mess202", None),
ByteField("mess203", None),
ByteField("mess204", None),
ByteField("mess205", None),
ByteField("mess206", None),
ByteField("mess207", None),
ByteField("mess208", None),
ByteField("mess209", None),
ByteField("mess210", None),
ByteField("mess211", None),
ByteField("mess212", None),
ByteField("mess213", None),
ByteField("mess214", None),
ByteField("mess215", None),
ByteField("mess216", None),
ByteField("mess217", None),
ByteField("mess218", None),
ByteField("mess219", None),
ByteField("mess220", None),
ByteField("mess221", None),
ByteField("mess222", None),
ByteField("mess223", None),
ByteField("mess224", None),
ByteField("mess225", None),
ByteField("mess226", None),
ByteField("mess227", None),
ByteField("mess228", None),
ByteField("mess229", None),
ByteField("mess230", None),
ByteField("mess231", None),
ByteField("mess232", None),
ByteField("mess233", None),
ByteField("mess234", None),
ByteField("mess235", None),
ByteField("mess236", None),
ByteField("mess237", None),
ByteField("mess238", None),
ByteField("mess239", None),
ByteField("mess240", None),
ByteField("mess241", None),
ByteField("mess242", None),
ByteField("mess243", None),
ByteField("mess244", None),
ByteField("mess245", None),
ByteField("mess246", None),
ByteField("mess247", None),
ByteField("mess248", None),
ByteField("mess249", None),
]
def post_build(self, p, pay):
a = [getattr(self, fld.name) for fld in self.fields_desc]
res = adapt(1, 250, a, self.fields_desc, 1)
if self.lengthSC is None:
p = struct.pack(">B", res[1]) + p[1:]
if res[0] != 0:
p = p[:-res[0]]
return p + pay
class Signal(Packet):
"""Signal Section 10.5.4.23"""
name = "Signal"
fields_desc = [
ByteField("sigValue", 0x0)
]
# length 2 to max for L3 message (251)
class SsVersionIndicator(Packet):
"""SS Version Indicator Section 10.5.4.24"""
name = "SS Version Indicator"
fields_desc = [
XByteField("lengthSVI", None),
# optional
ByteField("info1", None),
ByteField("info2", None),
ByteField("info3", None),
ByteField("info4", None),
ByteField("info5", None),
ByteField("info6", None),
ByteField("info7", None),
ByteField("info8", None),
ByteField("info9", None),
ByteField("info10", None),
ByteField("info11", None),
ByteField("info12", None),
ByteField("info13", None),
ByteField("info14", None),
ByteField("info15", None),
ByteField("info16", None),
ByteField("info17", None),
ByteField("info18", None),
ByteField("info19", None),
ByteField("info20", None),
ByteField("info21", None),
ByteField("info22", None),
ByteField("info23", None),
ByteField("info24", None),
ByteField("info25", None),
ByteField("info26", None),
ByteField("info27", None),
ByteField("info28", None),
ByteField("info29", None),
ByteField("info30", None),
ByteField("info31", None),
ByteField("info32", None),
ByteField("info33", None),
ByteField("info34", None),
ByteField("info35", None),
ByteField("info36", None),
ByteField("info37", None),
ByteField("info38", None),
ByteField("info39", None),
ByteField("info40", None),
ByteField("info41", None),
ByteField("info42", None),
ByteField("info43", None),
ByteField("info44", None),
ByteField("info45", None),
ByteField("info46", None),
ByteField("info47", None),
ByteField("info48", None),
ByteField("info49", None),
ByteField("info50", None),
ByteField("info51", None),
ByteField("info52", None),
ByteField("info53", None),
ByteField("info54", None),
ByteField("info55", None),
ByteField("info56", None),
ByteField("info57", None),
ByteField("info58", None),
ByteField("info59", None),
ByteField("info60", None),
ByteField("info61", None),
ByteField("info62", None),
ByteField("info63", None),
ByteField("info64", None),
ByteField("info65", None),
ByteField("info66", None),
ByteField("info67", None),
ByteField("info68", None),
ByteField("info69", None),
ByteField("info70", None),
ByteField("info71", None),
ByteField("info72", None),
ByteField("info73", None),
ByteField("info74", None),
ByteField("info75", None),
ByteField("info76", None),
ByteField("info77", None),
ByteField("info78", None),
ByteField("info79", None),
ByteField("info80", None),
ByteField("info81", None),
ByteField("info82", None),
ByteField("info83", None),
ByteField("info84", None),
ByteField("info85", None),
ByteField("info86", None),
ByteField("info87", None),
ByteField("info88", None),
ByteField("info89", None),
ByteField("info90", None),
ByteField("info91", None),
ByteField("info92", None),
ByteField("info93", None),
ByteField("info94", None),
ByteField("info95", None),
ByteField("info96", None),
ByteField("info97", None),
ByteField("info98", None),
ByteField("info99", None),
ByteField("info100", None),
ByteField("info101", None),
ByteField("info102", None),
ByteField("info103", None),
ByteField("info104", None),
ByteField("info105", None),
ByteField("info106", None),
ByteField("info107", None),
ByteField("info108", None),
ByteField("info109", None),
ByteField("info110", None),
ByteField("info111", None),
ByteField("info112", None),
ByteField("info113", None),
ByteField("info114", None),
ByteField("info115", None),
ByteField("info116", None),
ByteField("info117", None),
ByteField("info118", None),
ByteField("info119", None),
ByteField("info120", None),
ByteField("info121", None),
ByteField("info122", None),
ByteField("info123", None),
ByteField("info124", None),
ByteField("info125", None),
ByteField("info126", None),
ByteField("info127", None),
ByteField("info128", None),
ByteField("info129", None),
ByteField("info130", None),
ByteField("info131", None),
ByteField("info132", None),
ByteField("info133", None),
ByteField("info134", None),
ByteField("info135", None),
ByteField("info136", None),
ByteField("info137", None),
ByteField("info138", None),
ByteField("info139", None),
ByteField("info140", None),
ByteField("info141", None),
ByteField("info142", None),
ByteField("info143", None),
ByteField("info144", None),
ByteField("info145", None),
ByteField("info146", None),
ByteField("info147", None),
ByteField("info148", None),
ByteField("info149", None),
ByteField("info150", None),
ByteField("info151", None),
ByteField("info152", None),
ByteField("info153", None),
ByteField("info154", None),
ByteField("info155", None),
ByteField("info156", None),
ByteField("info157", None),
ByteField("info158", None),
ByteField("info159", None),
ByteField("info160", None),
ByteField("info161", None),
ByteField("info162", None),
ByteField("info163", None),
ByteField("info164", None),
ByteField("info165", None),
ByteField("info166", None),
ByteField("info167", None),
ByteField("info168", None),
ByteField("info169", None),
ByteField("info170", None),
ByteField("info171", None),
ByteField("info172", None),
ByteField("info173", None),
ByteField("info174", None),
ByteField("info175", None),
ByteField("info176", None),
ByteField("info177", None),
ByteField("info178", None),
ByteField("info179", None),
ByteField("info180", None),
ByteField("info181", None),
ByteField("info182", None),
ByteField("info183", None),
ByteField("info184", None),
ByteField("info185", None),
ByteField("info186", None),
ByteField("info187", None),
ByteField("info188", None),
ByteField("info189", None),
ByteField("info190", None),
ByteField("info191", None),
ByteField("info192", None),
ByteField("info193", None),
ByteField("info194", None),
ByteField("info195", None),
ByteField("info196", None),
ByteField("info197", None),
ByteField("info198", None),
ByteField("info199", None),
ByteField("info200", None),
ByteField("info201", None),
ByteField("info202", None),
ByteField("info203", None),
ByteField("info204", None),
ByteField("info205", None),
ByteField("info206", None),
ByteField("info207", None),
ByteField("info208", None),
ByteField("info209", None),
ByteField("info210", None),
ByteField("info211", None),
ByteField("info212", None),
ByteField("info213", None),
ByteField("info214", None),
ByteField("info215", None),
ByteField("info216", None),
ByteField("info217", None),
ByteField("info218", None),
ByteField("info219", None),
ByteField("info220", None),
ByteField("info221", None),
ByteField("info222", None),
ByteField("info223", None),
ByteField("info224", None),
ByteField("info225", None),
ByteField("info226", None),
ByteField("info227", None),
ByteField("info228", None),
ByteField("info229", None),
ByteField("info230", None),
ByteField("info231", None),
ByteField("info232", None),
ByteField("info233", None),
ByteField("info234", None),
ByteField("info235", None),
ByteField("info236", None),
ByteField("info237", None),
ByteField("info238", None),
ByteField("info239", None),
ByteField("info240", None),
ByteField("info241", None),
ByteField("info242", None),
ByteField("info243", None),
ByteField("info244", None),
ByteField("info245", None),
ByteField("info246", None),
ByteField("info247", None),
ByteField("info248", None),
ByteField("info249", None),
]
def post_build(self, p, pay):
a = [getattr(self, fld.name) for fld in self.fields_desc]
res = adapt(1, 250, a, self.fields_desc, 1)
if self.lengthSVI is None:
p = struct.pack(">B", res[1]) + p[1:]
if res[0] != 0:
p = p[:-res[0]]
return p + pay
# length 3 to 35 or 131
class UserUser(Packet):
"""User-user Section 10.5.4.25"""
name = "User-User"
fields_desc = [
XByteField("lengthUU", None), # dynamic length of field depending
# of the type of message
# let user decide which length he
# wants to take
# => more fuzzing options
ByteField("userUserPD", 0x0),
# optional
ByteField("userUserInfo1", None),
ByteField("userUserInfo2", None),
ByteField("userUserInfo3", None),
ByteField("userUserInfo4", None),
ByteField("userUserInfo5", None),
ByteField("userUserInfo6", None),
ByteField("userUserInfo7", None),
ByteField("userUserInfo8", None),
ByteField("userUserInfo9", None),
ByteField("userUserInfo10", None),
ByteField("userUserInfo11", None),
ByteField("userUserInfo12", None),
ByteField("userUserInfo13", None),
ByteField("userUserInfo14", None),
ByteField("userUserInfo15", None),
ByteField("userUserInfo16", None),
ByteField("userUserInfo17", None),
ByteField("userUserInfo18", None),
ByteField("userUserInfo19", None),
ByteField("userUserInfo20", None),
ByteField("userUserInfo21", None),
ByteField("userUserInfo22", None),
ByteField("userUserInfo23", None),
ByteField("userUserInfo24", None),
ByteField("userUserInfo25", None),
ByteField("userUserInfo26", None),
ByteField("userUserInfo27", None),
ByteField("userUserInfo28", None),
ByteField("userUserInfo29", None),
ByteField("userUserInfo30", None),
ByteField("userUserInfo31", None),
ByteField("userUserInfo32", None),
# long packet
ByteField("userUserInfo33", None),
ByteField("userUserInfo34", None),
ByteField("userUserInfo35", None),
ByteField("userUserInfo36", None),
ByteField("userUserInfo37", None),
ByteField("userUserInfo38", None),
ByteField("userUserInfo39", None),
ByteField("userUserInfo40", None),
ByteField("userUserInfo41", None),
ByteField("userUserInfo42", None),
ByteField("userUserInfo43", None),
ByteField("userUserInfo44", None),
ByteField("userUserInfo45", None),
ByteField("userUserInfo46", None),
ByteField("userUserInfo47", None),
ByteField("userUserInfo48", None),
ByteField("userUserInfo49", None),
ByteField("userUserInfo50", None),
ByteField("userUserInfo51", None),
ByteField("userUserInfo52", None),
ByteField("userUserInfo53", None),
ByteField("userUserInfo54", None),
ByteField("userUserInfo55", None),
ByteField("userUserInfo56", None),
ByteField("userUserInfo57", None),
ByteField("userUserInfo58", None),
ByteField("userUserInfo59", None),
ByteField("userUserInfo60", None),
ByteField("userUserInfo61", None),
ByteField("userUserInfo62", None),
ByteField("userUserInfo63", None),
ByteField("userUserInfo64", None),
ByteField("userUserInfo65", None),
ByteField("userUserInfo66", None),
ByteField("userUserInfo67", None),
ByteField("userUserInfo68", None),
ByteField("userUserInfo69", None),
ByteField("userUserInfo70", None),
ByteField("userUserInfo71", None),
ByteField("userUserInfo72", None),
ByteField("userUserInfo73", None),
ByteField("userUserInfo74", None),
ByteField("userUserInfo75", None),
ByteField("userUserInfo76", None),
ByteField("userUserInfo77", None),
ByteField("userUserInfo78", None),
ByteField("userUserInfo79", None),
ByteField("userUserInfo80", None),
ByteField("userUserInfo81", None),
ByteField("userUserInfo82", None),
ByteField("userUserInfo83", None),
ByteField("userUserInfo84", None),
ByteField("userUserInfo85", None),
ByteField("userUserInfo86", None),
ByteField("userUserInfo87", None),
ByteField("userUserInfo88", None),
ByteField("userUserInfo89", None),
ByteField("userUserInfo90", None),
ByteField("userUserInfo91", None),
ByteField("userUserInfo92", None),
ByteField("userUserInfo93", None),
ByteField("userUserInfo94", None),
ByteField("userUserInfo95", None),
ByteField("userUserInfo96", None),
ByteField("userUserInfo97", None),
ByteField("userUserInfo98", None),
ByteField("userUserInfo99", None),
ByteField("userUserInfo100", None),
ByteField("userUserInfo101", None),
ByteField("userUserInfo102", None),
ByteField("userUserInfo103", None),
ByteField("userUserInfo104", None),
ByteField("userUserInfo105", None),
ByteField("userUserInfo106", None),
ByteField("userUserInfo107", None),
ByteField("userUserInfo108", None),
ByteField("userUserInfo109", None),
ByteField("userUserInfo110", None),
ByteField("userUserInfo111", None),
ByteField("userUserInfo112", None),
ByteField("userUserInfo113", None),
ByteField("userUserInfo114", None),
ByteField("userUserInfo115", None),
ByteField("userUserInfo116", None),
ByteField("userUserInfo117", None),
ByteField("userUserInfo118", None),
ByteField("userUserInfo119", None),
ByteField("userUserInfo120", None),
ByteField("userUserInfo121", None),
ByteField("userUserInfo122", None),
ByteField("userUserInfo123", None),
ByteField("userUserInfo124", None),
ByteField("userUserInfo125", None),
ByteField("userUserInfo126", None),
ByteField("userUserInfo127", None),
ByteField("userUserInfo128", None),
ByteField("userUserInfo129", None),
ByteField("userUserInfo130", None),
ByteField("userUserInfo131", None)
]
def post_build(self, p, pay):
a = [getattr(self, fld.name) for fld in self.fields_desc]
res = adapt(2, 133, a, self.fields_desc, 1)
if self.lengthUU is None:
p = struct.pack(">B", res[1]) + p[1:]
if res[0] != 0:
p = p[:-res[0]]
return p + pay
class AlertingPattern(Packet):
"""Alerting Pattern 10.5.4.26"""
name = "Alerting Pattern"
fields_desc = [
XByteField("lengthAP", 0x3),
BitField("spare", 0x0, 4),
BitField("alertingValue", 0x0, 4)
]
class AllowedActions(Packet):
"""Allowed actions $(CCBS)$ Section 10.5.4.26"""
name = "Allowed Actions $(CCBS)$"
fields_desc = [
XByteField("lengthAP", 0x3),
BitField("CCBS", 0x0, 1),
BitField("spare", 0x0, 7)
]
#
# 10.5.5 GPRS mobility management information elements
#
class AttachType(Packet):
"""Attach type Section 10.5.5.2"""
name = "Attach Type"
fields_desc = [
BitField("spare", 0x0, 1),
BitField("type", 0x1, 3)
]
if __name__ == "__main__":
interact(mydict=globals(), mybanner="Scapy GSM-UM (Air) Addon")
|
mytliulei/Scapy
|
scapy/contrib/gsm_um.py
|
Python
|
apache-2.0
| 449,096
|
# -*- coding: utf-8 -*-
#
# ufo2fdk documentation build configuration file, created by
# sphinx-quickstart on Sun Jan 25 09:05:02 2009.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# The contents of this file are pickled, so don't put values in the namespace
# that aren't pickleable (module imports are okay, they're removed automatically).
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If your extensions are in another directory, add it here. If the directory
# is relative to the documentation root, use os.path.abspath to make it
# absolute, like shown here.
#sys.path.append(os.path.abspath('.'))
# General configuration
# ---------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'ufo2fdk'
copyright = u'2009, Type Supply LLC'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
#unused_docs = []
# List of directories, relative to source directory, that shouldn't be searched
# for source files.
exclude_trees = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# Options for HTML output
# -----------------------
# The style sheet to use for HTML and HTML Help pages. A file of that name
# must exist either in Sphinx' static/ path, or in one of the custom paths
# given in html_static_path.
html_style = 'default.css'
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_use_modindex = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, the reST sources are included in the HTML build as _sources/<name>.
#html_copy_source = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'ufo2fdkdoc'
# Options for LaTeX output
# ------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, document class [howto/manual]).
latex_documents = [
('index', 'ufo2fdk.tex', ur'ufo2fdk Documentation',
ur'Tal Leming', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True
|
anthrotype/ufo2fdk
|
documentation/source/conf.py
|
Python
|
mit
| 6,016
|
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2016 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import yaml
from snapcraft.internal.states._state import State
def _stage_state_constructor(loader, node):
parameters = loader.construct_mapping(node)
return StageState(**parameters)
yaml.add_constructor(u'!StageState', _stage_state_constructor)
class StageState(State):
yaml_tag = u'!StageState'
def __init__(self, files, directories, part_properties=None, project=None):
super().__init__(part_properties, project)
self.files = files
self.directories = directories
def properties_of_interest(self, part_properties):
"""Extract the properties concerning this step from part_properties.
The only property of interest to the stage step is the `stage` keyword
used to filter out files with a white or blacklist.
"""
return {
'stage': part_properties.get('stage', ['*']) or ['*'],
'filesets': part_properties.get('filesets', {}) or {},
}
def project_options_of_interest(self, project):
"""Extract the options concerning this step from the project.
The stage step doesn't care about any project options.
"""
return {}
|
3v1n0/snapcraft
|
snapcraft/internal/states/_stage_state.py
|
Python
|
gpl-3.0
| 1,846
|
## pygame - Python Game Library
## Copyright (C) 2007 Marcus von Appen
##
## This library is free software; you can redistribute it and/or
## modify it under the terms of the GNU Library General Public
## License as published by the Free Software Foundation; either
## version 2 of the License, or (at your option) any later version.
##
## This library is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## Library General Public License for more details.
##
## You should have received a copy of the GNU Library General Public
## License along with this library; if not, write to the Free
## Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##
## Marcus von Appen
## mva@sysfault.org
"""pygame module for accessing surface pixel data using array interfaces
Functions to convert pixel data between pygame Surfaces and arrays. This
module will only be functional when pygame can use the external Numpy or
Numeric packages.
Every pixel is stored as a single integer value to represent the red,
green, and blue colors. The 8bit images use a value that looks into a
colormap. Pixels with higher depth use a bit packing process to place
three or four values into a single number.
The arrays are indexed by the X axis first, followed by the Y
axis. Arrays that treat the pixels as a single integer are referred to
as 2D arrays. This module can also separate the red, green, and blue
color values into separate indices. These types of arrays are referred
to as 3D arrays, and the last index is 0 for red, 1 for green, and 2 for
blue.
Supported array types are
numpy
numeric (deprecated; will be removed in Pygame 1.9.3.)
The default will be numpy, if installed. Otherwise, Numeric will be set
as default if installed, and a deprecation warning will be issued. If
neither numpy nor Numeric are installed, the module will raise an
ImportError.
The array type to use can be changed at runtime using the use_arraytype()
method, which requires one of the above types as string.
Note: numpy and Numeric are not completely compatible. Certain array
manipulations, which work for one type, might behave differently or even
completely break for the other.
Additionally, in contrast to Numeric, numpy does use unsigned 16-bit
integers. Images with 16-bit data will be treated as unsigned
integers. Numeric instead uses signed integers for the representation,
which is important to keep in mind, if you use the module's functions
and wonder about the values.
"""
# Try to import the necessary modules.
import pygame._numpysurfarray as numpysf
from pygame.pixelcopy import array_to_surface, make_surface as pc_make_surface
def blit_array (surface, array):
"""pygame.surfarray.blit_array(Surface, array): return None
Blit directly from a array values.
Directly copy values from an array into a Surface. This is faster than
converting the array into a Surface and blitting. The array must be the
same dimensions as the Surface and will completely replace all pixel
values. Only integer, ascii character and record arrays are accepted.
This function will temporarily lock the Surface as the new values are
copied.
"""
return numpysf.blit_array (surface, array)
def array2d (surface):
"""pygame.surfarray.array2d (Surface): return array
Copy pixels into a 2d array.
Copy the pixels from a Surface into a 2D array. The bit depth of the
surface will control the size of the integer values, and will work
for any type of pixel format.
This function will temporarily lock the Surface as pixels are copied
(see the Surface.lock - lock the Surface memory for pixel access
method).
"""
return numpysf.array2d (surface)
def pixels2d (surface):
"""pygame.surfarray.pixels2d (Surface): return array
Reference pixels into a 2d array.
Create a new 2D array that directly references the pixel values in a
Surface. Any changes to the array will affect the pixels in the
Surface. This is a fast operation since no data is copied.
Pixels from a 24-bit Surface cannot be referenced, but all other
Surface bit depths can.
The Surface this references will remain locked for the lifetime of
the array (see the Surface.lock - lock the Surface memory for pixel
access method).
"""
return numpysf.pixels2d (surface)
def array3d (surface):
"""pygame.surfarray.array3d (Surface): return array
Copy pixels into a 3d array.
Copy the pixels from a Surface into a 3D array. The bit depth of the
surface will control the size of the integer values, and will work
for any type of pixel format.
This function will temporarily lock the Surface as pixels are copied
(see the Surface.lock - lock the Surface memory for pixel access
method).
"""
return numpysf.array3d (surface)
def pixels3d (surface):
"""pygame.surfarray.pixels3d (Surface): return array
Reference pixels into a 3d array.
Create a new 3D array that directly references the pixel values in a
Surface. Any changes to the array will affect the pixels in the
Surface. This is a fast operation since no data is copied.
This will only work on Surfaces that have 24-bit or 32-bit
formats. Lower pixel formats cannot be referenced.
The Surface this references will remain locked for the lifetime of
the array (see the Surface.lock - lock the Surface memory for pixel
access method).
"""
return numpysf.pixels3d (surface)
def array_alpha (surface):
"""pygame.surfarray.array_alpha (Surface): return array
Copy pixel alphas into a 2d array.
Copy the pixel alpha values (degree of transparency) from a Surface
into a 2D array. This will work for any type of Surface
format. Surfaces without a pixel alpha will return an array with all
opaque values.
This function will temporarily lock the Surface as pixels are copied
(see the Surface.lock - lock the Surface memory for pixel access
method).
"""
return numpysf.array_alpha (surface)
def pixels_alpha (surface):
"""pygame.surfarray.pixels_alpha (Surface): return array
Reference pixel alphas into a 2d array.
Create a new 2D array that directly references the alpha values
(degree of transparency) in a Surface. Any changes to the array will
affect the pixels in the Surface. This is a fast operation since no
data is copied.
This can only work on 32-bit Surfaces with a per-pixel alpha value.
The Surface this array references will remain locked for the
lifetime of the array.
"""
return numpysf.pixels_alpha (surface)
def pixels_red (surface):
"""pygame.surfarray.pixels_red (Surface): return array
Reference pixel red into a 2d array.
Create a new 2D array that directly references the red values
in a Surface. Any changes to the array will affect the pixels
in the Surface. This is a fast operation since no data is copied.
This can only work on 24-bit or 32-bit Surfaces.
The Surface this array references will remain locked for the
lifetime of the array.
"""
return numpysf.pixels_red (surface)
def pixels_green (surface):
"""pygame.surfarray.pixels_green (Surface): return array
Reference pixel green into a 2d array.
Create a new 2D array that directly references the green values
in a Surface. Any changes to the array will affect the pixels
in the Surface. This is a fast operation since no data is copied.
This can only work on 24-bit or 32-bit Surfaces.
The Surface this array references will remain locked for the
lifetime of the array.
"""
return numpysf.pixels_green (surface)
def pixels_blue (surface):
"""pygame.surfarray.pixels_blue (Surface): return array
Reference pixel blue into a 2d array.
Create a new 2D array that directly references the blue values
in a Surface. Any changes to the array will affect the pixels
in the Surface. This is a fast operation since no data is copied.
This can only work on 24-bit or 32-bit Surfaces.
The Surface this array references will remain locked for the
lifetime of the array.
"""
return numpysf.pixels_blue (surface)
def array_colorkey (surface):
"""pygame.surfarray.array_colorkey (Surface): return array
Copy the colorkey values into a 2d array.
Create a new array with the colorkey transparency value from each
pixel. If the pixel matches the colorkey it will be fully
tranparent; otherwise it will be fully opaque.
This will work on any type of Surface format. If the image has no
colorkey a solid opaque array will be returned.
This function will temporarily lock the Surface as pixels are
copied.
"""
return numpysf.array_colorkey (surface)
def make_surface(array):
"""pygame.surfarray.make_surface (array): return Surface
Copy an array to a new surface.
Create a new Surface that best resembles the data and format on the
array. The array can be 2D or 3D with any sized integer values.
"""
return numpysf.make_surface (array)
def map_array (surface, array):
"""pygame.surfarray.map_array (Surface, array3d): return array2d
Map a 3D array into a 2D array.
Convert a 3D array into a 2D array. This will use the given Surface
format to control the conversion. Palette surface formats are not
supported.
"""
return numpysf.map_array (surface, array)
def use_arraytype (arraytype):
"""pygame.surfarray.use_arraytype (arraytype): return None
DEPRECATED - only numpy arrays are now supported.
"""
arraytype = arraytype.lower ()
if arraytype != "numpy":
raise ValueError("invalid array type")
def get_arraytype ():
"""pygame.surfarray.get_arraytype (): return str
DEPRECATED - only numpy arrays are now supported.
"""
return "numpy"
def get_arraytypes ():
"""pygame.surfarray.get_arraytypes (): return tuple
DEPRECATED - only numpy arrays are now supported.
"""
return ("numpy",)
|
mark-me/Pi-Jukebox
|
venv/Lib/site-packages/pygame/surfarray.py
|
Python
|
agpl-3.0
| 10,257
|
"""
This is the courseware context_processor module.
This is meant to simplify the process of sending user preferences (espec. time_zone and pref-lang)
to the templates without having to append every view file.
"""
from openedx.core.djangoapps.request_cache import get_cache
from openedx.core.djangoapps.user_api.errors import UserAPIInternalError, UserNotFound
from openedx.core.djangoapps.user_api.preferences.api import get_user_preferences
RETRIEVABLE_PREFERENCES = {
'user_timezone': 'time_zone',
'user_language': 'pref-lang'
}
CACHE_NAME = "context_processor.user_timezone_preferences"
def user_timezone_locale_prefs(request):
"""
Checks if request has an authenticated user.
If so, sends set (or none if unset) time_zone and language prefs.
This interacts with the DateUtils to either display preferred or attempt to determine
system/browser set time_zones and languages
"""
cached_value = get_cache(CACHE_NAME)
if not cached_value:
user_prefs = {
'user_timezone': None,
'user_language': None,
}
if hasattr(request, 'user') and request.user.is_authenticated:
try:
user_preferences = get_user_preferences(request.user)
except (UserNotFound, UserAPIInternalError):
cached_value.update(user_prefs)
else:
user_prefs = {
key: user_preferences.get(pref_name, None)
for key, pref_name in RETRIEVABLE_PREFERENCES.iteritems()
}
cached_value.update(user_prefs)
return cached_value
|
BehavioralInsightsTeam/edx-platform
|
lms/djangoapps/courseware/context_processor.py
|
Python
|
agpl-3.0
| 1,626
|
'''Autogenerated by xml_generate script, do not edit!'''
from OpenGL import platform as _p, arrays
# Code generation uses this
from OpenGL.raw.GL import _types as _cs
# End users want this...
from OpenGL.raw.GL._types import *
from OpenGL.raw.GL import _errors
from OpenGL.constant import Constant as _C
import ctypes
_EXTENSION_NAME = 'GL_ARB_shader_stencil_export'
def _f( function ):
return _p.createFunction( function,_p.PLATFORM.GL,'GL_ARB_shader_stencil_export',error_checker=_errors._error_checker)
|
stack-of-tasks/rbdlpy
|
tutorial/lib/python2.7/site-packages/OpenGL/raw/GL/ARB/shader_stencil_export.py
|
Python
|
lgpl-3.0
| 513
|
"""Config flow for MQTT."""
from collections import OrderedDict
import logging
import queue
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import (
CONF_HOST,
CONF_PASSWORD,
CONF_PAYLOAD,
CONF_PORT,
CONF_PROTOCOL,
CONF_USERNAME,
)
from .const import (
ATTR_PAYLOAD,
ATTR_QOS,
ATTR_RETAIN,
ATTR_TOPIC,
CONF_BIRTH_MESSAGE,
CONF_BROKER,
CONF_DISCOVERY,
CONF_WILL_MESSAGE,
DATA_MQTT_CONFIG,
DEFAULT_BIRTH,
DEFAULT_DISCOVERY,
DEFAULT_WILL,
)
from .util import MQTT_WILL_BIRTH_SCHEMA
_LOGGER = logging.getLogger(__name__)
@config_entries.HANDLERS.register("mqtt")
class FlowHandler(config_entries.ConfigFlow):
"""Handle a config flow."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_PUSH
_hassio_discovery = None
@staticmethod
def async_get_options_flow(config_entry):
"""Get the options flow for this handler."""
return MQTTOptionsFlowHandler(config_entry)
async def async_step_user(self, user_input=None):
"""Handle a flow initialized by the user."""
if self._async_current_entries():
return self.async_abort(reason="single_instance_allowed")
return await self.async_step_broker()
async def async_step_broker(self, user_input=None):
"""Confirm the setup."""
errors = {}
if user_input is not None:
can_connect = await self.hass.async_add_executor_job(
try_connection,
user_input[CONF_BROKER],
user_input[CONF_PORT],
user_input.get(CONF_USERNAME),
user_input.get(CONF_PASSWORD),
)
if can_connect:
return self.async_create_entry(
title=user_input[CONF_BROKER], data=user_input
)
errors["base"] = "cannot_connect"
fields = OrderedDict()
fields[vol.Required(CONF_BROKER)] = str
fields[vol.Required(CONF_PORT, default=1883)] = vol.Coerce(int)
fields[vol.Optional(CONF_USERNAME)] = str
fields[vol.Optional(CONF_PASSWORD)] = str
fields[vol.Optional(CONF_DISCOVERY, default=DEFAULT_DISCOVERY)] = bool
return self.async_show_form(
step_id="broker", data_schema=vol.Schema(fields), errors=errors
)
async def async_step_import(self, user_input):
"""Import a config entry.
Special type of import, we're not actually going to store any data.
Instead, we're going to rely on the values that are in config file.
"""
if self._async_current_entries():
return self.async_abort(reason="single_instance_allowed")
return self.async_create_entry(title="configuration.yaml", data={})
async def async_step_hassio(self, discovery_info):
"""Receive a Hass.io discovery."""
if self._async_current_entries():
return self.async_abort(reason="single_instance_allowed")
self._hassio_discovery = discovery_info
return await self.async_step_hassio_confirm()
async def async_step_hassio_confirm(self, user_input=None):
"""Confirm a Hass.io discovery."""
errors = {}
if user_input is not None:
data = self._hassio_discovery
can_connect = await self.hass.async_add_executor_job(
try_connection,
data[CONF_HOST],
data[CONF_PORT],
data.get(CONF_USERNAME),
data.get(CONF_PASSWORD),
data.get(CONF_PROTOCOL),
)
if can_connect:
return self.async_create_entry(
title=data["addon"],
data={
CONF_BROKER: data[CONF_HOST],
CONF_PORT: data[CONF_PORT],
CONF_USERNAME: data.get(CONF_USERNAME),
CONF_PASSWORD: data.get(CONF_PASSWORD),
CONF_PROTOCOL: data.get(CONF_PROTOCOL),
CONF_DISCOVERY: user_input[CONF_DISCOVERY],
},
)
errors["base"] = "cannot_connect"
return self.async_show_form(
step_id="hassio_confirm",
description_placeholders={"addon": self._hassio_discovery["addon"]},
data_schema=vol.Schema(
{vol.Optional(CONF_DISCOVERY, default=DEFAULT_DISCOVERY): bool}
),
errors=errors,
)
class MQTTOptionsFlowHandler(config_entries.OptionsFlow):
"""Handle MQTT options."""
def __init__(self, config_entry):
"""Initialize MQTT options flow."""
self.config_entry = config_entry
self.broker_config = {}
self.options = dict(config_entry.options)
async def async_step_init(self, user_input=None):
"""Manage the MQTT options."""
return await self.async_step_broker()
async def async_step_broker(self, user_input=None):
"""Manage the MQTT options."""
errors = {}
current_config = self.config_entry.data
yaml_config = self.hass.data.get(DATA_MQTT_CONFIG, {})
if user_input is not None:
can_connect = await self.hass.async_add_executor_job(
try_connection,
user_input[CONF_BROKER],
user_input[CONF_PORT],
user_input.get(CONF_USERNAME),
user_input.get(CONF_PASSWORD),
)
if can_connect:
self.broker_config.update(user_input)
return await self.async_step_options()
errors["base"] = "cannot_connect"
fields = OrderedDict()
current_broker = current_config.get(CONF_BROKER, yaml_config.get(CONF_BROKER))
current_port = current_config.get(CONF_PORT, yaml_config.get(CONF_PORT))
current_user = current_config.get(CONF_USERNAME, yaml_config.get(CONF_USERNAME))
current_pass = current_config.get(CONF_PASSWORD, yaml_config.get(CONF_PASSWORD))
fields[vol.Required(CONF_BROKER, default=current_broker)] = str
fields[vol.Required(CONF_PORT, default=current_port)] = vol.Coerce(int)
fields[
vol.Optional(
CONF_USERNAME,
description={"suggested_value": current_user},
)
] = str
fields[
vol.Optional(
CONF_PASSWORD,
description={"suggested_value": current_pass},
)
] = str
return self.async_show_form(
step_id="broker",
data_schema=vol.Schema(fields),
errors=errors,
)
async def async_step_options(self, user_input=None):
"""Manage the MQTT options."""
errors = {}
current_config = self.config_entry.data
yaml_config = self.hass.data.get(DATA_MQTT_CONFIG, {})
options_config = {}
if user_input is not None:
bad_birth = False
bad_will = False
if "birth_topic" in user_input:
birth_message = {
ATTR_TOPIC: user_input["birth_topic"],
ATTR_PAYLOAD: user_input.get("birth_payload", ""),
ATTR_QOS: user_input["birth_qos"],
ATTR_RETAIN: user_input["birth_retain"],
}
try:
birth_message = MQTT_WILL_BIRTH_SCHEMA(birth_message)
options_config[CONF_BIRTH_MESSAGE] = birth_message
except vol.Invalid:
errors["base"] = "bad_birth"
bad_birth = True
if not user_input["birth_enable"]:
options_config[CONF_BIRTH_MESSAGE] = {}
if "will_topic" in user_input:
will_message = {
ATTR_TOPIC: user_input["will_topic"],
ATTR_PAYLOAD: user_input.get("will_payload", ""),
ATTR_QOS: user_input["will_qos"],
ATTR_RETAIN: user_input["will_retain"],
}
try:
will_message = MQTT_WILL_BIRTH_SCHEMA(will_message)
options_config[CONF_WILL_MESSAGE] = will_message
except vol.Invalid:
errors["base"] = "bad_will"
bad_will = True
if not user_input["will_enable"]:
options_config[CONF_WILL_MESSAGE] = {}
options_config[CONF_DISCOVERY] = user_input[CONF_DISCOVERY]
if not bad_birth and not bad_will:
updated_config = {}
updated_config.update(self.broker_config)
updated_config.update(options_config)
self.hass.config_entries.async_update_entry(
self.config_entry, data=updated_config
)
return self.async_create_entry(title="", data=None)
birth = {
**DEFAULT_BIRTH,
**current_config.get(
CONF_BIRTH_MESSAGE, yaml_config.get(CONF_BIRTH_MESSAGE, {})
),
}
will = {
**DEFAULT_WILL,
**current_config.get(
CONF_WILL_MESSAGE, yaml_config.get(CONF_WILL_MESSAGE, {})
),
}
discovery = current_config.get(
CONF_DISCOVERY, yaml_config.get(CONF_DISCOVERY, DEFAULT_DISCOVERY)
)
fields = OrderedDict()
fields[vol.Optional(CONF_DISCOVERY, default=discovery)] = bool
# Birth message is disabled if CONF_BIRTH_MESSAGE = {}
fields[
vol.Optional(
"birth_enable",
default=CONF_BIRTH_MESSAGE not in current_config
or current_config[CONF_BIRTH_MESSAGE] != {},
)
] = bool
fields[
vol.Optional(
"birth_topic", description={"suggested_value": birth[ATTR_TOPIC]}
)
] = str
fields[
vol.Optional(
"birth_payload", description={"suggested_value": birth[CONF_PAYLOAD]}
)
] = str
fields[vol.Optional("birth_qos", default=birth[ATTR_QOS])] = vol.In([0, 1, 2])
fields[vol.Optional("birth_retain", default=birth[ATTR_RETAIN])] = bool
# Will message is disabled if CONF_WILL_MESSAGE = {}
fields[
vol.Optional(
"will_enable",
default=CONF_WILL_MESSAGE not in current_config
or current_config[CONF_WILL_MESSAGE] != {},
)
] = bool
fields[
vol.Optional(
"will_topic", description={"suggested_value": will[ATTR_TOPIC]}
)
] = str
fields[
vol.Optional(
"will_payload", description={"suggested_value": will[CONF_PAYLOAD]}
)
] = str
fields[vol.Optional("will_qos", default=will[ATTR_QOS])] = vol.In([0, 1, 2])
fields[vol.Optional("will_retain", default=will[ATTR_RETAIN])] = bool
return self.async_show_form(
step_id="options",
data_schema=vol.Schema(fields),
errors=errors,
)
def try_connection(broker, port, username, password, protocol="3.1"):
"""Test if we can connect to an MQTT broker."""
# pylint: disable=import-outside-toplevel
import paho.mqtt.client as mqtt
if protocol == "3.1":
proto = mqtt.MQTTv31
else:
proto = mqtt.MQTTv311
client = mqtt.Client(protocol=proto)
if username and password:
client.username_pw_set(username, password)
result = queue.Queue(maxsize=1)
def on_connect(client_, userdata, flags, result_code):
"""Handle connection result."""
result.put(result_code == mqtt.CONNACK_ACCEPTED)
client.on_connect = on_connect
client.connect_async(broker, port)
client.loop_start()
try:
return result.get(timeout=5)
except queue.Empty:
return False
finally:
client.disconnect()
client.loop_stop()
|
tboyce021/home-assistant
|
homeassistant/components/mqtt/config_flow.py
|
Python
|
apache-2.0
| 12,201
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import pytest
from tests.providers.google.cloud.utils.gcp_authenticator import GCP_PUBSUB_KEY
from tests.test_utils.gcp_system_helpers import CLOUD_DAG_FOLDER, GoogleSystemTest, provide_gcp_context
@pytest.mark.backend("mysql", "postgres")
@pytest.mark.credential_file(GCP_PUBSUB_KEY)
class PubSubSystemTest(GoogleSystemTest):
@provide_gcp_context(GCP_PUBSUB_KEY)
def test_run_example_sensor_dag(self):
self.run_dag(dag_id="example_gcp_pubsub_sensor", dag_folder=CLOUD_DAG_FOLDER)
@provide_gcp_context(GCP_PUBSUB_KEY)
def test_run_example_operator_dag(self):
self.run_dag(dag_id="example_gcp_pubsub_operator", dag_folder=CLOUD_DAG_FOLDER)
|
airbnb/airflow
|
tests/providers/google/cloud/operators/test_pubsub_system.py
|
Python
|
apache-2.0
| 1,462
|
# Copyright (c) 2012 ARM Limited
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Copyright (c) 2005-2008 The Regents of The University of Michigan
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Nathan Binkert
# Andreas Hansson
from MemObject import MemObject
from m5.params import *
class BaseBus(MemObject):
type = 'BaseBus'
abstract = True
slave = VectorSlavePort("vector port for connecting masters")
master = VectorMasterPort("vector port for connecting slaves")
clock = Param.Clock("1GHz", "bus clock speed")
header_cycles = Param.Int(1, "cycles of overhead per transaction")
width = Param.Int(64, "bus width (bytes)")
block_size = Param.Int(64, "The default block size if not set by " \
"any connected module")
# The default port can be left unconnected, or be used to connect
# a default slave port
default = MasterPort("Port for connecting an optional default slave")
# The default port can be used unconditionally, or based on
# address range, in which case it may overlap with other
# ports. The default range is always checked first, thus creating
# a two-level hierarchical lookup. This is useful e.g. for the PCI
# bus configuration.
use_default_range = Param.Bool(False, "Perform address mapping for " \
"the default port")
class NoncoherentBus(BaseBus):
type = 'NoncoherentBus'
class CoherentBus(BaseBus):
type = 'CoherentBus'
|
silkyar/570_Big_Little
|
src/mem/Bus.py
|
Python
|
bsd-3-clause
| 3,484
|
from __future__ import unicode_literals
from django.apps import AppConfig
from django.apps.registry import Apps, apps as global_apps
from django.db import models
from django.db.models.options import DEFAULT_NAMES, normalize_together
from django.db.models.fields.related import do_pending_lookups
from django.db.models.fields.proxy import OrderWrt
from django.conf import settings
from django.utils import six
from django.utils.encoding import force_text, smart_text
from django.utils.module_loading import import_string
class InvalidBasesError(ValueError):
pass
class ProjectState(object):
"""
Represents the entire project's overall state.
This is the item that is passed around - we do it here rather than at the
app level so that cross-app FKs/etc. resolve properly.
"""
def __init__(self, models=None, real_apps=None):
self.models = models or {}
self.apps = None
# Apps to include from main registry, usually unmigrated ones
self.real_apps = real_apps or []
def add_model_state(self, model_state):
self.models[(model_state.app_label, model_state.name.lower())] = model_state
def clone(self):
"Returns an exact copy of this ProjectState"
return ProjectState(
models=dict((k, v.clone()) for k, v in self.models.items()),
real_apps=self.real_apps,
)
def render(self, include_real=None, ignore_swappable=False, skip_cache=False):
"Turns the project state into actual models in a new Apps"
if self.apps is None or skip_cache:
# Any apps in self.real_apps should have all their models included
# in the render. We don't use the original model instances as there
# are some variables that refer to the Apps object.
# FKs/M2Ms from real apps are also not included as they just
# mess things up with partial states (due to lack of dependencies)
real_models = []
for app_label in self.real_apps:
app = global_apps.get_app_config(app_label)
for model in app.get_models():
real_models.append(ModelState.from_model(model, exclude_rels=True))
# Populate the app registry with a stub for each application.
app_labels = set(model_state.app_label for model_state in self.models.values())
self.apps = Apps([AppConfigStub(label) for label in sorted(self.real_apps + list(app_labels))])
# We keep trying to render the models in a loop, ignoring invalid
# base errors, until the size of the unrendered models doesn't
# decrease by at least one, meaning there's a base dependency loop/
# missing base.
unrendered_models = list(self.models.values()) + real_models
while unrendered_models:
new_unrendered_models = []
for model in unrendered_models:
try:
model.render(self.apps)
except InvalidBasesError:
new_unrendered_models.append(model)
if len(new_unrendered_models) == len(unrendered_models):
raise InvalidBasesError("Cannot resolve bases for %r\nThis can happen if you are inheriting models from an app with migrations (e.g. contrib.auth)\n in an app with no migrations; see https://docs.djangoproject.com/en/1.7/topics/migrations/#dependencies for more" % new_unrendered_models)
unrendered_models = new_unrendered_models
# make sure apps has no dangling references
if self.apps._pending_lookups:
# There's some lookups left. See if we can first resolve them
# ourselves - sometimes fields are added after class_prepared is sent
for lookup_model, operations in self.apps._pending_lookups.items():
try:
model = self.apps.get_model(lookup_model[0], lookup_model[1])
except LookupError:
if "%s.%s" % (lookup_model[0], lookup_model[1]) == settings.AUTH_USER_MODEL and ignore_swappable:
continue
# Raise an error with a best-effort helpful message
# (only for the first issue). Error message should look like:
# "ValueError: Lookup failed for model referenced by
# field migrations.Book.author: migrations.Author"
raise ValueError("Lookup failed for model referenced by field {field}: {model[0]}.{model[1]}".format(
field=operations[0][1],
model=lookup_model,
))
else:
do_pending_lookups(model)
try:
return self.apps
finally:
if skip_cache:
self.apps = None
@classmethod
def from_apps(cls, apps):
"Takes in an Apps and returns a ProjectState matching it"
app_models = {}
for model in apps.get_models(include_swapped=True):
model_state = ModelState.from_model(model)
app_models[(model_state.app_label, model_state.name.lower())] = model_state
return cls(app_models)
def __eq__(self, other):
if set(self.models.keys()) != set(other.models.keys()):
return False
if set(self.real_apps) != set(other.real_apps):
return False
return all(model == other.models[key] for key, model in self.models.items())
def __ne__(self, other):
return not (self == other)
class AppConfigStub(AppConfig):
"""
Stubs a Django AppConfig. Only provides a label, and a dict of models.
"""
# Not used, but required by AppConfig.__init__
path = ''
def __init__(self, label):
super(AppConfigStub, self).__init__(label, None)
def import_models(self, all_models):
self.models = all_models
class ModelState(object):
"""
Represents a Django Model. We don't use the actual Model class
as it's not designed to have its options changed - instead, we
mutate this one and then render it into a Model as required.
Note that while you are allowed to mutate .fields, you are not allowed
to mutate the Field instances inside there themselves - you must instead
assign new ones, as these are not detached during a clone.
"""
def __init__(self, app_label, name, fields, options=None, bases=None):
self.app_label = app_label
self.name = force_text(name)
self.fields = fields
self.options = options or {}
self.bases = bases or (models.Model, )
# Sanity-check that fields is NOT a dict. It must be ordered.
if isinstance(self.fields, dict):
raise ValueError("ModelState.fields cannot be a dict - it must be a list of 2-tuples.")
# Sanity-check that fields are NOT already bound to a model.
for name, field in fields:
if hasattr(field, 'model'):
raise ValueError(
'ModelState.fields cannot be bound to a model - "%s" is.' % name
)
@classmethod
def from_model(cls, model, exclude_rels=False):
"""
Feed me a model, get a ModelState representing it out.
"""
# Deconstruct the fields
fields = []
for field in model._meta.local_fields:
if getattr(field, "rel", None) and exclude_rels:
continue
if isinstance(field, OrderWrt):
continue
name, path, args, kwargs = field.deconstruct()
field_class = import_string(path)
try:
fields.append((name, field_class(*args, **kwargs)))
except TypeError as e:
raise TypeError("Couldn't reconstruct field %s on %s.%s: %s" % (
name,
model._meta.app_label,
model._meta.object_name,
e,
))
if not exclude_rels:
for field in model._meta.local_many_to_many:
name, path, args, kwargs = field.deconstruct()
field_class = import_string(path)
try:
fields.append((name, field_class(*args, **kwargs)))
except TypeError as e:
raise TypeError("Couldn't reconstruct m2m field %s on %s: %s" % (
name,
model._meta.object_name,
e,
))
# Extract the options
options = {}
for name in DEFAULT_NAMES:
# Ignore some special options
if name in ["apps", "app_label"]:
continue
elif name in model._meta.original_attrs:
if name == "unique_together":
ut = model._meta.original_attrs["unique_together"]
options[name] = set(normalize_together(ut))
elif name == "index_together":
it = model._meta.original_attrs["index_together"]
options[name] = set(normalize_together(it))
else:
options[name] = model._meta.original_attrs[name]
# Force-convert all options to text_type (#23226)
options = cls.force_text_recursive(options)
# If we're ignoring relationships, remove all field-listing model
# options (that option basically just means "make a stub model")
if exclude_rels:
for key in ["unique_together", "index_together", "order_with_respect_to"]:
if key in options:
del options[key]
def flatten_bases(model):
bases = []
for base in model.__bases__:
if hasattr(base, "_meta") and base._meta.abstract:
bases.extend(flatten_bases(base))
else:
bases.append(base)
return bases
# We can't rely on __mro__ directly because we only want to flatten
# abstract models and not the whole tree. However by recursing on
# __bases__ we may end up with duplicates and ordering issues, we
# therefore discard any duplicates and reorder the bases according
# to their index in the MRO.
flattened_bases = sorted(set(flatten_bases(model)), key=lambda x: model.__mro__.index(x))
# Make our record
bases = tuple(
(
"%s.%s" % (base._meta.app_label, base._meta.model_name)
if hasattr(base, "_meta") else
base
)
for base in flattened_bases
)
# Ensure at least one base inherits from models.Model
if not any((isinstance(base, six.string_types) or issubclass(base, models.Model)) for base in bases):
bases = (models.Model,)
return cls(
model._meta.app_label,
model._meta.object_name,
fields,
options,
bases,
)
@classmethod
def force_text_recursive(cls, value):
if isinstance(value, six.string_types):
return smart_text(value)
elif isinstance(value, list):
return [cls.force_text_recursive(x) for x in value]
elif isinstance(value, tuple):
return tuple(cls.force_text_recursive(x) for x in value)
elif isinstance(value, set):
return set(cls.force_text_recursive(x) for x in value)
elif isinstance(value, dict):
return dict(
(cls.force_text_recursive(k), cls.force_text_recursive(v))
for k, v in value.items()
)
return value
def construct_fields(self):
"Deep-clone the fields using deconstruction"
for name, field in self.fields:
_, path, args, kwargs = field.deconstruct()
field_class = import_string(path)
yield name, field_class(*args, **kwargs)
def clone(self):
"Returns an exact copy of this ModelState"
return self.__class__(
app_label=self.app_label,
name=self.name,
fields=list(self.construct_fields()),
options=dict(self.options),
bases=self.bases,
)
def render(self, apps):
"Creates a Model object from our current state into the given apps"
# First, make a Meta object
meta_contents = {'app_label': self.app_label, "apps": apps}
meta_contents.update(self.options)
meta = type(str("Meta"), tuple(), meta_contents)
# Then, work out our bases
try:
bases = tuple(
(apps.get_model(base) if isinstance(base, six.string_types) else base)
for base in self.bases
)
except LookupError:
raise InvalidBasesError("Cannot resolve one or more bases from %r" % (self.bases,))
# Turn fields into a dict for the body, add other bits
body = dict(self.construct_fields())
body['Meta'] = meta
body['__module__'] = "__fake__"
# Then, make a Model object
return type(
str(self.name),
bases,
body,
)
def get_field_by_name(self, name):
for fname, field in self.fields:
if fname == name:
return field
raise ValueError("No field called %s on model %s" % (name, self.name))
def __repr__(self):
return "<ModelState: '%s.%s'>" % (self.app_label, self.name)
def __eq__(self, other):
return (
(self.app_label == other.app_label) and
(self.name == other.name) and
(len(self.fields) == len(other.fields)) and
all((k1 == k2 and (f1.deconstruct()[1:] == f2.deconstruct()[1:])) for (k1, f1), (k2, f2) in zip(self.fields, other.fields)) and
(self.options == other.options) and
(self.bases == other.bases)
)
def __ne__(self, other):
return not (self == other)
|
domenicosolazzo/practice-django
|
venv/lib/python2.7/site-packages/django/db/migrations/state.py
|
Python
|
mit
| 14,253
|
import os
import time
interval = int(os.getenv("INTERVAL"))
word = os.getenv("NAME")
while 1:
time.sleep(interval)
print word
|
GolangDorks/gobox
|
example/test2.py
|
Python
|
mit
| 136
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe, unittest, json
from frappe.test_runner import make_test_records_for_doctype
from frappe.core.doctype.doctype.doctype import InvalidFieldNameError
test_dependencies = ["Custom Field", "Property Setter"]
class TestCustomizeForm(unittest.TestCase):
def insert_custom_field(self):
frappe.delete_doc_if_exists("Custom Field", "User-test_custom_field")
frappe.get_doc({
"doctype": "Custom Field",
"dt": "User",
"label": "Test Custom Field",
"description": "A Custom Field for Testing",
"fieldtype": "Select",
"in_list_view": 1,
"options": "\nCustom 1\nCustom 2\nCustom 3",
"default": "Custom 3",
"insert_after": frappe.get_meta('User').fields[-1].fieldname
}).insert()
def setUp(self):
self.insert_custom_field()
frappe.db.commit()
frappe.clear_cache(doctype="User")
def tearDown(self):
frappe.delete_doc("Custom Field", "User-test_custom_field")
frappe.db.commit()
frappe.clear_cache(doctype="User")
def get_customize_form(self, doctype=None):
d = frappe.get_doc("Customize Form")
if doctype:
d.doc_type = doctype
d.run_method("fetch_to_customize")
return d
def test_fetch_to_customize(self):
d = self.get_customize_form()
self.assertEquals(d.doc_type, None)
self.assertEquals(len(d.get("fields")), 0)
d = self.get_customize_form("Event")
self.assertEquals(d.doc_type, "Event")
self.assertEquals(len(d.get("fields")), 28)
d = self.get_customize_form("User")
self.assertEquals(d.doc_type, "User")
self.assertEquals(len(d.get("fields")), len(frappe.get_doc("DocType", d.doc_type).fields) + 1)
self.assertEquals(d.get("fields")[-1].fieldname, "test_custom_field")
self.assertEquals(d.get("fields", {"fieldname": "location"})[0].in_list_view, 1)
return d
def test_save_customization_property(self):
d = self.get_customize_form("User")
self.assertEquals(frappe.db.get_value("Property Setter",
{"doc_type": "User", "property": "allow_copy"}, "value"), None)
d.allow_copy = 1
d.run_method("save_customization")
self.assertEquals(frappe.db.get_value("Property Setter",
{"doc_type": "User", "property": "allow_copy"}, "value"), '1')
d.allow_copy = 0
d.run_method("save_customization")
self.assertEquals(frappe.db.get_value("Property Setter",
{"doc_type": "User", "property": "allow_copy"}, "value"), None)
def test_save_customization_field_property(self):
d = self.get_customize_form("User")
self.assertEquals(frappe.db.get_value("Property Setter",
{"doc_type": "User", "property": "reqd", "field_name": "location"}, "value"), None)
location_field = d.get("fields", {"fieldname": "location"})[0]
location_field.reqd = 1
d.run_method("save_customization")
self.assertEquals(frappe.db.get_value("Property Setter",
{"doc_type": "User", "property": "reqd", "field_name": "location"}, "value"), '1')
location_field = d.get("fields", {"fieldname": "location"})[0]
location_field.reqd = 0
d.run_method("save_customization")
self.assertEquals(frappe.db.get_value("Property Setter",
{"doc_type": "User", "property": "reqd", "field_name": "location"}, "value"), None)
def test_save_customization_custom_field_property(self):
d = self.get_customize_form("User")
self.assertEquals(frappe.db.get_value("Custom Field", "User-test_custom_field", "reqd"), 0)
custom_field = d.get("fields", {"fieldname": "test_custom_field"})[0]
custom_field.reqd = 1
d.run_method("save_customization")
self.assertEquals(frappe.db.get_value("Custom Field", "User-test_custom_field", "reqd"), 1)
custom_field = d.get("fields", {"is_custom_field": True})[0]
custom_field.reqd = 0
d.run_method("save_customization")
self.assertEquals(frappe.db.get_value("Custom Field", "User-test_custom_field", "reqd"), 0)
def test_save_customization_new_field(self):
d = self.get_customize_form("User")
last_fieldname = d.fields[-1].fieldname
d.append("fields", {
"label": "Test Add Custom Field Via Customize Form",
"fieldtype": "Data",
"__islocal": 1
})
d.run_method("save_customization")
self.assertEquals(frappe.db.get_value("Custom Field",
"User-test_add_custom_field_via_customize_form", "fieldtype"), "Data")
self.assertEquals(frappe.db.get_value("Custom Field",
"User-test_add_custom_field_via_customize_form", 'insert_after'), last_fieldname)
frappe.delete_doc("Custom Field", "User-test_add_custom_field_via_customize_form")
self.assertEquals(frappe.db.get_value("Custom Field",
"User-test_add_custom_field_via_customize_form"), None)
def test_save_customization_remove_field(self):
d = self.get_customize_form("User")
custom_field = d.get("fields", {"fieldname": "test_custom_field"})[0]
d.get("fields").remove(custom_field)
d.run_method("save_customization")
self.assertEquals(frappe.db.get_value("Custom Field", custom_field.name), None)
frappe.local.test_objects["Custom Field"] = []
make_test_records_for_doctype("Custom Field")
def test_reset_to_defaults(self):
d = frappe.get_doc("Customize Form")
d.doc_type = "User"
d.run_method('reset_to_defaults')
self.assertEquals(d.get("fields", {"fieldname": "location"})[0].in_list_view, 0)
frappe.local.test_objects["Property Setter"] = []
make_test_records_for_doctype("Property Setter")
def test_set_allow_on_submit(self):
d = self.get_customize_form("User")
d.get("fields", {"fieldname": "first_name"})[0].allow_on_submit = 1
d.get("fields", {"fieldname": "test_custom_field"})[0].allow_on_submit = 1
d.run_method("save_customization")
d = self.get_customize_form("User")
# don't allow for standard fields
self.assertEquals(d.get("fields", {"fieldname": "first_name"})[0].allow_on_submit or 0, 0)
# allow for custom field
self.assertEquals(d.get("fields", {"fieldname": "test_custom_field"})[0].allow_on_submit, 1)
def test_title_field_pattern(self):
d = self.get_customize_form("Web Form")
df = d.get("fields", {"fieldname": "title"})[0]
# invalid fieldname
df.options = """{doc_type} - {introduction_test}"""
self.assertRaises(InvalidFieldNameError, d.run_method, "save_customization")
# space in formatter
df.options = """{doc_type} - {introduction text}"""
self.assertRaises(InvalidFieldNameError, d.run_method, "save_customization")
# valid fieldname
df.options = """{doc_type} - {introduction_text}"""
d.run_method("save_customization")
# valid fieldname with escaped curlies
df.options = """{{ {doc_type} }} - {introduction_text}"""
d.run_method("save_customization")
# undo
df.options = None
d.run_method("save_customization")
|
vqw/frappe
|
frappe/custom/doctype/customize_form/test_customize_form.py
|
Python
|
mit
| 6,711
|
"""Add a field for user salts
Revision ID: 590a0265a5f
Revises: 41a80a9e472
Create Date: 2015-05-03 10:36:23.196312
"""
# revision identifiers, used by Alembic.
revision = '590a0265a5f'
down_revision = '41a80a9e472'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('person', sa.Column('password_salt', sa.String(length=64), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('person', 'password_salt')
### end Alembic commands ###
|
jhesketh/zookeepr
|
alembic/versions/590a0265a5f_.py
|
Python
|
gpl-2.0
| 636
|
# Performance test for running
#
# 1. Regular TriaxialTest with 3 independent dispatchers (geom, phys, constitutive law)
# 2. TriaxialTest with InteractionLoop (common loop and functor cache)
#
# Run the test like this:
#
# yade-trunk-opt-multi -j1 triax-perf.table triax-perf.py
#
# The -j1 ensures that only 1 job will run at time
# (even if other cores are free, access to memory is limiting if running multiple jobs at time)
#
# You have to collect the results by hand from log files, or run sh mkTextTable.sh and use
# triax-perf.ods to get comparison
#
utils.readParamsFromTable(fast=False,noTableOk=True)
TriaxialTest(numberOfGrains=50000,fast=fast,noFiles=True).load()
O.run(10,True) # filter out initialization
O.timingEnabled=True
O.run(200,True)
from yade import timing
timing.stats()
print 'ForceContainer synced %d times'%(O.bexSyncCount)
|
bcharlas/mytrunk
|
scripts/checks-and-tests/triax-perf/triax-perf.py
|
Python
|
gpl-2.0
| 856
|
import http_server
from SOAPpy.SOAP import *
Fault = faultType
import string, sys
Config = SOAPConfig(debug=1)
class soap_handler:
def __init__(self, encoding='UTF-8', config=Config, namespace=None):
self.namespace = namespace
self.objmap = {}
self.funcmap = {}
self.config = config
self.encoding = encoding
def match (self, request):
return 1
def handle_request (self, request):
[path, params, query, fragment] = request.split_uri()
if request.command == 'post':
request.collector = collector(self, request)
else:
request.error(400)
def continue_request(self, data, request):
# Everthing that follows is cripped from do_POST().
if self.config.debug:
print "\n***RECEIVING***\n", data, "*" * 13 + "\n"
sys.stdout.flush()
try:
r, header, body = parseSOAPRPC(data, header=1, body=1)
method = r._name
args = r._aslist
kw = r._asdict
ns = r._ns
resp = ""
# For faults messages
if ns:
nsmethod = "%s:%s" % (ns, method)
else:
nsmethod = method
try:
# First look for registered functions
if self.funcmap.has_key(ns) and \
self.funcmap[ns].has_key(method):
f = self.funcmap[ns][method]
else: # Now look at registered objects
# Check for nested attributes
if method.find(".") != -1:
t = self.objmap[ns]
l = method.split(".")
for i in l:
t = getattr(t,i)
f = t
else:
f = getattr(self.objmap[ns], method)
except:
if self.config.debug:
import traceback
traceback.print_exc ()
resp = buildSOAP(Fault("%s:Client" % NS.ENV_T,
"No method %s found" % nsmethod,
"%s %s" % tuple(sys.exc_info()[0:2])),
encoding = self.encoding, config = self.config)
status = 500
else:
try:
# If it's wrapped to indicate it takes keywords
# send it keywords
if header:
x = HeaderHandler(header)
if isinstance(f,MethodSig):
c = None
if f.context: # Build context object
c = SOAPContext(header, body, d, self.connection, self.headers,
self.headers["soapaction"])
if f.keywords:
tkw = {}
# This is lame, but have to de-unicode keywords
for (k,v) in kw.items():
tkw[str(k)] = v
if c:
tkw["_SOAPContext"] = c
fr = apply(f,(),tkw)
else:
if c:
fr = apply(f,args,{'_SOAPContext':c})
else:
fr = apply(f,args,{})
else:
fr = apply(f,args,{})
if type(fr) == type(self) and isinstance(fr, voidType):
resp = buildSOAP(kw = {'%sResponse' % method:fr},
encoding = self.encoding,
config = self.config)
else:
resp = buildSOAP(kw =
{'%sResponse' % method:{'Result':fr}},
encoding = self.encoding,
config = self.config)
except Fault, e:
resp = buildSOAP(e, config = self.config)
status = 500
except:
if self.config.debug:
import traceback
traceback.print_exc ()
resp = buildSOAP(Fault("%s:Server" % NS.ENV_T, \
"Method %s failed." % nsmethod,
"%s %s" % tuple(sys.exc_info()[0:2])),
encoding = self.encoding,
config = self.config)
status = 500
else:
status = 200
except Fault,e:
resp = buildSOAP(e, encoding = self.encoding,
config = self.config)
status = 500
except:
# internal error, report as HTTP server error
if self.config.debug:
import traceback
traceback.print_exc ()
request.error(500)
#self.send_response(500)
#self.end_headers()
else:
request['Content-Type'] = 'text/xml; charset="%s"' % self.encoding
request.push(resp)
request.done()
# got a valid SOAP response
#self.send_response(status)
#self.send_header("Content-type",
# 'text/xml; charset="%s"' % self.encoding)
#self.send_header("Content-length", str(len(resp)))
#self.end_headers()
if self.config.debug:
print "\n***SENDING***\n", resp, "*" * 13 + "\n"
sys.stdout.flush()
"""
# We should be able to shut down both a regular and an SSL
# connection, but under Python 2.1, calling shutdown on an
# SSL connections drops the output, so this work-around.
# This should be investigated more someday.
if self.config.SSLserver and \
isinstance(self.connection, SSL.Connection):
self.connection.set_shutdown(SSL.SSL_SENT_SHUTDOWN |
SSL.SSL_RECEIVED_SHUTDOWN)
else:
self.connection.shutdown(1)
"""
def registerObject(self, object, namespace = ''):
if namespace == '': namespace = self.namespace
self.objmap[namespace] = object
def registerFunction(self, function, namespace = '', funcName = None):
if not funcName : funcName = function.__name__
if namespace == '': namespace = self.namespace
if self.funcmap.has_key(namespace):
self.funcmap[namespace][funcName] = function
else:
self.funcmap[namespace] = {funcName : function}
class collector:
"gathers input for POST and PUT requests"
def __init__ (self, handler, request):
self.handler = handler
self.request = request
self.data = ''
# make sure there's a content-length header
cl = request.get_header ('content-length')
if not cl:
request.error (411)
else:
cl = string.atoi (cl)
# using a 'numeric' terminator
self.request.channel.set_terminator (cl)
def collect_incoming_data (self, data):
self.data = self.data + data
def found_terminator (self):
# set the terminator back to the default
self.request.channel.set_terminator ('\r\n\r\n')
self.handler.continue_request (self.data, self.request)
if __name__ == '__main__':
import asyncore
import http_server
class Thing:
def badparam(self, param):
if param == 'good param':
return 1
else:
return Fault(faultstring='bad param')
def dt(self, aDateTime):
return aDateTime
thing = Thing()
soaph = soap_handler()
soaph.registerObject(thing)
hs = http_server.http_server('', 10080)
hs.install_handler(soaph)
asyncore.loop()
|
intip/da-apps
|
plugins/da_centrallogin/modules/soappy/contrib/soap_handler.py
|
Python
|
gpl-2.0
| 8,099
|
import sys
import numpy as np
import torch
# torch.nn.modules useful for defining a MLIAPPY model.
from lammps.mliap.pytorch import TorchWrapper, IgnoreElems
# Read coefficients
coeffs = np.genfromtxt("Ta06A.mliap.model",skip_header=6)
# Write coefficients to a pytorch linear model
bias = coeffs[0]
weights = coeffs[1:]
lin = torch.nn.Linear(weights.shape[0],1)
lin.to(torch.float64)
with torch.autograd.no_grad():
lin.weight.set_(torch.from_numpy(weights).unsqueeze(0))
lin.bias.set_(torch.as_tensor(bias,dtype=torch.float64).unsqueeze(0))
# Wrap the pytorch model for usage with mliappy coupling.
model = IgnoreElems(lin) # The linear module does not use the types.
n_descriptors = lin.weight.shape[1]
n_elements = 1
linked_model = TorchWrapper(model,n_descriptors=n_descriptors,n_elements=n_elements)
torch.save(linked_model,"Ta06A.mliap.pytorch.model.pt")
|
jeremiahyan/lammps
|
examples/mliap/convert_mliap_Ta06A.py
|
Python
|
gpl-2.0
| 874
|
from django.conf.urls import patterns, url
urlpatterns = patterns('nodeshot.ui.default.views',
url(r'^$', 'index', name='index'), # noqa
)
|
sephiroth6/nodeshot
|
nodeshot/ui/default/urls.py
|
Python
|
gpl-3.0
| 146
|
{
'name': 'Base Kanban',
'category': 'Hidden',
'description': """
Odoo Web kanban view.
========================
""",
'version': '2.0',
'depends': ['web'],
'data' : [
'views/web_kanban.xml',
],
'qweb' : [
'static/src/xml/*.xml',
],
'auto_install': True
}
|
ghandiosm/Test
|
addons/web_kanban/__openerp__.py
|
Python
|
gpl-3.0
| 312
|
import ocl as cam
import camvtk
import time
import vtk
import datetime
if __name__ == "__main__":
myscreen = camvtk.VTKScreen()
myscreen.setAmbient(20,20,20)
myscreen.camera.SetPosition(20, 30, 50)
myscreen.camera.SetFocalPoint(5, 5, 0)
stl = camvtk.STLSurf(filename="stl/demo.stl")
#stl = camvtk.STLSurf(filename="demo2.stl")
print "STL surface read"
myscreen.addActor(stl)
stl.SetWireframe()
stl.SetColor((0.5,0.5,0.5))
#stl.SetFlat()
polydata = stl.src.GetOutput()
s= cam.STLSurf()
camvtk.vtkPolyData2OCLSTL(polydata, s)
print "STLSurf with ", s.size(), " triangles"
cutterDiameter=1
cutter = cam.CylCutter(cutterDiameter)
#print cutter.str()
#print cc.type
minx=0
dx=1
maxx=10
miny=0
dy=1
maxy=10
z=-10
bucketSize = 20
#pftp = cam.ParallelFinish()
#pftp.initCLPoints(minx,dx,maxx,miny,dy,maxy,z)
#pftp.initSTLSurf(s, bucketSize)
#pftp.dropCutterSTL1(cutter)
#print " made ", pftp.dcCalls, " drop-cutter calls"
#exit
pf2 = cam.ParallelFinish()
pf2.initCLPoints(minx,dx,maxx,miny,dy,maxy,z)
pf2.initSTLSurf(s, bucketSize)
pf2.dropCutterSTL2(cutter)
print " made ", pf2.dcCalls, " drop-cutter calls"
#clpoints = pftp.getCLPoints()
#ccpoints = pftp.getCCPoints()
clpoints = pf2.getCLPoints()
ccpoints = pf2.getCCPoints()
#CLPointGrid(minx,dx,maxx,miny,dy,maxy,z)
nv=0
nn=0
ne=0
nf=0
t = camvtk.Text()
t.SetPos( (myscreen.width-200, myscreen.height-30) )
myscreen.addActor( t)
t2 = camvtk.Text()
t2.SetPos( (myscreen.width-200, 30) )
myscreen.addActor( t2)
t3 = camvtk.Text()
t3.SetPos( (30, 30))
myscreen.addActor( t3)
t4 = camvtk.Text()
t4.SetPos( (30, myscreen.height-60))
myscreen.addActor( t4)
n=0
precl = cam.Point()
#w2if = vtk.vtkWindowToImageFilter()
#w2if.SetInput(myscreen.renWin)
#lwr = vtk.vtkPNGWriter()
#lwr.SetInput( w2if.GetOutput() )
#w2if.Modified()
#lwr.SetFileName("tux1.png")
for cl,cc in zip(clpoints,ccpoints):
camEye = myscreen.camera.GetFocalPoint()
camPos = myscreen.camera.GetPosition()
postext = "(%3.3f, %3.3f, %3.3f)" % (camPos[0], camPos[1], camPos[2])
eyetext = "(%3.3f, %3.3f, %3.3f)" % (camEye[0], camEye[1], camEye[2])
camtext = "Camera LookAt: "+eyetext+"\nCamera Pos: "+ postext
t4.SetText(camtext)
t.SetText(datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
xtext = "%3.3f" % cl.x
ytext = "%3.3f" % cl.y
ztext = "%3.3f" % cl.z
t2.SetText( "X: " + xtext + "\nY: " + ytext + "\nZ: " + ztext )
if cc.type==cam.CCType.FACET:
nf+=1
col = (0,1,1)
elif cc.type == cam.CCType.VERTEX:
nv+=1
col = (0,1,0)
elif cc.type == cam.CCType.EDGE:
ne+=1
col = (1,0,0)
elif cc.type == cam.CCType.NONE:
#print "type=NONE!"
nn+=1
col = (1,1,1)
#if cl.isInside(t):
# col = (0, 1, 0)
#else:
# col = (1, 0, 0)
trilist = pf2.getTrianglesUnderCutter(cl, cutter)
#print "at cl=", cl.str() , " where len(trilist)=", len(trilist)
t3.SetText("Total Triangles: "+ str(s.size()) +"\nUnder Cutter (red): "+str(len(trilist)))
stl2 = camvtk.STLSurf(filename=None, triangleList=trilist, color=(1,0,0)) # a new surface with only triangles under cutter
stl2.SetWireframe()
#stl2.SetFlat()
myscreen.addActor(stl2)
trilist=[]
cutactor = camvtk.Cylinder(center=(cl.x,cl.y,cl.z),
radius=cutterDiameter/2,
height=20,
rotXYZ=(90,0,0),
color=(0.7,0,1))
myscreen.addActor( cutactor )
myscreen.addActor( camvtk.Sphere(center=(cl.x,cl.y,cl.z) ,radius=0.03, color=col) )
"""
if n==0:
precl = cl
else:
d = cl-precl
if (d.norm() < 90):
myscreen.addActor( camvtk.Line( p1=(precl.x, precl.y, precl.z), p2=(cl.x, cl.y, cl.z), color=(0,1,1) ) )
precl = cl
"""
n=n+1
#myscreen.addActor( camvtk.Point(center=(cl2.x,cl2.y,cl2.z+0.2) , color=(0.6,0.2,0.9)) )
#myscreen.addActor( camvtk.Point(center=(cc.x,cc.y,cc.z), color=col) )
#print cc.type
#myscreen.camera.Azimuth( 0.2 )
time.sleep(0.1)
myscreen.render()
#w2if.Modified()
#lwr.SetFileName("kdbig"+ ('%05d' % n)+".png")
#lwr.Write()
#raw_input("Press Enter to continue")
myscreen.removeActor(stl2)
myscreen.removeActor( cutactor )
print "none=",nn," vertex=",nv, " edge=",ne, " facet=",nf, " sum=", nn+nv+ne+nf
print len(clpoints), " cl points evaluated"
#lwr.Write()
for n in range(1,36):
t.SetText(datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
myscreen.camera.Azimuth( 1 )
time.sleep(0.01)
myscreen.render()
#lwr.SetFileName("kd_frame"+ ('%03d' % n)+".png")
#w2if.Modified()
#lwr.Write()
myscreen.iren.Start()
raw_input("Press Enter to terminate")
|
tectronics/opencamlib
|
scripts/kdtree_movie2.py
|
Python
|
gpl-3.0
| 5,564
|
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2014-2015 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""Data used by setup.py and scripts/freeze.py."""
import sys
import re
import ast
import os
import os.path
import subprocess
sys.path.insert(0, os.path.join(os.path.dirname(__file__), os.pardir))
if sys.hexversion >= 0x03000000:
_open = open
else:
import codecs
_open = codecs.open
BASEDIR = os.path.join(os.path.dirname(os.path.realpath(__file__)),
os.path.pardir)
def read_file(name):
"""Get the string contained in the file named name."""
with _open(name, 'r', encoding='utf-8') as f:
return f.read()
def _get_constant(name):
"""Read a __magic__ constant from qutebrowser/__init__.py.
We don't import qutebrowser here because it can go wrong for multiple
reasons. Instead we use re/ast to get the value directly from the source
file.
Args:
name: The name of the argument to get.
Return:
The value of the argument.
"""
field_re = re.compile(r'__{}__\s+=\s+(.*)'.format(re.escape(name)))
path = os.path.join(BASEDIR, 'qutebrowser', '__init__.py')
line = field_re.search(read_file(path)).group(1)
value = ast.literal_eval(line)
return value
def _git_str():
"""Try to find out git version.
Return:
string containing the git commit ID and timestamp.
None if there was an error or we're not in a git repo.
"""
if BASEDIR is None:
return None
if not os.path.isdir(os.path.join(BASEDIR, ".git")):
return None
try:
cid = subprocess.check_output(
['git', 'describe', '--tags', '--dirty', '--always'],
cwd=BASEDIR).decode('UTF-8').strip()
date = subprocess.check_output(
['git', 'show', '-s', '--format=%ci', 'HEAD'],
cwd=BASEDIR).decode('UTF-8').strip()
return '{} ({})'.format(cid, date)
except (subprocess.CalledProcessError, OSError):
return None
def write_git_file():
"""Write the git-commit-id file with the current commit."""
gitstr = _git_str()
if gitstr is None:
gitstr = ''
path = os.path.join(BASEDIR, 'qutebrowser', 'git-commit-id')
with _open(path, 'w', encoding='ascii') as f:
f.write(gitstr)
setupdata = {
'name': 'qutebrowser',
'version': '.'.join(map(str, _get_constant('version_info'))),
'description': _get_constant('description'),
'long_description': read_file('README.asciidoc'),
'url': 'http://www.qutebrowser.org/',
'requires': ['pypeg2', 'jinja2', 'pygments', 'PyYAML'],
'author': _get_constant('author'),
'author_email': _get_constant('email'),
'license': _get_constant('license'),
'classifiers': [
'Development Status :: 3 - Alpha',
'Environment :: X11 Applications :: Qt',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: GNU General Public License v3 or later '
'(GPLv3+)',
'Natural Language :: English',
'Operating System :: Microsoft :: Windows',
'Operating System :: Microsoft :: Windows :: Windows XP',
'Operating System :: Microsoft :: Windows :: Windows 7',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Browsers',
],
'keywords': 'pyqt browser web qt webkit',
}
|
Kingdread/qutebrowser
|
scripts/setupcommon.py
|
Python
|
gpl-3.0
| 4,251
|
"""Consolidate account sync status columns
Revision ID: 4f57260602c9
Revises: 5143154fb1a2
Create Date: 2014-07-17 06:07:08.339740
"""
# revision identifiers, used by Alembic.
revision = '4f57260602c9'
down_revision = '4b4c5579c083'
from alembic import op
import sqlalchemy as sa
from bson import json_util
def upgrade():
from inbox.sqlalchemy_ext.util import JSON, MutableDict
from inbox.ignition import main_engine
engine = main_engine(pool_size=1, max_overflow=0)
from inbox.models.session import session_scope
from sqlalchemy.ext.declarative import declarative_base
op.add_column('account',
sa.Column('_sync_status', MutableDict.as_mutable(JSON()),
default={}, nullable=True))
Base = declarative_base()
Base.metadata.reflect(engine)
class Account(Base):
__table__ = Base.metadata.tables['account']
with session_scope(versioned=False) \
as db_session:
for acct in db_session.query(Account):
d = dict(sync_start_time=str(acct.sync_start_time),
sync_end_time=str(acct.sync_end_time))
acct._sync_status = json_util.dumps(d)
db_session.commit()
op.drop_column('account', 'sync_start_time')
op.drop_column('account', 'sync_end_time')
def downgrade():
raise Exception("Clocks don't rewind, we don't undo.")
|
nylas/sync-engine
|
migrations/versions/057_consolidate_account_sync_status_columns.py
|
Python
|
agpl-3.0
| 1,396
|
# -*- coding: utf-8 -*-
# © 2015 Antiun Ingenieria S.L. - Antonio Espinosa
# © 2015 Antiun Ingenieria S.L. - Jairo Llopis
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import models, api
import logging
logger = logging.getLogger(__name__)
class NutsImport(models.TransientModel):
_inherit = 'nuts.import'
_es_state_map = {
'ES111': 'l10n_es_toponyms.ES15', # A Coruña
'ES112': 'l10n_es_toponyms.ES27', # Lugo
'ES113': 'l10n_es_toponyms.ES32', # Ourense
'ES114': 'l10n_es_toponyms.ES36', # Pontevedra
'ES120': 'l10n_es_toponyms.ES33', # Asturias
'ES130': 'l10n_es_toponyms.ES39', # Cantabria
'ES211': 'l10n_es_toponyms.ES01', # Araba/Álava
'ES212': 'l10n_es_toponyms.ES20', # Gipuzkoa
'ES213': 'l10n_es_toponyms.ES48', # Bizkaia
'ES220': 'l10n_es_toponyms.ES31', # Navarra
'ES230': 'l10n_es_toponyms.ES26', # La Rioja
'ES241': 'l10n_es_toponyms.ES22', # Huesca
'ES242': 'l10n_es_toponyms.ES44', # Teruel
'ES243': 'l10n_es_toponyms.ES50', # Zaragoza
'ES300': 'l10n_es_toponyms.ES28', # Madrid
'ES411': 'l10n_es_toponyms.ES05', # Ávila
'ES412': 'l10n_es_toponyms.ES09', # Burgos
'ES413': 'l10n_es_toponyms.ES24', # León
'ES414': 'l10n_es_toponyms.ES34', # Palencia
'ES415': 'l10n_es_toponyms.ES37', # Salamanca
'ES416': 'l10n_es_toponyms.ES40', # Segovia
'ES417': 'l10n_es_toponyms.ES42', # Soria
'ES418': 'l10n_es_toponyms.ES47', # Valladolid
'ES419': 'l10n_es_toponyms.ES49', # Zamora
'ES421': 'l10n_es_toponyms.ES02', # Albacete
'ES422': 'l10n_es_toponyms.ES13', # Ciudad Real
'ES423': 'l10n_es_toponyms.ES16', # Cuenca
'ES424': 'l10n_es_toponyms.ES19', # Guadalajara
'ES425': 'l10n_es_toponyms.ES45', # Toledo
'ES431': 'l10n_es_toponyms.ES06', # Badajoz
'ES432': 'l10n_es_toponyms.ES10', # Cáceres
'ES511': 'l10n_es_toponyms.ES08', # Barcelona
'ES512': 'l10n_es_toponyms.ES17', # Girona
'ES513': 'l10n_es_toponyms.ES25', # Lleida
'ES514': 'l10n_es_toponyms.ES43', # Tarragona
'ES521': 'l10n_es_toponyms.ES03', # Alicante / Alacant
'ES522': 'l10n_es_toponyms.ES12', # Castellón / Castelló
'ES523': 'l10n_es_toponyms.ES46', # Valencia / València
'ES531': 'l10n_es_toponyms.ES07', # Eivissa y Formentera
'ES532': 'l10n_es_toponyms.ES07', # Mallorca
'ES533': 'l10n_es_toponyms.ES07', # Menorca
'ES611': 'l10n_es_toponyms.ES04', # Almería
'ES612': 'l10n_es_toponyms.ES11', # Cádiz
'ES613': 'l10n_es_toponyms.ES14', # Córdoba
'ES614': 'l10n_es_toponyms.ES18', # Granada
'ES615': 'l10n_es_toponyms.ES21', # Huelva
'ES616': 'l10n_es_toponyms.ES23', # Jaén
'ES617': 'l10n_es_toponyms.ES29', # Málaga
'ES618': 'l10n_es_toponyms.ES41', # Sevilla
'ES620': 'l10n_es_toponyms.ES30', # Murcia
'ES630': 'l10n_es_toponyms.ES51', # Ceuta
'ES640': 'l10n_es_toponyms.ES52', # Melilla
'ES703': 'l10n_es_toponyms.ES38', # El Hierro
'ES704': 'l10n_es_toponyms.ES35', # Fuerteventura
'ES705': 'l10n_es_toponyms.ES35', # Gran Canaria
'ES706': 'l10n_es_toponyms.ES38', # La Gomera
'ES707': 'l10n_es_toponyms.ES38', # La Palma
'ES708': 'l10n_es_toponyms.ES35', # Lanzarote
'ES709': 'l10n_es_toponyms.ES38', # Tenerife
'ESZZZ': False, # Extra-Regio NUTS 3
}
@api.model
def state_mapping(self, data, node):
mapping = super(NutsImport, self).state_mapping(data, node)
level = data.get('level', 0)
code = data.get('code', '')
if self._current_country.code == 'ES' and level == 4:
toponyms = self._es_state_map.get(code, False)
if toponyms:
state = self.env.ref(toponyms)
if state:
mapping['state_id'] = state.id
return mapping
|
RamonGuiuGou/l10n-spain
|
l10n_es_location_nuts/wizard/nuts_import.py
|
Python
|
agpl-3.0
| 4,129
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import logging
import threading
from openerp import SUPERUSER_ID
from openerp import tools
from openerp.osv import osv
from openerp.api import Environment
_logger = logging.getLogger(__name__)
class procurement_compute_all(osv.osv_memory):
_name = 'procurement.order.compute.all'
_description = 'Compute all schedulers'
def _procure_calculation_all(self, cr, uid, ids, context=None):
"""
@param self: The object pointer.
@param cr: A database cursor
@param uid: ID of the user currently logged in
@param ids: List of IDs selected
@param context: A standard dictionary
"""
with Environment.manage():
proc_obj = self.pool.get('procurement.order')
#As this function is in a new thread, i need to open a new cursor, because the old one may be closed
new_cr = self.pool.cursor()
scheduler_cron_id = self.pool['ir.model.data'].get_object_reference(new_cr, SUPERUSER_ID, 'procurement', 'ir_cron_scheduler_action')[1]
# Avoid to run the scheduler multiple times in the same time
try:
with tools.mute_logger('openerp.sql_db'):
new_cr.execute("SELECT id FROM ir_cron WHERE id = %s FOR UPDATE NOWAIT", (scheduler_cron_id,))
except Exception:
_logger.info('Attempt to run procurement scheduler aborted, as already running')
new_cr.rollback()
new_cr.close()
return {}
user = self.pool.get('res.users').browse(new_cr, uid, uid, context=context)
comps = [x.id for x in user.company_ids]
for comp in comps:
proc_obj.run_scheduler(new_cr, uid, use_new_cursor=new_cr.dbname, company_id = comp, context=context)
#close the new cursor
new_cr.close()
return {}
def procure_calculation(self, cr, uid, ids, context=None):
"""
@param self: The object pointer.
@param cr: A database cursor
@param uid: ID of the user currently logged in
@param ids: List of IDs selected
@param context: A standard dictionary
"""
threaded_calculation = threading.Thread(target=self._procure_calculation_all, args=(cr, uid, ids, context))
threaded_calculation.start()
return {'type': 'ir.actions.act_window_close'}
|
addition-it-solutions/project-all
|
addons/procurement/wizard/schedulers_all.py
|
Python
|
agpl-3.0
| 3,388
|
class C(object):
def method(self):
pass
def global_func(c):
c.method()
c = C()
global_func(c)
|
Microsoft/PTVS
|
Python/Tests/GlassTests/PythonTests/Python/StepOut/py_mod.py
|
Python
|
apache-2.0
| 113
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ======================================
"""XLA LiteralProto utilities."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.compiler.xla import xla_data_pb2
from tensorflow.compiler.xla.python_api import types
from tensorflow.compiler.xla.python_api import xla_shape
def ConvertLiteralToNumpyArray(literal):
"""Converts a XLA literal to a Numpy array."""
element_type = literal.shape.element_type
if element_type == xla_data_pb2.TUPLE:
return tuple(
ConvertLiteralToNumpyArray(subliteral)
for subliteral in literal.tuple_literals)
type_record = types.MAP_XLA_TYPE_TO_RECORD[element_type]
if not literal.shape.dimensions:
return np.array(
getattr(literal, type_record.literal_field_name)[0],
type_record.numpy_dtype)
else:
# Infer the proper Numpy order from the LiteralProto's layout. The repeated
# field representing the array's content in the Literal is linearized.
# Reading is done in two steps:
#
# 1. Read the array as 1D from the LiteralProto repeated field.
# 2. Reshape the array to its proper shape, using the right order depending
# on the LiteralProto's layout.
layout_order = literal.shape.layout.minor_to_major
numpy_shape = tuple(literal.shape.dimensions)
if layout_order == range(len(literal.shape.dimensions)):
numpy_reshaper = lambda arr: arr.reshape(numpy_shape, order='F')
elif layout_order == range(len(literal.shape.dimensions) - 1, -1, -1):
numpy_reshaper = lambda arr: arr.reshape(numpy_shape, order='C')
else:
raise NotImplementedError('Unsupported layout: {0}'.format(layout_order))
ndarray = np.array(
getattr(literal, type_record.literal_field_name),
copy=False,
dtype=type_record.numpy_dtype)
return numpy_reshaper(ndarray)
def _ConvertNumpyArrayToLiteral(ndarray):
"""Converts a Numpy array to a XLA literal."""
type_record = types.MAP_DTYPE_TO_RECORD[str(ndarray.dtype)]
literal = xla_data_pb2.LiteralProto()
literal.shape.CopyFrom(xla_shape.CreateShapeFromNumpy(ndarray).message)
if ndarray.ndim == 0:
getattr(literal, type_record.literal_field_name).append(
np.asscalar(ndarray.astype(type_record.literal_field_type)))
else:
# Ndarrays with boolean dtypes need special type conversion with protobufs
if ndarray.dtype in {np.bool_, np.dtype('bool')}:
for element in np.nditer(ndarray):
getattr(literal, type_record.literal_field_name).append(
type_record.literal_field_type(element))
else:
ndarray_flat = ndarray.ravel(order='A')
getattr(literal, type_record.literal_field_name).extend(ndarray_flat)
return literal
def ConvertNumpyArrayToLiteral(value):
"""Converts a Numpy array or a nested tuple thereof to an XLA literal."""
if isinstance(value, tuple):
literal = xla_data_pb2.LiteralProto()
literal.shape.CopyFrom(xla_shape.CreateShapeFromNumpy(value).message)
for component in value:
component_literal = literal.tuple_literals.add()
component_literal.CopyFrom(ConvertNumpyArrayToLiteral(component))
return literal
else:
return _ConvertNumpyArrayToLiteral(value)
|
lukeiwanski/tensorflow
|
tensorflow/compiler/xla/python_api/xla_literal.py
|
Python
|
apache-2.0
| 3,881
|
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.platform import googletest
from tensorflow.python.platform import resource_loader
class DefaultResourceLoaderTest(googletest.TestCase):
def test_exception(self):
with self.assertRaises(IOError):
resource_loader.load_resource("/fake/file/path/dne")
if __name__ == "__main__":
googletest.main()
|
sachinpro/sachinpro.github.io
|
tensorflow/python/platform/resource_loader_test.py
|
Python
|
apache-2.0
| 1,122
|
from AnyQt.QtWidgets import QAction, QToolButton
from .. import test
from ..toolgrid import ToolGrid
class TestToolGrid(test.QAppTestCase):
def test_tool_grid(self):
w = ToolGrid()
w.show()
self.app.processEvents()
def buttonsOrderedVisual():
# Process layout events so the buttons have right positions
self.app.processEvents()
buttons = w.findChildren(QToolButton)
return sorted(buttons, key=lambda b: (b.y(), b.x()))
def buttonsOrderedLogical():
return list(map(w.buttonForAction, w.actions()))
def assertOrdered():
self.assertSequenceEqual(buttonsOrderedLogical(),
buttonsOrderedVisual())
action_a = QAction("A", w)
action_b = QAction("B", w)
action_c = QAction("C", w)
action_d = QAction("D", w)
w.addAction(action_b)
w.insertAction(0, action_a)
self.assertSequenceEqual(w.actions(),
[action_a, action_b])
assertOrdered()
w.addAction(action_d)
w.insertAction(action_d, action_c)
self.assertSequenceEqual(w.actions(),
[action_a, action_b, action_c, action_d])
assertOrdered()
w.removeAction(action_c)
self.assertSequenceEqual(w.actions(),
[action_a, action_b, action_d])
assertOrdered()
w.removeAction(action_a)
self.assertSequenceEqual(w.actions(),
[action_b, action_d])
assertOrdered()
w.insertAction(0, action_a)
self.assertSequenceEqual(w.actions(),
[action_a, action_b, action_d])
assertOrdered()
w.setColumnCount(2)
self.assertSequenceEqual(w.actions(),
[action_a, action_b, action_d])
assertOrdered()
w.insertAction(2, action_c)
self.assertSequenceEqual(w.actions(),
[action_a, action_b, action_c, action_d])
assertOrdered()
w.clear()
# test no 'before' action edge case
w.insertAction(0, action_a)
self.assertIs(action_a, w.actions()[0])
w.insertAction(1, action_b)
self.assertSequenceEqual(w.actions(),
[action_a, action_b])
w.clear()
w.setActions([action_a, action_b, action_c, action_d])
self.assertSequenceEqual(w.actions(),
[action_a, action_b, action_c, action_d])
assertOrdered()
triggered_actions = []
def p(action):
print(action.text())
w.actionTriggered.connect(p)
w.actionTriggered.connect(triggered_actions.append)
action_a.trigger()
w.show()
self.app.exec_()
|
cheral/orange3
|
Orange/canvas/gui/tests/test_toolgrid.py
|
Python
|
bsd-2-clause
| 2,931
|
#=======================================================================
# BitStruct.py
#=======================================================================
from __future__ import print_function
from Bits import Bits
#=======================================================================
# MetaBitStruct
#=======================================================================
class MetaBitStruct( type ):
#---------------------------------------------------------------------
# __new__
#---------------------------------------------------------------------
#def __new__( meta, classname, supers, classdict ):
# #print( "- Meta NEW", classname ) # DEBUG
# return type.__new__( meta, classname, supers, classdict )
#---------------------------------------------------------------------
# __init__
#---------------------------------------------------------------------
# Use __init__ instead of __new__ because we are saving the classdict
# for later use during __call__. Class attributes and instance
# methods don't show up in the classdict during __new__!
def __init__( meta, classname, supers, classdict ):
#print( "- Meta INIT", classname ) # DEBUG
# Save the classdict of the BitStructDefinition class (aka. the
# users # definition for a BitStruct). We'll need this to add the
# class constants to the new BitStruct class we create later.
#
# TODO: should we leave __module__?
meta._classdict = {key: val for key, val in classdict.items()
if not key.startswith('_')}
return type.__init__( meta, classname, supers, classdict )
#---------------------------------------------------------------------
# __call__
#---------------------------------------------------------------------
# Takes an instantiation of type BitStructDefinition, and generates a
# new subclass BitStruct. Returns an instance of the newly created
# BitStruct class.
#
# This approach is necessary because Python properties (our bitfields)
# are per class, not per instance. This requires creating a new class
# class type (not instance!) and dynamically adding propreties to it.
# This is necessary so that each instance of the message has its
# properties set. More details can be found here:
#
# http://stackoverflow.com/a/1633363
#
def __call__( self, *args, **kwargs ):
#print( "- Meta CALL", args ) # DEBUG
# Instantiate the user-created BitStructDefinition class
def_inst = super( MetaBitStruct, self ).__call__( *args, **kwargs )
# Get all the members of type BitField from the BitStructDefinition
# instance. Sort them by order of declaration (stored by the
# BitField objects). TODO: sort objects in dictionary..
fields = [(name, obj) for name, obj in
def_inst.__dict__.items() if isinstance( obj, BitField )]
fields.sort( lambda (n1, o1), (n2, o2) : cmp(o2.id, o1.id) )
# Get the total size of the BitStruct
nbits = sum( [ f.nbits for name, f in fields ] )
# Create the new BitStruct class, then instantiate it
name_prfx = def_inst.__class__.__name__
name_sufx = '_'.join(str(x) for x in args)
class_name = "{}_{}".format( name_prfx, name_sufx )
bitstruct_class = type( class_name, ( BitStruct, ), self._classdict )
# Keep track of bit positions for each bitfield
start_pos = 0
bitstruct_class._bitfields = {}
# Transform attributes containing BitField objects into properties,
# when accessed they return slices of the underlying value
for attr_name, bitfield in fields:
# Calculate address range, update start_pos
end_pos = start_pos + bitfield.nbits
addr = slice( start_pos, end_pos )
start_pos = end_pos
# Add slice to bitfields
bitstruct_class._bitfields[ attr_name ] = addr
# Create a getter to assign to the property
def create_getter( addr ):
return lambda self : self.__getitem__( addr )
# Create a setter to assign to the property
# TODO: not needed when returning ConnectionSlice and accessing .value
def create_setter( addr ):
return lambda self, value: self.__setitem__( addr, value )
# Add the property to the class
setattr( bitstruct_class, attr_name,
property( create_getter( addr ),
create_setter( addr )
)
)
if '__str__' in def_inst.__class__.__dict__:
bitstruct_class.__str__ = def_inst.__class__.__dict__['__str__']
# Return an instance of the new BitStruct class
bitstruct_inst = bitstruct_class( nbits )
# TODO: hack for verilog translation!
bitstruct_inst._module = def_inst.__class__.__module__
bitstruct_inst._classname = def_inst.__class__.__name__
bitstruct_inst._instantiate = '{class_name}{args}'.format(
class_name = def_inst.__class__.__name__,
args = args,
)
assert not kwargs
return bitstruct_inst
#=======================================================================
# BitStructDefinition
#=======================================================================
# Users wishing to define a new BitStruct type should create a new class
# which subclasses BitStructDefinition, then define fields using the
# BitField type (below). The parameterizable BitStructDefinition defined
# by the user is then used to create new classes of type BitStruct.
#
class BitStructDefinition( object ):
__metaclass__ = MetaBitStruct
#=======================================================================
# BitField
#=======================================================================
# Defines a bit field when creating a BitStructDefinition.
#
class BitField( object ):
# http://stackoverflow.com/a/2014002
ids = 0
def __init__( self, nbits ):
self.nbits = nbits
self.id = BitField.ids
BitField.ids += 1
#=======================================================================
# BitStruct
#=======================================================================
# Superclass of BitStruct classes/objects generated by calling classes
# of type BitStructDefinition.
#
class BitStruct( Bits ):
#---------------------------------------------------------------------
# bitfields
#---------------------------------------------------------------------
# Allow interrogation of all bitfields in a BitStruct.
#
@property
def bitfields( self ):
return self._bitfields
#---------------------------------------------------------------------
# __call__
#---------------------------------------------------------------------
# Allows BitStruct objects to act as both instances and types. Calling
# a BitStruct instance generates a new instance of that object. This
# allows the following syntax
#
# # MyBitStruct acting as a type
# dtype = MyBitStruct( 32, 32 )
# msg_inst1 = dtype()
#
# # MyBitStruct acting as an instance
# msg_inst2 = MyBitStruct( 32, 32 )
# msg_inst.fieldA = 12
# msg_inst.fieldB = 32
#
def __call__( self ):
#print( "-CALL", type( self ) )
return type( self )( self.nbits )
#---------------------------------------------------------------------
# __hash__
#---------------------------------------------------------------------
def __hash__( self ):
return hash( (self.__class__.__name__, self._uint) )
|
Abhinav117/pymtl
|
pymtl/datatypes/BitStruct.py
|
Python
|
bsd-3-clause
| 7,460
|
# stdlib
import os
from random import random, randrange
import subprocess
import sys
import time
import unittest
import urllib as url
# 3p
from nose.plugins.attrib import attr
# project
# needed because of the subprocess calls
sys.path.append(os.getcwd())
from ddagent import Application
from util import Watchdog
@attr(requires='core_integration')
class TestWatchdog(unittest.TestCase):
"""Test watchdog in various conditions
"""
JITTER_FACTOR = 2
def test_watchdog(self):
"""Verify that watchdog kills ourselves even when spinning
Verify that watchdog kills ourselves when hanging
"""
start = time.time()
try:
subprocess.check_call(["python", __file__, "busy"], stderr=subprocess.STDOUT)
raise Exception("Should have died with an error")
except subprocess.CalledProcessError:
duration = int(time.time() - start)
self.assertTrue(duration < self.JITTER_FACTOR * 5)
# Start pseudo web server
subprocess.Popen(["nc", "-l", "31834"])
start = time.time()
try:
subprocess.check_call(["python", __file__, "net"])
raise Exception("Should have died with an error")
except subprocess.CalledProcessError:
duration = int(time.time() - start)
self.assertTrue(duration < self.JITTER_FACTOR * 5)
# Normal loop, should run 5 times
start = time.time()
try:
subprocess.check_call(["python", __file__, "normal"])
duration = int(time.time() - start)
self.assertTrue(duration < self.JITTER_FACTOR * 5)
except subprocess.CalledProcessError:
self.fail("Watchdog killed normal process after %s seconds" % int(time.time() - start))
# Fast tornado, not killed
start = time.time()
p = subprocess.Popen(["python", __file__, "fast"])
p.wait()
duration = int(time.time() - start)
# should die as soon as flush_trs has been called
self.assertTrue(duration < self.JITTER_FACTOR * 10)
# Slow tornado, killed by the Watchdog
start = time.time()
p = subprocess.Popen(["python", __file__, "slow"])
p.wait()
duration = int(time.time() - start)
self.assertTrue(duration < self.JITTER_FACTOR * 4)
class MockTxManager(object):
def flush(self):
"Pretend to flush for a long time"
time.sleep(5)
sys.exit(0)
class MemoryHogTxManager(object):
def __init__(self, watchdog):
self._watchdog = watchdog
def flush(self):
rand_data = []
while True:
rand_data.append('%030x' % randrange(256**15))
self._watchdog.reset()
class PseudoAgent(object):
"""Same logic as the agent, simplified"""
def busy_run(self):
w = Watchdog(5)
w.reset()
while True:
random()
def hanging_net(self):
w = Watchdog(5)
w.reset()
x = url.urlopen("http://localhost:31834")
print "ERROR Net call returned", x
return True
def normal_run(self):
w = Watchdog(2)
w.reset()
for i in range(5):
time.sleep(1)
w.reset()
def slow_tornado(self):
a = Application(12345, {"bind_host": "localhost"})
a._watchdog = Watchdog(4)
a._tr_manager = MockTxManager()
a.run()
def fast_tornado(self):
a = Application(12345, {"bind_host": "localhost"})
a._watchdog = Watchdog(6)
a._tr_manager = MockTxManager()
a.run()
if __name__ == "__main__":
if sys.argv[1] == "busy":
a = PseudoAgent()
a.busy_run()
elif sys.argv[1] == "net":
a = PseudoAgent()
a.hanging_net()
elif sys.argv[1] == "normal":
a = PseudoAgent()
a.normal_run()
elif sys.argv[1] == "slow":
a = PseudoAgent()
a.slow_tornado()
elif sys.argv[1] == "fast":
a = PseudoAgent()
a.fast_tornado()
elif sys.argv[1] == "test":
t = TestWatchdog()
t.runTest()
elif sys.argv[1] == "memory":
a = PseudoAgent()
a.use_lots_of_memory()
|
huhongbo/dd-agent
|
tests/core/test_watchdog.py
|
Python
|
bsd-3-clause
| 4,224
|
# Copyright (c) 2013 Paul Tagliamonte <paultag@debian.org>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
from hy.models import HyObject, _wrappers
class HyComplex(HyObject, complex):
"""
Internal representation of a Hy Complex. May raise a ValueError as if
complex(foo) was called, given HyComplex(foo).
"""
def __new__(cls, number, *args, **kwargs):
number = complex(number)
return super(HyComplex, cls).__new__(cls, number)
_wrappers[complex] = HyComplex
|
gilch/hy
|
hy/models/complex.py
|
Python
|
mit
| 1,510
|
"""
JPEG picture parser.
Information:
- APP14 documents
http://partners.adobe.com/public/developer/en/ps/sdk/5116.DCT_Filter.pdf
http://java.sun.com/j2se/1.5.0/docs/api/javax/imageio/metadata/doc-files/jpeg_metadata.html#color
- APP12:
http://search.cpan.org/~exiftool/Image-ExifTool/lib/Image/ExifTool/TagNames.pod
Author: Victor Stinner
"""
from hachoir_core.error import HachoirError
from hachoir_parser import Parser
from hachoir_core.field import (FieldSet, ParserError,
UInt8, UInt16, Enum,
Bit, Bits, NullBits, NullBytes,
String, RawBytes)
from hachoir_parser.image.common import PaletteRGB
from hachoir_core.endian import BIG_ENDIAN
from hachoir_core.text_handler import textHandler, hexadecimal
from hachoir_parser.image.exif import Exif
from hachoir_parser.image.photoshop_metadata import PhotoshopMetadata
MAX_FILESIZE = 100 * 1024 * 1024
# The four tables (hash/sum for color/grayscale JPEG) comes
# from ImageMagick project
QUALITY_HASH_COLOR = (
1020, 1015, 932, 848, 780, 735, 702, 679, 660, 645,
632, 623, 613, 607, 600, 594, 589, 585, 581, 571,
555, 542, 529, 514, 494, 474, 457, 439, 424, 410,
397, 386, 373, 364, 351, 341, 334, 324, 317, 309,
299, 294, 287, 279, 274, 267, 262, 257, 251, 247,
243, 237, 232, 227, 222, 217, 213, 207, 202, 198,
192, 188, 183, 177, 173, 168, 163, 157, 153, 148,
143, 139, 132, 128, 125, 119, 115, 108, 104, 99,
94, 90, 84, 79, 74, 70, 64, 59, 55, 49,
45, 40, 34, 30, 25, 20, 15, 11, 6, 4,
0)
QUALITY_SUM_COLOR = (
32640,32635,32266,31495,30665,29804,29146,28599,28104,27670,
27225,26725,26210,25716,25240,24789,24373,23946,23572,22846,
21801,20842,19949,19121,18386,17651,16998,16349,15800,15247,
14783,14321,13859,13535,13081,12702,12423,12056,11779,11513,
11135,10955,10676,10392,10208, 9928, 9747, 9564, 9369, 9193,
9017, 8822, 8639, 8458, 8270, 8084, 7896, 7710, 7527, 7347,
7156, 6977, 6788, 6607, 6422, 6236, 6054, 5867, 5684, 5495,
5305, 5128, 4945, 4751, 4638, 4442, 4248, 4065, 3888, 3698,
3509, 3326, 3139, 2957, 2775, 2586, 2405, 2216, 2037, 1846,
1666, 1483, 1297, 1109, 927, 735, 554, 375, 201, 128,
0)
QUALITY_HASH_GRAY = (
510, 505, 422, 380, 355, 338, 326, 318, 311, 305,
300, 297, 293, 291, 288, 286, 284, 283, 281, 280,
279, 278, 277, 273, 262, 251, 243, 233, 225, 218,
211, 205, 198, 193, 186, 181, 177, 172, 168, 164,
158, 156, 152, 148, 145, 142, 139, 136, 133, 131,
129, 126, 123, 120, 118, 115, 113, 110, 107, 105,
102, 100, 97, 94, 92, 89, 87, 83, 81, 79,
76, 74, 70, 68, 66, 63, 61, 57, 55, 52,
50, 48, 44, 42, 39, 37, 34, 31, 29, 26,
24, 21, 18, 16, 13, 11, 8, 6, 3, 2,
0)
QUALITY_SUM_GRAY = (
16320,16315,15946,15277,14655,14073,13623,13230,12859,12560,
12240,11861,11456,11081,10714,10360,10027, 9679, 9368, 9056,
8680, 8331, 7995, 7668, 7376, 7084, 6823, 6562, 6345, 6125,
5939, 5756, 5571, 5421, 5240, 5086, 4976, 4829, 4719, 4616,
4463, 4393, 4280, 4166, 4092, 3980, 3909, 3835, 3755, 3688,
3621, 3541, 3467, 3396, 3323, 3247, 3170, 3096, 3021, 2952,
2874, 2804, 2727, 2657, 2583, 2509, 2437, 2362, 2290, 2211,
2136, 2068, 1996, 1915, 1858, 1773, 1692, 1620, 1552, 1477,
1398, 1326, 1251, 1179, 1109, 1031, 961, 884, 814, 736,
667, 592, 518, 441, 369, 292, 221, 151, 86, 64,
0)
JPEG_NATURAL_ORDER = (
0, 1, 8, 16, 9, 2, 3, 10,
17, 24, 32, 25, 18, 11, 4, 5,
12, 19, 26, 33, 40, 48, 41, 34,
27, 20, 13, 6, 7, 14, 21, 28,
35, 42, 49, 56, 57, 50, 43, 36,
29, 22, 15, 23, 30, 37, 44, 51,
58, 59, 52, 45, 38, 31, 39, 46,
53, 60, 61, 54, 47, 55, 62, 63)
class JpegChunkApp0(FieldSet):
UNIT_NAME = {
0: "pixels",
1: "dots per inch",
2: "dots per cm",
}
def createFields(self):
yield String(self, "jfif", 5, "JFIF string", charset="ASCII")
if self["jfif"].value != "JFIF\0":
raise ParserError(
"Stream doesn't look like JPEG chunk (wrong JFIF signature)")
yield UInt8(self, "ver_maj", "Major version")
yield UInt8(self, "ver_min", "Minor version")
yield Enum(UInt8(self, "units", "Units"), self.UNIT_NAME)
if self["units"].value == 0:
yield UInt16(self, "aspect_x", "Aspect ratio (X)")
yield UInt16(self, "aspect_y", "Aspect ratio (Y)")
else:
yield UInt16(self, "x_density", "X density")
yield UInt16(self, "y_density", "Y density")
yield UInt8(self, "thumb_w", "Thumbnail width")
yield UInt8(self, "thumb_h", "Thumbnail height")
thumb_size = self["thumb_w"].value * self["thumb_h"].value
if thumb_size != 0:
yield PaletteRGB(self, "thumb_palette", 256)
yield RawBytes(self, "thumb_data", thumb_size, "Thumbnail data")
class Ducky(FieldSet):
BLOCK_TYPE = {
0: "end",
1: "Quality",
2: "Comment",
3: "Copyright",
}
def createFields(self):
yield Enum(UInt16(self, "type"), self.BLOCK_TYPE)
if self["type"].value == 0:
return
yield UInt16(self, "size")
size = self["size"].value
if size:
yield RawBytes(self, "data", size)
class APP12(FieldSet):
"""
The JPEG APP12 "Picture Info" segment was used by some older cameras, and
contains ASCII-based meta information.
"""
def createFields(self):
yield String(self, "ducky", 5, '"Ducky" string', charset="ASCII")
while not self.eof:
yield Ducky(self, "item[]")
class StartOfFrame(FieldSet):
def createFields(self):
yield UInt8(self, "precision")
yield UInt16(self, "height")
yield UInt16(self, "width")
yield UInt8(self, "nr_components")
for index in range(self["nr_components"].value):
yield UInt8(self, "component_id[]")
yield UInt8(self, "high[]")
yield UInt8(self, "low[]")
class Comment(FieldSet):
def createFields(self):
yield String(self, "comment", self.size//8, strip="\0")
class AdobeChunk(FieldSet):
COLORSPACE_TRANSFORMATION = {
1: "YCbCr (converted from RGB)",
2: "YCCK (converted from CMYK)",
}
def createFields(self):
if self.stream.readBytes(self.absolute_address, 5) != "Adobe":
yield RawBytes(self, "raw", self.size//8, "Raw data")
return
yield String(self, "adobe", 5, "\"Adobe\" string", charset="ASCII")
yield UInt16(self, "version", "DCT encoder version")
yield Enum(Bit(self, "flag00"),
{False: "Chop down or subsampling", True: "Blend"})
yield NullBits(self, "flags0_reserved", 15)
yield NullBytes(self, "flags1", 2)
yield Enum(UInt8(self, "color_transform", "Colorspace transformation code"), self.COLORSPACE_TRANSFORMATION)
class StartOfScan(FieldSet):
def createFields(self):
yield UInt8(self, "nr_components")
for index in range(self["nr_components"].value):
comp_id = UInt8(self, "component_id[]")
yield comp_id
if not(1 <= comp_id.value <= self["nr_components"].value):
raise ParserError("JPEG error: Invalid component-id")
yield UInt8(self, "value[]")
yield RawBytes(self, "raw", 3) # TODO: What's this???
class RestartInterval(FieldSet):
def createFields(self):
yield UInt16(self, "interval", "Restart interval")
class QuantizationTable(FieldSet):
def createFields(self):
# Code based on function get_dqt() (jdmarker.c from libjpeg62)
yield Bits(self, "is_16bit", 4)
yield Bits(self, "index", 4)
if self["index"].value >= 4:
raise ParserError("Invalid quantification index (%s)" % self["index"].value)
if self["is_16bit"].value:
coeff_type = UInt16
else:
coeff_type = UInt8
for index in xrange(64):
natural = JPEG_NATURAL_ORDER[index]
yield coeff_type(self, "coeff[%u]" % natural)
def createDescription(self):
return "Quantification table #%u" % self["index"].value
class DefineQuantizationTable(FieldSet):
def createFields(self):
while self.current_size < self.size:
yield QuantizationTable(self, "qt[]")
class JpegChunk(FieldSet):
TAG_SOI = 0xD8
TAG_EOI = 0xD9
TAG_SOS = 0xDA
TAG_DQT = 0xDB
TAG_DRI = 0xDD
TAG_INFO = {
0xC4: ("huffman[]", "Define Huffman Table (DHT)", None),
0xD8: ("start_image", "Start of image (SOI)", None),
0xD9: ("end_image", "End of image (EOI)", None),
0xDA: ("start_scan", "Start Of Scan (SOS)", StartOfScan),
0xDB: ("quantization[]", "Define Quantization Table (DQT)", DefineQuantizationTable),
0xDC: ("nb_line", "Define number of Lines (DNL)", None),
0xDD: ("restart_interval", "Define Restart Interval (DRI)", RestartInterval),
0xE0: ("app0", "APP0", JpegChunkApp0),
0xE1: ("exif", "Exif metadata", Exif),
0xEC: ("app12", "APP12", APP12),
0xED: ("photoshop", "Photoshop", PhotoshopMetadata),
0xEE: ("adobe", "Image encoding information for DCT filters (Adobe)", AdobeChunk),
0xFE: ("comment[]", "Comment", Comment),
}
START_OF_FRAME = {
0xC0: u"Baseline",
0xC1: u"Extended sequential",
0xC2: u"Progressive",
0xC3: u"Lossless",
0xC5: u"Differential sequential",
0xC6: u"Differential progressive",
0xC7: u"Differential lossless",
0xC9: u"Extended sequential, arithmetic coding",
0xCA: u"Progressive, arithmetic coding",
0xCB: u"Lossless, arithmetic coding",
0xCD: u"Differential sequential, arithmetic coding",
0xCE: u"Differential progressive, arithmetic coding",
0xCF: u"Differential lossless, arithmetic coding",
}
for key, text in START_OF_FRAME.iteritems():
TAG_INFO[key] = ("start_frame", "Start of frame (%s)" % text.lower(), StartOfFrame)
def __init__(self, parent, name, description=None):
FieldSet.__init__(self, parent, name, description)
tag = self["type"].value
if tag == 0xE1:
# Hack for Adobe extension: XAP metadata (as XML)
bytes = self.stream.readBytes(self.absolute_address + 32, 6)
if bytes == "Exif\0\0":
self._name = "exif"
self._description = "EXIF"
self._parser = Exif
else:
self._parser = None
elif tag in self.TAG_INFO:
self._name, self._description, self._parser = self.TAG_INFO[tag]
else:
self._parser = None
def createFields(self):
yield textHandler(UInt8(self, "header", "Header"), hexadecimal)
if self["header"].value != 0xFF:
raise ParserError("JPEG: Invalid chunk header!")
yield textHandler(UInt8(self, "type", "Type"), hexadecimal)
tag = self["type"].value
if tag in (self.TAG_SOI, self.TAG_EOI):
return
yield UInt16(self, "size", "Size")
size = (self["size"].value - 2)
if 0 < size:
if self._parser:
yield self._parser(self, "content", "Chunk content", size=size*8)
else:
yield RawBytes(self, "data", size, "Data")
def createDescription(self):
return "Chunk: %s" % self["type"].display
class JpegFile(Parser):
endian = BIG_ENDIAN
PARSER_TAGS = {
"id": "jpeg",
"category": "image",
"file_ext": ("jpg", "jpeg"),
"mime": (u"image/jpeg",),
"magic": (
("\xFF\xD8\xFF\xE0", 0), # (Start Of Image, APP0)
("\xFF\xD8\xFF\xE1", 0), # (Start Of Image, EXIF)
("\xFF\xD8\xFF\xEE", 0), # (Start Of Image, Adobe)
),
"min_size": 22*8,
"description": "JPEG picture",
"subfile": "skip",
}
def validate(self):
if self.stream.readBytes(0, 2) != "\xFF\xD8":
return "Invalid file signature"
try:
for index, field in enumerate(self):
chunk_type = field["type"].value
if chunk_type not in JpegChunk.TAG_INFO:
return "Unknown chunk type: 0x%02X (chunk #%s)" % (chunk_type, index)
if index == 2:
# Only check 3 fields
break
except HachoirError:
return "Unable to parse at least three chunks"
return True
def createFields(self):
while not self.eof:
chunk = JpegChunk(self, "chunk[]")
yield chunk
if chunk["type"].value == JpegChunk.TAG_SOS:
# TODO: Read JPEG image data...
break
# TODO: is it possible to handle piped input?
if self._size is None:
raise NotImplementedError
has_end = False
size = (self._size - self.current_size) // 8
if size:
if 2 < size \
and self.stream.readBytes(self._size - 16, 2) == "\xff\xd9":
has_end = True
size -= 2
yield RawBytes(self, "data", size, "JPEG data")
if has_end:
yield JpegChunk(self, "chunk[]")
def createDescription(self):
desc = "JPEG picture"
if "sof/content" in self:
header = self["sof/content"]
desc += ": %ux%u pixels" % (header["width"].value, header["height"].value)
return desc
def createContentSize(self):
if "end" in self:
return self["end"].absolute_address + self["end"].size
if "data" not in self:
return None
start = self["data"].absolute_address
end = self.stream.searchBytes("\xff\xd9", start, MAX_FILESIZE*8)
if end is not None:
return end + 16
return None
|
pwnieexpress/raspberry_pwn
|
src/pentest/metagoofil/hachoir_parser/image/jpeg.py
|
Python
|
gpl-3.0
| 14,287
|
# Copyright (c) 2014 Roger Light <roger@atchoo.org>
#
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Eclipse Public License v1.0
# and Eclipse Distribution License v1.0 which accompany this distribution.
#
# The Eclipse Public License is available at
# http://www.eclipse.org/legal/epl-v10.html
# and the Eclipse Distribution License is available at
# http://www.eclipse.org/org/documents/edl-v10.php.
#
# Contributors:
# Roger Light - initial API and implementation
"""
This module provides some helper functions to allow straightforward publishing
of messages in a one-shot manner. In other words, they are useful for the
situation where you have a single/multiple messages you want to publish to a
broker, then disconnect and nothing else is required.
"""
import paho.mqtt.client as paho
import paho.mqtt as mqtt
def _do_publish(c):
"""Internal function"""
m = c._userdata.pop()
if type(m) is dict:
topic = m['topic']
try:
payload = m['payload']
except KeyError:
payload = None
try:
qos = m['qos']
except KeyError:
qos = 0
try:
retain = m['retain']
except KeyError:
retain = False
elif type(m) is tuple:
(topic, payload, qos, retain) = m
else:
raise ValueError('message must be a dict or a tuple')
c.publish(topic, payload, qos, retain)
def _on_connect(c, userdata, flags, rc):
"""Internal callback"""
if rc == 0:
_do_publish(c)
else:
raise mqtt.MQTTException(paho.connack_string(rc))
def _on_publish(c, userdata, mid):
"""Internal callback"""
if len(userdata) == 0:
c.disconnect()
else:
_do_publish(c)
def multiple(msgs, hostname="localhost", port=1883, client_id="", keepalive=60,
will=None, auth=None, tls=None, protocol=paho.MQTTv311, transport="tcp"):
"""Publish multiple messages to a broker, then disconnect cleanly.
This function creates an MQTT client, connects to a broker and publishes a
list of messages. Once the messages have been delivered, it disconnects
cleanly from the broker.
msgs : a list of messages to publish. Each message is either a dict or a
tuple.
If a dict, only the topic must be present. Default values will be
used for any missing arguments. The dict must be of the form:
msg = {'topic':"<topic>", 'payload':"<payload>", 'qos':<qos>,
'retain':<retain>}
topic must be present and may not be empty.
If payload is "", None or not present then a zero length payload
will be published.
If qos is not present, the default of 0 is used.
If retain is not present, the default of False is used.
If a tuple, then it must be of the form:
("<topic>", "<payload>", qos, retain)
hostname : a string containing the address of the broker to connect to.
Defaults to localhost.
port : the port to connect to the broker on. Defaults to 1883.
client_id : the MQTT client id to use. If "" or None, the Paho library will
generate a client id automatically.
keepalive : the keepalive timeout value for the client. Defaults to 60
seconds.
will : a dict containing will parameters for the client: will = {'topic':
"<topic>", 'payload':"<payload">, 'qos':<qos>, 'retain':<retain>}.
Topic is required, all other parameters are optional and will
default to None, 0 and False respectively.
Defaults to None, which indicates no will should be used.
auth : a dict containing authentication parameters for the client:
auth = {'username':"<username>", 'password':"<password>"}
Username is required, password is optional and will default to None
if not provided.
Defaults to None, which indicates no authentication is to be used.
tls : a dict containing TLS configuration parameters for the client:
dict = {'ca_certs':"<ca_certs>", 'certfile':"<certfile>",
'keyfile':"<keyfile>", 'tls_version':"<tls_version>",
'ciphers':"<ciphers">}
ca_certs is required, all other parameters are optional and will
default to None if not provided, which results in the client using
the default behaviour - see the paho.mqtt.client documentation.
Defaults to None, which indicates that TLS should not be used.
transport : set to "tcp" to use the default setting of transport which is
raw TCP. Set to "websockets" to use WebSockets as the transport.
"""
if type(msgs) is not list:
raise ValueError('msgs must be a list')
client = paho.Client(client_id=client_id,
userdata=msgs, protocol=protocol, transport=transport)
client.on_publish = _on_publish
client.on_connect = _on_connect
if auth is not None:
username = auth['username']
try:
password = auth['password']
except KeyError:
password = None
client.username_pw_set(username, password)
if will is not None:
will_topic = will['topic']
try:
will_payload = will['payload']
except KeyError:
will_payload = None
try:
will_qos = will['qos']
except KeyError:
will_qos = 0
try:
will_retain = will['retain']
except KeyError:
will_retain = False
client.will_set(will_topic, will_payload, will_qos, will_retain)
if tls is not None:
ca_certs = tls['ca_certs']
try:
certfile = tls['certfile']
except KeyError:
certfile = None
try:
keyfile = tls['keyfile']
except KeyError:
keyfile = None
try:
tls_version = tls['tls_version']
except KeyError:
tls_version = None
try:
ciphers = tls['ciphers']
except KeyError:
ciphers = None
client.tls_set(ca_certs, certfile, keyfile, tls_version=tls_version,
ciphers=ciphers)
client.connect(hostname, port, keepalive)
client.loop_forever()
def single(topic, payload=None, qos=0, retain=False, hostname="localhost",
port=1883, client_id="", keepalive=60, will=None, auth=None,
tls=None, protocol=paho.MQTTv311, transport="tcp"):
"""Publish a single message to a broker, then disconnect cleanly.
This function creates an MQTT client, connects to a broker and publishes a
single message. Once the message has been delivered, it disconnects cleanly
from the broker.
topic : the only required argument must be the topic string to which the
payload will be published.
payload : the payload to be published. If "" or None, a zero length payload
will be published.
qos : the qos to use when publishing, default to 0.
retain : set the message to be retained (True) or not (False).
hostname : a string containing the address of the broker to connect to.
Defaults to localhost.
port : the port to connect to the broker on. Defaults to 1883.
client_id : the MQTT client id to use. If "" or None, the Paho library will
generate a client id automatically.
keepalive : the keepalive timeout value for the client. Defaults to 60
seconds.
will : a dict containing will parameters for the client: will = {'topic':
"<topic>", 'payload':"<payload">, 'qos':<qos>, 'retain':<retain>}.
Topic is required, all other parameters are optional and will
default to None, 0 and False respectively.
Defaults to None, which indicates no will should be used.
auth : a dict containing authentication parameters for the client:
auth = {'username':"<username>", 'password':"<password>"}
Username is required, password is optional and will default to None
if not provided.
Defaults to None, which indicates no authentication is to be used.
tls : a dict containing TLS configuration parameters for the client:
dict = {'ca_certs':"<ca_certs>", 'certfile':"<certfile>",
'keyfile':"<keyfile>", 'tls_version':"<tls_version>",
'ciphers':"<ciphers">}
ca_certs is required, all other parameters are optional and will
default to None if not provided, which results in the client using
the default behaviour - see the paho.mqtt.client documentation.
Defaults to None, which indicates that TLS should not be used.
transport : set to "tcp" to use the default setting of transport which is
raw TCP. Set to "websockets" to use WebSockets as the transport.
"""
msg = {'topic':topic, 'payload':payload, 'qos':qos, 'retain':retain}
multiple([msg], hostname, port, client_id, keepalive, will, auth, tls, protocol, transport)
|
MrBramme/Mqtt-Hyperion-Remote
|
modules/paho/mqtt/publish.py
|
Python
|
mit
| 9,169
|
# encoding: UTF-8
import time
from eventEngine import *
from vtConstant import *
########################################################################
class VtGateway(object):
"""交易接口"""
#----------------------------------------------------------------------
def __init__(self, eventEngine, gatewayName):
"""Constructor"""
self.eventEngine = eventEngine
self.gatewayName = gatewayName
#----------------------------------------------------------------------
def onTick(self, tick):
"""市场行情推送"""
# 通用事件
event1 = Event(type_=EVENT_TICK)
event1.dict_['data'] = tick
self.eventEngine.put(event1)
# 特定合约代码的事件
event2 = Event(type_=EVENT_TICK+tick.vtSymbol)
event2.dict_['data'] = tick
self.eventEngine.put(event2)
#----------------------------------------------------------------------
def onTrade(self, trade):
"""成交信息推送"""
# 通用事件
event1 = Event(type_=EVENT_TRADE)
event1.dict_['data'] = trade
self.eventEngine.put(event1)
# 特定合约的成交事件
event2 = Event(type_=EVENT_TRADE+trade.vtSymbol)
event2.dict_['data'] = trade
self.eventEngine.put(event2)
#----------------------------------------------------------------------
def onOrder(self, order):
"""订单变化推送"""
# 通用事件
event1 = Event(type_=EVENT_ORDER)
event1.dict_['data'] = order
self.eventEngine.put(event1)
# 特定订单编号的事件
event2 = Event(type_=EVENT_ORDER+order.vtOrderID)
event2.dict_['data'] = order
self.eventEngine.put(event2)
#----------------------------------------------------------------------
def onPosition(self, position):
"""持仓信息推送"""
# 通用事件
event1 = Event(type_=EVENT_POSITION)
event1.dict_['data'] = position
self.eventEngine.put(event1)
# 特定合约代码的事件
event2 = Event(type_=EVENT_POSITION+position.vtSymbol)
event2.dict_['data'] = position
self.eventEngine.put(event2)
#----------------------------------------------------------------------
def onAccount(self, account):
"""账户信息推送"""
# 通用事件
event1 = Event(type_=EVENT_ACCOUNT)
event1.dict_['data'] = account
self.eventEngine.put(event1)
# 特定合约代码的事件
event2 = Event(type_=EVENT_ACCOUNT+account.vtAccountID)
event2.dict_['data'] = account
self.eventEngine.put(event2)
#----------------------------------------------------------------------
def onError(self, error):
"""错误信息推送"""
# 通用事件
event1 = Event(type_=EVENT_ERROR)
event1.dict_['data'] = error
self.eventEngine.put(event1)
#----------------------------------------------------------------------
def onLog(self, log):
"""日志推送"""
# 通用事件
event1 = Event(type_=EVENT_LOG)
event1.dict_['data'] = log
self.eventEngine.put(event1)
#----------------------------------------------------------------------
def onContract(self, contract):
"""合约基础信息推送"""
# 通用事件
event1 = Event(type_=EVENT_CONTRACT)
event1.dict_['data'] = contract
self.eventEngine.put(event1)
#----------------------------------------------------------------------
def connect(self):
"""连接"""
pass
#----------------------------------------------------------------------
def subscribe(self, subscribeReq):
"""订阅行情"""
pass
#----------------------------------------------------------------------
def sendOrder(self, orderReq):
"""发单"""
pass
#----------------------------------------------------------------------
def cancelOrder(self, cancelOrderReq):
"""撤单"""
pass
#----------------------------------------------------------------------
def qryAccount(self):
"""查询账户资金"""
pass
#----------------------------------------------------------------------
def qryPosition(self):
"""查询持仓"""
pass
#----------------------------------------------------------------------
def close(self):
"""关闭"""
pass
########################################################################
class VtBaseData(object):
"""回调函数推送数据的基础类,其他数据类继承于此"""
#----------------------------------------------------------------------
def __init__(self):
"""Constructor"""
self.gatewayName = EMPTY_STRING # Gateway名称
self.rawData = None # 原始数据
########################################################################
class VtTickData(VtBaseData):
"""Tick行情数据类"""
#----------------------------------------------------------------------
def __init__(self):
"""Constructor"""
super(VtTickData, self).__init__()
# 代码相关
self.symbol = EMPTY_STRING # 合约代码
self.exchange = EMPTY_STRING # 交易所代码
self.vtSymbol = EMPTY_STRING # 合约在vt系统中的唯一代码,通常是 合约代码.交易所代码
# 成交数据
self.lastPrice = EMPTY_FLOAT # 最新成交价
self.lastVolume = EMPTY_INT # 最新成交量
self.volume = EMPTY_INT # 今天总成交量
self.openInterest = EMPTY_INT # 持仓量
self.time = EMPTY_STRING # 时间 11:20:56.5
self.date = EMPTY_STRING # 日期 20151009
# 常规行情
self.openPrice = EMPTY_FLOAT # 今日开盘价
self.highPrice = EMPTY_FLOAT # 今日最高价
self.lowPrice = EMPTY_FLOAT # 今日最低价
self.preClosePrice = EMPTY_FLOAT
self.upperLimit = EMPTY_FLOAT # 涨停价
self.lowerLimit = EMPTY_FLOAT # 跌停价
# 五档行情
self.bidPrice1 = EMPTY_FLOAT
self.bidPrice2 = EMPTY_FLOAT
self.bidPrice3 = EMPTY_FLOAT
self.bidPrice4 = EMPTY_FLOAT
self.bidPrice5 = EMPTY_FLOAT
self.askPrice1 = EMPTY_FLOAT
self.askPrice2 = EMPTY_FLOAT
self.askPrice3 = EMPTY_FLOAT
self.askPrice4 = EMPTY_FLOAT
self.askPrice5 = EMPTY_FLOAT
self.bidVolume1 = EMPTY_INT
self.bidVolume2 = EMPTY_INT
self.bidVolume3 = EMPTY_INT
self.bidVolume4 = EMPTY_INT
self.bidVolume5 = EMPTY_INT
self.askVolume1 = EMPTY_INT
self.askVolume2 = EMPTY_INT
self.askVolume3 = EMPTY_INT
self.askVolume4 = EMPTY_INT
self.askVolume5 = EMPTY_INT
########################################################################
class VtTradeData(VtBaseData):
"""成交数据类"""
#----------------------------------------------------------------------
def __init__(self):
"""Constructor"""
super(VtTradeData, self).__init__()
# 代码编号相关
self.symbol = EMPTY_STRING # 合约代码
self.exchange = EMPTY_STRING # 交易所代码
self.vtSymbol = EMPTY_STRING # 合约在vt系统中的唯一代码,通常是 合约代码.交易所代码
self.tradeID = EMPTY_STRING # 成交编号
self.vtTradeID = EMPTY_STRING # 成交在vt系统中的唯一编号,通常是 Gateway名.成交编号
self.orderID = EMPTY_STRING # 订单编号
self.vtOrderID = EMPTY_STRING # 订单在vt系统中的唯一编号,通常是 Gateway名.订单编号
# 成交相关
self.direction = EMPTY_UNICODE # 成交方向
self.offset = EMPTY_UNICODE # 成交开平仓
self.price = EMPTY_FLOAT # 成交价格
self.volume = EMPTY_INT # 成交数量
self.tradeTime = EMPTY_STRING # 成交时间
########################################################################
class VtOrderData(VtBaseData):
"""订单数据类"""
#----------------------------------------------------------------------
def __init__(self):
"""Constructor"""
super(VtOrderData, self).__init__()
# 代码编号相关
self.symbol = EMPTY_STRING # 合约代码
self.exchange = EMPTY_STRING # 交易所代码
self.vtSymbol = EMPTY_STRING # 合约在vt系统中的唯一代码,通常是 合约代码.交易所代码
self.orderID = EMPTY_STRING # 订单编号
self.vtOrderID = EMPTY_STRING # 订单在vt系统中的唯一编号,通常是 Gateway名.订单编号
# 报单相关
self.direction = EMPTY_UNICODE # 报单方向
self.offset = EMPTY_UNICODE # 报单开平仓
self.price = EMPTY_FLOAT # 报单价格
self.totalVolume = EMPTY_INT # 报单总数量
self.tradedVolume = EMPTY_INT # 报单成交数量
self.status = EMPTY_UNICODE # 报单状态
self.orderTime = EMPTY_STRING # 发单时间
self.cancelTime = EMPTY_STRING # 撤单时间
# CTP/LTS相关
self.frontID = EMPTY_INT # 前置机编号
self.sessionID = EMPTY_INT # 连接编号
########################################################################
class VtPositionData(VtBaseData):
"""持仓数据类"""
#----------------------------------------------------------------------
def __init__(self):
"""Constructor"""
super(VtPositionData, self).__init__()
# 代码编号相关
self.symbol = EMPTY_STRING # 合约代码
self.exchange = EMPTY_STRING # 交易所代码
self.vtSymbol = EMPTY_STRING # 合约在vt系统中的唯一代码,合约代码.交易所代码
# 持仓相关
self.direction = EMPTY_STRING # 持仓方向
self.position = EMPTY_INT # 持仓量
self.frozen = EMPTY_INT # 冻结数量
self.price = EMPTY_FLOAT # 持仓均价
self.vtPositionName = EMPTY_STRING # 持仓在vt系统中的唯一代码,通常是vtSymbol.方向
# 20151020添加
self.ydPosition = EMPTY_INT # 昨持仓
########################################################################
class VtAccountData(VtBaseData):
"""账户数据类"""
#----------------------------------------------------------------------
def __init__(self):
"""Constructor"""
super(VtAccountData, self).__init__()
# 账号代码相关
self.accountID = EMPTY_STRING # 账户代码
self.vtAccountID = EMPTY_STRING # 账户在vt中的唯一代码,通常是 Gateway名.账户代码
# 数值相关
self.preBalance = EMPTY_FLOAT # 昨日账户结算净值
self.balance = EMPTY_FLOAT # 账户净值
self.available = EMPTY_FLOAT # 可用资金
self.commission = EMPTY_FLOAT # 今日手续费
self.margin = EMPTY_FLOAT # 保证金占用
self.closeProfit = EMPTY_FLOAT # 平仓盈亏
self.positionProfit = EMPTY_FLOAT # 持仓盈亏
########################################################################
class VtErrorData(VtBaseData):
"""错误数据类"""
#----------------------------------------------------------------------
def __init__(self):
"""Constructor"""
super(VtErrorData, self).__init__()
self.errorID = EMPTY_STRING # 错误代码
self.errorMsg = EMPTY_UNICODE # 错误信息
self.additionalInfo = EMPTY_UNICODE # 补充信息
self.errorTime = time.strftime('%X', time.localtime()) # 错误生成时间
########################################################################
class VtLogData(VtBaseData):
"""日志数据类"""
#----------------------------------------------------------------------
def __init__(self):
"""Constructor"""
super(VtLogData, self).__init__()
self.logTime = time.strftime('%X', time.localtime()) # 日志生成时间
self.logContent = EMPTY_UNICODE # 日志信息
########################################################################
class VtContractData(VtBaseData):
"""合约详细信息类"""
#----------------------------------------------------------------------
def __init__(self):
"""Constructor"""
super(VtContractData, self).__init__()
self.symbol = EMPTY_STRING # 代码
self.exchange = EMPTY_STRING # 交易所代码
self.vtSymbol = EMPTY_STRING # 合约在vt系统中的唯一代码,通常是 合约代码.交易所代码
self.name = EMPTY_UNICODE # 合约中文名
self.productClass = EMPTY_UNICODE # 合约类型
self.size = EMPTY_INT # 合约大小
self.priceTick = EMPTY_FLOAT # 合约最小价格TICK
# 期权相关
self.strikePrice = EMPTY_FLOAT # 期权行权价
self.underlyingSymbol = EMPTY_STRING # 标的物合约代码
self.optionType = EMPTY_UNICODE # 期权类型
########################################################################
class VtSubscribeReq(object):
"""订阅行情时传入的对象类"""
#----------------------------------------------------------------------
def __init__(self):
"""Constructor"""
self.symbol = EMPTY_STRING # 代码
self.exchange = EMPTY_STRING # 交易所
# 以下为IB相关
self.productClass = EMPTY_UNICODE # 合约类型
self.currency = EMPTY_STRING # 合约货币
self.expiry = EMPTY_STRING # 到期日
self.strikePrice = EMPTY_FLOAT # 行权价
self.optionType = EMPTY_UNICODE # 期权类型
########################################################################
class VtOrderReq(object):
"""发单时传入的对象类"""
#----------------------------------------------------------------------
def __init__(self):
"""Constructor"""
self.symbol = EMPTY_STRING # 代码
self.exchange = EMPTY_STRING # 交易所
self.price = EMPTY_FLOAT # 价格
self.volume = EMPTY_INT # 数量
self.priceType = EMPTY_STRING # 价格类型
self.direction = EMPTY_STRING # 买卖
self.offset = EMPTY_STRING # 开平
# 以下为IB相关
self.productClass = EMPTY_UNICODE # 合约类型
self.currency = EMPTY_STRING # 合约货币
self.expiry = EMPTY_STRING # 到期日
self.strikePrice = EMPTY_FLOAT # 行权价
self.optionType = EMPTY_UNICODE # 期权类型
########################################################################
class VtCancelOrderReq(object):
"""撤单时传入的对象类"""
#----------------------------------------------------------------------
def __init__(self):
"""Constructor"""
self.symbol = EMPTY_STRING # 代码
self.exchange = EMPTY_STRING # 交易所
# 以下字段主要和CTP、LTS类接口相关
self.orderID = EMPTY_STRING # 报单号
self.frontID = EMPTY_STRING # 前置机号
self.sessionID = EMPTY_STRING # 会话号
|
akeyong/vnpy
|
vn.tutorial/tick2trade/vn.trader_t2t/vtGateway.py
|
Python
|
mit
| 16,962
|
from django.contrib import admin
from socialregistration.contrib.github.models import GithubProfile
admin.site.register(GithubProfile)
|
lgapontes/django-socialregistration
|
socialregistration/contrib/github/admin.py
|
Python
|
mit
| 136
|
#!/usr/bin/env python
#
# file-torture.py - Simple torture test for file notificatins in Nautilus
# Copyright (C) 2006 Federico Mena-Quintero
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Author: Federico Mena-Quintero <federico@novell.com>
import random
import os
import sys
import optparse
import time
output_dir = ""
random_gen = None
verbose = False
extensions = (".doc", ".gif", ".jpg", ".png", ".xls", ".odt", ".odp", ".ods", ".txt", ".zip", ".gz")
files = []
directories = []
def get_random_file_index ():
n = len (files)
if n == 0:
return -1
else:
return random_gen.randrange (n)
def get_random_directory_index ():
n = len (directories)
if n == 0:
return -1
else:
return random_gen.randrange (n)
def get_random_filename ():
chars = []
for i in range (20):
chars.append ("abcdefghijklmnopqrstuvwxyz"[random_gen.randrange (26)])
extension = extensions[random_gen.randrange (len (extensions))]
filename = "".join (chars) + extension
return filename
def get_random_path ():
return os.path.join (output_dir, get_random_filename ())
def op_create_file ():
filename = get_random_path ()
files.append (filename)
f = open (filename, "w")
f.close ()
if verbose:
print 'create file %s' % filename
return True
def op_move_file ():
idx = get_random_file_index ()
if idx == -1:
return False
new_name = get_random_path ()
old_name = files[idx]
os.rename (old_name, new_name)
files[idx] = new_name
if verbose:
print 'rename file %s to %s' % (old_name, new_name)
return True
def op_delete_file ():
idx = get_random_file_index ()
if idx == -1:
return False
filename = files[idx]
os.unlink (filename)
files.pop (idx)
if verbose:
print 'delete file %s' % filename
return True
def op_write_file ():
idx = get_random_file_index ()
if idx == -1:
return False
name = files[idx]
f = open (name, "a")
f.write ("blah blah blah blah blah blah blah\n")
f.close ()
if verbose:
print 'write to file %s' % name
return True
def op_create_dir ():
name = get_random_path ()
os.mkdir (name)
directories.append (name)
if verbose:
print 'create directory %s' % name
return True
def op_move_dir ():
idx = get_random_directory_index ()
if idx == -1:
return False
new_name = get_random_path ()
old_name = directories[idx]
os.rename (old_name, new_name)
directories[idx] = new_name
if verbose:
print 'move directory %s to %s' % (old_name, new_name)
return True
def op_delete_dir ():
idx = get_random_directory_index ()
if idx == -1:
return False
name = directories[idx]
os.rmdir (name)
directories.pop (idx)
if verbose:
print 'delete directory %s' % name
return True
def op_file_to_dir ():
idx = get_random_file_index ()
if idx == -1:
return False
name = files[idx]
os.unlink (name)
files.pop (idx)
os.mkdir (name)
directories.append (name)
if verbose:
print 'file to dir %s' % name
return True
def op_dir_to_file ():
idx = get_random_directory_index ()
if idx == -1:
return False
name = directories[idx]
os.rmdir (name)
directories.pop (idx)
f = open (name, "w")
f.close ()
files.append (name)
if verbose:
print 'dir to file %s' % name
return True
operations = (
op_create_file,
op_move_file,
op_delete_file,
op_write_file,
op_create_dir,
op_move_dir,
op_delete_dir,
op_file_to_dir,
op_dir_to_file,
)
def main ():
option_parser = optparse.OptionParser (usage="usage: %prog -o <dirname>")
option_parser.add_option ("-o",
"--output", dest="output",
metavar="FILE",
help="Name of output directory")
option_parser.add_option ("-s",
"--seed", dest="seed",
metavar="NUMBER",
help="Random number seed")
option_parser.add_option ("",
"--no-sleep", dest="sleep_enabled", action="store_false", default=True,
help="Disable short sleeps between operations. Will use a lot of CPU!")
option_parser.add_option ("-v",
"--verbose", dest="verbose", action="store_true", default=False,
help="Enable verbose output")
(options, args) = option_parser.parse_args ()
if not options.output:
print 'Please specify an output directory with "-o outputdir"'
return 1
sleep_enabled = options.sleep_enabled
if len (args) != 0:
print 'No extra arguments are supported'
return 1
global output_dir
global random_gen
global verbose
verbose = options.verbose
random_gen = random.Random ()
if options.seed:
seed = int (options.seed)
else:
seed = int (time.time ())
print 'Use "--seed=%s" to reproduce this run' % seed
random_gen.seed (seed)
if sleep_enabled:
print 'Using short sleeps between operations (use --no-sleep to disable)'
else:
print 'Disabling short sleeps between operations'
output_dir = options.output
try:
os.mkdir (output_dir)
except:
1 # nothing
while True:
op = operations [random_gen.randrange (len (operations))]
op ()
if sleep_enabled:
time.sleep (random_gen.random () / 100)
return 0
if __name__ == "__main__":
sys.exit (main ())
|
mssurajkaiga/nautilus-3.8.1
|
test/file-torture.py
|
Python
|
gpl-2.0
| 6,455
|
from tests.unit import unittest
from tests.unit import AWSMockServiceTestCase
from boto.vpc import VPCConnection, DhcpOptions
class TestDescribeDhcpOptions(AWSMockServiceTestCase):
connection_class = VPCConnection
def default_body(self):
return """
<DescribeDhcpOptionsResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-01/">
<requestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</requestId>
<dhcpOptionsSet>
<item>
<dhcpOptionsId>dopt-7a8b9c2d</dhcpOptionsId>
<dhcpConfigurationSet>
<item>
<key>domain-name</key>
<valueSet>
<item>
<value>example.com</value>
</item>
</valueSet>
</item>
<item>
<key>domain-name-servers</key>
<valueSet>
<item>
<value>10.2.5.1</value>
</item>
</valueSet>
</item>
<item>
<key>domain-name-servers</key>
<valueSet>
<item>
<value>10.2.5.2</value>
</item>
</valueSet>
</item>
</dhcpConfigurationSet>
<tagSet/>
</item>
</dhcpOptionsSet>
</DescribeDhcpOptionsResponse>
"""
def test_get_all_dhcp_options(self):
self.set_http_response(status_code=200)
api_response = self.service_connection.get_all_dhcp_options(['dopt-7a8b9c2d'],
[('key', 'domain-name')])
self.assert_request_parameters({
'Action': 'DescribeDhcpOptions',
'DhcpOptionsId.1': 'dopt-7a8b9c2d',
'Filter.1.Name': 'key',
'Filter.1.Value.1': 'domain-name'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
self.assertEquals(len(api_response), 1)
self.assertIsInstance(api_response[0], DhcpOptions)
self.assertEquals(api_response[0].id, 'dopt-7a8b9c2d')
self.assertEquals(api_response[0].options['domain-name'], ['example.com'])
self.assertEquals(api_response[0].options['domain-name-servers'], ['10.2.5.1', '10.2.5.2'])
class TestCreateDhcpOptions(AWSMockServiceTestCase):
connection_class = VPCConnection
def default_body(self):
return """
<CreateDhcpOptionsResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-01/">
<requestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</requestId>
<dhcpOptions>
<dhcpOptionsId>dopt-7a8b9c2d</dhcpOptionsId>
<dhcpConfigurationSet>
<item>
<key>domain-name</key>
<valueSet>
<item>
<value>example.com</value>
</item>
</valueSet>
</item>
<item>
<key>domain-name-servers</key>
<valueSet>
<item>
<value>10.2.5.1</value>
</item>
<item>
<value>10.2.5.2</value>
</item>
</valueSet>
</item>
<item>
<key>ntp-servers</key>
<valueSet>
<item>
<value>10.12.12.1</value>
</item>
<item>
<value>10.12.12.2</value>
</item>
</valueSet>
</item>
<item>
<key>netbios-name-servers</key>
<valueSet>
<item>
<value>10.20.20.1</value>
</item>
</valueSet>
</item>
<item>
<key>netbios-node-type</key>
<valueSet>
<item>
<value>2</value>
</item>
</valueSet>
</item>
</dhcpConfigurationSet>
<tagSet/>
</dhcpOptions>
</CreateDhcpOptionsResponse>
"""
def test_create_dhcp_options(self):
self.set_http_response(status_code=200)
api_response = self.service_connection.create_dhcp_options(
domain_name='example.com', domain_name_servers=['10.2.5.1', '10.2.5.2'],
ntp_servers=('10.12.12.1', '10.12.12.2'),
netbios_name_servers='10.20.20.1',
netbios_node_type='2')
self.assert_request_parameters({
'Action': 'CreateDhcpOptions',
'DhcpConfiguration.1.Key': 'domain-name',
'DhcpConfiguration.1.Value.1': 'example.com',
'DhcpConfiguration.2.Key': 'domain-name-servers',
'DhcpConfiguration.2.Value.1': '10.2.5.1',
'DhcpConfiguration.2.Value.2': '10.2.5.2',
'DhcpConfiguration.3.Key': 'ntp-servers',
'DhcpConfiguration.3.Value.1': '10.12.12.1',
'DhcpConfiguration.3.Value.2': '10.12.12.2',
'DhcpConfiguration.4.Key': 'netbios-name-servers',
'DhcpConfiguration.4.Value.1': '10.20.20.1',
'DhcpConfiguration.5.Key': 'netbios-node-type',
'DhcpConfiguration.5.Value.1': '2'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
self.assertIsInstance(api_response, DhcpOptions)
self.assertEquals(api_response.id, 'dopt-7a8b9c2d')
self.assertEquals(api_response.options['domain-name'], ['example.com'])
self.assertEquals(api_response.options['domain-name-servers'], ['10.2.5.1', '10.2.5.2'])
self.assertEquals(api_response.options['ntp-servers'], ['10.12.12.1', '10.12.12.2'])
self.assertEquals(api_response.options['netbios-name-servers'], ['10.20.20.1'])
self.assertEquals(api_response.options['netbios-node-type'], ['2'])
class TestDeleteDhcpOptions(AWSMockServiceTestCase):
connection_class = VPCConnection
def default_body(self):
return """
<DeleteDhcpOptionsResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-01/">
<requestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</requestId>
<return>true</return>
</DeleteDhcpOptionsResponse>
"""
def test_delete_dhcp_options(self):
self.set_http_response(status_code=200)
api_response = self.service_connection.delete_dhcp_options('dopt-7a8b9c2d')
self.assert_request_parameters({
'Action': 'DeleteDhcpOptions',
'DhcpOptionsId': 'dopt-7a8b9c2d'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
self.assertEquals(api_response, True)
class TestAssociateDhcpOptions(AWSMockServiceTestCase):
connection_class = VPCConnection
def default_body(self):
return """
<AssociateDhcpOptionsResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-01/">
<requestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</requestId>
<return>true</return>
</AssociateDhcpOptionsResponse>
"""
def test_associate_dhcp_options(self):
self.set_http_response(status_code=200)
api_response = self.service_connection.associate_dhcp_options(
'dopt-7a8b9c2d', 'vpc-1a2b3c4d')
self.assert_request_parameters({
'Action': 'AssociateDhcpOptions',
'DhcpOptionsId': 'dopt-7a8b9c2d',
'VpcId': 'vpc-1a2b3c4d'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
self.assertEquals(api_response, True)
if __name__ == '__main__':
unittest.main()
|
harshilasu/LinkurApp
|
y/google-cloud-sdk/platform/gsutil/third_party/boto/tests/unit/vpc/test_dhcpoptions.py
|
Python
|
gpl-3.0
| 8,763
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('downloads', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='release',
name='content_markup_type',
field=models.CharField(max_length=30, default='restructuredtext', choices=[('', '--'), ('html', 'HTML'), ('plain', 'Plain'), ('markdown', 'Markdown'), ('restructuredtext', 'Restructured Text')]),
preserve_default=True,
),
]
|
lebronhkh/pythondotorg
|
downloads/migrations/0002_auto_20150416_1853.py
|
Python
|
apache-2.0
| 599
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""maintain history for compatibility with earlier migrations
Revision ID: 13eb55f81627
Revises: 1507a7289a2f
Create Date: 2015-08-23 05:12:49.732174
"""
# revision identifiers, used by Alembic.
revision = '13eb55f81627'
down_revision = '1507a7289a2f'
branch_labels = None
depends_on = None
def upgrade():
pass
def downgrade():
pass
|
apache/incubator-airflow
|
airflow/migrations/versions/13eb55f81627_for_compatibility.py
|
Python
|
apache-2.0
| 1,136
|
"""0MQ Constants."""
# Copyright (c) PyZMQ Developers.
# Distributed under the terms of the Modified BSD License.
from zmq.backend import constants
from zmq.utils.constant_names import (
base_names,
switched_sockopt_names,
int_sockopt_names,
int64_sockopt_names,
bytes_sockopt_names,
fd_sockopt_names,
ctx_opt_names,
msg_opt_names,
)
#-----------------------------------------------------------------------------
# Python module level constants
#-----------------------------------------------------------------------------
__all__ = [
'int_sockopts',
'int64_sockopts',
'bytes_sockopts',
'ctx_opts',
'ctx_opt_names',
'DRAFT_API',
]
DRAFT_API = constants.DRAFT_API
int_sockopts = set()
int64_sockopts = set()
bytes_sockopts = set()
fd_sockopts = set()
ctx_opts = set()
msg_opts = set()
if constants.VERSION < 30000:
int64_sockopt_names.extend(switched_sockopt_names)
else:
int_sockopt_names.extend(switched_sockopt_names)
_UNDEFINED = -9999
def _add_constant(name, container=None):
"""add a constant to be defined
optionally add it to one of the sets for use in get/setopt checkers
"""
c = getattr(constants, name, _UNDEFINED)
if c == _UNDEFINED:
return
globals()[name] = c
__all__.append(name)
if container is not None:
container.add(c)
return c
for name in base_names:
_add_constant(name)
for name in int_sockopt_names:
_add_constant(name, int_sockopts)
for name in int64_sockopt_names:
_add_constant(name, int64_sockopts)
for name in bytes_sockopt_names:
_add_constant(name, bytes_sockopts)
for name in fd_sockopt_names:
_add_constant(name, fd_sockopts)
for name in ctx_opt_names:
_add_constant(name, ctx_opts)
for name in msg_opt_names:
_add_constant(name, msg_opts)
# ensure some aliases are always defined
aliases = [
('DONTWAIT', 'NOBLOCK'),
('XREQ', 'DEALER'),
('XREP', 'ROUTER'),
]
for group in aliases:
undefined = set()
found = None
for name in group:
value = getattr(constants, name, -1)
if value != -1:
found = value
else:
undefined.add(name)
if found is not None:
for name in undefined:
globals()[name] = found
__all__.append(name)
|
swn1/pyzmq
|
zmq/sugar/constants.py
|
Python
|
bsd-3-clause
| 2,355
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Ansible module to manage A10 Networks slb virtual server objects
(c) 2014, Mischa Peters <mpeters@a10networks.com>
This file is part of Ansible
Ansible is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Ansible is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Ansible. If not, see <http://www.gnu.org/licenses/>.
"""
DOCUMENTATION = '''
---
module: a10_virtual_server
version_added: 1.8
short_description: Manage A10 Networks devices' virtual servers
description:
- Manage slb virtual server objects on A10 Networks devices via aXAPI
author: "Mischa Peters (@mischapeters)"
extends_documentation_fragment: a10
options:
virtual_server:
description:
- SLB virtual server name.
required: true
default: null
aliases: ['vip', 'virtual']
virtual_server_ip:
description:
- SLB virtual server IP address.
required: false
default: null
aliases: ['ip', 'address']
virtual_server_status:
description:
- SLB virtual server status.
required: false
default: enable
aliases: ['status']
choices: ['enabled', 'disabled']
virtual_server_ports:
description:
- A list of ports to create for the virtual server. Each list item should be a
dictionary which specifies the C(port:) and C(type:), but can also optionally
specify the C(service_group:) as well as the C(status:). See the examples
below for details. This parameter is required when C(state) is C(present).
required: false
'''
EXAMPLES = '''
# Create a new virtual server
- a10_virtual_server:
host: a10.mydomain.com
username: myadmin
password: mypassword
virtual_server: vserver1
virtual_server_ip: 1.1.1.1
virtual_server_ports:
- port: 80
protocol: TCP
service_group: sg-80-tcp
- port: 443
protocol: HTTPS
service_group: sg-443-https
- port: 8080
protocol: http
status: disabled
'''
VALID_PORT_FIELDS = ['port', 'protocol', 'service_group', 'status']
def validate_ports(module, ports):
for item in ports:
for key in item:
if key not in VALID_PORT_FIELDS:
module.fail_json(msg="invalid port field (%s), must be one of: %s" % (key, ','.join(VALID_PORT_FIELDS)))
# validate the port number is present and an integer
if 'port' in item:
try:
item['port'] = int(item['port'])
except:
module.fail_json(msg="port definitions must be integers")
else:
module.fail_json(msg="port definitions must define the port field")
# validate the port protocol is present, and convert it to
# the internal API integer value (and validate it)
if 'protocol' in item:
protocol = axapi_get_vport_protocol(item['protocol'])
if not protocol:
module.fail_json(msg="invalid port protocol, must be one of: %s" % ','.join(AXAPI_VPORT_PROTOCOLS))
else:
item['protocol'] = protocol
else:
module.fail_json(msg="port definitions must define the port protocol (%s)" % ','.join(AXAPI_VPORT_PROTOCOLS))
# convert the status to the internal API integer value
if 'status' in item:
item['status'] = axapi_enabled_disabled(item['status'])
else:
item['status'] = 1
# ensure the service_group field is at least present
if 'service_group' not in item:
item['service_group'] = ''
def main():
argument_spec = a10_argument_spec()
argument_spec.update(url_argument_spec())
argument_spec.update(
dict(
state=dict(type='str', default='present', choices=['present', 'absent']),
virtual_server=dict(type='str', aliases=['vip', 'virtual'], required=True),
virtual_server_ip=dict(type='str', aliases=['ip', 'address'], required=True),
virtual_server_status=dict(type='str', default='enabled', aliases=['status'], choices=['enabled', 'disabled']),
virtual_server_ports=dict(type='list', required=True),
)
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=False
)
host = module.params['host']
username = module.params['username']
password = module.params['password']
state = module.params['state']
write_config = module.params['write_config']
slb_virtual = module.params['virtual_server']
slb_virtual_ip = module.params['virtual_server_ip']
slb_virtual_status = module.params['virtual_server_status']
slb_virtual_ports = module.params['virtual_server_ports']
if slb_virtual is None:
module.fail_json(msg='virtual_server is required')
validate_ports(module, slb_virtual_ports)
axapi_base_url = 'https://%s/services/rest/V2.1/?format=json' % host
session_url = axapi_authenticate(module, axapi_base_url, username, password)
slb_virtual_data = axapi_call(module, session_url + '&method=slb.virtual_server.search', json.dumps({'name': slb_virtual}))
slb_virtual_exists = not axapi_failure(slb_virtual_data)
changed = False
if state == 'present':
json_post = {
'virtual_server': {
'name': slb_virtual,
'address': slb_virtual_ip,
'status': axapi_enabled_disabled(slb_virtual_status),
'vport_list': slb_virtual_ports,
}
}
# before creating/updating we need to validate that any
# service groups defined in the ports list exist since
# since the API will still create port definitions for
# them while indicating a failure occurred
checked_service_groups = []
for port in slb_virtual_ports:
if 'service_group' in port and port['service_group'] not in checked_service_groups:
# skip blank service group entries
if port['service_group'] == '':
continue
result = axapi_call(module, session_url + '&method=slb.service_group.search', json.dumps({'name': port['service_group']}))
if axapi_failure(result):
module.fail_json(msg="the service group %s specified in the ports list does not exist" % port['service_group'])
checked_service_groups.append(port['service_group'])
if not slb_virtual_exists:
result = axapi_call(module, session_url + '&method=slb.virtual_server.create', json.dumps(json_post))
if axapi_failure(result):
module.fail_json(msg="failed to create the virtual server: %s" % result['response']['err']['msg'])
changed = True
else:
def needs_update(src_ports, dst_ports):
'''
Checks to determine if the port definitions of the src_ports
array are in or different from those in dst_ports. If there is
a difference, this function returns true, otherwise false.
'''
for src_port in src_ports:
found = False
different = False
for dst_port in dst_ports:
if src_port['port'] == dst_port['port']:
found = True
for valid_field in VALID_PORT_FIELDS:
if src_port[valid_field] != dst_port[valid_field]:
different = True
break
if found or different:
break
if not found or different:
return True
# every port from the src exists in the dst, and none of them were different
return False
defined_ports = slb_virtual_data.get('virtual_server', {}).get('vport_list', [])
# we check for a needed update both ways, in case ports
# are missing from either the ones specified by the user
# or from those on the device
if needs_update(defined_ports, slb_virtual_ports) or needs_update(slb_virtual_ports, defined_ports):
result = axapi_call(module, session_url + '&method=slb.virtual_server.update', json.dumps(json_post))
if axapi_failure(result):
module.fail_json(msg="failed to create the virtual server: %s" % result['response']['err']['msg'])
changed = True
# if we changed things, get the full info regarding
# the service group for the return data below
if changed:
result = axapi_call(module, session_url + '&method=slb.virtual_server.search', json.dumps({'name': slb_virtual}))
else:
result = slb_virtual_data
elif state == 'absent':
if slb_virtual_exists:
result = axapi_call(module, session_url + '&method=slb.virtual_server.delete', json.dumps({'name': slb_virtual}))
changed = True
else:
result = dict(msg="the virtual server was not present")
# if the config has changed, save the config unless otherwise requested
if changed and write_config:
write_result = axapi_call(module, session_url + '&method=system.action.write_memory')
if axapi_failure(write_result):
module.fail_json(msg="failed to save the configuration: %s" % write_result['response']['err']['msg'])
# log out of the session nicely and exit
axapi_call(module, session_url + '&method=session.close')
module.exit_json(changed=changed, content=result)
# standard ansible module imports
from ansible.module_utils.basic import *
from ansible.module_utils.urls import *
from ansible.module_utils.a10 import *
if __name__ == '__main__':
main()
|
lekum/ansible-modules-extras
|
network/a10/a10_virtual_server.py
|
Python
|
gpl-3.0
| 10,358
|
from gen import *
##########
# shared #
##########
flow_var[0] = """
(declare-fun tau () Real)
(declare-fun x1 () Real)
(declare-fun x2 () Real)
(declare-fun x3 () Real)
"""
flow_dec[0] = """
(define-ode flow_1 ((= d/dt[x1] (* 0.015 (- 100 (+ (* (- 1 0.03) x1) (* 0.01 x2) (* 0.02 x3)))))
(= d/dt[x2] (* 0.045 (- 200 (+ (* (- 1 0.06) x2) (* 0.01 x1) (* 0.05 x3)))))
(= d/dt[x3] (* 0.03 (- 300 (+ (* (- 1 0.07) x3) (* 0.02 x1) (* 0.05 x2)))))
(= d/dt[tau] 1)))
(define-ode flow_2 ((= d/dt[x1] (* 0.015 (- 100 (+ (* (- 1 0.03) x1) (* 0.01 x2) (* 0.02 x3)))))
(= d/dt[x2] (* 0.045 (- 200 (+ (* (- 1 0.06) x2) (* 0.01 x1) (* 0.05 x3)))))
(= d/dt[x3] (* -0.03 (+ (* (- 1 0.07) x3) (* 0.02 x1) (* 0.05 x2))))
(= d/dt[tau] 1)))
(define-ode flow_3 ((= d/dt[x1] (* 0.015 (- 100 (+ (* (- 1 0.03) x1) (* 0.01 x2) (* 0.02 x3)))))
(= d/dt[x2] (* -0.045 (+ (* (- 1 0.06) x2) (* 0.01 x1) (* 0.05 x3))))
(= d/dt[x3] (* 0.03 (- 300 (+ (* (- 1 0.07) x3) (* 0.02 x1) (* 0.05 x2)))))
(= d/dt[tau] 1)))
(define-ode flow_4 ((= d/dt[x1] (* 0.015 (- 100 (+ (* (- 1 0.03) x1) (* 0.01 x2) (* 0.02 x3)))))
(= d/dt[x2] (* -0.045 (+ (* (- 1 0.06) x2) (* 0.01 x1) (* 0.05 x3))))
(= d/dt[x3] (* -0.03 (+ (* (- 1 0.07) x3) (* 0.02 x1) (* 0.05 x2))))
(= d/dt[tau] 1)))
(define-ode flow_5 ((= d/dt[x1] (* -0.015 (+ (* (- 1 0.03) x1) (* 0.01 x2) (* 0.02 x3))))
(= d/dt[x2] (* 0.045 (- 200 (+ (* (- 1 0.06) x2) (* 0.01 x1) (* 0.05 x3)))))
(= d/dt[x3] (* 0.03 (- 300 (+ (* (- 1 0.07) x3) (* 0.02 x1) (* 0.05 x2)))))
(= d/dt[tau] 1)))
(define-ode flow_6 ((= d/dt[x1] (* -0.015 (+ (* (- 1 0.03) x1) (* 0.01 x2) (* 0.02 x3))))
(= d/dt[x2] (* 0.045 (- 200 (+ (* (- 1 0.06) x2) (* 0.01 x1) (* 0.05 x3)))))
(= d/dt[x3] (* -0.03 (+ (* (- 1 0.07) x3) (* 0.02 x1) (* 0.05 x2))))
(= d/dt[tau] 1)))
(define-ode flow_7 ((= d/dt[x1] (* -0.015 (+ (* (- 1 0.03) x1) (* 0.01 x2) (* 0.02 x3))))
(= d/dt[x2] (* -0.045 (+ (* (- 1 0.06) x2) (* 0.01 x1) (* 0.05 x3))))
(= d/dt[x3] (* 0.03 (- 300 (+ (* (- 1 0.07) x3) (* 0.02 x1) (* 0.05 x2)))))
(= d/dt[tau] 1)))
(define-ode flow_8 ((= d/dt[x1] (* -0.015 (+ (* (- 1 0.03) x1) (* 0.01 x2) (* 0.02 x3))))
(= d/dt[x2] (* -0.045 (+ (* (- 1 0.06) x2) (* 0.01 x1) (* 0.05 x3))))
(= d/dt[x3] (* -0.03 (+ (* (- 1 0.07) x3) (* 0.02 x1) (* 0.05 x2))))
(= d/dt[tau] 1)))
"""
state_dec[0] = """
(declare-fun time_{0} () Real)
(declare-fun tau_{0}_0 () Real)
(declare-fun tau_{0}_t () Real)
(declare-fun mode1_{0} () Bool)
(declare-fun x1_{0}_0 () Real)
(declare-fun x1_{0}_t () Real)
(declare-fun mode2_{0} () Bool)
(declare-fun x2_{0}_0 () Real)
(declare-fun x2_{0}_t () Real)
(declare-fun mode3_{0} () Bool)
(declare-fun x3_{0}_0 () Real)
(declare-fun x3_{0}_t () Real)
"""
state_val[0] = """
(assert (<= 0 time_{0})) (assert (<= time_{0} 1))
(assert (<= 0 tau_{0}_0)) (assert (<= tau_{0}_0 1))
(assert (<= 0 tau_{0}_t)) (assert (<= tau_{0}_t 1))
(assert (<= -20 x1_{0}_0)) (assert (<= x1_{0}_0 100))
(assert (<= -20 x1_{0}_t)) (assert (<= x1_{0}_t 100))
(assert (<= -20 x2_{0}_0)) (assert (<= x2_{0}_0 100))
(assert (<= -20 x2_{0}_t)) (assert (<= x2_{0}_t 100))
(assert (<= -20 x3_{0}_0)) (assert (<= x3_{0}_0 100))
(assert (<= -20 x3_{0}_t)) (assert (<= x3_{0}_t 100))
"""
cont_cond[0] = ["""
(assert (and (>= tau_{0}_0 0) (<= tau_{0}_0 1)
(>= tau_{0}_t 0) (<= tau_{0}_t 1)
(forall_t 1 [0 time_{0}] (>= tau_{0}_t 0))
(forall_t 2 [0 time_{0}] (<= tau_{0}_t 1))))
(assert (or (and (= mode1_{0} true) (= mode2_{0} true) (= mode3_{0} true)
(= [x1_{0}_t x2_{0}_t x3_{0}_t tau_{0}_t]
(integral 0. time_{0} [x1_{0}_0 x2_{0}_0 x3_{0}_0 tau_{0}_0] flow_1)))
(and (= mode1_{0} true) (= mode2_{0} true) (= mode3_{0} false)
(= [x1_{0}_t x2_{0}_t x3_{0}_t tau_{0}_t]
(integral 0. time_{0} [x1_{0}_0 x2_{0}_0 x3_{0}_0 tau_{0}_0] flow_2)))
(and (= mode1_{0} true) (= mode2_{0} false) (= mode3_{0} true)
(= [x1_{0}_t x2_{0}_t x3_{0}_t tau_{0}_t]
(integral 0. time_{0} [x1_{0}_0 x2_{0}_0 x3_{0}_0 tau_{0}_0] flow_3)))
(and (= mode1_{0} true) (= mode2_{0} false) (= mode3_{0} false)
(= [x1_{0}_t x2_{0}_t x3_{0}_t tau_{0}_t]
(integral 0. time_{0} [x1_{0}_0 x2_{0}_0 x3_{0}_0 tau_{0}_0] flow_4)))
(and (= mode1_{0} false) (= mode2_{0} true) (= mode3_{0} true)
(= [x1_{0}_t x2_{0}_t x3_{0}_t tau_{0}_t]
(integral 0. time_{0} [x1_{0}_0 x2_{0}_0 x3_{0}_0 tau_{0}_0] flow_5)))
(and (= mode1_{0} false) (= mode2_{0} true) (= mode3_{0} false)
(= [x1_{0}_t x2_{0}_t x3_{0}_t tau_{0}_t]
(integral 0. time_{0} [x1_{0}_0 x2_{0}_0 x3_{0}_0 tau_{0}_0] flow_6)))
(and (= mode1_{0} false) (= mode2_{0} false) (= mode3_{0} true)
(= [x1_{0}_t x2_{0}_t x3_{0}_t tau_{0}_t]
(integral 0. time_{0} [x1_{0}_0 x2_{0}_0 x3_{0}_0 tau_{0}_0] flow_7)))
(and (= mode1_{0} false) (= mode2_{0} false) (= mode3_{0} false)
(= [x1_{0}_t x2_{0}_t x3_{0}_t tau_{0}_t]
(integral 0. time_{0} [x1_{0}_0 x2_{0}_0 x3_{0}_0 tau_{0}_0] flow_8)))))"""]
jump_cond[0] = ["""
(assert (and (= tau_{0}_t 1) (= tau_{1}_0 0)))
(assert (and (= x1_{1}_0 x1_{0}_t)))
(assert (or (and (<= x1_{0}_t 20) (= mode1_{1} true))
(and (> x1_{0}_t 20) (= mode1_{1} false))))
(assert (and (= x2_{1}_0 x2_{0}_t)))
(assert (or (and (<= x2_{0}_t 20) (= mode2_{1} true))
(and (> x2_{0}_t 20) (= mode2_{1} false))))
(assert (and (= x3_{1}_0 x3_{0}_t)))
(assert (or (and (<= x3_{0}_t 20) (= mode3_{1} true))
(and (> x3_{0}_t 20) (= mode3_{1} false))))"""]
#############
# Init/Goal #
#############
init_cond = """
(assert (< 0.99 tau_{0}_0))
(assert
(and (> x1_{0}_0 (- 20 9)) (< x1_{0}_0 (+ 20 9))
(> x2_{0}_0 (- 20 9)) (< x2_{0}_0 (+ 20 9))
(> x3_{0}_0 (- 20 9)) (< x3_{0}_0 (+ 20 9))))
"""
goal_cond = """
(assert (< 0.99 tau_{0}_t))
(assert (not
(and (> x1_{0}_t (- 20 9)) (< x1_{0}_t (+ 20 9))
(> x2_{0}_t (- 20 9)) (< x2_{0}_t (+ 20 9))
(> x3_{0}_t (- 20 9)) (< x3_{0}_t (+ 20 9)))))
"""
import sys
try:
bound = int(sys.argv[1])
except:
print("Usage:", sys.argv[0], "<Bound>")
else:
generate(bound, 1, [0], 0, init_cond, goal_cond)
|
fran-penedo/dreal
|
benchmarks/network/thermostat/thermostat-triple-ind.py
|
Python
|
gpl-3.0
| 6,756
|
import unittest
from captcha import fields, forms, models, widgets
class TestCase(unittest.TestCase):
def test_something(self):
raise NotImplementedError('Test not implemented. Bad developer!')
|
ckprice/bedrock
|
vendor-local/src/django-recaptcha/captcha/tests.py
|
Python
|
mpl-2.0
| 209
|
#!/usr/bin/env python
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Starter script for Cinder OS API."""
import eventlet
eventlet.monkey_patch()
import logging as python_logging
import sys
from cinder import objects
from oslo_config import cfg
from oslo_log import log as logging
from oslo_reports import guru_meditation_report as gmr
from oslo_reports import opts as gmr_opts
from cinder import i18n
i18n.enable_lazy()
# Need to register global_opts
from cinder.common import config
from cinder import rpc
from cinder import service
from cinder import utils
from cinder import version
CONF = cfg.CONF
def main():
objects.register_all()
gmr_opts.set_defaults(CONF)
CONF(sys.argv[1:], project='cinder',
version=version.version_string())
config.set_middleware_defaults()
logging.setup(CONF, "cinder")
python_logging.captureWarnings(True)
utils.monkey_patch()
gmr.TextGuruMeditation.setup_autorun(version, conf=CONF)
rpc.init(CONF)
launcher = service.process_launcher()
server = service.WSGIService('osapi_volume')
launcher.launch_service(server, workers=server.workers)
launcher.wait()
|
phenoxim/cinder
|
cinder/cmd/api.py
|
Python
|
apache-2.0
| 1,845
|
#! /usr/bin/env python2
"""
mbed SDK
Copyright (c) 2011-2013 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
LIBRARIES BUILD
"""
import sys
from time import time
from os.path import join, abspath, dirname
# Be sure that the tools directory is in the search path
ROOT = abspath(join(dirname(__file__), ".."))
sys.path.insert(0, ROOT)
from workspace_tools.toolchains import TOOLCHAINS
from workspace_tools.targets import TARGET_NAMES, TARGET_MAP
from workspace_tools.options import get_default_options_parser
from workspace_tools.build_api import build_mbed_libs, build_lib
from workspace_tools.build_api import mcu_toolchain_matrix
from workspace_tools.build_api import static_analysis_scan, static_analysis_scan_lib, static_analysis_scan_library
from workspace_tools.build_api import print_build_results
from workspace_tools.settings import CPPCHECK_CMD, CPPCHECK_MSG_FORMAT
if __name__ == '__main__':
start = time()
# Parse Options
parser = get_default_options_parser()
# Extra libraries
parser.add_option("-r", "--rtos",
action="store_true",
dest="rtos",
default=False,
help="Compile the rtos")
parser.add_option("--rpc",
action="store_true",
dest="rpc",
default=False,
help="Compile the rpc library")
parser.add_option("-e", "--eth",
action="store_true", dest="eth",
default=False,
help="Compile the ethernet library")
parser.add_option("-U", "--usb_host",
action="store_true",
dest="usb_host",
default=False,
help="Compile the USB Host library")
parser.add_option("-u", "--usb",
action="store_true",
dest="usb",
default=False,
help="Compile the USB Device library")
parser.add_option("-d", "--dsp",
action="store_true",
dest="dsp",
default=False,
help="Compile the DSP library")
parser.add_option("-F", "--fat",
action="store_true",
dest="fat",
default=False,
help="Compile FS and SD card file system library")
parser.add_option("-b", "--ublox",
action="store_true",
dest="ublox",
default=False,
help="Compile the u-blox library")
parser.add_option("", "--cpputest",
action="store_true",
dest="cpputest_lib",
default=False,
help="Compiles 'cpputest' unit test library (library should be on the same directory level as mbed repository)")
parser.add_option("-D", "",
action="append",
dest="macros",
help="Add a macro definition")
parser.add_option("-S", "--supported-toolchains",
action="store_true",
dest="supported_toolchains",
default=False,
help="Displays supported matrix of MCUs and toolchains")
parser.add_option("", "--cppcheck",
action="store_true",
dest="cppcheck_validation",
default=False,
help="Forces 'cppcheck' static code analysis")
parser.add_option('-f', '--filter',
dest='general_filter_regex',
default=None,
help='For some commands you can use filter to filter out results')
parser.add_option("-j", "--jobs", type="int", dest="jobs",
default=1, help="Number of concurrent jobs (default 1). Use 0 for auto based on host machine's number of CPUs")
parser.add_option("-v", "--verbose",
action="store_true",
dest="verbose",
default=False,
help="Verbose diagnostic output")
parser.add_option("--silent",
action="store_true",
dest="silent",
default=False,
help="Silent diagnostic output (no copy, compile notification)")
parser.add_option("-x", "--extra-verbose-notifications",
action="store_true",
dest="extra_verbose_notify",
default=False,
help="Makes compiler more verbose, CI friendly.")
(options, args) = parser.parse_args()
# Only prints matrix of supported toolchains
if options.supported_toolchains:
print mcu_toolchain_matrix(platform_filter=options.general_filter_regex)
exit(0)
# Get target list
if options.mcu:
mcu_list = (options.mcu).split(",")
for mcu in mcu_list:
if mcu not in TARGET_NAMES:
print "Given MCU '%s' not into the supported list:\n%s" % (mcu, TARGET_NAMES)
sys.exit(1)
targets = mcu_list
else:
targets = TARGET_NAMES
# Get toolchains list
if options.tool:
toolchain_list = (options.tool).split(",")
for tc in toolchain_list:
if tc not in TOOLCHAINS:
print "Given toolchain '%s' not into the supported list:\n%s" % (tc, TOOLCHAINS)
sys.exit(1)
toolchains = toolchain_list
else:
toolchains = TOOLCHAINS
# Get libraries list
libraries = []
# Additional Libraries
if options.rtos:
libraries.extend(["rtx", "rtos"])
if options.rpc:
libraries.extend(["rpc"])
if options.eth:
libraries.append("eth")
if options.usb:
libraries.append("usb")
if options.usb_host:
libraries.append("usb_host")
if options.dsp:
libraries.extend(["cmsis_dsp", "dsp"])
if options.fat:
libraries.extend(["fat"])
if options.ublox:
libraries.extend(["rtx", "rtos", "usb_host", "ublox"])
if options.cpputest_lib:
libraries.extend(["cpputest"])
# Build results
failures = []
successes = []
skipped = []
# CPPCHECK code validation
if options.cppcheck_validation:
for toolchain in toolchains:
for target in targets:
try:
mcu = TARGET_MAP[target]
# CMSIS and MBED libs analysis
static_analysis_scan(mcu, toolchain, CPPCHECK_CMD, CPPCHECK_MSG_FORMAT, verbose=options.verbose, jobs=options.jobs)
for lib_id in libraries:
# Static check for library
static_analysis_scan_lib(lib_id, mcu, toolchain, CPPCHECK_CMD, CPPCHECK_MSG_FORMAT,
options=options.options,
extra_verbose=options.extra_verbose_notify, verbose=options.verbose, jobs=options.jobs, clean=options.clean,
macros=options.macros)
pass
except Exception, e:
if options.verbose:
import traceback
traceback.print_exc(file=sys.stdout)
sys.exit(1)
print e
else:
# Build
for toolchain in toolchains:
for target in targets:
tt_id = "%s::%s" % (toolchain, target)
try:
mcu = TARGET_MAP[target]
lib_build_res = build_mbed_libs(mcu, toolchain,
options=options.options,
extra_verbose=options.extra_verbose_notify,
verbose=options.verbose,
silent=options.silent,
jobs=options.jobs,
clean=options.clean,
macros=options.macros)
for lib_id in libraries:
build_lib(lib_id, mcu, toolchain,
options=options.options,
extra_verbose=options.extra_verbose_notify,
verbose=options.verbose,
silent=options.silent,
clean=options.clean,
macros=options.macros,
jobs=options.jobs)
if lib_build_res:
successes.append(tt_id)
else:
skipped.append(tt_id)
except Exception, e:
if options.verbose:
import traceback
traceback.print_exc(file=sys.stdout)
sys.exit(1)
failures.append(tt_id)
print e
# Write summary of the builds
print
print "Completed in: (%.2f)s" % (time() - start)
print
for report, report_name in [(successes, "Build successes:"),
(skipped, "Build skipped:"),
(failures, "Build failures:"),
]:
if report:
print print_build_results(report, report_name),
if failures:
sys.exit(1)
|
nabilbendafi/mbed
|
workspace_tools/build.py
|
Python
|
apache-2.0
| 10,294
|
# Copyright (c) 2012 OpenStack Foundation
#
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from novaclient.v1_1.client import Client # noqa
|
tylertian/Openstack
|
openstack F/python-novaclient/novaclient/v1_1/__init__.py
|
Python
|
apache-2.0
| 693
|
# Joint copyright:
# - Copyright 2012,2013 Wikimedia Foundation
# - Copyright 2012,2013 Antoine "hashar" Musso
# - Copyright 2013 Arnaud Fabre
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from jenkins_jobs.modules import general
from tests import base
class TestCaseModuleGeneral(base.BaseScenariosTestCase):
fixtures_path = os.path.join(os.path.dirname(__file__), 'fixtures')
scenarios = base.get_scenarios(fixtures_path)
klass = general.General
|
dataxu/jenkins-job-builder
|
tests/general/test_general.py
|
Python
|
apache-2.0
| 973
|
from bokeh.core.properties import value
from bokeh.io import show, output_file
from bokeh.models import ColumnDataSource
from bokeh.plotting import figure
from bokeh.transform import dodge
output_file("dodged_bars.html")
fruits = ['Apples', 'Pears', 'Nectarines', 'Plums', 'Grapes', 'Strawberries']
years = ['2015', '2016', '2017']
data = {'fruits' : fruits,
'2015' : [2, 1, 4, 3, 2, 4],
'2016' : [5, 3, 3, 2, 4, 6],
'2017' : [3, 2, 4, 4, 5, 3]}
source = ColumnDataSource(data=data)
p = figure(x_range=fruits, y_range=(0, 10), plot_height=250, title="Fruit Counts by Year",
toolbar_location=None, tools="")
p.vbar(x=dodge('fruits', -0.25, range=p.x_range), top='2015', width=0.2, source=source,
color="#c9d9d3", legend=value("2015"))
p.vbar(x=dodge('fruits', 0.0, range=p.x_range), top='2016', width=0.2, source=source,
color="#718dbf", legend=value("2016"))
p.vbar(x=dodge('fruits', 0.25, range=p.x_range), top='2017', width=0.2, source=source,
color="#e84d60", legend=value("2017"))
p.x_range.range_padding = 0.1
p.xgrid.grid_line_color = None
p.legend.location = "top_left"
p.legend.orientation = "horizontal"
show(p)
|
dennisobrien/bokeh
|
sphinx/source/docs/user_guide/examples/categorical_bar_dodged.py
|
Python
|
bsd-3-clause
| 1,196
|
"""
Support for SCSGate switches.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/switch.scsgate/
"""
import logging
import homeassistant.components.scsgate as scsgate
from homeassistant.components.switch import SwitchDevice
from homeassistant.const import ATTR_ENTITY_ID
DEPENDENCIES = ['scsgate']
def setup_platform(hass, config, add_devices_callback, discovery_info=None):
"""Setup the SCSGate switches."""
logger = logging.getLogger(__name__)
_setup_traditional_switches(
logger=logger,
config=config,
add_devices_callback=add_devices_callback)
_setup_scenario_switches(
logger=logger,
config=config,
hass=hass)
def _setup_traditional_switches(logger, config, add_devices_callback):
"""Add traditional SCSGate switches."""
traditional = config.get('traditional')
switches = []
if traditional:
for _, entity_info in traditional.items():
if entity_info['scs_id'] in scsgate.SCSGATE.devices:
continue
logger.info(
"Adding %s scsgate.traditional_switch", entity_info['name'])
name = entity_info['name']
scs_id = entity_info['scs_id']
switch = SCSGateSwitch(
name=name,
scs_id=scs_id,
logger=logger)
switches.append(switch)
add_devices_callback(switches)
scsgate.SCSGATE.add_devices_to_register(switches)
def _setup_scenario_switches(logger, config, hass):
"""Add only SCSGate scenario switches."""
scenario = config.get("scenario")
if scenario:
for _, entity_info in scenario.items():
if entity_info['scs_id'] in scsgate.SCSGATE.devices:
continue
logger.info(
"Adding %s scsgate.scenario_switch", entity_info['name'])
name = entity_info['name']
scs_id = entity_info['scs_id']
switch = SCSGateScenarioSwitch(
name=name,
scs_id=scs_id,
logger=logger,
hass=hass)
scsgate.SCSGATE.add_device(switch)
class SCSGateSwitch(SwitchDevice):
"""Representation of a SCSGate switch."""
def __init__(self, scs_id, name, logger):
"""Initialize the switch."""
self._name = name
self._scs_id = scs_id
self._toggled = False
self._logger = logger
@property
def scs_id(self):
"""Return the SCS ID."""
return self._scs_id
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def name(self):
"""Return the name of the device if any."""
return self._name
@property
def is_on(self):
"""Return true if switch is on."""
return self._toggled
def turn_on(self, **kwargs):
"""Turn the device on."""
from scsgate.tasks import ToggleStatusTask
scsgate.SCSGATE.append_task(
ToggleStatusTask(
target=self._scs_id,
toggled=True))
self._toggled = True
self.update_ha_state()
def turn_off(self, **kwargs):
"""Turn the device off."""
from scsgate.tasks import ToggleStatusTask
scsgate.SCSGATE.append_task(
ToggleStatusTask(
target=self._scs_id,
toggled=False))
self._toggled = False
self.update_ha_state()
def process_event(self, message):
"""Handle a SCSGate message related with this switch."""
if self._toggled == message.toggled:
self._logger.info(
"Switch %s, ignoring message %s because state already active",
self._scs_id, message)
# Nothing changed, ignoring
return
self._toggled = message.toggled
self.update_ha_state()
command = "off"
if self._toggled:
command = "on"
self.hass.bus.fire(
'button_pressed', {
ATTR_ENTITY_ID: self._scs_id,
'state': command
}
)
class SCSGateScenarioSwitch:
"""Provides a SCSGate scenario switch.
This switch is always in a 'off" state, when toggled it's used to trigger
events.
"""
def __init__(self, scs_id, name, logger, hass):
"""Initialize the scenario."""
self._name = name
self._scs_id = scs_id
self._logger = logger
self._hass = hass
@property
def scs_id(self):
"""Return the SCS ID."""
return self._scs_id
@property
def name(self):
"""Return the name of the device if any."""
return self._name
def process_event(self, message):
"""Handle a SCSGate message related with this switch."""
from scsgate.messages import StateMessage, ScenarioTriggeredMessage
if isinstance(message, StateMessage):
scenario_id = message.bytes[4]
elif isinstance(message, ScenarioTriggeredMessage):
scenario_id = message.scenario
else:
self._logger.warn(
"Scenario switch: received unknown message %s",
message)
return
self._hass.bus.fire(
'scenario_switch_triggered', {
ATTR_ENTITY_ID: int(self._scs_id),
'scenario_id': int(scenario_id, 16)
}
)
|
mikaelboman/home-assistant
|
homeassistant/components/switch/scsgate.py
|
Python
|
mit
| 5,534
|
#!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
# Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fortios_system_pppoe_interface
short_description: Configure the PPPoE interfaces in Fortinet's FortiOS and FortiGate.
description:
- This module is able to configure a FortiGate or FortiOS (FOS) device by allowing the
user to set and modify system feature and pppoe_interface category.
Examples include all parameters and values need to be adjusted to datasources before usage.
Tested with FOS v6.0.5
version_added: "2.9"
author:
- Miguel Angel Munoz (@mamunozgonzalez)
- Nicolas Thomas (@thomnico)
notes:
- Requires fortiosapi library developed by Fortinet
- Run as a local_action in your playbook
requirements:
- fortiosapi>=0.9.8
options:
host:
description:
- FortiOS or FortiGate IP address.
type: str
required: false
username:
description:
- FortiOS or FortiGate username.
type: str
required: false
password:
description:
- FortiOS or FortiGate password.
type: str
default: ""
vdom:
description:
- Virtual domain, among those defined previously. A vdom is a
virtual instance of the FortiGate that can be configured and
used as a different unit.
type: str
default: root
https:
description:
- Indicates if the requests towards FortiGate must use HTTPS protocol.
type: bool
default: true
ssl_verify:
description:
- Ensures FortiGate certificate must be verified by a proper CA.
type: bool
default: true
state:
description:
- Indicates whether to create or remove the object.
type: str
required: true
choices:
- present
- absent
system_pppoe_interface:
description:
- Configure the PPPoE interfaces.
default: null
type: dict
suboptions:
ac_name:
description:
- PPPoE AC name.
type: str
auth_type:
description:
- PPP authentication type to use.
type: str
choices:
- auto
- pap
- chap
- mschapv1
- mschapv2
device:
description:
- Name for the physical interface. Source system.interface.name.
type: str
dial_on_demand:
description:
- Enable/disable dial on demand to dial the PPPoE interface when packets are routed to the PPPoE interface.
type: str
choices:
- enable
- disable
disc_retry_timeout:
description:
- PPPoE discovery init timeout value in (0-4294967295 sec).
type: int
idle_timeout:
description:
- PPPoE auto disconnect after idle timeout (0-4294967295 sec).
type: int
ipunnumbered:
description:
- PPPoE unnumbered IP.
type: str
ipv6:
description:
- Enable/disable IPv6 Control Protocol (IPv6CP).
type: str
choices:
- enable
- disable
lcp_echo_interval:
description:
- PPPoE LCP echo interval in (0-4294967295 sec).
type: int
lcp_max_echo_fails:
description:
- Maximum missed LCP echo messages before disconnect (0-4294967295).
type: int
name:
description:
- Name of the PPPoE interface.
required: true
type: str
padt_retry_timeout:
description:
- PPPoE terminate timeout value in (0-4294967295 sec).
type: int
password:
description:
- Enter the password.
type: str
pppoe_unnumbered_negotiate:
description:
- Enable/disable PPPoE unnumbered negotiation.
type: str
choices:
- enable
- disable
service_name:
description:
- PPPoE service name.
type: str
username:
description:
- User name.
type: str
'''
EXAMPLES = '''
- hosts: localhost
vars:
host: "192.168.122.40"
username: "admin"
password: ""
vdom: "root"
ssl_verify: "False"
tasks:
- name: Configure the PPPoE interfaces.
fortios_system_pppoe_interface:
host: "{{ host }}"
username: "{{ username }}"
password: "{{ password }}"
vdom: "{{ vdom }}"
https: "False"
state: "present"
system_pppoe_interface:
ac_name: "<your_own_value>"
auth_type: "auto"
device: "<your_own_value> (source system.interface.name)"
dial_on_demand: "enable"
disc_retry_timeout: "7"
idle_timeout: "8"
ipunnumbered: "<your_own_value>"
ipv6: "enable"
lcp_echo_interval: "11"
lcp_max_echo_fails: "12"
name: "default_name_13"
padt_retry_timeout: "14"
password: "<your_own_value>"
pppoe_unnumbered_negotiate: "enable"
service_name: "<your_own_value>"
username: "<your_own_value>"
'''
RETURN = '''
build:
description: Build number of the fortigate image
returned: always
type: str
sample: '1547'
http_method:
description: Last method used to provision the content into FortiGate
returned: always
type: str
sample: 'PUT'
http_status:
description: Last result given by FortiGate on last operation applied
returned: always
type: str
sample: "200"
mkey:
description: Master key (id) used in the last call to FortiGate
returned: success
type: str
sample: "id"
name:
description: Name of the table used to fulfill the request
returned: always
type: str
sample: "urlfilter"
path:
description: Path of the table used to fulfill the request
returned: always
type: str
sample: "webfilter"
revision:
description: Internal revision number
returned: always
type: str
sample: "17.0.2.10658"
serial:
description: Serial number of the unit
returned: always
type: str
sample: "FGVMEVYYQT3AB5352"
status:
description: Indication of the operation's result
returned: always
type: str
sample: "success"
vdom:
description: Virtual domain used
returned: always
type: str
sample: "root"
version:
description: Version of the FortiGate
returned: always
type: str
sample: "v5.6.3"
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
from ansible.module_utils.network.fortimanager.common import FAIL_SOCKET_MSG
def login(data, fos):
host = data['host']
username = data['username']
password = data['password']
ssl_verify = data['ssl_verify']
fos.debug('on')
if 'https' in data and not data['https']:
fos.https('off')
else:
fos.https('on')
fos.login(host, username, password, verify=ssl_verify)
def filter_system_pppoe_interface_data(json):
option_list = ['ac_name', 'auth_type', 'device',
'dial_on_demand', 'disc_retry_timeout', 'idle_timeout',
'ipunnumbered', 'ipv6', 'lcp_echo_interval',
'lcp_max_echo_fails', 'name', 'padt_retry_timeout',
'password', 'pppoe_unnumbered_negotiate', 'service_name',
'username']
dictionary = {}
for attribute in option_list:
if attribute in json and json[attribute] is not None:
dictionary[attribute] = json[attribute]
return dictionary
def underscore_to_hyphen(data):
if isinstance(data, list):
for elem in data:
elem = underscore_to_hyphen(elem)
elif isinstance(data, dict):
new_data = {}
for k, v in data.items():
new_data[k.replace('_', '-')] = underscore_to_hyphen(v)
data = new_data
return data
def system_pppoe_interface(data, fos):
vdom = data['vdom']
state = data['state']
system_pppoe_interface_data = data['system_pppoe_interface']
filtered_data = underscore_to_hyphen(filter_system_pppoe_interface_data(system_pppoe_interface_data))
if state == "present":
return fos.set('system',
'pppoe-interface',
data=filtered_data,
vdom=vdom)
elif state == "absent":
return fos.delete('system',
'pppoe-interface',
mkey=filtered_data['name'],
vdom=vdom)
def is_successful_status(status):
return status['status'] == "success" or \
status['http_method'] == "DELETE" and status['http_status'] == 404
def fortios_system(data, fos):
if data['system_pppoe_interface']:
resp = system_pppoe_interface(data, fos)
return not is_successful_status(resp), \
resp['status'] == "success", \
resp
def main():
fields = {
"host": {"required": False, "type": "str"},
"username": {"required": False, "type": "str"},
"password": {"required": False, "type": "str", "default": "", "no_log": True},
"vdom": {"required": False, "type": "str", "default": "root"},
"https": {"required": False, "type": "bool", "default": True},
"ssl_verify": {"required": False, "type": "bool", "default": True},
"state": {"required": True, "type": "str",
"choices": ["present", "absent"]},
"system_pppoe_interface": {
"required": False, "type": "dict", "default": None,
"options": {
"ac_name": {"required": False, "type": "str"},
"auth_type": {"required": False, "type": "str",
"choices": ["auto", "pap", "chap",
"mschapv1", "mschapv2"]},
"device": {"required": False, "type": "str"},
"dial_on_demand": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"disc_retry_timeout": {"required": False, "type": "int"},
"idle_timeout": {"required": False, "type": "int"},
"ipunnumbered": {"required": False, "type": "str"},
"ipv6": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"lcp_echo_interval": {"required": False, "type": "int"},
"lcp_max_echo_fails": {"required": False, "type": "int"},
"name": {"required": True, "type": "str"},
"padt_retry_timeout": {"required": False, "type": "int"},
"password": {"required": False, "type": "str"},
"pppoe_unnumbered_negotiate": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"service_name": {"required": False, "type": "str"},
"username": {"required": False, "type": "str"}
}
}
}
module = AnsibleModule(argument_spec=fields,
supports_check_mode=False)
# legacy_mode refers to using fortiosapi instead of HTTPAPI
legacy_mode = 'host' in module.params and module.params['host'] is not None and \
'username' in module.params and module.params['username'] is not None and \
'password' in module.params and module.params['password'] is not None
if not legacy_mode:
if module._socket_path:
connection = Connection(module._socket_path)
fos = FortiOSHandler(connection)
is_error, has_changed, result = fortios_system(module.params, fos)
else:
module.fail_json(**FAIL_SOCKET_MSG)
else:
try:
from fortiosapi import FortiOSAPI
except ImportError:
module.fail_json(msg="fortiosapi module is required")
fos = FortiOSAPI()
login(module.params, fos)
is_error, has_changed, result = fortios_system(module.params, fos)
fos.logout()
if not is_error:
module.exit_json(changed=has_changed, meta=result)
else:
module.fail_json(msg="Error in repo", meta=result)
if __name__ == '__main__':
main()
|
kvar/ansible
|
lib/ansible/modules/network/fortios/fortios_system_pppoe_interface.py
|
Python
|
gpl-3.0
| 13,917
|
""" Overrides for Docker-based devstack. """
from .devstack import * # pylint: disable=wildcard-import, unused-wildcard-import
# Docker does not support the syslog socket at /dev/log. Rely on the console.
LOGGING['handlers']['local'] = LOGGING['handlers']['tracking'] = {
'class': 'logging.NullHandler',
}
LOGGING['loggers']['tracking']['handlers'] = ['console']
LMS_BASE = 'edx.devstack.lms:18000'
CMS_BASE = 'edx.devstack.studio:18010'
LMS_ROOT_URL = 'http://{}'.format(LMS_BASE)
FEATURES.update({
'ENABLE_COURSEWARE_INDEX': False,
'ENABLE_LIBRARY_INDEX': False,
'ENABLE_DISCUSSION_SERVICE': True,
})
CREDENTIALS_SERVICE_USERNAME = 'credentials_worker'
OAUTH_OIDC_ISSUER = '{}/oauth2'.format(LMS_ROOT_URL)
JWT_AUTH.update({
'JWT_SECRET_KEY': 'lms-secret',
'JWT_ISSUER': OAUTH_OIDC_ISSUER,
'JWT_AUDIENCE': 'lms-key',
})
|
BehavioralInsightsTeam/edx-platform
|
cms/envs/devstack_docker.py
|
Python
|
agpl-3.0
| 858
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import numpy as np
from .geom import geom
class geom_text(geom):
DEFAULT_AES = {'alpha': None, 'angle': 0, 'color': 'black', 'family': None,
'fontface': 1, 'hjust': None, 'size': 12, 'vjust': None,
'lineheight': 1.2}
REQUIRED_AES = {'label','x','y'}
DEFAULT_PARAMS = {'stat': 'identity', 'position': 'identity', 'parse': False}
_aes_renames = {'angle': 'rotation', 'lineheight': 'linespacing'}
_units = {'alpha', 'color', 'family', 'size'}
def _plot_unit(self, pinfo, ax):
x = pinfo.pop('x')
y = pinfo.pop('y')
label = pinfo.pop('label')
# TODO: Deal with the fontface
# from ggplot2
# 1 = plain, 2 = bold, 3 = italic, 4 = bold italic
# "plain", "bold", "italic", "oblique", and "bold.italic"
pinfo.pop('fontface')
# before taking max and min make sure x is not empty
if len(x) == 0:
return
# plt.text does not resize axes, must do manually
xmax = max(x)
xmin = min(x)
ymax = max(y)
ymin = min(y)
margin = 0.1
xmargin = (xmax - xmin) * margin
ymargin = (ymax - ymin) * margin
xmax = xmax + xmargin
xmin = xmin - xmargin
ymax = ymax + ymargin
ymin = ymin - ymargin
# Take current plotting dimension in account for the case that we
# work on a special dataframe just for this geom!
if not self.data is None: # NOTE: not working??
cxmin, cxmax = ax.get_xlim()
cymin, cymax = ax.get_ylim()
# there is a problem if geom_text is the first plot, as
# then the dimension are 0-1 for all axis :-(
xmax = max(xmax, cxmax)
xmin = min(xmin, cxmin)
ymax = max(ymax, cymax)
ymin = min(ymin, cymin)
# TODO: Fix the defaults for this
# try out 0.5
if pinfo['hjust'] is not None:
x = (np.array(x) + pinfo['hjust']).tolist()
else:
pinfo['horizontalalignment'] = 'center'
if pinfo['vjust'] is not None:
y = (np.array(y) + pinfo['vjust']).tolist()
else:
pinfo['verticalalignment'] = 'center'
del pinfo['hjust']
del pinfo['vjust']
for x_g,y_g,s in zip(x,y,label):
ax.text(x_g,y_g,s,**pinfo)
# TODO: Find out why this isn't working as desired
# resize axes
ax.axis([xmin, xmax, ymin, ymax])
|
kmather73/ggplot
|
ggplot/geoms/geom_text.py
|
Python
|
bsd-2-clause
| 2,616
|
# Copyright (c) 2013, Ricardo Andrade
# Licensed under the BSD 3-clause license (see LICENSE.txt)
import numpy as np
from ..core import SparseGP
from .. import likelihoods
from .. import kern
from ..likelihoods import likelihood
from ..inference.latent_function_inference import expectation_propagation_dtc
class SparseGPClassification(SparseGP):
"""
sparse Gaussian Process model for classification
This is a thin wrapper around the sparse_GP class, with a set of sensible defaults
:param X: input observations
:param Y: observed values
:param likelihood: a GPy likelihood, defaults to Binomial with probit link_function
:param kernel: a GPy kernel, defaults to rbf+white
:param normalize_X: whether to normalize the input data before computing (predictions will be in original scales)
:type normalize_X: False|True
:param normalize_Y: whether to normalize the input data before computing (predictions will be in original scales)
:type normalize_Y: False|True
:rtype: model object
"""
#def __init__(self, X, Y=None, likelihood=None, kernel=None, normalize_X=False, normalize_Y=False, Z=None, num_inducing=10):
def __init__(self, X, Y=None, likelihood=None, kernel=None, Z=None, num_inducing=10, Y_metadata=None):
if kernel is None:
kernel = kern.RBF(X.shape[1])
likelihood = likelihoods.Bernoulli()
if Z is None:
i = np.random.permutation(X.shape[0])[:num_inducing]
Z = X[i].copy()
else:
assert Z.shape[1] == X.shape[1]
SparseGP.__init__(self, X, Y, Z, kernel, likelihood, inference_method=expectation_propagation_dtc.EPDTC(), name='SparseGPClassification',Y_metadata=Y_metadata)
#def __init__(self, X, Y, Z, kernel, likelihood, inference_method=None, name='sparse gp', Y_metadata=None):
|
ptonner/GPy
|
GPy/models/sparse_gp_classification.py
|
Python
|
bsd-3-clause
| 1,855
|
#!/usr/bin/env python
"""
================
sMRI: FSReconAll
================
This script, smri_fsreconall.py, demonstrates the ability to use the
create_reconall_workflow function to create a workflow and then run it on a
set of subjects and then make an average subject::
python smri_fsreconall.py
For an example on how to call FreeSurfer's reconall script in Nipype
see smri_freesurfer.py.
Import necessary modules from nipype.
"""
import os
import nipype.pipeline.engine as pe
import nipype.interfaces.io as nio
from nipype.workflows.smri.freesurfer import create_reconall_workflow
from nipype.interfaces.freesurfer.utils import MakeAverageSubject
from nipype.interfaces.utility import IdentityInterface
"""
Assign the tutorial directory
"""
tutorial_dir = os.path.abspath('smri_fsreconall_tutorial')
if not os.path.isdir(tutorial_dir):
os.mkdir(tutorial_dir)
"""
Define the workflow directories
"""
subject_list = ['s1', 's3']
data_dir = os.path.abspath('data')
subjects_dir = os.path.join(tutorial_dir, 'subjects_dir')
if not os.path.exists(subjects_dir):
os.mkdir(subjects_dir)
wf = pe.Workflow(name="l1workflow")
wf.base_dir = os.path.join(tutorial_dir, 'workdir')
"""
Create inputspec
"""
inputspec = pe.Node(interface=IdentityInterface(['subject_id']),
name="inputspec")
inputspec.iterables = ("subject_id", subject_list)
"""
Grab data
"""
datasource = pe.Node(interface=nio.DataGrabber(infields=['subject_id'],
outfields=['struct']),
name='datasource')
datasource.inputs.base_directory = data_dir
datasource.inputs.template = '%s/%s.nii'
datasource.inputs.template_args = dict(struct=[['subject_id', 'struct']])
datasource.inputs.subject_id = subject_list
datasource.inputs.sort_filelist = True
wf.connect(inputspec, 'subject_id', datasource, 'subject_id')
"""
Run recon-all
"""
recon_all = create_reconall_workflow()
recon_all.inputs.inputspec.subjects_dir = subjects_dir
wf.connect(datasource, 'struct', recon_all, 'inputspec.T1_files')
wf.connect(inputspec, 'subject_id', recon_all, 'inputspec.subject_id')
"""
Make average subject
"""
average = pe.JoinNode(interface=MakeAverageSubject(),
joinsource="inputspec",
joinfield="subjects_ids",
name="average")
average.inputs.subjects_dir = subjects_dir
wf.connect(recon_all, 'postdatasink_outputspec.subject_id', average, 'subjects_ids')
wf.run("MultiProc", plugin_args={'n_procs': 4})
|
BrainIntensive/OnlineBrainIntensive
|
resources/nipype/nipype/examples/smri_fsreconall.py
|
Python
|
mit
| 2,537
|
# (C) British Crown Copyright 2014 - 2015, Met Office
#
# This file is part of Iris.
#
# Iris is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Iris is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Iris. If not, see <http://www.gnu.org/licenses/>.
"""
Tests for function
:func:`iris.fileformats.grib._load_convert.generating_process`.
"""
from __future__ import (absolute_import, division, print_function)
from six.moves import (filter, input, map, range, zip) # noqa
# import iris tests first so that some things can be initialised
# before importing anything else.
import iris.tests as tests
from iris.fileformats.grib._load_convert import generating_process
class TestGeneratingProcess(tests.IrisTest):
def setUp(self):
self.warn_patch = self.patch('warnings.warn')
def test_nowarn(self):
generating_process(None)
self.assertEqual(self.warn_patch.call_count, 0)
def test_warn(self):
module = 'iris.fileformats.grib._load_convert'
self.patch(module + '.options.warn_on_unsupported', True)
generating_process(None)
got_msgs = [call[0][0] for call in self.warn_patch.call_args_list]
expected_msgs = ['Unable to translate type of generating process',
'Unable to translate background generating process',
'Unable to translate forecast generating process']
for expect_msg in expected_msgs:
matches = [msg for msg in got_msgs if expect_msg in msg]
self.assertEqual(len(matches), 1)
got_msgs.remove(matches[0])
if __name__ == '__main__':
tests.main()
|
mo-g/iris
|
lib/iris/tests/unit/fileformats/grib/load_convert/test_generating_process.py
|
Python
|
gpl-3.0
| 2,101
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import tables
from horizon import tabs
from horizon.utils import memoized
from horizon.utils.urlresolvers import reverse # noqa
from horizon import workflows
from openstack_dashboard.contrib.sahara.api import sahara as saharaclient
import openstack_dashboard.contrib.sahara.content.data_processing. \
data_sources.tables as ds_tables
import openstack_dashboard.contrib.sahara.content.data_processing. \
data_sources.tabs as _tabs
import openstack_dashboard.contrib.sahara.content.data_processing. \
data_sources.workflows.create as create_flow
import openstack_dashboard.contrib.sahara.content.data_processing. \
data_sources.workflows.edit as edit_flow
LOG = logging.getLogger(__name__)
class DataSourcesView(tables.DataTableView):
table_class = ds_tables.DataSourcesTable
template_name = 'project/data_processing.data_sources/data_sources.html'
page_title = _("Data Sources")
def get_data(self):
try:
data_sources = saharaclient.data_source_list(self.request)
except Exception:
data_sources = []
exceptions.handle(self.request,
_("Unable to fetch data sources."))
return data_sources
class CreateDataSourceView(workflows.WorkflowView):
workflow_class = create_flow.CreateDataSource
success_url = \
"horizon:project:data_processing.data-sources:create-data-source"
classes = ("ajax-modal",)
template_name = "project/data_processing.data_sources/create.html"
page_title = _("Create Data Source")
class EditDataSourceView(CreateDataSourceView):
workflow_class = edit_flow.EditDataSource
page_title = _("Edit Data Source")
def get_context_data(self, **kwargs):
context = super(EditDataSourceView, self) \
.get_context_data(**kwargs)
context["data_source_id"] = kwargs["data_source_id"]
return context
def get_initial(self):
initial = super(EditDataSourceView, self).get_initial()
initial['data_source_id'] = self.kwargs['data_source_id']
return initial
class DataSourceDetailsView(tabs.TabView):
tab_group_class = _tabs.DataSourceDetailsTabs
template_name = 'project/data_processing.data_sources/details.html'
page_title = _("Data Source Details")
@memoized.memoized_method
def get_object(self):
ds_id = self.kwargs["data_source_id"]
try:
return saharaclient.data_source_get(self.request, ds_id)
except Exception:
msg = _('Unable to retrieve details for data source "%s".') % ds_id
redirect = reverse(
"horizon:project:data_processing.data_sources:data-sources")
exceptions.handle(self.request, msg, redirect=redirect)
def get_context_data(self, **kwargs):
context = super(DataSourceDetailsView, self).get_context_data(**kwargs)
context['data_source'] = self.get_object()
return context
|
FNST-OpenStack/horizon
|
openstack_dashboard/contrib/sahara/content/data_processing/data_sources/views.py
|
Python
|
apache-2.0
| 3,628
|
# Enable Python coverage for subprocesses. See:
# http://nedbatchelder.com/code/coverage/subprocess.html
try:
import coverage
coverage.process_startup()
except ImportError:
pass
|
kellinm/anaconda
|
tests/usercustomize.py
|
Python
|
gpl-2.0
| 191
|
# coding=utf-8
from __future__ import absolute_import
__author__ = "Gina Häußge <osd@foosel.net>"
__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'
__copyright__ = "Copyright (C) 2015 The OctoPrint Project - Released under terms of the AGPLv3 License"
import octoprint.plugin
import octoprint.plugin.core
from octoprint.settings import valid_boolean_trues
from octoprint.server.util.flask import restricted_access
from octoprint.server import admin_permission
from octoprint.util.pip import PipCaller, UnknownPip
from flask import jsonify, make_response
from flask.ext.babel import gettext
import logging
import sarge
import sys
import requests
import re
import os
import pkg_resources
class PluginManagerPlugin(octoprint.plugin.SimpleApiPlugin,
octoprint.plugin.TemplatePlugin,
octoprint.plugin.AssetPlugin,
octoprint.plugin.SettingsPlugin,
octoprint.plugin.StartupPlugin,
octoprint.plugin.BlueprintPlugin):
def __init__(self):
self._pending_enable = set()
self._pending_disable = set()
self._pending_install = set()
self._pending_uninstall = set()
self._pip_caller = None
self._pip_version_dependency_links = pkg_resources.parse_version("1.5")
self._repository_available = False
self._repository_plugins = []
self._repository_cache_path = None
self._repository_cache_ttl = 0
def initialize(self):
self._console_logger = logging.getLogger("octoprint.plugins.pluginmanager.console")
self._repository_cache_path = os.path.join(self.get_plugin_data_folder(), "plugins.json")
self._repository_cache_ttl = self._settings.get_int(["repository_ttl"]) * 60
self._pip_caller = PipCaller(configured=self._settings.get(["pip"]))
self._pip_caller.on_log_call = self._log_call
self._pip_caller.on_log_stdout = self._log_stdout
self._pip_caller.on_log_stderr = self._log_stderr
##~~ Body size hook
def increase_upload_bodysize(self, current_max_body_sizes, *args, **kwargs):
# set a maximum body size of 50 MB for plugin archive uploads
return [("POST", r"/upload_archive", 50 * 1024 * 1024)]
##~~ StartupPlugin
def on_startup(self, host, port):
console_logging_handler = logging.handlers.RotatingFileHandler(self._settings.get_plugin_logfile_path(postfix="console"), maxBytes=2*1024*1024)
console_logging_handler.setFormatter(logging.Formatter("%(asctime)s %(message)s"))
console_logging_handler.setLevel(logging.DEBUG)
self._console_logger.addHandler(console_logging_handler)
self._console_logger.setLevel(logging.DEBUG)
self._console_logger.propagate = False
self._repository_available = self._fetch_repository_from_disk()
##~~ SettingsPlugin
def get_settings_defaults(self):
return dict(
repository="http://plugins.octoprint.org/plugins.json",
repository_ttl=24*60,
pip=None,
dependency_links=False,
hidden=[]
)
def on_settings_save(self, data):
octoprint.plugin.SettingsPlugin.on_settings_save(self, data)
self._repository_cache_ttl = self._settings.get_int(["repository_ttl"]) * 60
self._pip_caller.refresh = True
##~~ AssetPlugin
def get_assets(self):
return dict(
js=["js/pluginmanager.js"],
css=["css/pluginmanager.css"],
less=["less/pluginmanager.less"]
)
##~~ TemplatePlugin
def get_template_configs(self):
return [
dict(type="settings", name=gettext("Plugin Manager"), template="pluginmanager_settings.jinja2", custom_bindings=True)
]
##~~ BlueprintPlugin
@octoprint.plugin.BlueprintPlugin.route("/upload_archive", methods=["POST"])
@restricted_access
@admin_permission.require(403)
def upload_archive(self):
import flask
input_name = "file"
input_upload_path = input_name + "." + self._settings.global_get(["server", "uploads", "pathSuffix"])
input_upload_name = input_name + "." + self._settings.global_get(["server", "uploads", "nameSuffix"])
if input_upload_path not in flask.request.values or input_upload_name not in flask.request.values:
return flask.make_response("No file included", 400)
upload_path = flask.request.values[input_upload_path]
upload_name = flask.request.values[input_upload_name]
exts = filter(lambda x: upload_name.lower().endswith(x), (".zip", ".tar.gz", ".tgz", ".tar"))
if not len(exts):
return flask.make_response("File doesn't have a valid extension for a plugin archive", 400)
ext = exts[0]
import tempfile
import shutil
import os
archive = tempfile.NamedTemporaryFile(delete=False, suffix="{ext}".format(**locals()))
try:
archive.close()
shutil.copy(upload_path, archive.name)
return self.command_install(path=archive.name, force="force" in flask.request.values and flask.request.values["force"] in valid_boolean_trues)
finally:
try:
os.remove(archive.name)
except Exception as e:
self._logger.warn("Could not remove temporary file {path} again: {message}".format(path=archive.name, message=str(e)))
##~~ SimpleApiPlugin
def get_api_commands(self):
return {
"install": ["url"],
"uninstall": ["plugin"],
"enable": ["plugin"],
"disable": ["plugin"],
"refresh_repository": []
}
def on_api_get(self, request):
if not admin_permission.can():
return make_response("Insufficient rights", 403)
plugins = self._plugin_manager.plugins
result = []
for name, plugin in plugins.items():
result.append(self._to_external_representation(plugin))
if "refresh_repository" in request.values and request.values["refresh_repository"] in valid_boolean_trues:
self._repository_available = self._refresh_repository()
return jsonify(plugins=result, repository=dict(available=self._repository_available, plugins=self._repository_plugins), os=self._get_os(), octoprint=self._get_octoprint_version())
def on_api_command(self, command, data):
if not admin_permission.can():
return make_response("Insufficient rights", 403)
if self._printer.is_printing() or self._printer.is_paused():
# do not update while a print job is running
return make_response("Printer is currently printing or paused", 409)
if command == "install":
url = data["url"]
plugin_name = data["plugin"] if "plugin" in data else None
return self.command_install(url=url,
force="force" in data and data["force"] in valid_boolean_trues,
dependency_links="dependency_links" in data and data["dependency_links"] in valid_boolean_trues,
reinstall=plugin_name)
elif command == "uninstall":
plugin_name = data["plugin"]
if not plugin_name in self._plugin_manager.plugins:
return make_response("Unknown plugin: %s" % plugin_name, 404)
plugin = self._plugin_manager.plugins[plugin_name]
return self.command_uninstall(plugin)
elif command == "enable" or command == "disable":
plugin_name = data["plugin"]
if not plugin_name in self._plugin_manager.plugins:
return make_response("Unknown plugin: %s" % plugin_name, 404)
plugin = self._plugin_manager.plugins[plugin_name]
return self.command_toggle(plugin, command)
def command_install(self, url=None, path=None, force=False, reinstall=None, dependency_links=False):
if url is not None:
pip_args = ["install", sarge.shell_quote(url)]
elif path is not None:
pip_args = ["install", sarge.shell_quote(path)]
else:
raise ValueError("Either url or path must be provided")
if dependency_links or self._settings.get_boolean(["dependency_links"]):
pip_args.append("--process-dependency-links")
all_plugins_before = self._plugin_manager.find_plugins()
success_string = "Successfully installed"
failure_string = "Could not install"
try:
returncode, stdout, stderr = self._call_pip(pip_args)
except:
self._logger.exception("Could not install plugin from %s" % url)
return make_response("Could not install plugin from url, see the log for more details", 500)
else:
if force:
pip_args += ["--ignore-installed", "--force-reinstall", "--no-deps"]
try:
returncode, stdout, stderr = self._call_pip(pip_args)
except:
self._logger.exception("Could not install plugin from %s" % url)
return make_response("Could not install plugin from url, see the log for more details", 500)
try:
result_line = filter(lambda x: x.startswith(success_string) or x.startswith(failure_string), stdout)[-1]
except IndexError:
result = dict(result=False, reason="Could not parse output from pip")
self._send_result_notification("install", result)
return jsonify(result)
# The final output of a pip install command looks something like this:
#
# Successfully installed OctoPrint-Plugin-1.0 Dependency-One-0.1 Dependency-Two-9.3
#
# or this:
#
# Successfully installed OctoPrint-Plugin Dependency-One Dependency-Two
# Cleaning up...
#
# So we'll need to fetch the "Successfully installed" line, strip the "Successfully" part, then split by whitespace
# and strip to get all installed packages.
#
# We then need to iterate over all known plugins and see if either the package name or the package name plus
# version number matches one of our installed packages. If it does, that's our installed plugin.
#
# Known issue: This might return the wrong plugin if more than one plugin was installed through this
# command (e.g. due to pulling in another plugin as dependency). It should be safe for now though to
# consider this a rare corner case. Once it becomes a real problem we'll just extend the plugin manager
# so that it can report on more than one installed plugin.
result_line = result_line.strip()
if not result_line.startswith(success_string):
result = dict(result=False, reason="Pip did not report successful installation")
self._send_result_notification("install", result)
return jsonify(result)
installed = map(lambda x: x.strip(), result_line[len(success_string):].split(" "))
all_plugins_after = self._plugin_manager.find_plugins(existing=dict(), ignore_uninstalled=False)
for key, plugin in all_plugins_after.items():
if plugin.origin is None or plugin.origin.type != "entry_point":
continue
package_name = plugin.origin.package_name
package_version = plugin.origin.package_version
versioned_package = "{package_name}-{package_version}".format(**locals())
if package_name in installed or versioned_package in installed:
# exact match, we are done here
new_plugin_key = key
new_plugin = plugin
break
else:
# it might still be a version that got stripped by python's package resources, e.g. 1.4.5a0 => 1.4.5a
found = False
for inst in installed:
if inst.startswith(versioned_package):
found = True
break
if found:
new_plugin_key = key
new_plugin = plugin
break
else:
self._logger.warn("The plugin was installed successfully, but couldn't be found afterwards to initialize properly during runtime. Please restart OctoPrint.")
result = dict(result=True, url=url, needs_restart=True, needs_refresh=True, was_reinstalled=False, plugin="unknown")
self._send_result_notification("install", result)
return jsonify(result)
self._plugin_manager.mark_plugin(new_plugin_key, uninstalled=False)
self._plugin_manager.reload_plugins()
needs_restart = self._plugin_manager.is_restart_needing_plugin(new_plugin) or new_plugin_key in all_plugins_before or reinstall is not None
needs_refresh = new_plugin.implementation and isinstance(new_plugin.implementation, octoprint.plugin.ReloadNeedingPlugin)
self._plugin_manager.log_all_plugins()
result = dict(result=True, url=url, needs_restart=needs_restart, needs_refresh=needs_refresh, was_reinstalled=new_plugin_key in all_plugins_before or reinstall is not None, plugin=self._to_external_representation(new_plugin))
self._send_result_notification("install", result)
return jsonify(result)
def command_uninstall(self, plugin):
if plugin.key == "pluginmanager":
return make_response("Can't uninstall Plugin Manager", 400)
if plugin.bundled:
return make_response("Bundled plugins cannot be uninstalled", 400)
if plugin.origin is None:
self._logger.warn(u"Trying to uninstall plugin {plugin} but origin is unknown".format(**locals()))
return make_response("Could not uninstall plugin, its origin is unknown")
if plugin.origin.type == "entry_point":
# plugin is installed through entry point, need to use pip to uninstall it
origin = plugin.origin[3]
if origin is None:
origin = plugin.origin[2]
pip_args = ["uninstall", "--yes", origin]
try:
self._call_pip(pip_args)
except:
self._logger.exception(u"Could not uninstall plugin via pip")
return make_response("Could not uninstall plugin via pip, see the log for more details", 500)
elif plugin.origin.type == "folder":
import os
import shutil
full_path = os.path.realpath(plugin.location)
if os.path.isdir(full_path):
# plugin is installed via a plugin folder, need to use rmtree to get rid of it
self._log_stdout(u"Deleting plugin from {folder}".format(folder=plugin.location))
shutil.rmtree(full_path)
elif os.path.isfile(full_path):
self._log_stdout(u"Deleting plugin from {file}".format(file=plugin.location))
os.remove(full_path)
if full_path.endswith(".py"):
pyc_file = "{full_path}c".format(**locals())
if os.path.isfile(pyc_file):
os.remove(pyc_file)
else:
self._logger.warn(u"Trying to uninstall plugin {plugin} but origin is unknown ({plugin.origin.type})".format(**locals()))
return make_response("Could not uninstall plugin, its origin is unknown")
needs_restart = self._plugin_manager.is_restart_needing_plugin(plugin)
needs_refresh = plugin.implementation and isinstance(plugin.implementation, octoprint.plugin.ReloadNeedingPlugin)
self._plugin_manager.mark_plugin(plugin.key, uninstalled=True)
if not needs_restart:
try:
self._plugin_manager.disable_plugin(plugin.key, plugin=plugin)
except octoprint.plugin.core.PluginLifecycleException as e:
self._logger.exception(u"Problem disabling plugin {name}".format(name=plugin.key))
result = dict(result=False, uninstalled=True, disabled=False, unloaded=False, reason=e.reason)
self._send_result_notification("uninstall", result)
return jsonify(result)
try:
self._plugin_manager.unload_plugin(plugin.key)
except octoprint.plugin.core.PluginLifecycleException as e:
self._logger.exception(u"Problem unloading plugin {name}".format(name=plugin.key))
result = dict(result=False, uninstalled=True, disabled=True, unloaded=False, reason=e.reason)
self._send_result_notification("uninstall", result)
return jsonify(result)
self._plugin_manager.reload_plugins()
result = dict(result=True, needs_restart=needs_restart, needs_refresh=needs_refresh, plugin=self._to_external_representation(plugin))
self._send_result_notification("uninstall", result)
return jsonify(result)
def command_toggle(self, plugin, command):
if plugin.key == "pluginmanager":
return make_response("Can't enable/disable Plugin Manager", 400)
needs_restart = self._plugin_manager.is_restart_needing_plugin(plugin)
needs_refresh = plugin.implementation and isinstance(plugin.implementation, octoprint.plugin.ReloadNeedingPlugin)
pending = ((command == "disable" and plugin.key in self._pending_enable) or (command == "enable" and plugin.key in self._pending_disable))
needs_restart_api = needs_restart and not pending
needs_refresh_api = needs_refresh and not pending
try:
if command == "disable":
self._mark_plugin_disabled(plugin, needs_restart=needs_restart)
elif command == "enable":
self._mark_plugin_enabled(plugin, needs_restart=needs_restart)
except octoprint.plugin.core.PluginLifecycleException as e:
self._logger.exception(u"Problem toggling enabled state of {name}: {reason}".format(name=plugin.key, reason=e.reason))
result = dict(result=False, reason=e.reason)
except octoprint.plugin.core.PluginNeedsRestart:
result = dict(result=True, needs_restart=True, needs_refresh=True, plugin=self._to_external_representation(plugin))
else:
result = dict(result=True, needs_restart=needs_restart_api, needs_refresh=needs_refresh_api, plugin=self._to_external_representation(plugin))
self._send_result_notification(command, result)
return jsonify(result)
def _send_result_notification(self, action, result):
notification = dict(type="result", action=action)
notification.update(result)
self._plugin_manager.send_plugin_message(self._identifier, notification)
def _call_pip(self, args):
if self._pip_caller is None or not self._pip_caller.available:
raise RuntimeError(u"No pip available, can't operate".format(**locals()))
if "--process-dependency-links" in args:
self._log_message(u"Installation needs to process external dependencies, that might make it take a bit longer than usual depending on the pip version")
if self._pip_caller < self._pip_version_dependency_links:
args.remove("--process-dependency-links")
return self._pip_caller.execute(*args)
def _log_message(self, *lines):
self._log(lines, prefix=u"*", stream="message")
def _log_call(self, *lines):
self._log(lines, prefix=u" ", stream="call")
def _log_stdout(self, *lines):
self._log(lines, prefix=u">", stream="stdout")
def _log_stderr(self, *lines):
self._log(lines, prefix=u"!", stream="stderr")
def _log(self, lines, prefix=None, stream=None, strip=True):
if strip:
lines = map(lambda x: x.strip(), lines)
self._plugin_manager.send_plugin_message(self._identifier, dict(type="loglines", loglines=[dict(line=line, stream=stream) for line in lines]))
for line in lines:
self._console_logger.debug(u"{prefix} {line}".format(**locals()))
def _mark_plugin_enabled(self, plugin, needs_restart=False):
disabled_list = list(self._settings.global_get(["plugins", "_disabled"]))
if plugin.key in disabled_list:
disabled_list.remove(plugin.key)
self._settings.global_set(["plugins", "_disabled"], disabled_list)
self._settings.save(force=True)
if not needs_restart:
self._plugin_manager.enable_plugin(plugin.key)
else:
if plugin.key in self._pending_disable:
self._pending_disable.remove(plugin.key)
elif not plugin.enabled and plugin.key not in self._pending_enable:
self._pending_enable.add(plugin.key)
def _mark_plugin_disabled(self, plugin, needs_restart=False):
disabled_list = list(self._settings.global_get(["plugins", "_disabled"]))
if not plugin.key in disabled_list:
disabled_list.append(plugin.key)
self._settings.global_set(["plugins", "_disabled"], disabled_list)
self._settings.save(force=True)
if not needs_restart:
self._plugin_manager.disable_plugin(plugin.key)
else:
if plugin.key in self._pending_enable:
self._pending_enable.remove(plugin.key)
elif plugin.enabled and plugin.key not in self._pending_disable:
self._pending_disable.add(plugin.key)
def _fetch_repository_from_disk(self):
repo_data = None
if os.path.isfile(self._repository_cache_path):
import time
mtime = os.path.getmtime(self._repository_cache_path)
if mtime + self._repository_cache_ttl >= time.time() > mtime:
try:
import json
with open(self._repository_cache_path) as f:
repo_data = json.load(f)
self._logger.info("Loaded plugin repository data from disk, was still valid")
except:
self._logger.exception("Error while loading repository data from {}".format(self._repository_cache_path))
return self._refresh_repository(repo_data=repo_data)
def _fetch_repository_from_url(self):
import requests
repository_url = self._settings.get(["repository"])
try:
r = requests.get(repository_url)
self._logger.info("Loaded plugin repository data from {}".format(repository_url))
except Exception as e:
self._logger.exception("Could not fetch plugins from repository at {repository_url}: {message}".format(repository_url=repository_url, message=str(e)))
return None
repo_data = r.json()
try:
import json
with open(self._repository_cache_path, "w+b") as f:
json.dump(repo_data, f)
except Exception as e:
self._logger.exception("Error while saving repository data to {}: {}".format(self._repository_cache_path, str(e)))
return repo_data
def _refresh_repository(self, repo_data=None):
if repo_data is None:
repo_data = self._fetch_repository_from_url()
if repo_data is None:
return False
current_os = self._get_os()
octoprint_version = self._get_octoprint_version()
if "-" in octoprint_version:
octoprint_version = octoprint_version[:octoprint_version.find("-")]
def map_repository_entry(entry):
result = dict(entry)
if not "follow_dependency_links" in result:
result["follow_dependency_links"] = False
result["is_compatible"] = dict(
octoprint=True,
os=True
)
if "compatibility" in entry:
if "octoprint" in entry["compatibility"] and entry["compatibility"]["octoprint"] is not None and len(entry["compatibility"]["octoprint"]):
result["is_compatible"]["octoprint"] = self._is_octoprint_compatible(octoprint_version, entry["compatibility"]["octoprint"])
if "os" in entry["compatibility"] and entry["compatibility"]["os"] is not None and len(entry["compatibility"]["os"]):
result["is_compatible"]["os"] = self._is_os_compatible(current_os, entry["compatibility"]["os"])
return result
self._repository_plugins = map(map_repository_entry, repo_data)
return True
def _is_octoprint_compatible(self, octoprint_version_string, compatibility_entries):
"""
Tests if the current ``octoprint_version`` is compatible to any of the provided ``compatibility_entries``.
"""
octoprint_version = pkg_resources.parse_version(octoprint_version_string)
for octo_compat in compatibility_entries:
if not any(octo_compat.startswith(c) for c in ("<", "<=", "!=", "==", ">=", ">", "~=", "===")):
octo_compat = ">={}".format(octo_compat)
s = next(pkg_resources.parse_requirements("OctoPrint" + octo_compat))
if octoprint_version in s:
break
else:
return False
return True
def _is_os_compatible(self, current_os, compatibility_entries):
"""
Tests if the ``current_os`` matches any of the provided ``compatibility_entries``.
"""
return current_os in compatibility_entries
def _get_os(self):
if sys.platform == "win32":
return "windows"
elif sys.platform == "linux2":
return "linux"
elif sys.platform == "darwin":
return "macos"
else:
return "unknown"
def _get_octoprint_version(self):
from octoprint._version import get_versions
return get_versions()["version"]
def _to_external_representation(self, plugin):
return dict(
key=plugin.key,
name=plugin.name,
description=plugin.description,
author=plugin.author,
version=plugin.version,
url=plugin.url,
license=plugin.license,
bundled=plugin.bundled,
enabled=plugin.enabled,
pending_enable=(not plugin.enabled and plugin.key in self._pending_enable),
pending_disable=(plugin.enabled and plugin.key in self._pending_disable),
pending_install=(plugin.key in self._pending_install),
pending_uninstall=(plugin.key in self._pending_uninstall)
)
__plugin_name__ = "Plugin Manager"
__plugin_author__ = "Gina Häußge"
__plugin_url__ = "https://github.com/foosel/OctoPrint/wiki/Plugin:-Plugin-Manager"
__plugin_description__ = "Allows installing and managing OctoPrint plugins"
__plugin_license__ = "AGPLv3"
def __plugin_load__():
global __plugin_implementation__
__plugin_implementation__ = PluginManagerPlugin()
global __plugin_hooks__
__plugin_hooks__ = {
"octoprint.server.http.bodysize": __plugin_implementation__.increase_upload_bodysize
}
|
jneves/OctoPrint
|
src/octoprint/plugins/pluginmanager/__init__.py
|
Python
|
agpl-3.0
| 23,854
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
import unittest
from telemetry import decorators
from telemetry.internal.platform.profiler import vtune_profiler
from telemetry.testing import options_for_unittests
from telemetry.testing import simple_mock
from telemetry.testing import tab_test_case
class MockPopen(object):
def __init__(self, returncode, stdout=None, stderr=None):
self.returncode = returncode
self.stdout = stdout
self.stderr = stderr
def communicate(self):
return (self.stdout, self.stderr)
def wait(self):
return self.returncode
class MockSubprocess(object):
def __init__(self):
self.PIPE = simple_mock.MockObject()
self.STDOUT = simple_mock.MockObject()
self._num_collect_calls = 0
self._num_stop_calls = 0
@property
def num_collect_calls(self):
return self._num_collect_calls
@property
def num_stop_calls(self):
return self._num_stop_calls
def Popen(self, cmd, **_):
self._AnalyzeCommand(cmd)
return MockPopen(0)
def call(self, cmd):
self._AnalyzeCommand(cmd)
def _AnalyzeCommand(self, cmd):
if MockSubprocess._IsCollectCommand(cmd):
self._num_collect_calls += 1
elif MockSubprocess._IsStopCommand(cmd):
self._num_stop_calls += 1
@staticmethod
def _IsCollectCommand(cmd):
return '-collect' in cmd
@staticmethod
def _IsStopCommand(cmd):
try:
cmd_idx = cmd.index('-command') + 1
return cmd_idx < len(cmd) and cmd[cmd_idx] == 'stop'
except ValueError:
return False
class TestVTuneProfiler(unittest.TestCase):
def testVTuneProfilerIsSupported(self):
options = options_for_unittests.GetCopy()
mock_subprocess = simple_mock.MockObject()
mock_subprocess.ExpectCall(
'Popen').WithArgs(simple_mock.DONT_CARE).WillReturn(MockPopen(0))
mock_subprocess.SetAttribute('PIPE', simple_mock.MockObject())
mock_subprocess.SetAttribute('STDOUT', simple_mock.MockObject())
real_subprocess = vtune_profiler.subprocess
vtune_profiler.subprocess = mock_subprocess
if options.browser_type.startswith('android'):
# On Android we're querying if 'su' is available.
mock_subprocess.ExpectCall('Popen').WithArgs(
simple_mock.DONT_CARE).WillReturn(MockPopen(0, 'su', None))
try:
self.assertTrue(
vtune_profiler.VTuneProfiler.is_supported(options.browser_type) or
sys.platform != 'linux2' or
options.browser_type.startswith('cros'))
finally:
vtune_profiler.subprocess = real_subprocess
class TestVTuneProfilerTabTestCase(tab_test_case.TabTestCase):
# This test is only meant to be run if VTune is installed locally. Please
# run it locally if you are modifying related code, but it's disabled on the
# bots because they don't have VTune. See crbug.com/437085
@decorators.Disabled('all')
def testVTuneProfiler(self):
mock_subprocess = MockSubprocess()
real_subprocess = vtune_profiler.subprocess
vtune_profiler.subprocess = mock_subprocess
try:
# pylint: disable=protected-access
profiler = vtune_profiler.VTuneProfiler(self._browser._browser_backend,
self._browser._platform_backend,
'tmp',
{})
profiler.CollectProfile()
self.assertEqual(mock_subprocess.num_collect_calls,
mock_subprocess.num_stop_calls)
finally:
vtune_profiler.subprocess = real_subprocess
|
hujiajie/chromium-crosswalk
|
tools/telemetry/telemetry/internal/platform/profiler/vtune_profiler_unittest.py
|
Python
|
bsd-3-clause
| 3,671
|
"""Build wheels/sdists by installing build deps to a temporary environment.
"""
import os
import logging
from pip._vendor import pytoml
import shutil
from subprocess import check_call
import sys
from sysconfig import get_paths
from tempfile import mkdtemp
from .wrappers import Pep517HookCaller
log = logging.getLogger(__name__)
def _load_pyproject(source_dir):
with open(os.path.join(source_dir, 'pyproject.toml')) as f:
pyproject_data = pytoml.load(f)
buildsys = pyproject_data['build-system']
return buildsys['requires'], buildsys['build-backend']
class BuildEnvironment(object):
"""Context manager to install build deps in a simple temporary environment
Based on code I wrote for pip, which is MIT licensed.
"""
# Copyright (c) 2008-2016 The pip developers (see AUTHORS.txt file)
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
path = None
def __init__(self, cleanup=True):
self._cleanup = cleanup
def __enter__(self):
self.path = mkdtemp(prefix='pep517-build-env-')
log.info('Temporary build environment: %s', self.path)
self.save_path = os.environ.get('PATH', None)
self.save_pythonpath = os.environ.get('PYTHONPATH', None)
install_scheme = 'nt' if (os.name == 'nt') else 'posix_prefix'
install_dirs = get_paths(install_scheme, vars={
'base': self.path,
'platbase': self.path,
})
scripts = install_dirs['scripts']
if self.save_path:
os.environ['PATH'] = scripts + os.pathsep + self.save_path
else:
os.environ['PATH'] = scripts + os.pathsep + os.defpath
if install_dirs['purelib'] == install_dirs['platlib']:
lib_dirs = install_dirs['purelib']
else:
lib_dirs = install_dirs['purelib'] + os.pathsep + \
install_dirs['platlib']
if self.save_pythonpath:
os.environ['PYTHONPATH'] = lib_dirs + os.pathsep + \
self.save_pythonpath
else:
os.environ['PYTHONPATH'] = lib_dirs
return self
def pip_install(self, reqs):
"""Install dependencies into this env by calling pip in a subprocess"""
if not reqs:
return
log.info('Calling pip to install %s', reqs)
check_call([
sys.executable, '-m', 'pip', 'install', '--ignore-installed',
'--prefix', self.path] + list(reqs))
def __exit__(self, exc_type, exc_val, exc_tb):
needs_cleanup = (
self._cleanup and
self.path is not None and
os.path.isdir(self.path)
)
if needs_cleanup:
shutil.rmtree(self.path)
if self.save_path is None:
os.environ.pop('PATH', None)
else:
os.environ['PATH'] = self.save_path
if self.save_pythonpath is None:
os.environ.pop('PYTHONPATH', None)
else:
os.environ['PYTHONPATH'] = self.save_pythonpath
def build_wheel(source_dir, wheel_dir, config_settings=None):
"""Build a wheel from a source directory using PEP 517 hooks.
:param str source_dir: Source directory containing pyproject.toml
:param str wheel_dir: Target directory to create wheel in
:param dict config_settings: Options to pass to build backend
This is a blocking function which will run pip in a subprocess to install
build requirements.
"""
if config_settings is None:
config_settings = {}
requires, backend = _load_pyproject(source_dir)
hooks = Pep517HookCaller(source_dir, backend)
with BuildEnvironment() as env:
env.pip_install(requires)
reqs = hooks.get_requires_for_build_wheel(config_settings)
env.pip_install(reqs)
return hooks.build_wheel(wheel_dir, config_settings)
def build_sdist(source_dir, sdist_dir, config_settings=None):
"""Build an sdist from a source directory using PEP 517 hooks.
:param str source_dir: Source directory containing pyproject.toml
:param str sdist_dir: Target directory to place sdist in
:param dict config_settings: Options to pass to build backend
This is a blocking function which will run pip in a subprocess to install
build requirements.
"""
if config_settings is None:
config_settings = {}
requires, backend = _load_pyproject(source_dir)
hooks = Pep517HookCaller(source_dir, backend)
with BuildEnvironment() as env:
env.pip_install(requires)
reqs = hooks.get_requires_for_build_sdist(config_settings)
env.pip_install(reqs)
return hooks.build_sdist(sdist_dir, config_settings)
|
Karosuo/Linux_tools
|
xls_handlers/xls_sum_venv/lib/python3.6/site-packages/pip/_vendor/pep517/envbuild.py
|
Python
|
gpl-3.0
| 5,763
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
import oauth2client.django_orm
class Migration(migrations.Migration):
dependencies = [
('auth', '0006_require_contenttypes_0002'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='AccountModel',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=128)),
('type', models.CharField(max_length=16)),
('email', models.EmailField(max_length=254)),
('access_token', models.TextField(max_length=2048)),
('root', models.CharField(max_length=256)),
('is_active', models.IntegerField()),
('quota', models.BigIntegerField()),
('used_space', models.BigIntegerField()),
('assigned_space', models.BigIntegerField()),
('status', models.IntegerField(default=models.BigIntegerField())),
],
),
migrations.CreateModel(
name='ActivityLog',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('activity', models.CharField(max_length=512)),
('created_timestamp', models.DateTimeField(auto_now=True)),
],
),
migrations.CreateModel(
name='Address',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('street', models.CharField(max_length=128, error_messages={b'required': b'Please specify the street name!'})),
('created_timestamp', models.DateTimeField(auto_now_add=True)),
('last_updated', models.DateTimeField(auto_now=True)),
],
),
migrations.CreateModel(
name='Category',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=128, error_messages={b'required': b'Please enter the category name!'})),
('deleted', models.BooleanField(default=False)),
('created_timestamp', models.DateTimeField(auto_now_add=True)),
('last_updated', models.DateTimeField(auto_now=True)),
('parent', models.ForeignKey(to='crowdsourcing.Category', null=True)),
],
),
migrations.CreateModel(
name='City',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=64, error_messages={b'required': b'Please specify the city!'})),
('created_timestamp', models.DateTimeField(auto_now_add=True)),
('last_updated', models.DateTimeField(auto_now=True)),
],
),
migrations.CreateModel(
name='Country',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=64, error_messages={b'required': b'Please specify the country!'})),
('code', models.CharField(max_length=8, error_messages={b'required': b'Please specify the country code!'})),
('created_timestamp', models.DateTimeField(auto_now_add=True)),
('last_updated', models.DateTimeField(auto_now=True)),
],
),
migrations.CreateModel(
name='CredentialsModel',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('credential', oauth2client.django_orm.CredentialsField(null=True)),
('account', models.ForeignKey(to='crowdsourcing.AccountModel')),
],
),
migrations.CreateModel(
name='Currency',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=32)),
('iso_code', models.CharField(max_length=8)),
('last_updated', models.DateTimeField(auto_now=True)),
],
),
migrations.CreateModel(
name='FlowModel',
fields=[
('id', models.OneToOneField(primary_key=True, serialize=False, to=settings.AUTH_USER_MODEL)),
('flow', oauth2client.django_orm.FlowField(null=True)),
],
),
migrations.CreateModel(
name='Friendship',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('deleted', models.BooleanField(default=False)),
('created_timestamp', models.DateTimeField(auto_now_add=True)),
('last_updated', models.DateTimeField(auto_now=True)),
],
),
migrations.CreateModel(
name='Language',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=64, error_messages={b'required': b'Please specify the language!'})),
('iso_code', models.CharField(max_length=8)),
('created_timestamp', models.DateTimeField(auto_now_add=True)),
('last_updated', models.DateTimeField(auto_now=True)),
],
),
migrations.CreateModel(
name='Module',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=128, error_messages={b'required': b'Please enter the module name!'})),
('description', models.TextField(error_messages={b'required': b'Please enter the module description!'})),
('keywords', models.TextField()),
('status', models.IntegerField(default=1, choices=[(1, b'Created'), (2, b'In Review'), (3, b'In Progress'), (4, b'Finished')])),
('repetition', models.IntegerField()),
('module_timeout', models.IntegerField()),
('deleted', models.BooleanField(default=False)),
('created_timestamp', models.DateTimeField(auto_now_add=True)),
('last_updated', models.DateTimeField(auto_now=True)),
],
),
migrations.CreateModel(
name='ModuleCategory',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created_timestamp', models.DateTimeField(auto_now_add=True)),
('last_updated', models.DateTimeField(auto_now=True)),
('category', models.ForeignKey(to='crowdsourcing.Category')),
('module', models.ForeignKey(to='crowdsourcing.Module')),
],
),
migrations.CreateModel(
name='ModuleRating',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('value', models.IntegerField()),
('last_updated', models.DateTimeField(auto_now=True)),
('module', models.ForeignKey(to='crowdsourcing.Module')),
],
),
migrations.CreateModel(
name='ModuleReview',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('annonymous', models.BooleanField(default=False)),
('comments', models.TextField()),
('last_updated', models.DateTimeField(auto_now=True)),
('module', models.ForeignKey(to='crowdsourcing.Module')),
],
),
migrations.CreateModel(
name='PasswordResetModel',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('reset_key', models.CharField(max_length=40)),
('created_timestamp', models.DateTimeField(auto_now_add=True)),
('last_updated', models.DateTimeField(auto_now=True)),
('user', models.OneToOneField(to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Project',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=128, error_messages={b'required': b'Please enter the project name!'})),
('start_date', models.DateTimeField(auto_now_add=True)),
('end_date', models.DateTimeField(auto_now_add=True)),
('description', models.CharField(default=b'', max_length=1024)),
('keywords', models.TextField()),
('deleted', models.BooleanField(default=False)),
('created_timestamp', models.DateTimeField(auto_now_add=True)),
('last_updated', models.DateTimeField(auto_now=True)),
],
),
migrations.CreateModel(
name='ProjectCategory',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created_timestamp', models.DateTimeField(auto_now_add=True)),
('last_updated', models.DateTimeField(auto_now=True)),
('category', models.ForeignKey(to='crowdsourcing.Category')),
('project', models.ForeignKey(to='crowdsourcing.Project')),
],
),
migrations.CreateModel(
name='ProjectRequester',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created_timestamp', models.DateTimeField(auto_now_add=True)),
('last_updated', models.DateTimeField(auto_now=True)),
('project', models.ForeignKey(to='crowdsourcing.Project')),
],
),
migrations.CreateModel(
name='Qualification',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('type', models.IntegerField(default=1, choices=[(1, b'Strict'), (2, b'Flexible')])),
('created_timestamp', models.DateTimeField(auto_now_add=True)),
('last_updated', models.DateTimeField(auto_now=True)),
('module', models.ForeignKey(to='crowdsourcing.Module')),
],
),
migrations.CreateModel(
name='QualificationItem',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('attribute', models.CharField(max_length=128)),
('operator', models.CharField(max_length=128)),
('value1', models.CharField(max_length=128)),
('value2', models.CharField(max_length=128)),
('created_timestamp', models.DateTimeField(auto_now_add=True)),
('last_updated', models.DateTimeField(auto_now=True)),
('qualification', models.ForeignKey(to='crowdsourcing.Qualification')),
],
),
migrations.CreateModel(
name='Region',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=64, error_messages={b'required': b'Please specify the region!'})),
('code', models.CharField(max_length=16, error_messages={b'required': b'Please specify the region code!'})),
('created_timestamp', models.DateTimeField(auto_now_add=True)),
('last_updated', models.DateTimeField(auto_now=True)),
],
),
migrations.CreateModel(
name='RegistrationModel',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('activation_key', models.CharField(max_length=40)),
('created_timestamp', models.DateTimeField(auto_now_add=True)),
('last_updated', models.DateTimeField(auto_now=True)),
('user', models.OneToOneField(to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Requester',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
],
),
migrations.CreateModel(
name='RequesterRanking',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('requester_name', models.CharField(max_length=128)),
('requester_payRank', models.FloatField()),
('requester_fairRank', models.FloatField()),
('requester_speedRank', models.FloatField()),
('requester_communicationRank', models.FloatField()),
('requester_numberofReviews', models.IntegerField(default=0)),
],
),
migrations.CreateModel(
name='Role',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(unique=True, max_length=32, error_messages={b'unique': b'The role %(value)r already exists. Please provide another name!', b'required': b'Please specify the role name!'})),
('is_active', models.BooleanField(default=True)),
('deleted', models.BooleanField(default=False)),
('created_timestamp', models.DateTimeField(auto_now_add=True)),
('last_updated', models.DateTimeField(auto_now=True)),
],
),
migrations.CreateModel(
name='Skill',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=128, error_messages={b'required': b'Please enter the skill name!'})),
('description', models.CharField(max_length=512, error_messages={b'required': b'Please enter the skill description!'})),
('verified', models.BooleanField(default=False)),
('deleted', models.BooleanField(default=False)),
('created_timestamp', models.DateTimeField(auto_now_add=True)),
('last_updated', models.DateTimeField(auto_now=True)),
('parent', models.ForeignKey(to='crowdsourcing.Skill', null=True)),
],
),
migrations.CreateModel(
name='Task',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('status', models.IntegerField(default=1, choices=[(1, b'Created'), (2, b'Accepted'), (3, b'Assigned'), (4, b'Finished')])),
('deleted', models.BooleanField(default=False)),
('created_timestamp', models.DateTimeField(auto_now_add=True)),
('last_updated', models.DateTimeField(auto_now=True)),
('price', models.FloatField(default=0)),
('module', models.ForeignKey(to='crowdsourcing.Module')),
],
),
migrations.CreateModel(
name='TaskWorker',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created_timestamp', models.DateTimeField(auto_now_add=True)),
('last_updated', models.DateTimeField(auto_now=True)),
('task', models.ForeignKey(to='crowdsourcing.Task')),
],
),
migrations.CreateModel(
name='TaskWorkerResult',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('result', models.TextField()),
('status', models.IntegerField(default=1, choices=[(1, b'Created'), (2, b'Accepted'), (3, b'Rejected')])),
('created_timestamp', models.DateTimeField(auto_now_add=True)),
('last_updated', models.DateTimeField(auto_now=True)),
('task_worker', models.ForeignKey(to='crowdsourcing.TaskWorker')),
],
),
migrations.CreateModel(
name='Template',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=128, error_messages={b'required': b'Please enter the template name!'})),
('source_html', models.TextField()),
('deleted', models.BooleanField(default=False)),
('created_timestamp', models.DateTimeField(auto_now_add=True)),
('last_updated', models.DateTimeField(auto_now=True)),
('owner', models.ForeignKey(to='crowdsourcing.Requester')),
],
),
migrations.CreateModel(
name='TemplateItem',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=128, error_messages={b'required': b'Please enter the name of the template item!'})),
('deleted', models.BooleanField(default=False)),
('created_timestamp', models.DateTimeField(auto_now_add=True)),
('last_updated', models.DateTimeField(auto_now=True)),
('template', models.ForeignKey(to='crowdsourcing.Template')),
],
),
migrations.CreateModel(
name='TemplateItemProperties',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('attribute', models.CharField(max_length=128)),
('operator', models.CharField(max_length=128)),
('value1', models.CharField(max_length=128)),
('value2', models.CharField(max_length=128)),
('created_timestamp', models.DateTimeField(auto_now_add=True)),
('last_updated', models.DateTimeField(auto_now=True)),
('template_item', models.ForeignKey(to='crowdsourcing.TemplateItem')),
],
),
migrations.CreateModel(
name='TemporaryFlowModel',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('type', models.CharField(max_length=16)),
('email', models.EmailField(max_length=254)),
('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='UserCountry',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created_timestamp', models.DateTimeField(auto_now_add=True)),
('last_updated', models.DateTimeField(auto_now=True)),
('country', models.ForeignKey(to='crowdsourcing.Country')),
],
),
migrations.CreateModel(
name='UserLanguage',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('last_updated', models.DateTimeField(auto_now=True)),
('language', models.ForeignKey(to='crowdsourcing.Language')),
],
),
migrations.CreateModel(
name='UserPreferences',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('login_alerts', models.SmallIntegerField(default=0)),
('last_updated', models.DateTimeField(auto_now=True)),
('currency', models.ForeignKey(to='crowdsourcing.Currency')),
('language', models.ForeignKey(to='crowdsourcing.Language')),
('user', models.OneToOneField(to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='UserProfile',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('gender', models.CharField(max_length=1, choices=[(b'M', b'Male'), (b'F', b'Female')])),
('birthday', models.DateField(null=True, error_messages={b'invalid': b'Please enter a correct date format'})),
('verified', models.BooleanField(default=False)),
('picture', models.BinaryField(null=True)),
('deleted', models.BooleanField(default=False)),
('created_timestamp', models.DateTimeField(auto_now_add=True)),
('last_updated', models.DateTimeField(auto_now=True)),
('worker_alias', models.CharField(max_length=32, error_messages={b'required': b'Please enter an alias!'})),
('requester_alias', models.CharField(max_length=32, error_messages={b'required': b'Please enter an alias!'})),
('address', models.ForeignKey(to='crowdsourcing.Address', null=True)),
('friends', models.ManyToManyField(to='crowdsourcing.UserProfile', through='crowdsourcing.Friendship')),
('languages', models.ManyToManyField(to='crowdsourcing.Language', through='crowdsourcing.UserLanguage')),
('nationality', models.ManyToManyField(to='crowdsourcing.Country', through='crowdsourcing.UserCountry')),
],
),
migrations.CreateModel(
name='UserRole',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created_timestamp', models.DateTimeField(auto_now_add=True)),
('last_updated', models.DateTimeField(auto_now=True)),
('role', models.ForeignKey(to='crowdsourcing.Role')),
('user_profile', models.ForeignKey(to='crowdsourcing.UserProfile')),
],
),
migrations.CreateModel(
name='Worker',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('deleted', models.BooleanField(default=False)),
('profile', models.OneToOneField(to='crowdsourcing.UserProfile')),
],
),
migrations.CreateModel(
name='WorkerModuleApplication',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('status', models.IntegerField(default=1, choices=[(1, b'Created'), (2, b'Accepted'), (3, b'Rejected')])),
('created_timestamp', models.DateTimeField(auto_now_add=True)),
('last_updated', models.DateTimeField(auto_now=True)),
('module', models.ForeignKey(to='crowdsourcing.Module')),
('worker', models.ForeignKey(to='crowdsourcing.Worker')),
],
),
migrations.CreateModel(
name='WorkerSkill',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('level', models.IntegerField(null=True)),
('verified', models.BooleanField(default=False)),
('created_timestamp', models.DateTimeField(auto_now_add=True)),
('last_updated', models.DateTimeField(auto_now=True)),
('skill', models.ForeignKey(to='crowdsourcing.Skill')),
('worker', models.ForeignKey(to='crowdsourcing.Worker')),
],
),
migrations.AddField(
model_name='worker',
name='skills',
field=models.ManyToManyField(to='crowdsourcing.Skill', through='crowdsourcing.WorkerSkill'),
),
migrations.AddField(
model_name='userprofile',
name='roles',
field=models.ManyToManyField(to='crowdsourcing.Role', through='crowdsourcing.UserRole'),
),
migrations.AddField(
model_name='userprofile',
name='user',
field=models.OneToOneField(to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='userlanguage',
name='user',
field=models.ForeignKey(to='crowdsourcing.UserProfile'),
),
migrations.AddField(
model_name='usercountry',
name='user',
field=models.ForeignKey(to='crowdsourcing.UserProfile'),
),
migrations.AddField(
model_name='taskworkerresult',
name='template_item',
field=models.ForeignKey(to='crowdsourcing.TemplateItem'),
),
migrations.AddField(
model_name='taskworker',
name='worker',
field=models.ForeignKey(to='crowdsourcing.Worker'),
),
migrations.AddField(
model_name='requester',
name='profile',
field=models.OneToOneField(to='crowdsourcing.UserProfile'),
),
migrations.AddField(
model_name='projectrequester',
name='requester',
field=models.ForeignKey(to='crowdsourcing.Requester'),
),
migrations.AddField(
model_name='project',
name='categories',
field=models.ManyToManyField(to='crowdsourcing.Category', through='crowdsourcing.ProjectCategory'),
),
migrations.AddField(
model_name='project',
name='collaborators',
field=models.ManyToManyField(to='crowdsourcing.Requester', through='crowdsourcing.ProjectRequester'),
),
migrations.AddField(
model_name='project',
name='owner',
field=models.ForeignKey(related_name='project_owner', to='crowdsourcing.Requester'),
),
migrations.AddField(
model_name='modulereview',
name='worker',
field=models.ForeignKey(to='crowdsourcing.Worker'),
),
migrations.AddField(
model_name='modulerating',
name='worker',
field=models.ForeignKey(to='crowdsourcing.Worker'),
),
migrations.AddField(
model_name='module',
name='categories',
field=models.ManyToManyField(to='crowdsourcing.Category', through='crowdsourcing.ModuleCategory'),
),
migrations.AddField(
model_name='module',
name='owner',
field=models.ForeignKey(to='crowdsourcing.Requester'),
),
migrations.AddField(
model_name='module',
name='project',
field=models.ForeignKey(to='crowdsourcing.Project'),
),
migrations.AddField(
model_name='friendship',
name='user_source',
field=models.ForeignKey(related_name='user_source', to='crowdsourcing.UserProfile'),
),
migrations.AddField(
model_name='friendship',
name='user_target',
field=models.ForeignKey(related_name='user_target', to='crowdsourcing.UserProfile'),
),
migrations.AddField(
model_name='country',
name='region',
field=models.ForeignKey(to='crowdsourcing.Region'),
),
migrations.AddField(
model_name='city',
name='country',
field=models.ForeignKey(to='crowdsourcing.Country'),
),
migrations.AddField(
model_name='address',
name='city',
field=models.ForeignKey(to='crowdsourcing.City'),
),
migrations.AddField(
model_name='address',
name='country',
field=models.ForeignKey(to='crowdsourcing.Country'),
),
migrations.AddField(
model_name='activitylog',
name='author',
field=models.ForeignKey(to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='accountmodel',
name='owner',
field=models.ForeignKey(to=settings.AUTH_USER_MODEL),
),
migrations.AlterUniqueTogether(
name='projectcategory',
unique_together=set([('project', 'category')]),
),
migrations.AlterUniqueTogether(
name='modulereview',
unique_together=set([('worker', 'module')]),
),
migrations.AlterUniqueTogether(
name='modulerating',
unique_together=set([('worker', 'module')]),
),
migrations.AlterUniqueTogether(
name='modulecategory',
unique_together=set([('category', 'module')]),
),
]
|
rakshit-agrawal/crowdsource-platform
|
crowdsourcing/migrations/0001_initial.py
|
Python
|
mit
| 30,183
|
from __future__ import unicode_literals
from frappe import _
def get_data():
return [
{
"label": _("Documents"),
"icon": "icon-star",
"items": [
{
"type": "doctype",
"name": "Web Page",
"description": _("Content web page."),
},
{
"type": "doctype",
"name": "Blog Post",
"description": _("Single Post (article)."),
},
{
"type": "doctype",
"name": "Web Form",
"description": _("User editable form on Website."),
},
{
"type": "doctype",
"name": "Blogger",
"description": _("User ID of a blog writer."),
},
{
"type": "doctype",
"name": "Website Slideshow",
"description": _("Embed image slideshows in website pages."),
},
]
},
{
"label": _("Setup"),
"icon": "icon-cog",
"items": [
{
"type": "doctype",
"name": "Website Settings",
"description": _("Setup of top navigation bar, footer and logo."),
},
{
"type": "doctype",
"name": "Style Settings",
"description": _("Setup of fonts and background."),
},
{
"type": "doctype",
"name": "Website Script",
"description": _("Javascript to append to the head section of the page."),
},
{
"type": "doctype",
"name": "Blog Settings",
"description": _("Write titles and introductions to your blog."),
},
{
"type": "doctype",
"name": "Blog Category",
"description": _("Categorize blog posts."),
},
{
"type": "doctype",
"name": "About Us Settings",
"description": _("Settings for About Us Page."),
},
{
"type": "doctype",
"name": "Contact Us Settings",
"description": _("Settings for Contact Us Page."),
},
{
"type": "doctype",
"name": "Website Theme",
"description": _("List of themes for Website."),
},
{
"type": "doctype",
"name": "Social Login Keys",
"description": _("Enter keys to enable login via Facebook, Google, GitHub."),
}
]
},
]
|
mbauskar/tele-frappe
|
frappe/config/website.py
|
Python
|
mit
| 2,031
|
# Configuration file for jointcal
from lsst.meas.algorithms import LoadIndexedReferenceObjectsTask
# Select external catalogs for Astrometry
config.astrometryRefObjLoader.retarget(LoadIndexedReferenceObjectsTask)
config.astrometryRefObjLoader.ref_dataset_name='pan-starrs'
config.astrometryRefObjLoader.filterMap = {
'u':'g',
'g':'g',
'r':'r',
'i':'i',
'i2': 'i',
'z':'z',
'y':'y',
}
# Select external catalogs for Photometry
config.doPhotometry = True # comment out to run the photometric calibration
config.photometryRefObjLoader.retarget(LoadIndexedReferenceObjectsTask)
config.photometryRefObjLoader.ref_dataset_name='sdss'
config.photometryRefObjLoader.filterMap = {
'u': 'U',
'g': 'G',
'r': 'R',
'i': 'I',
'i2': 'I',
'z': 'Z',
'y': 'Z',
}
# These are the default values
# Minimum allowed signal-to-noise ratio for sources used for matching
# (in the flux specified by sourceFluxType); <= 0 for no limit
# config.sourceSelector['matcher'].minSnr = 40.0
# Minimum allowed signal-to-noise ratio for sources used for matching
# (in the flux specified by sourceFluxType); <= 0 for no limit
# config.sourceSelector['astrometry'].minSnr = 10.0
|
DarkEnergyScienceCollaboration/ReprocessingTaskForce
|
config/w_2017_40/jointcalConfig.py
|
Python
|
gpl-2.0
| 1,208
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2010 Radim Rehurek <radimrehurek@seznam.cz>
# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html
"""
USAGE: %(program)s SIZE_OF_JOBS_QUEUE
Dispatcher process which orchestrates distributed LSI computations. Run this \
script only once, on any node in your cluster.
Example: python -m gensim.models.lsi_dispatcher
"""
from __future__ import with_statement
import os, sys, logging, threading, time
from six import iteritems, itervalues
try:
from Queue import Queue
except ImportError:
from queue import Queue
import Pyro4
from gensim import utils
logger = logging.getLogger("gensim.models.lsi_dispatcher")
# How many jobs (=chunks of N documents) to keep "pre-fetched" in a queue?
# A small number is usually enough, unless iteration over the corpus is very very
# slow (slower than the actual computation of LSI), in which case you can override
# this value from command line. ie. run "python ./lsi_dispatcher.py 100"
MAX_JOBS_QUEUE = 10
# timeout for the Queue object put/get blocking methods.
# it should really be infinity, but then keyboard interrupts don't work.
# so this is really just a hack, see http://bugs.python.org/issue1360
HUGE_TIMEOUT = 365 * 24 * 60 * 60 # one year
class Dispatcher(object):
"""
Dispatcher object that communicates and coordinates individual workers.
There should never be more than one dispatcher running at any one time.
"""
def __init__(self, maxsize=0):
"""
Note that the constructor does not fully initialize the dispatcher;
use the `initialize()` function to populate it with workers etc.
"""
self.maxsize = maxsize
self.workers = {}
self.callback = None # a pyro proxy to this object (unknown at init time, but will be set later)
@Pyro4.expose
def initialize(self, **model_params):
"""
`model_params` are parameters used to initialize individual workers (gets
handed all the way down to worker.initialize()).
"""
self.jobs = Queue(maxsize=self.maxsize)
self.lock_update = threading.Lock()
self._jobsdone = 0
self._jobsreceived = 0
# locate all available workers and store their proxies, for subsequent RMI calls
self.workers = {}
with utils.getNS() as ns:
self.callback = Pyro4.Proxy('PYRONAME:gensim.lsi_dispatcher') # = self
for name, uri in iteritems(ns.list(prefix='gensim.lsi_worker')):
try:
worker = Pyro4.Proxy(uri)
workerid = len(self.workers)
# make time consuming methods work asynchronously
logger.info("registering worker #%i from %s" % (workerid, uri))
worker.initialize(workerid, dispatcher=self.callback, **model_params)
self.workers[workerid] = worker
except Pyro4.errors.PyroError:
logger.exception("unresponsive worker at %s, deleting it from the name server" % uri)
ns.remove(name)
if not self.workers:
raise RuntimeError('no workers found; run some lsi_worker scripts on your machines first!')
@Pyro4.expose
def getworkers(self):
"""
Return pyro URIs of all registered workers.
"""
return [worker._pyroUri for worker in itervalues(self.workers)]
@Pyro4.expose
def getjob(self, worker_id):
logger.info("worker #%i requesting a new job" % worker_id)
job = self.jobs.get(block=True, timeout=1)
logger.info("worker #%i got a new job (%i left)" % (worker_id, self.jobs.qsize()))
return job
@Pyro4.expose
def putjob(self, job):
self._jobsreceived += 1
self.jobs.put(job, block=True, timeout=HUGE_TIMEOUT)
logger.info("added a new job (len(queue)=%i items)" % self.jobs.qsize())
@Pyro4.expose
def getstate(self):
"""
Merge projections from across all workers and return the final projection.
"""
logger.info("end of input, assigning all remaining jobs")
logger.debug("jobs done: %s, jobs received: %s" % (self._jobsdone, self._jobsreceived))
while self._jobsdone < self._jobsreceived:
time.sleep(0.5) # check every half a second
# TODO: merge in parallel, so that we're done in `log_2(workers)` merges,
# and not `workers - 1` merges!
# but merging only takes place once, after all input data has been processed,
# so the overall effect would be small... compared to the amount of coding :-)
logger.info("merging states from %i workers" % len(self.workers))
workers = list(self.workers.items())
result = workers[0][1].getstate()
for workerid, worker in workers[1:]:
logger.info("pulling state from worker %s" % workerid)
result.merge(worker.getstate())
logger.info("sending out merged projection")
return result
@Pyro4.expose
def reset(self):
"""
Initialize all workers for a new decomposition.
"""
for workerid, worker in iteritems(self.workers):
logger.info("resetting worker %s" % workerid)
worker.reset()
worker.requestjob()
self._jobsdone = 0
self._jobsreceived = 0
@Pyro4.expose
@Pyro4.oneway
@utils.synchronous('lock_update')
def jobdone(self, workerid):
"""
A worker has finished its job. Log this event and then asynchronously
transfer control back to the worker.
In this way, control flow basically oscillates between dispatcher.jobdone()
worker.requestjob().
"""
self._jobsdone += 1
logger.info("worker #%s finished job #%i" % (workerid, self._jobsdone))
worker = self.workers[workerid]
worker.requestjob() # tell the worker to ask for another job, asynchronously (one-way)
def jobsdone(self):
"""Wrap self._jobsdone, needed for remote access through proxies"""
return self._jobsdone
@Pyro4.oneway
def exit(self):
"""
Terminate all registered workers and then the dispatcher.
"""
for workerid, worker in iteritems(self.workers):
logger.info("terminating worker %s" % workerid)
worker.exit()
logger.info("terminating dispatcher")
os._exit(0) # exit the whole process (not just this thread ala sys.exit())
#endclass Dispatcher
def main():
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO)
logger.info("running %s" % " ".join(sys.argv))
program = os.path.basename(sys.argv[0])
# make sure we have enough cmd line parameters
if len(sys.argv) < 1:
print(globals()["__doc__"] % locals())
sys.exit(1)
if len(sys.argv) < 2:
maxsize = MAX_JOBS_QUEUE
else:
maxsize = int(sys.argv[1])
utils.pyro_daemon('gensim.lsi_dispatcher', Dispatcher(maxsize=maxsize))
logger.info("finished running %s" % program)
if __name__ == '__main__':
main()
|
duyet-website/api.duyet.net
|
lib/gensim/models/lsi_dispatcher.py
|
Python
|
mit
| 7,220
|
#!/usr/bin/python
from TestHandler import TestHandler
Test = TestHandler()
test = Test
# noinspection PyUnresolvedReferences
import solution
test.dispatch_asserts()
|
bionikspoon/Codewars-Challenges
|
python/skeleton/main.py
|
Python
|
mit
| 168
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2016, William L Thomson Jr
# (c) 2013, Yap Sok Ann
# Written by Yap Sok Ann <sokann@gmail.com>
# Modified by William L. Thomson Jr. <wlt@o-sinc.com>
# Based on apt module written by Matthew Williams <matthew@flowroute.com>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: portage
short_description: Package manager for Gentoo
description:
- Manages Gentoo packages
version_added: "1.6"
options:
package:
description:
- Package atom or set, e.g. C(sys-apps/foo) or C(>foo-2.13) or C(@world)
state:
description:
- State of the package atom
default: "present"
choices: [ "present", "installed", "emerged", "absent", "removed", "unmerged", "latest" ]
update:
description:
- Update packages to the best version available (--update)
type: bool
default: 'no'
deep:
description:
- Consider the entire dependency tree of packages (--deep)
type: bool
default: 'no'
newuse:
description:
- Include installed packages where USE flags have changed (--newuse)
type: bool
default: 'no'
changed_use:
description:
- Include installed packages where USE flags have changed, except when
- flags that the user has not enabled are added or removed
- (--changed-use)
type: bool
default: 'no'
version_added: 1.8
oneshot:
description:
- Do not add the packages to the world file (--oneshot)
type: bool
default: 'no'
noreplace:
description:
- Do not re-emerge installed packages (--noreplace)
type: bool
default: 'yes'
nodeps:
description:
- Only merge packages but not their dependencies (--nodeps)
type: bool
default: 'no'
onlydeps:
description:
- Only merge packages' dependencies but not the packages (--onlydeps)
type: bool
default: 'no'
depclean:
description:
- Remove packages not needed by explicitly merged packages (--depclean)
- If no package is specified, clean up the world's dependencies
- Otherwise, --depclean serves as a dependency aware version of --unmerge
type: bool
default: 'no'
quiet:
description:
- Run emerge in quiet mode (--quiet)
type: bool
default: 'no'
verbose:
description:
- Run emerge in verbose mode (--verbose)
type: bool
default: 'no'
sync:
description:
- Sync package repositories first
- If yes, perform "emerge --sync"
- If web, perform "emerge-webrsync"
choices: [ "web", "yes", "no" ]
getbinpkg:
description:
- Prefer packages specified at PORTAGE_BINHOST in make.conf
type: bool
default: 'no'
usepkgonly:
description:
- Merge only binaries (no compiling). This sets getbinpkg=yes.
type: bool
default: 'no'
keepgoing:
description:
- Continue as much as possible after an error.
type: bool
default: 'no'
version_added: 2.3
jobs:
description:
- Specifies the number of packages to build simultaneously.
- "Since version 2.6: Value of 0 or False resets any previously added"
- --jobs setting values
version_added: 2.3
loadavg:
description:
- Specifies that no new builds should be started if there are
- other builds running and the load average is at least LOAD
- "Since version 2.6: Value of 0 or False resets any previously added"
- --load-average setting values
version_added: 2.3
quietbuild:
description:
- Redirect all build output to logs alone, and do not display it
- on stdout (--quiet-build)
type: bool
default: 'no'
version_added: 2.6
quietfail:
description:
- Suppresses display of the build log on stdout (--quiet-fail)
- Only the die message and the path of the build log will be
- displayed on stdout.
type: bool
default: 'no'
version_added: 2.6
requirements: [ gentoolkit ]
author:
- "William L Thomson Jr (@wltjr)"
- "Yap Sok Ann (@sayap)"
- "Andrew Udvare (@Tatsh)"
'''
EXAMPLES = '''
# Make sure package foo is installed
- portage:
package: foo
state: present
# Make sure package foo is not installed
- portage:
package: foo
state: absent
# Update package foo to the "latest" version ( os specific alternative to latest )
- portage:
package: foo
update: yes
# Install package foo using PORTAGE_BINHOST setup
- portage:
package: foo
getbinpkg: yes
# Re-install world from binary packages only and do not allow any compiling
- portage:
package: '@world'
usepkgonly: yes
# Sync repositories and update world
- portage:
package: '@world'
update: yes
deep: yes
sync: yes
# Remove unneeded packages
- portage:
depclean: yes
# Remove package foo if it is not explicitly needed
- portage:
package: foo
state: absent
depclean: yes
'''
import os
import re
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native
def query_package(module, package, action):
if package.startswith('@'):
return query_set(module, package, action)
return query_atom(module, package, action)
def query_atom(module, atom, action):
cmd = '%s list %s' % (module.equery_path, atom)
rc, out, err = module.run_command(cmd)
return rc == 0
def query_set(module, set, action):
system_sets = [
'@live-rebuild',
'@module-rebuild',
'@preserved-rebuild',
'@security',
'@selected',
'@system',
'@world',
'@x11-module-rebuild',
]
if set in system_sets:
if action == 'unmerge':
module.fail_json(msg='set %s cannot be removed' % set)
return False
world_sets_path = '/var/lib/portage/world_sets'
if not os.path.exists(world_sets_path):
return False
cmd = 'grep %s %s' % (set, world_sets_path)
rc, out, err = module.run_command(cmd)
return rc == 0
def sync_repositories(module, webrsync=False):
if module.check_mode:
module.exit_json(msg='check mode not supported by sync')
if webrsync:
webrsync_path = module.get_bin_path('emerge-webrsync', required=True)
cmd = '%s --quiet' % webrsync_path
else:
cmd = '%s --sync --quiet --ask=n' % module.emerge_path
rc, out, err = module.run_command(cmd)
if rc != 0:
module.fail_json(msg='could not sync package repositories')
# Note: In the 3 functions below, equery is done one-by-one, but emerge is done
# in one go. If that is not desirable, split the packages into multiple tasks
# instead of joining them together with comma.
def emerge_packages(module, packages):
"""Run emerge command against given list of atoms."""
p = module.params
if p['noreplace'] and not (p['update'] or p['state'] == 'latest'):
for package in packages:
if p['noreplace'] and not query_package(module, package, 'emerge'):
break
else:
module.exit_json(changed=False, msg='Packages already present.')
if module.check_mode:
module.exit_json(changed=True, msg='Packages would be installed.')
args = []
emerge_flags = {
'update': '--update',
'deep': '--deep',
'newuse': '--newuse',
'changed_use': '--changed-use',
'oneshot': '--oneshot',
'noreplace': '--noreplace',
'nodeps': '--nodeps',
'onlydeps': '--onlydeps',
'quiet': '--quiet',
'verbose': '--verbose',
'getbinpkg': '--getbinpkg',
'usepkgonly': '--usepkgonly',
'usepkg': '--usepkg',
'keepgoing': '--keep-going',
'quietbuild': '--quiet-build',
'quietfail': '--quiet-fail',
}
for flag, arg in emerge_flags.items():
if p[flag]:
args.append(arg)
if p['state'] and p['state'] == 'latest':
args.append("--update")
if p['usepkg'] and p['usepkgonly']:
module.fail_json(msg='Use only one of usepkg, usepkgonly')
emerge_flags = {
'jobs': '--jobs',
'loadavg': '--load-average',
}
for flag, arg in emerge_flags.items():
flag_val = p[flag]
if flag_val is None:
"""Fallback to default: don't use this argument at all."""
continue
if not flag_val:
"""If the value is 0 or 0.0: add the flag, but not the value."""
args.append(arg)
continue
"""Add the --flag=value pair."""
args.extend((arg, to_native(flag_val)))
cmd, (rc, out, err) = run_emerge(module, packages, *args)
if rc != 0:
module.fail_json(
cmd=cmd, rc=rc, stdout=out, stderr=err,
msg='Packages not installed.',
)
# Check for SSH error with PORTAGE_BINHOST, since rc is still 0 despite
# this error
if (p['usepkgonly'] or p['getbinpkg']) \
and 'Permission denied (publickey).' in err:
module.fail_json(
cmd=cmd, rc=rc, stdout=out, stderr=err,
msg='Please check your PORTAGE_BINHOST configuration in make.conf '
'and your SSH authorized_keys file',
)
changed = True
for line in out.splitlines():
if re.match(r'(?:>+) Emerging (?:binary )?\(1 of', line):
msg = 'Packages installed.'
break
elif module.check_mode and re.match(r'\[(binary|ebuild)', line):
msg = 'Packages would be installed.'
break
else:
changed = False
msg = 'No packages installed.'
module.exit_json(
changed=changed, cmd=cmd, rc=rc, stdout=out, stderr=err,
msg=msg,
)
def unmerge_packages(module, packages):
p = module.params
for package in packages:
if query_package(module, package, 'unmerge'):
break
else:
module.exit_json(changed=False, msg='Packages already absent.')
args = ['--unmerge']
for flag in ['quiet', 'verbose']:
if p[flag]:
args.append('--%s' % flag)
cmd, (rc, out, err) = run_emerge(module, packages, *args)
if rc != 0:
module.fail_json(
cmd=cmd, rc=rc, stdout=out, stderr=err,
msg='Packages not removed.',
)
module.exit_json(
changed=True, cmd=cmd, rc=rc, stdout=out, stderr=err,
msg='Packages removed.',
)
def cleanup_packages(module, packages):
p = module.params
if packages:
for package in packages:
if query_package(module, package, 'unmerge'):
break
else:
module.exit_json(changed=False, msg='Packages already absent.')
args = ['--depclean']
for flag in ['quiet', 'verbose']:
if p[flag]:
args.append('--%s' % flag)
cmd, (rc, out, err) = run_emerge(module, packages, *args)
if rc != 0:
module.fail_json(cmd=cmd, rc=rc, stdout=out, stderr=err)
removed = 0
for line in out.splitlines():
if not line.startswith('Number removed:'):
continue
parts = line.split(':')
removed = int(parts[1].strip())
changed = removed > 0
module.exit_json(
changed=changed, cmd=cmd, rc=rc, stdout=out, stderr=err,
msg='Depclean completed.',
)
def run_emerge(module, packages, *args):
args = list(args)
args.append('--ask=n')
if module.check_mode:
args.append('--pretend')
cmd = [module.emerge_path] + args + packages
return cmd, module.run_command(cmd)
portage_present_states = ['present', 'emerged', 'installed', 'latest']
portage_absent_states = ['absent', 'unmerged', 'removed']
def main():
module = AnsibleModule(
argument_spec=dict(
package=dict(default=None, aliases=['name'], type='list'),
state=dict(
default=portage_present_states[0],
choices=portage_present_states + portage_absent_states,
),
update=dict(default=False, type='bool'),
deep=dict(default=False, type='bool'),
newuse=dict(default=False, type='bool'),
changed_use=dict(default=False, type='bool'),
oneshot=dict(default=False, type='bool'),
noreplace=dict(default=True, type='bool'),
nodeps=dict(default=False, type='bool'),
onlydeps=dict(default=False, type='bool'),
depclean=dict(default=False, type='bool'),
quiet=dict(default=False, type='bool'),
verbose=dict(default=False, type='bool'),
sync=dict(default=None, choices=['yes', 'web', 'no']),
getbinpkg=dict(default=False, type='bool'),
usepkgonly=dict(default=False, type='bool'),
usepkg=dict(default=False, type='bool'),
keepgoing=dict(default=False, type='bool'),
jobs=dict(default=None, type='int'),
loadavg=dict(default=None, type='float'),
quietbuild=dict(default=False, type='bool'),
quietfail=dict(default=False, type='bool'),
),
required_one_of=[['package', 'sync', 'depclean']],
mutually_exclusive=[
['nodeps', 'onlydeps'],
['quiet', 'verbose'],
['quietbuild', 'verbose'],
['quietfail', 'verbose'],
],
supports_check_mode=True,
)
module.emerge_path = module.get_bin_path('emerge', required=True)
module.equery_path = module.get_bin_path('equery', required=True)
p = module.params
if p['sync'] and p['sync'].strip() != 'no':
sync_repositories(module, webrsync=(p['sync'] == 'web'))
if not p['package']:
module.exit_json(msg='Sync successfully finished.')
packages = []
if p['package']:
packages.extend(p['package'])
if p['depclean']:
if packages and p['state'] not in portage_absent_states:
module.fail_json(
msg='Depclean can only be used with package when the state is '
'one of: %s' % portage_absent_states,
)
cleanup_packages(module, packages)
elif p['state'] in portage_present_states:
emerge_packages(module, packages)
elif p['state'] in portage_absent_states:
unmerge_packages(module, packages)
if __name__ == '__main__':
main()
|
thaim/ansible
|
lib/ansible/modules/packaging/os/portage.py
|
Python
|
mit
| 14,711
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2015, Maciej Delmanowski <drybjed@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: virt_net
author: "Maciej Delmanowski (@drybjed)"
version_added: "2.0"
short_description: Manage libvirt network configuration
description:
- Manage I(libvirt) networks.
options:
name:
required: true
aliases: ['network']
description:
- name of the network being managed. Note that network must be previously
defined with xml.
state:
required: false
choices: [ "active", "inactive", "present", "absent" ]
description:
- specify which state you want a network to be in.
If 'active', network will be started.
If 'present', ensure that network is present but do not change its
state; if it's missing, you need to specify xml argument.
If 'inactive', network will be stopped.
If 'undefined' or 'absent', network will be removed from I(libvirt) configuration.
command:
required: false
choices: [ "define", "create", "start", "stop", "destroy",
"undefine", "get_xml", "list_nets", "facts",
"info", "status", "modify"]
description:
- in addition to state management, various non-idempotent commands are available.
See examples.
Modify was added in version 2.1
autostart:
required: false
choices: ["yes", "no"]
description:
- Specify if a given storage pool should be started automatically on system boot.
uri:
required: false
default: "qemu:///system"
description:
- libvirt connection uri.
xml:
required: false
description:
- XML document used with the define command.
requirements:
- "python >= 2.6"
- "python-libvirt"
- "python-lxml"
'''
EXAMPLES = '''
# Define a new network
- virt_net: command=define name=br_nat xml='{{ lookup("template", "network/bridge.xml.j2") }}'
# Start a network
- virt_net: command=create name=br_nat
# List available networks
- virt_net: command=list_nets
# Get XML data of a specified network
- virt_net: command=get_xml name=br_nat
# Stop a network
- virt_net: command=destroy name=br_nat
# Undefine a network
- virt_net: command=undefine name=br_nat
# Gather facts about networks
# Facts will be available as 'ansible_libvirt_networks'
- virt_net: command=facts
# Gather information about network managed by 'libvirt' remotely using uri
- virt_net: command=info uri='{{ item }}'
with_items: libvirt_uris
register: networks
# Ensure that a network is active (needs to be defined and built first)
- virt_net: state=active name=br_nat
# Ensure that a network is inactive
- virt_net: state=inactive name=br_nat
# Ensure that a given network will be started at boot
- virt_net: autostart=yes name=br_nat
# Disable autostart for a given network
- virt_net: autostart=no name=br_nat
'''
VIRT_FAILED = 1
VIRT_SUCCESS = 0
VIRT_UNAVAILABLE=2
import sys
try:
import libvirt
except ImportError:
HAS_VIRT = False
else:
HAS_VIRT = True
try:
from lxml import etree
except ImportError:
HAS_XML = False
else:
HAS_XML = True
ALL_COMMANDS = []
ENTRY_COMMANDS = ['create', 'status', 'start', 'stop',
'undefine', 'destroy', 'get_xml', 'define',
'modify' ]
HOST_COMMANDS = [ 'list_nets', 'facts', 'info' ]
ALL_COMMANDS.extend(ENTRY_COMMANDS)
ALL_COMMANDS.extend(HOST_COMMANDS)
ENTRY_STATE_ACTIVE_MAP = {
0 : "inactive",
1 : "active"
}
ENTRY_STATE_AUTOSTART_MAP = {
0 : "no",
1 : "yes"
}
ENTRY_STATE_PERSISTENT_MAP = {
0 : "no",
1 : "yes"
}
class EntryNotFound(Exception):
pass
class LibvirtConnection(object):
def __init__(self, uri, module):
self.module = module
conn = libvirt.open(uri)
if not conn:
raise Exception("hypervisor connection failure")
self.conn = conn
def find_entry(self, entryid):
# entryid = -1 returns a list of everything
results = []
# Get active entries
for name in self.conn.listNetworks():
entry = self.conn.networkLookupByName(name)
results.append(entry)
# Get inactive entries
for name in self.conn.listDefinedNetworks():
entry = self.conn.networkLookupByName(name)
results.append(entry)
if entryid == -1:
return results
for entry in results:
if entry.name() == entryid:
return entry
raise EntryNotFound("network %s not found" % entryid)
def create(self, entryid):
if not self.module.check_mode:
return self.find_entry(entryid).create()
else:
try:
state = self.find_entry(entryid).isActive()
except:
return self.module.exit_json(changed=True)
if not state:
return self.module.exit_json(changed=True)
def modify(self, entryid, xml):
network = self.find_entry(entryid)
# identify what type of entry is given in the xml
new_data = etree.fromstring(xml)
old_data = etree.fromstring(network.XMLDesc(0))
if new_data.tag == 'host':
mac_addr = new_data.get('mac')
hosts = old_data.xpath('/network/ip/dhcp/host')
# find the one mac we're looking for
host = None
for h in hosts:
if h.get('mac') == mac_addr:
host = h
break
if host is None:
# add the host
if not self.module.check_mode:
res = network.update (libvirt.VIR_NETWORK_UPDATE_COMMAND_ADD_LAST,
libvirt.VIR_NETWORK_SECTION_IP_DHCP_HOST,
-1, xml, libvirt.VIR_NETWORK_UPDATE_AFFECT_CURRENT)
else:
# pretend there was a change
res = 0
if res == 0:
return True
else:
# change the host
if host.get('name') == new_data.get('name') and host.get('ip') == new_data.get('ip'):
return False
else:
if not self.module.check_mode:
res = network.update (libvirt.VIR_NETWORK_UPDATE_COMMAND_MODIFY,
libvirt.VIR_NETWORK_SECTION_IP_DHCP_HOST,
-1, xml, libvirt.VIR_NETWORK_UPDATE_AFFECT_CURRENT)
else:
# pretend there was a change
res = 0
if res == 0:
return True
# command, section, parentIndex, xml, flags=0
self.module.fail_json(msg='updating this is not supported yet '+unicode(xml))
def destroy(self, entryid):
if not self.module.check_mode:
return self.find_entry(entryid).destroy()
else:
if self.find_entry(entryid).isActive():
return self.module.exit_json(changed=True)
def undefine(self, entryid):
if not self.module.check_mode:
return self.find_entry(entryid).undefine()
else:
if not self.find_entry(entryid):
return self.module.exit_json(changed=True)
def get_status2(self, entry):
state = entry.isActive()
return ENTRY_STATE_ACTIVE_MAP.get(state,"unknown")
def get_status(self, entryid):
if not self.module.check_mode:
state = self.find_entry(entryid).isActive()
return ENTRY_STATE_ACTIVE_MAP.get(state,"unknown")
else:
try:
state = self.find_entry(entryid).isActive()
return ENTRY_STATE_ACTIVE_MAP.get(state,"unknown")
except:
return ENTRY_STATE_ACTIVE_MAP.get("inactive","unknown")
def get_uuid(self, entryid):
return self.find_entry(entryid).UUIDString()
def get_xml(self, entryid):
return self.find_entry(entryid).XMLDesc(0)
def get_forward(self, entryid):
xml = etree.fromstring(self.find_entry(entryid).XMLDesc(0))
try:
result = xml.xpath('/network/forward')[0].get('mode')
except:
raise ValueError('Forward mode not specified')
return result
def get_domain(self, entryid):
xml = etree.fromstring(self.find_entry(entryid).XMLDesc(0))
try:
result = xml.xpath('/network/domain')[0].get('name')
except:
raise ValueError('Domain not specified')
return result
def get_macaddress(self, entryid):
xml = etree.fromstring(self.find_entry(entryid).XMLDesc(0))
try:
result = xml.xpath('/network/mac')[0].get('address')
except:
raise ValueError('MAC address not specified')
return result
def get_autostart(self, entryid):
state = self.find_entry(entryid).autostart()
return ENTRY_STATE_AUTOSTART_MAP.get(state,"unknown")
def get_autostart2(self, entryid):
if not self.module.check_mode:
return self.find_entry(entryid).autostart()
else:
try:
return self.find_entry(entryid).autostart()
except:
return self.module.exit_json(changed=True)
def set_autostart(self, entryid, val):
if not self.module.check_mode:
return self.find_entry(entryid).setAutostart(val)
else:
try:
state = self.find_entry(entryid).autostart()
except:
return self.module.exit_json(changed=True)
if bool(state) != val:
return self.module.exit_json(changed=True)
def get_bridge(self, entryid):
return self.find_entry(entryid).bridgeName()
def get_persistent(self, entryid):
state = self.find_entry(entryid).isPersistent()
return ENTRY_STATE_PERSISTENT_MAP.get(state,"unknown")
def define_from_xml(self, entryid, xml):
if not self.module.check_mode:
return self.conn.networkDefineXML(xml)
else:
try:
state = self.find_entry(entryid)
except:
return self.module.exit_json(changed=True)
class VirtNetwork(object):
def __init__(self, uri, module):
self.module = module
self.uri = uri
self.conn = LibvirtConnection(self.uri, self.module)
def get_net(self, entryid):
return self.conn.find_entry(entryid)
def list_nets(self, state=None):
results = []
for entry in self.conn.find_entry(-1):
if state:
if state == self.conn.get_status2(entry):
results.append(entry.name())
else:
results.append(entry.name())
return results
def state(self):
results = []
for entry in self.list_nets():
state_blurb = self.conn.get_status(entry)
results.append("%s %s" % (entry,state_blurb))
return results
def autostart(self, entryid):
return self.conn.set_autostart(entryid, True)
def get_autostart(self, entryid):
return self.conn.get_autostart2(entryid)
def set_autostart(self, entryid, state):
return self.conn.set_autostart(entryid, state)
def create(self, entryid):
return self.conn.create(entryid)
def modify(self, entryid, xml):
return self.conn.modify(entryid, xml)
def start(self, entryid):
return self.conn.create(entryid)
def stop(self, entryid):
return self.conn.destroy(entryid)
def destroy(self, entryid):
return self.conn.destroy(entryid)
def undefine(self, entryid):
return self.conn.undefine(entryid)
def status(self, entryid):
return self.conn.get_status(entryid)
def get_xml(self, entryid):
return self.conn.get_xml(entryid)
def define(self, entryid, xml):
return self.conn.define_from_xml(entryid, xml)
def info(self):
return self.facts(facts_mode='info')
def facts(self, facts_mode='facts'):
results = dict()
for entry in self.list_nets():
results[entry] = dict()
results[entry]["autostart"] = self.conn.get_autostart(entry)
results[entry]["persistent"] = self.conn.get_persistent(entry)
results[entry]["state"] = self.conn.get_status(entry)
results[entry]["bridge"] = self.conn.get_bridge(entry)
results[entry]["uuid"] = self.conn.get_uuid(entry)
try:
results[entry]["forward_mode"] = self.conn.get_forward(entry)
except ValueError as e:
pass
try:
results[entry]["domain"] = self.conn.get_domain(entry)
except ValueError as e:
pass
try:
results[entry]["macaddress"] = self.conn.get_macaddress(entry)
except ValueError as e:
pass
facts = dict()
if facts_mode == 'facts':
facts["ansible_facts"] = dict()
facts["ansible_facts"]["ansible_libvirt_networks"] = results
elif facts_mode == 'info':
facts['networks'] = results
return facts
def core(module):
state = module.params.get('state', None)
name = module.params.get('name', None)
command = module.params.get('command', None)
uri = module.params.get('uri', None)
xml = module.params.get('xml', None)
autostart = module.params.get('autostart', None)
v = VirtNetwork(uri, module)
res = {}
if state and command == 'list_nets':
res = v.list_nets(state=state)
if type(res) != dict:
res = { command: res }
return VIRT_SUCCESS, res
if state:
if not name:
module.fail_json(msg = "state change requires a specified name")
res['changed'] = False
if state in [ 'active' ]:
if v.status(name) is not 'active':
res['changed'] = True
res['msg'] = v.start(name)
elif state in [ 'present' ]:
try:
v.get_net(name)
except EntryNotFound:
if not xml:
module.fail_json(msg = "network '" + name + "' not present, but xml not specified")
v.define(name, xml)
res = {'changed': True, 'created': name}
elif state in [ 'inactive' ]:
entries = v.list_nets()
if name in entries:
if v.status(name) is not 'inactive':
res['changed'] = True
res['msg'] = v.destroy(name)
elif state in [ 'undefined', 'absent' ]:
entries = v.list_nets()
if name in entries:
if v.status(name) is not 'inactive':
v.destroy(name)
res['changed'] = True
res['msg'] = v.undefine(name)
else:
module.fail_json(msg="unexpected state")
return VIRT_SUCCESS, res
if command:
if command in ENTRY_COMMANDS:
if not name:
module.fail_json(msg = "%s requires 1 argument: name" % command)
if command in ('define', 'modify'):
if not xml:
module.fail_json(msg = command+" requires xml argument")
try:
v.get_net(name)
except EntryNotFound:
v.define(name, xml)
res = {'changed': True, 'created': name}
else:
if command == 'modify':
mod = v.modify(name, xml)
res = {'changed': mod, 'modified': name}
return VIRT_SUCCESS, res
res = getattr(v, command)(name)
if type(res) != dict:
res = { command: res }
return VIRT_SUCCESS, res
elif hasattr(v, command):
res = getattr(v, command)()
if type(res) != dict:
res = { command: res }
return VIRT_SUCCESS, res
else:
module.fail_json(msg="Command %s not recognized" % basecmd)
if autostart is not None:
if not name:
module.fail_json(msg = "state change requires a specified name")
res['changed'] = False
if autostart:
if not v.get_autostart(name):
res['changed'] = True
res['msg'] = v.set_autostart(name, True)
else:
if v.get_autostart(name):
res['changed'] = True
res['msg'] = v.set_autostart(name, False)
return VIRT_SUCCESS, res
module.fail_json(msg="expected state or command parameter to be specified")
def main():
module = AnsibleModule (
argument_spec = dict(
name = dict(aliases=['network']),
state = dict(choices=['active', 'inactive', 'present', 'absent']),
command = dict(choices=ALL_COMMANDS),
uri = dict(default='qemu:///system'),
xml = dict(),
autostart = dict(type='bool')
),
supports_check_mode = True
)
if not HAS_VIRT:
module.fail_json(
msg='The `libvirt` module is not importable. Check the requirements.'
)
if not HAS_XML:
module.fail_json(
msg='The `lxml` module is not importable. Check the requirements.'
)
rc = VIRT_SUCCESS
try:
rc, result = core(module)
except Exception, e:
module.fail_json(msg=str(e))
if rc != 0: # something went wrong emit the msg
module.fail_json(rc=rc, msg=result)
else:
module.exit_json(**result)
# import module snippets
from ansible.module_utils.basic import *
main()
|
haad/ansible-modules-extras
|
cloud/misc/virt_net.py
|
Python
|
gpl-3.0
| 18,818
|
import os
import re
import unittest
from collections import OrderedDict
from coalib.settings.Setting import (
Setting, path, path_list, url, typed_dict, typed_list, typed_ordered_dict,
glob, glob_list)
from coalib.parsing.Globbing import glob_escape
class SettingTest(unittest.TestCase):
def test_construction(self):
self.assertRaises(ValueError, Setting, "", 2, 2)
self.assertRaises(TypeError, Setting, "", "", "", from_cli=5)
def test_path(self):
self.uut = Setting("key", " 22\n", "." + os.path.sep, True)
self.assertEqual(path(self.uut),
os.path.abspath(os.path.join(".", "22")))
abspath = os.path.abspath(".")
self.uut = Setting("key", re.escape(abspath))
self.assertEqual(path(self.uut), abspath)
self.uut = Setting("key", " 22", "")
self.assertRaises(ValueError, path, self.uut)
self.assertEqual(path(self.uut,
origin="test" + os.path.sep),
os.path.abspath(os.path.join("test", "22")))
def test_glob(self):
self.uut = Setting("key", ".",
origin=os.path.join("test (1)", "somefile"))
self.assertEqual(glob(self.uut),
glob_escape(os.path.abspath("test (1)")))
def test_path_list(self):
abspath = os.path.abspath(".")
# Need to escape backslashes since we use list conversion
self.uut = Setting("key", "., " + abspath.replace("\\", "\\\\"),
origin=os.path.join("test", "somefile"))
self.assertEqual(path_list(self.uut),
[os.path.abspath(os.path.join("test", ".")), abspath])
def test_url(self):
uut = Setting("key", "http://google.com")
self.assertEqual(url(uut), "http://google.com")
with self.assertRaises(ValueError):
uut = Setting("key", "abc")
url(uut)
def test_glob_list(self):
abspath = glob_escape(os.path.abspath("."))
# Need to escape backslashes since we use list conversion
self.uut = Setting("key", "., " + abspath.replace("\\", "\\\\"),
origin=os.path.join("test (1)", "somefile"))
self.assertEqual(
glob_list(self.uut),
[glob_escape(os.path.abspath(os.path.join("test (1)", "."))),
abspath])
def test_typed_list(self):
self.uut = Setting("key", "1, 2, 3")
self.assertEqual(typed_list(int)(self.uut),
[1, 2, 3])
with self.assertRaises(ValueError):
self.uut = Setting("key", "1, a, 3")
typed_list(int)(self.uut)
def test_typed_dict(self):
self.uut = Setting("key", "1, 2: t, 3")
self.assertEqual(typed_dict(int, str, None)(self.uut),
{1: None, 2: "t", 3: None})
with self.assertRaises(ValueError):
self.uut = Setting("key", "1, a, 3")
typed_dict(int, str, "")(self.uut)
def test_typed_ordered_dict(self):
self.uut = Setting("key", "1, 2: t, 3")
self.assertEqual(typed_ordered_dict(int, str, None)(self.uut),
OrderedDict([(1, None), (2, "t"), (3, None)]))
with self.assertRaises(ValueError):
self.uut = Setting("key", "1, a, 3")
typed_ordered_dict(int, str, "")(self.uut)
def test_inherited_conversions(self):
self.uut = Setting("key", " 22\n", ".", True)
self.assertEqual(str(self.uut), "22")
self.assertEqual(int(self.uut), 22)
self.assertRaises(ValueError, bool, self.uut)
|
MattAllmendinger/coala
|
tests/settings/SettingTest.py
|
Python
|
agpl-3.0
| 3,666
|
# Copyright: Damien Elmes <anki@ichi2.net>
# -*- coding: utf-8 -*-
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
import time, re, traceback
from aqt.qt import *
from anki.sync import httpCon
from aqt.utils import showWarning
from anki.hooks import addHook, remHook
import aqt.sync # monkey-patches httplib2
def download(mw, code):
"Download addon/deck from AnkiWeb. On success caller must stop progress diag."
# check code is valid
try:
code = int(code)
except ValueError:
showWarning(_("Invalid code."))
return
# create downloading thread
thread = Downloader(code)
def onRecv():
try:
mw.progress.update(label="%dKB downloaded" % (thread.recvTotal/1024))
except NameError:
# some users report the following error on long downloads
# NameError: free variable 'mw' referenced before assignment in enclosing scope
# unsure why this is happening, but guard against throwing the
# error
pass
mw.connect(thread, SIGNAL("recv"), onRecv)
thread.start()
mw.progress.start(immediate=True)
while not thread.isFinished():
mw.app.processEvents()
thread.wait(100)
if not thread.error:
# success
return thread.data, thread.fname
else:
mw.progress.finish()
showWarning(_("Download failed: %s") % thread.error)
class Downloader(QThread):
def __init__(self, code):
QThread.__init__(self)
self.code = code
self.error = None
def run(self):
# setup progress handler
self.byteUpdate = time.time()
self.recvTotal = 0
def canPost():
if (time.time() - self.byteUpdate) > 0.1:
self.byteUpdate = time.time()
return True
def recvEvent(bytes):
self.recvTotal += bytes
if canPost():
self.emit(SIGNAL("recv"))
addHook("httpRecv", recvEvent)
con = httpCon()
try:
resp, cont = con.request(
aqt.appShared + "download/%d" % self.code)
except Exception, e:
exc = traceback.format_exc()
try:
self.error = unicode(e[0], "utf8", "ignore")
except:
self.error = unicode(exc, "utf8", "ignore")
return
finally:
remHook("httpRecv", recvEvent)
if resp['status'] == '200':
self.error = None
self.fname = re.match("attachment; filename=(.+)",
resp['content-disposition']).group(1)
self.data = cont
elif resp['status'] == '403':
self.error = _("Invalid code.")
else:
self.error = _("Error downloading: %s") % resp['status']
|
Arthaey/anki
|
aqt/downloader.py
|
Python
|
agpl-3.0
| 2,861
|
"""
NOTE: this API is WIP and has not yet been approved. Do not use this API without talking to Christina or Andy.
For more information, see:
https://openedx.atlassian.net/wiki/display/TNL/User+API
"""
from django.db import transaction
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import status
from rest_framework import permissions
from openedx.core.lib.api.authentication import (
SessionAuthenticationAllowInactiveUser,
OAuth2AuthenticationAllowInactiveUser,
)
from ..errors import UserNotFound, UserNotAuthorized, AccountUpdateError, AccountValidationError
from openedx.core.lib.api.parsers import MergePatchParser
from .api import get_account_settings, update_account_settings
from .serializers import PROFILE_IMAGE_KEY_PREFIX
class AccountView(APIView):
"""
**Use Cases**
Get or update a user's account information. Updates are supported
only through merge patch.
**Example Requests**
GET /api/user/v1/accounts/{username}/[?view=shared]
PATCH /api/user/v1/accounts/{username}/{"key":"value"} "application/merge-patch+json"
**Response Values for GET**
If no user exists with the specified username, an HTTP 404 "Not
Found" response is returned.
If the user makes the request for her own account, or makes a
request for another account and has "is_staff" access, an HTTP 200
"OK" response is returned. The response contains the following
values.
* bio: null or textual representation of user biographical
information ("about me").
* country: An ISO 3166 country code or null.
* date_joined: The date the account was created, in the string
format provided by datetime. For example, "2014-08-26T17:52:11Z".
* email: Email address for the user. New email addresses must be confirmed
via a confirmation email, so GET does not reflect the change until
the address has been confirmed.
* gender: One of the following values:
* null
* "f"
* "m"
* "o"
* goals: The textual representation of the user's goals, or null.
* is_active: Boolean representation of whether a user is active.
* language: The user's preferred language, or null.
* language_proficiencies: Array of language preferences. Each
preference is a JSON object with the following keys:
* "code": string ISO 639-1 language code e.g. "en".
* level_of_education: One of the following values:
* "p": PhD or Doctorate
* "m": Master's or professional degree
* "b": Bachelor's degree
* "a": Associate's degree
* "hs": Secondary/high school
* "jhs": Junior secondary/junior high/middle school
* "el": Elementary/primary school
* "none": None
* "o": Other
* null: The user did not enter a value
* mailing_address: The textual representation of the user's mailing
address, or null.
* name: The full name of the user.
* profile_image: A JSON representation of a user's profile image
information. This representation has the following keys.
* "has_image": Boolean indicating whether the user has a profile
image.
* "image_url_*": Absolute URL to various sizes of a user's
profile image, where '*' matches a representation of the
corresponding image size, such as 'small', 'medium', 'large',
and 'full'. These are configurable via PROFILE_IMAGE_SIZES_MAP.
* requires_parental_consent: True if the user is a minor
requiring parental consent.
* username: The username associated with the account.
* year_of_birth: The year the user was born, as an integer, or null.
* account_privacy: The user's setting for sharing her personal
profile. Possible values are "all_users" or "private".
For all text fields, plain text instead of HTML is supported. The
data is stored exactly as specified. Clients must HTML escape
rendered values to avoid script injections.
If a user who does not have "is_staff" access requests account
information for a different user, only a subset of these fields is
returned. The returns fields depend on the
ACCOUNT_VISIBILITY_CONFIGURATION configuration setting and the
visibility preference of the user for whom data is requested.
Note that a user can view which account fields they have shared
with other users by requesting their own username and providing
the "view=shared" URL parameter.
**Response Values for PATCH**
Users can only modify their own account information. If the
requesting user does not have the specified username and has staff
access, the request returns an HTTP 403 "Forbidden" response. If
the requesting user does not have staff access, the request
returns an HTTP 404 "Not Found" response to avoid revealing the
existence of the account.
If no user exists with the specified username, an HTTP 404 "Not
Found" response is returned.
If "application/merge-patch+json" is not the specified content
type, a 415 "Unsupported Media Type" response is returned.
If validation errors prevent the update, this method returns a 400
"Bad Request" response that includes a "field_errors" field that
lists all error messages.
If a failure at the time of the update prevents the update, a 400
"Bad Request" error is returned. The JSON collection contains
specific errors.
If the update is successful, updated user account data is returned.
"""
authentication_classes = (OAuth2AuthenticationAllowInactiveUser, SessionAuthenticationAllowInactiveUser)
permission_classes = (permissions.IsAuthenticated,)
parser_classes = (MergePatchParser,)
def get(self, request, username):
"""
GET /api/user/v1/accounts/{username}/
"""
try:
account_settings = get_account_settings(request, username, view=request.query_params.get('view'))
except UserNotFound:
return Response(status=status.HTTP_403_FORBIDDEN if request.user.is_staff else status.HTTP_404_NOT_FOUND)
return Response(account_settings)
def patch(self, request, username):
"""
PATCH /api/user/v1/accounts/{username}/
Note that this implementation is the "merge patch" implementation proposed in
https://tools.ietf.org/html/rfc7396. The content_type must be "application/merge-patch+json" or
else an error response with status code 415 will be returned.
"""
try:
with transaction.atomic():
update_account_settings(request.user, request.data, username=username)
account_settings = get_account_settings(request, username)
except UserNotAuthorized:
return Response(status=status.HTTP_403_FORBIDDEN if request.user.is_staff else status.HTTP_404_NOT_FOUND)
except UserNotFound:
return Response(status=status.HTTP_404_NOT_FOUND)
except AccountValidationError as err:
return Response({"field_errors": err.field_errors}, status=status.HTTP_400_BAD_REQUEST)
except AccountUpdateError as err:
return Response(
{
"developer_message": err.developer_message,
"user_message": err.user_message
},
status=status.HTTP_400_BAD_REQUEST
)
return Response(account_settings)
|
Emergya/icm-openedx-educamadrid-platform-basic
|
openedx/core/djangoapps/user_api/accounts/views.py
|
Python
|
agpl-3.0
| 8,229
|
# -*- coding: utf-8 -*-
"""Tests for tracking middleware."""
import ddt
from django.contrib.auth.models import User
from django.contrib.sessions.middleware import SessionMiddleware
from django.test import TestCase
from django.test.client import RequestFactory
from django.test.utils import override_settings
from mock import patch, sentinel
from eventtracking import tracker
from track.middleware import TrackMiddleware
@ddt.ddt
class TrackMiddlewareTestCase(TestCase):
""" Class for checking tracking requests """
def setUp(self):
super(TrackMiddlewareTestCase, self).setUp()
self.track_middleware = TrackMiddleware()
self.request_factory = RequestFactory()
patcher = patch('track.views.server_track')
self.mock_server_track = patcher.start()
self.addCleanup(patcher.stop)
def test_normal_request(self):
request = self.request_factory.get('/somewhere')
self.track_middleware.process_request(request)
self.assertTrue(self.mock_server_track.called)
@ddt.unpack
@ddt.data(
('HTTP_USER_AGENT', 'agent'),
('PATH_INFO', 'path'),
('HTTP_REFERER', 'referer'),
('HTTP_ACCEPT_LANGUAGE', 'accept_language'),
)
def test_request_with_latin1_characters(self, meta_key, context_key):
"""
When HTTP headers contains latin1 characters.
"""
request = self.request_factory.get('/somewhere')
# pylint: disable=no-member
request.META[meta_key] = 'test latin1 \xd3 \xe9 \xf1' # pylint: disable=no-member
context = self.get_context_for_request(request)
# The bytes in the string on the right are utf8 encoded in the source file, so we decode them to construct
# a valid unicode string.
self.assertEqual(context[context_key], 'test latin1 Ó é ñ'.decode('utf8'))
def test_default_filters_do_not_render_view(self):
for url in ['/event', '/event/1', '/login', '/heartbeat']:
request = self.request_factory.get(url)
self.track_middleware.process_request(request)
self.assertFalse(self.mock_server_track.called)
self.mock_server_track.reset_mock()
@override_settings(TRACKING_IGNORE_URL_PATTERNS=[])
def test_reading_filtered_urls_from_settings(self):
request = self.request_factory.get('/event')
self.track_middleware.process_request(request)
self.assertTrue(self.mock_server_track.called)
@override_settings(TRACKING_IGNORE_URL_PATTERNS=[r'^/some/excluded.*'])
def test_anchoring_of_patterns_at_beginning(self):
request = self.request_factory.get('/excluded')
self.track_middleware.process_request(request)
self.assertTrue(self.mock_server_track.called)
self.mock_server_track.reset_mock()
request = self.request_factory.get('/some/excluded/url')
self.track_middleware.process_request(request)
self.assertFalse(self.mock_server_track.called)
def test_default_request_context(self):
context = self.get_context_for_path('/courses/')
self.assertEquals(context, {
'accept_language': '',
'referer': '',
'user_id': '',
'session': '',
'username': '',
'ip': '127.0.0.1',
'host': 'testserver',
'agent': '',
'path': '/courses/',
'org_id': '',
'course_id': '',
'client_id': None,
})
def test_no_forward_for_header_ip_context(self):
request = self.request_factory.get('/courses/')
remote_addr = '127.0.0.1'
request.META['REMOTE_ADDR'] = remote_addr
context = self.get_context_for_request(request)
self.assertEquals(context['ip'], remote_addr)
def test_single_forward_for_header_ip_context(self):
request = self.request_factory.get('/courses/')
remote_addr = '127.0.0.1'
forwarded_ip = '11.22.33.44'
request.META['REMOTE_ADDR'] = remote_addr
request.META['HTTP_X_FORWARDED_FOR'] = forwarded_ip
context = self.get_context_for_request(request)
self.assertEquals(context['ip'], forwarded_ip)
def test_multiple_forward_for_header_ip_context(self):
request = self.request_factory.get('/courses/')
remote_addr = '127.0.0.1'
forwarded_ip = '11.22.33.44, 10.0.0.1, 127.0.0.1'
request.META['REMOTE_ADDR'] = remote_addr
request.META['HTTP_X_FORWARDED_FOR'] = forwarded_ip
context = self.get_context_for_request(request)
self.assertEquals(context['ip'], '11.22.33.44')
def get_context_for_path(self, path):
"""Extract the generated event tracking context for a given request for the given path."""
request = self.request_factory.get(path)
return self.get_context_for_request(request)
def get_context_for_request(self, request):
"""Extract the generated event tracking context for the given request."""
self.track_middleware.process_request(request)
try:
captured_context = tracker.get_tracker().resolve_context()
finally:
self.track_middleware.process_response(request, None)
self.assertEquals(
tracker.get_tracker().resolve_context(),
{}
)
return captured_context
def test_request_in_course_context(self):
captured_context = self.get_context_for_path('/courses/test_org/test_course/test_run/foo')
expected_context_subset = {
'course_id': 'test_org/test_course/test_run',
'org_id': 'test_org',
}
self.assert_dict_subset(captured_context, expected_context_subset)
def assert_dict_subset(self, superset, subset):
"""Assert that the superset dict contains all of the key-value pairs found in the subset dict."""
for key, expected_value in subset.iteritems():
self.assertEquals(superset[key], expected_value)
def test_request_with_user(self):
user_id = 1
username = sentinel.username
request = self.request_factory.get('/courses/')
request.user = User(pk=user_id, username=username)
context = self.get_context_for_request(request)
self.assert_dict_subset(context, {
'user_id': user_id,
'username': username,
})
def test_request_with_session(self):
request = self.request_factory.get('/courses/')
SessionMiddleware().process_request(request)
request.session.save()
session_key = request.session.session_key
expected_session_key = self.track_middleware.encrypt_session_key(session_key)
self.assertEquals(len(session_key), len(expected_session_key))
context = self.get_context_for_request(request)
self.assert_dict_subset(context, {
'session': expected_session_key,
})
@override_settings(SECRET_KEY='85920908f28904ed733fe576320db18cabd7b6cd')
def test_session_key_encryption(self):
session_key = '665924b49a93e22b46ee9365abf28c2a'
expected_session_key = '3b81f559d14130180065d635a4f35dd2'
encrypted_session_key = self.track_middleware.encrypt_session_key(session_key)
self.assertEquals(encrypted_session_key, expected_session_key)
def test_request_headers(self):
ip_address = '10.0.0.0'
user_agent = 'UnitTest/1.0'
client_id_header = '123.123'
factory = RequestFactory(
REMOTE_ADDR=ip_address, HTTP_USER_AGENT=user_agent, HTTP_X_EDX_GA_CLIENT_ID=client_id_header
)
request = factory.get('/some-path')
context = self.get_context_for_request(request)
self.assert_dict_subset(context, {
'ip': ip_address,
'agent': user_agent,
'client_id': client_id_header
})
|
ahmedaljazzar/edx-platform
|
common/djangoapps/track/tests/test_middleware.py
|
Python
|
agpl-3.0
| 7,908
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2005, 2006 Zuza Software Foundation
#
# This file is part of translate.
#
# translate is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# translate is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
"""Restructure Gettxt PO files produced by
:doc:`poconflicts </commands/poconflicts>` into the original directory tree
for merging using :doc:`pomerge </commands/pomerge>`.
See: http://docs.translatehouse.org/projects/translate-toolkit/en/latest/commands/pomerge.html
for examples and usage instructions.
"""
import os
import sys
from translate.misc import optrecurse
from translate.storage import po
class SplitOptionParser(optrecurse.RecursiveOptionParser):
"""a specialized Option Parser for posplit"""
def parse_args(self, args=None, values=None):
"""parses the command line options, handling implicit input/output args"""
(options, args) = optrecurse.RecursiveOptionParser.parse_args(self, args, values)
if not options.output:
self.error("Output file is rquired")
return (options, args)
def set_usage(self, usage=None):
"""sets the usage string - if usage not given, uses getusagestring for each option"""
if usage is None:
self.usage = "%prog " + " ".join([self.getusagestring(option) for option in self.option_list]) + \
"\n " + \
"input directory is searched for PO files with (poconflicts) comments, all entries are written to files in a directory structure for pomerge"
else:
super(SplitOptionParser, self).set_usage(usage)
def recursiveprocess(self, options):
"""recurse through directories and process files"""
if not self.isrecursive(options.output, 'output'):
try:
self.warning("Output directory does not exist. Attempting to create")
# TODO: maybe we should only allow it to be created, otherwise
# we mess up an existing tree.
os.mkdir(options.output)
except:
self.error(optrecurse.optparse.OptionValueError("Output directory does not exist, attempt to create failed"))
if self.isrecursive(options.input, 'input') and getattr(options, "allowrecursiveinput", True):
if isinstance(options.input, list):
inputfiles = self.recurseinputfilelist(options)
else:
inputfiles = self.recurseinputfiles(options)
else:
if options.input:
inputfiles = [os.path.basename(options.input)]
options.input = os.path.dirname(options.input)
else:
inputfiles = [options.input]
self.textmap = {}
self.initprogressbar(inputfiles, options)
for inputpath in inputfiles:
fullinputpath = self.getfullinputpath(options, inputpath)
try:
success = self.processfile(options, fullinputpath)
except Exception as error:
if isinstance(error, KeyboardInterrupt):
raise self.warning("Error processing: input %s" % (fullinputpath), options, sys.exc_info())
success = False
self.reportprogress(inputpath, success)
del self.progressbar
def processfile(self, options, fullinputpath):
"""process an individual file"""
inputfile = self.openinputfile(options, fullinputpath)
inputpofile = po.pofile(inputfile)
for pounit in inputpofile.units:
if not (pounit.isheader() or pounit.hasplural()): # XXX
if pounit.hasmarkedcomment("poconflicts"):
for comment in pounit.othercomments:
if comment.find("# (poconflicts)") == 0:
pounit.othercomments.remove(comment)
break
# TODO: refactor writing out
outputpath = comment[comment.find(")") + 2:].strip()
self.checkoutputsubdir(options, os.path.dirname(outputpath))
fulloutputpath = os.path.join(options.output, outputpath)
if os.path.isfile(fulloutputpath):
outputfile = open(fulloutputpath, 'r')
outputpofile = po.pofile(outputfile)
else:
outputpofile = po.pofile()
outputpofile.units.append(pounit) # TODO:perhaps check to see if it's already there...
outputfile = open(fulloutputpath, 'w')
outputfile.write(str(outputpofile))
def main():
# outputfile extentions will actually be determined by the comments in the
# po files
pooutput = ("po", None)
formats = {(None, None): pooutput, ("po", "po"): pooutput, "po": pooutput}
parser = SplitOptionParser(formats, description=__doc__)
parser.set_usage()
parser.run()
if __name__ == '__main__':
main()
|
bluemini/kuma
|
vendor/packages/translate/tools/porestructure.py
|
Python
|
mpl-2.0
| 5,556
|
class APIField:
def __init__(self, name, serializer=None):
self.name = name
self.serializer = serializer
def __hash__(self):
return hash(self.name)
def __repr__(self):
return '<APIField {}>'.format(self.name)
|
zerolab/wagtail
|
wagtail/api/conf.py
|
Python
|
bsd-3-clause
| 256
|
# ==================================================================================================
# Copyright 2015 Twitter, Inc.
# --------------------------------------------------------------------------------------------------
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this work except in compliance with the License.
# You may obtain a copy of the License in the LICENSE file, or at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==================================================================================================
# zktraffic
|
rgs1/zktraffic
|
zktraffic/zab/__init__.py
|
Python
|
apache-2.0
| 914
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import fields, models
class ReportProjectTaskUser(models.Model):
_inherit = "report.project.task.user"
hours_planned = fields.Float('Planned Hours', readonly=True)
hours_effective = fields.Float('Effective Hours', readonly=True)
remaining_hours = fields.Float('Remaining Hours', readonly=True)
progress = fields.Float('Progress', group_operator='avg', readonly=True)
def _select(self):
return super(ReportProjectTaskUser, self)._select() + """,
progress as progress,
t.effective_hours as hours_effective,
t.planned_hours - t.effective_hours - t.subtask_effective_hours as remaining_hours,
planned_hours as hours_planned"""
def _group_by(self):
return super(ReportProjectTaskUser, self)._group_by() + """,
remaining_hours,
t.effective_hours,
progress,
planned_hours
"""
|
ddico/odoo
|
addons/hr_timesheet/report/project_report.py
|
Python
|
agpl-3.0
| 1,036
|