code stringlengths 2 1.05M | repo_name stringlengths 5 104 | path stringlengths 4 251 | language stringclasses 1 value | license stringclasses 15 values | size int32 2 1.05M |
|---|---|---|---|---|---|
from __future__ import unicode_literals
"""
This module abstracts all the bytes<-->string conversions so that the python 2
and 3 code everywhere else is similar. This also has a few simple functions
that deal with the fact that bytes are different between the 2 versions even
when using from __future__ import unicode_literals. For example:
Python 2:
b'mybytes'[0] --> b'm' (type str)
Python 3:
b'mybytes'[0] --> 109 (type int)
So in some places we get an index of a bytes object and we have to make sure
the behaviour is the same in both versions of python so functions here take
care of that.
"""
import sys
try:
import cmp
except ImportError:
#No cmp function available, probably Python 3
def cmp(a, b):
return (a > b) - (a < b)
def string_to_bytestr(string):
"""
Convert a string to a bytes object. This encodes the object as well which
will typically change ord on each element & change the length (i.e. 1 char
could become 1/2/3 bytes)
"""
return string.encode('utf-8')
if sys.version_info >= (3,):
#some constants that are python2 only
unicode = str
long = int
range = range
unichr = chr
def iteritems(d):
return d.items()
from io import BytesIO as SimIO
def string_to_bytes(text):
"""
Convert a string to a bytes object. This is a raw conversion
so that ord() on each element remains unchanged.
Input type: string
Output type: bytes
"""
return bytes([ord(c) for c in text])
def bytes_to_string(byte_array):
"""
Inverse of string_to_bytes.
"""
return ''.join([chr(b) for b in byte_array])
def string_to_bytestr(string):
"""
Convert a string to a bytes object. This encodes the object as well which
will typically change ord on each element & change the length (i.e. 1 char
could become 1/2/3 bytes)
"""
return string.encode('utf-8')
def bytestr_to_string(bytestr):
"""
Inverse of string_to_bytestr.
"""
return bytes([c for c in bytestr]).decode('utf-8')
def byte_chr(bytes_str):
"""
This converts a *single* input byte to a bytes object. Usually used in
conjuction with b'mybytes'[i]. See module description.
Input: 2: string/pseudo-bytes 3: int
Output: bytes
"""
return bytes([bytes_str])
def bytestr(val):
"""
Convert a *single* integer to a bytes object. Usually used like
bytestr(int).
Input: int
Output: bytes
"""
return bytes([val])
else:
#some constants that are python2 only
range = xrange
unicode = unicode
long = long
unichr = unichr
def iteritems(d):
return d.iteritems()
try:
from cStringIO import StringIO as SimIO
except:
from StringIO import StringIO as SimIO
def string_to_bytes(text):
"""
See other implementation for notes
"""
return b"".join([c for c in text])
def bytes_to_string(byte_array):
"""
See other implementation for notes
"""
return ''.join([b for b in byte_array])
def bytestr_to_string(bytestr):
"""
See other implementation for notes
"""
return unicode(bytestr, 'utf-8')
def byte_chr(bytes_str):
"""
See other implementation for notes
"""
return bytes_str
def bytestr(val):
"""
See other implementation for notes
"""
return chr(val)
| openx/python3-protobuf | python/google/protobuf/internal/utils.py | Python | bsd-3-clause | 3,630 |
##########################################################################
#
# Copyright (c) 2012, John Haddon. All rights reserved.
# Copyright (c) 2012, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import os
import unittest
import IECore
import Gaffer
import GafferTest
import GafferImage
class OpenColorIOTest( unittest.TestCase ) :
fileName = os.path.expandvars( "$GAFFER_ROOT/python/GafferTest/images/checker.exr" )
def test( self ) :
n = GafferImage.ImageReader()
n["fileName"].setValue( self.fileName )
o = GafferImage.OpenColorIO()
o["in"].setInput( n["out"] )
self.assertEqual( n["out"].image(), o["out"].image() )
o["inputSpace"].setValue( "linear" )
o["outputSpace"].setValue( "sRGB" )
self.assertNotEqual( n["out"].image(), o["out"].image() )
def testHashPassThrough( self ) :
n = GafferImage.ImageReader()
n["fileName"].setValue( self.fileName )
o = GafferImage.OpenColorIO()
o["in"].setInput( n["out"] )
self.assertEqual( n["out"].image(), o["out"].image() )
o["inputSpace"].setValue( "linear" )
o["outputSpace"].setValue( "sRGB" )
self.assertNotEqual( n["out"].image(), o["out"].image() )
o["enabled"].setValue( False )
self.assertEqual( n["out"].image(), o["out"].image() )
self.assertEqual( n["out"]['format'].hash(), o["out"]['format'].hash() )
self.assertEqual( n["out"]['dataWindow'].hash(), o["out"]['dataWindow'].hash() )
self.assertEqual( n["out"]['channelNames'].hash(), o["out"]['channelNames'].hash() )
o["enabled"].setValue( True )
o["inputSpace"].setValue( "linear" )
o["outputSpace"].setValue( "linear" )
self.assertEqual( n["out"].image(), o["out"].image() )
self.assertEqual( n["out"]['format'].hash(), o["out"]['format'].hash() )
self.assertEqual( n["out"]['dataWindow'].hash(), o["out"]['dataWindow'].hash() )
self.assertEqual( n["out"]['channelNames'].hash(), o["out"]['channelNames'].hash() )
def testImageHashPassThrough( self ) :
i = GafferImage.ImageReader()
i["fileName"].setValue( self.fileName )
o = GafferImage.OpenColorIO()
o["in"].setInput( i["out"] )
self.assertEqual( i["out"].imageHash(), o["out"].imageHash() )
o["inputSpace"].setValue( "linear" )
o["outputSpace"].setValue( "sRGB" )
self.assertNotEqual( i["out"].imageHash(), o["out"].imageHash() )
def testChannelsAreSeparate( self ) :
i = GafferImage.ImageReader()
i["fileName"].setValue( os.path.expandvars( "$GAFFER_ROOT/python/GafferTest/images/circles.exr" ) )
o = GafferImage.OpenColorIO()
o["in"].setInput( i["out"] )
o["inputSpace"].setValue( "linear" )
o["outputSpace"].setValue( "sRGB" )
self.assertNotEqual(
o["out"].channelDataHash( "R", IECore.V2i( 0 ) ),
o["out"].channelDataHash( "G", IECore.V2i( 0 ) )
)
self.assertNotEqual(
o["out"].channelData( "R", IECore.V2i( 0 ) ),
o["out"].channelData( "G", IECore.V2i( 0 ) )
)
if __name__ == "__main__":
unittest.main()
| cedriclaunay/gaffer | python/GafferImageTest/OpenColorIOTest.py | Python | bsd-3-clause | 4,580 |
# -*- coding: utf-8 -*-
from copy import deepcopy
from cms.admin.placeholderadmin import FrontendEditableAdminMixin, \
PlaceholderAdminMixin
from django import forms
from django.conf import settings
from django.contrib import admin
from django.contrib.auth import get_user_model
from parler.admin import TranslatableAdmin
from django.contrib.sites.models import Site
from .models import BlogCategory, Post
from .settings import get_setting
try:
from admin_enhancer.admin import EnhancedModelAdminMixin
except ImportError:
class EnhancedModelAdminMixin(object):
pass
class BlogCategoryAdmin(EnhancedModelAdminMixin, TranslatableAdmin):
exclude = ['parent']
_fieldsets = [
(None, {
'fields': [('name', 'slug')]
}),
('Info', {
'fields': ([], ),
'classes': ('collapse',)
}),
]
def get_prepopulated_fields(self, request, obj=None):
return {'slug': ('name',)}
def get_queryset(self, request):
current_site = Site.objects.get_current()
return BlogCategory.objects.filter(sites=current_site)
def get_fieldsets(self, request, obj=None):
fsets = deepcopy(self._fieldsets)
if get_setting('MULTISITE'):
fsets[1][1]['fields'][0].append('sites')
return fsets
def save_related(self, request, form, formsets, change):
if not form.cleaned_data['sites']:
form.cleaned_data['sites'] = [Site.objects.get_current()]
super(BlogCategoryAdmin, self).save_related(
request, form, formsets, change)
class Media:
css = {
'all': ('%sdjangocms_blog/css/%s' % (settings.STATIC_URL,
'djangocms_blog_admin.css'),)
}
# from django.contrib import admin
# from django.utils.translation import ugettext_lazy as _
# class SitesFilter(admin.SimpleListFilter):
# title = _('Site')
# parameter_name = 'sites'
#
# def lookups(self, request, model_admin):
# return (('current_site', _('Current Site')),)
#
# def queryset(self, request, queryset):
# if self.value() == 'current_site':
# return queryset.filter(sites__in=[Site.objects.get_current()])
# else:
# return queryset
class PostAdmin(EnhancedModelAdminMixin, FrontendEditableAdminMixin,
PlaceholderAdminMixin, TranslatableAdmin):
list_display = ['title', 'author', 'date_published', 'date_published_end']
# list_filter = (SitesFilter,)
date_hierarchy = 'date_published'
raw_id_fields = ['author']
frontend_editable_fields = ('title', 'abstract', 'post_text')
enhance_exclude = ('main_image', 'tags')
_fieldsets = [
(None, {
'fields': [('title', 'categories', 'publish')]
}),
('Info', {
'fields': (['slug', 'tags'],
('date_published', 'date_published_end', 'enable_comments')),
'classes': ('collapse',)
}),
('Images', {
'fields': (('main_image', 'main_image_thumbnail', 'main_image_full'),),
'classes': ('collapse',)
}),
('SEO', {
'fields': [('meta_description', 'meta_title', 'meta_keywords')],
'classes': ('collapse',)
}),
]
def formfield_for_dbfield(self, db_field, **kwargs):
field = super(PostAdmin, self).formfield_for_dbfield(db_field, **kwargs)
if db_field.name == 'meta_description':
original_attrs = field.widget.attrs
original_attrs['maxlength'] = 160
field.widget = forms.TextInput(original_attrs)
elif db_field.name == 'meta_title':
field.max_length = 70
return field
def formfield_for_manytomany(self, db_field, request=None, **kwargs):
if db_field.name == "categories":
kwargs["queryset"] = BlogCategory.objects.filter(
sites=Site.objects.get_current())
return super(PostAdmin, self).formfield_for_manytomany(
db_field, request, **kwargs)
def get_fieldsets(self, request, obj=None):
fsets = deepcopy(self._fieldsets)
if get_setting('USE_ABSTRACT'):
fsets[0][1]['fields'].append('abstract')
if not get_setting('USE_PLACEHOLDER'):
fsets[0][1]['fields'].append('post_text')
if get_setting('MULTISITE'):
fsets[1][1]['fields'][0].append('sites')
if request.user.is_superuser:
fsets[1][1]['fields'][0].append('author')
return fsets
def get_prepopulated_fields(self, request, obj=None):
return {'slug': ('title',)}
def get_queryset(self, request):
current_site = Site.objects.get_current()
return Post.objects.filter(sites=current_site)
def save_model(self, request, obj, form, change):
if not obj.author_id and get_setting('AUTHOR_DEFAULT'):
if get_setting('AUTHOR_DEFAULT') is True:
user = request.user
else:
user = get_user_model().objects.get(username=get_setting('AUTHOR_DEFAULT'))
obj.author = user
super(PostAdmin, self).save_model(request, obj, form, change)
def save_related(self, request, form, formsets, change):
if not form.cleaned_data['sites']:
form.cleaned_data['sites'] = [Site.objects.get_current()]
super(PostAdmin, self).save_related(request, form, formsets, change)
class Media:
css = {
'all': ('%sdjangocms_blog/css/%s' % (settings.STATIC_URL,
'djangocms_blog_admin.css'),)
}
admin.site.register(BlogCategory, BlogCategoryAdmin)
admin.site.register(Post, PostAdmin)
| britny/djangocms-blog | djangocms_blog/admin.py | Python | bsd-3-clause | 5,791 |
# Django settings for nadbproj project.
import os.path
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'nadbproj', # Or path to database file if using sqlite3.
'USER': 'nadbproj', # Not used with sqlite3.
'PASSWORD': 'nadbproj', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# URL prefix for admin static files -- CSS, JavaScript and images.
# Make sure to use a trailing slash.
# Examples: "http://foo.com/static/admin/", "/static/admin/".
ADMIN_MEDIA_PREFIX = '/static/admin/'
# Additional locations of static files
STATICFILES_DIRS = (
os.path.join(os.path.dirname(__file__), 'staticfiles'),
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'zx!582g59qwpwdnds)8b$pm(v-03jgpiq1e1(ix&iyvw*)$_yi'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'nadb-sample-site.urls'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
os.path.join(os.path.dirname(__file__), 'templates'),
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'nadb',
'django.contrib.markup',
'django.contrib.admin',
'django.contrib.comments',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'mail_admins': {
'level': 'ERROR',
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
| earonne/nadb-sample-site | nadb-sample-site/settings.py | Python | bsd-3-clause | 4,940 |
from django import template
import clevercss
register = template.Library()
@register.tag(name="clevercss")
def do_clevercss(parser, token):
nodelist = parser.parse(('endclevercss',))
parser.delete_first_token()
return CleverCSSNode(nodelist)
class CleverCSSNode(template.Node):
def __init__(self, nodelist):
self.nodelist = nodelist
def render(self, context):
output = self.nodelist.render(context)
return clevercss.convert(output)
| amitu/gitology | src/gitology/d/templatetags/clevercsstag.py | Python | bsd-3-clause | 480 |
import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 128 , FREQ = 'D', seed = 0, trendtype = "Lag1Trend", cycle_length = 5, transform = "None", sigma = 0.0, exog_count = 100, ar_order = 0); | antoinecarme/pyaf | tests/artificial/transf_None/trend_Lag1Trend/cycle_5/ar_/test_artificial_128_None_Lag1Trend_5__100.py | Python | bsd-3-clause | 260 |
import codecs
import os
from setuptools import setup, find_packages
def read(filename):
filepath = os.path.join(os.path.dirname(__file__), filename)
return codecs.open(filepath, encoding='utf-8').read()
setup(
name='lemon-filebrowser',
version='0.1.2',
license='ISC',
description="Fork of Patrick Kranzlmueller's django-filebrowser app.",
url='https://github.com/trilan/lemon-filebrowser',
author='Trilan Team',
author_email='dev@lemon.io',
packages=find_packages(exclude=['tests', 'tests.*']),
include_package_data=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: ISC License (ISCL)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
],
)
| trilan/lemon-filebrowser | setup.py | Python | bsd-3-clause | 998 |
from __future__ import print_function
from __future__ import unicode_literals
from builtins import str, bytes, dict, int
import os
import sys
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", ".."))
from pattern.search import search, Pattern, Constraint
from pattern.en import parsetree, parse, Sentence
# What we call a "search word" in example 01-search.py
# is actually called a constraint, because it can contain different options.
# Options are separated by "|".
# The next search pattern retrieves words that are a noun OR an adjective:
s = parsetree("big white rabbit")
print(search("NN|JJ", s))
print("")
# This pattern yields phrases containing an adjective followed by a noun.
# Consecutive constraints are separated by a space:
print(search("JJ NN", s))
print("")
# Or a noun preceded by any number of adjectives:
print(search("JJ?+ NN", s))
print("")
# Note: NN marks singular nouns, NNS marks plural nouns.
# If you want to include both, use "NN*" as a constraint.
# This works for NN*, VB*, JJ*, RB*.
s = parsetree("When I sleep the big white rabbit will stare at my feet.")
m = search("rabbit stare at feet", s)
print(s)
print(m)
print("")
# Why does this work?
# The word "will" is included in the result, even if the pattern does not define it.
# The pattern should break when it does not encounter "stare" after "rabbit."
# It works because "will stare" is one verb chunk.
# The "stare" constraint matches the head word of the chunk ("stare"),
# so "will stare" is considered an overspecified version of "stare".
# The same happens with "my feet" and the "rabbit" constraint,
# which matches the overspecified chunk "the big white rabbit".
p = Pattern.fromstring("rabbit stare at feet", s)
p.strict = True # Now it matches only what the pattern explicitly defines (=no match).
m = p.search(s)
print(m)
print("")
# Sentence chunks can be matched by tag (e.g. NP, VP, ADJP).
# The pattern below matches anything from
# "the rabbit gnaws at your fingers" to
# "the white rabbit looks at the carrots":
p = Pattern.fromstring("rabbit VP at NP", s)
m = p.search(s)
print(m)
print("")
if m:
for w in m[0].words:
print("%s\t=> %s" % (w, m[0].constraint(w)))
print("")
print("-------------------------------------------------------------")
# Finally, constraints can also include regular expressions.
# To include them we need to use the full syntax instead of the search() function:
import re
r = re.compile(r"[0-9|\.]+") # all numbers
p = Pattern()
p.sequence.append(Constraint(words=[r]))
p.sequence.append(Constraint(tags=["NN*"]))
s = Sentence(parse("I have 9.5 rabbits."))
print(s)
print(p.search(s))
print("")
| clips/pattern | examples/04-search/02-constraint.py | Python | bsd-3-clause | 2,667 |
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'RemoveInstanceDatabase'
db.create_table(u'maintenance_removeinstancedatabase', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('created_at', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('updated_at', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('current_step', self.gf('django.db.models.fields.PositiveSmallIntegerField')(default=0)),
('status', self.gf('django.db.models.fields.IntegerField')(default=0)),
('started_at', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)),
('finished_at', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)),
('can_do_retry', self.gf('django.db.models.fields.BooleanField')(default=True)),
('task_schedule', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name=u'maintenance_removeinstancedatabase_related', null=True, to=orm['maintenance.TaskSchedule'])),
('task', self.gf('django.db.models.fields.related.ForeignKey')(related_name=u'remove_instances_database_manager', to=orm['notification.TaskHistory'])),
('database', self.gf('django.db.models.fields.related.ForeignKey')(related_name=u'remove_instances_database_manager', to=orm['logical.Database'])),
('instance', self.gf('django.db.models.fields.related.ForeignKey')(related_name=u'remove_instances_database_manager', to=orm['physical.Instance'])),
))
db.send_create_signal(u'maintenance', ['RemoveInstanceDatabase'])
def backwards(self, orm):
# Deleting model 'RemoveInstanceDatabase'
db.delete_table(u'maintenance_removeinstancedatabase')
models = {
u'account.organization': {
'Meta': {'object_name': 'Organization'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'external': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'grafana_datasource': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'grafana_endpoint': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'grafana_hostgroup': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'grafana_orgid': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'account.team': {
'Meta': {'ordering': "[u'name']", 'object_name': 'Team'},
'contacts': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'database_alocation_limit': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '2'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'organization': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'team_organization'", 'on_delete': 'models.PROTECT', 'to': u"orm['account.Organization']"}),
'role': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.Group']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.User']", 'symmetrical': 'False'})
},
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'backup.backupgroup': {
'Meta': {'object_name': 'BackupGroup'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'backup.snapshot': {
'Meta': {'object_name': 'Snapshot'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'database_name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'end_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'backup_environment'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Environment']"}),
'error': ('django.db.models.fields.CharField', [], {'max_length': '400', 'null': 'True', 'blank': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'backups'", 'null': 'True', 'to': u"orm['backup.BackupGroup']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'backup_instance'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Instance']"}),
'is_automatic': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'purge_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'size': ('django.db.models.fields.BigIntegerField', [], {'null': 'True', 'blank': 'True'}),
'snapshopt_id': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'snapshot_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'start_at': ('django.db.models.fields.DateTimeField', [], {}),
'status': ('django.db.models.fields.IntegerField', [], {}),
'type': ('django.db.models.fields.IntegerField', [], {}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'volume': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'backups'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Volume']"}),
'volume_path': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'logical.database': {
'Meta': {'ordering': "(u'name',)", 'unique_together': "((u'name', u'environment'),)", 'object_name': 'Database'},
'backup_path': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'databaseinfra': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.DatabaseInfra']"}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'disk_auto_resize': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Environment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_in_quarantine': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_protected': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'db_index': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': u"orm['logical.Project']"}),
'quarantine_dt': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'quarantine_user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases_quarantine'", 'null': 'True', 'to': u"orm['auth.User']"}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'subscribe_to_email_events': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases'", 'null': 'True', 'to': u"orm['account.Team']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'used_size_in_bytes': ('django.db.models.fields.FloatField', [], {'default': '0.0'})
},
u'logical.project': {
'Meta': {'ordering': "[u'name']", 'object_name': 'Project'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.addinstancestodatabase': {
'Meta': {'object_name': 'AddInstancesToDatabase'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'add_instances_to_database_manager'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'number_of_instances': ('django.db.models.fields.PositiveIntegerField', [], {}),
'number_of_instances_before': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'add_instances_to_database_manager'", 'to': u"orm['notification.TaskHistory']"}),
'task_schedule': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'maintenance_addinstancestodatabase_related'", 'null': 'True', 'to': u"orm['maintenance.TaskSchedule']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databasechangeparameter': {
'Meta': {'object_name': 'DatabaseChangeParameter'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'change_parameters'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_change_parameters'", 'to': u"orm['notification.TaskHistory']"}),
'task_schedule': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'maintenance_databasechangeparameter_related'", 'null': 'True', 'to': u"orm['maintenance.TaskSchedule']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databaseclone': {
'Meta': {'object_name': 'DatabaseClone'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases_clone'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['logical.Database']"}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases_clone'", 'to': u"orm['physical.Environment']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'infra': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases_clone'", 'to': u"orm['physical.DatabaseInfra']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'origin_database': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'origin_databases_clone'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['logical.Database']"}),
'plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases_clone'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Plan']"}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'create_clone'", 'to': u"orm['notification.TaskHistory']"}),
'task_schedule': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'maintenance_databaseclone_related'", 'null': 'True', 'to': u"orm['maintenance.TaskSchedule']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'maintenance.databaseconfiguressl': {
'Meta': {'object_name': 'DatabaseConfigureSSL'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'configure_ssl'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_configure_ssl'", 'to': u"orm['notification.TaskHistory']"}),
'task_schedule': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'maintenance_databaseconfiguressl_related'", 'null': 'True', 'to': u"orm['maintenance.TaskSchedule']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databasecreate': {
'Meta': {'object_name': 'DatabaseCreate'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases_create'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['logical.Database']"}),
'description': ('django.db.models.fields.TextField', [], {}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases_create'", 'to': u"orm['physical.Environment']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'infra': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases_create'", 'to': u"orm['physical.DatabaseInfra']"}),
'is_protected': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases_create'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Plan']"}),
'plan_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases_create'", 'null': 'True', 'to': u"orm['logical.Project']"}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'subscribe_to_email_events': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'create_database'", 'to': u"orm['notification.TaskHistory']"}),
'task_schedule': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'maintenance_databasecreate_related'", 'null': 'True', 'to': u"orm['maintenance.TaskSchedule']"}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases_create'", 'to': u"orm['account.Team']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'maintenance.databasedestroy': {
'Meta': {'object_name': 'DatabaseDestroy'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases_destroy'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['logical.Database']"}),
'description': ('django.db.models.fields.TextField', [], {}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases_destroy'", 'to': u"orm['physical.Environment']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'infra': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases_destroy'", 'to': u"orm['physical.DatabaseInfra']"}),
'is_protected': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases_destroy'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Plan']"}),
'plan_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases_destroy'", 'null': 'True', 'to': u"orm['logical.Project']"}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'subscribe_to_email_events': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases_destroy'", 'to': u"orm['notification.TaskHistory']"}),
'task_schedule': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'maintenance_databasedestroy_related'", 'null': 'True', 'to': u"orm['maintenance.TaskSchedule']"}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases_destroy'", 'to': u"orm['account.Team']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'maintenance.databasemigrate': {
'Meta': {'object_name': 'DatabaseMigrate'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_migrate'", 'to': u"orm['logical.Database']"}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_migrate'", 'to': u"orm['physical.Environment']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'offering': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'database_migrate'", 'null': 'True', 'to': u"orm['physical.Offering']"}),
'origin_environment': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Environment']"}),
'origin_offering': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Offering']", 'null': 'True', 'blank': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_migrate'", 'to': u"orm['notification.TaskHistory']"}),
'task_schedule': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'maintenance_databasemigrate_related'", 'null': 'True', 'to': u"orm['maintenance.TaskSchedule']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databasemigrateengine': {
'Meta': {'object_name': 'DatabaseMigrateEngine', '_ormbases': [u'maintenance.DatabaseUpgrade']},
'current_database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'engine_migrations'", 'to': u"orm['logical.Database']"}),
u'databaseupgrade_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['maintenance.DatabaseUpgrade']", 'unique': 'True', 'primary_key': 'True'})
},
u'maintenance.databasereinstallvm': {
'Meta': {'object_name': 'DatabaseReinstallVM'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'reinstall_vm'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_reinstall_vm'", 'to': u"orm['physical.Instance']"}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_reinsgtall_vm'", 'to': u"orm['notification.TaskHistory']"}),
'task_schedule': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'maintenance_databasereinstallvm_related'", 'null': 'True', 'to': u"orm['maintenance.TaskSchedule']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databaseresize': {
'Meta': {'object_name': 'DatabaseResize'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'resizes'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'source_offer': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'database_resizes_source'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Offering']"}),
'source_offer_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'target_offer': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'database_resizes_target'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Offering']"}),
'target_offer_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_resizes'", 'to': u"orm['notification.TaskHistory']"}),
'task_schedule': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'maintenance_databaseresize_related'", 'null': 'True', 'to': u"orm['maintenance.TaskSchedule']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databaserestore': {
'Meta': {'object_name': 'DatabaseRestore'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_restore'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_restore'", 'to': u"orm['backup.BackupGroup']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'new_group': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'database_restore_new'", 'null': 'True', 'to': u"orm['backup.BackupGroup']"}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_restore'", 'to': u"orm['notification.TaskHistory']"}),
'task_schedule': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'maintenance_databaserestore_related'", 'null': 'True', 'to': u"orm['maintenance.TaskSchedule']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databaserestoreinstancepair': {
'Meta': {'unique_together': "((u'master', u'slave', u'restore'),)", 'object_name': 'DatabaseRestoreInstancePair'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'master': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'restore_master'", 'to': u"orm['physical.Instance']"}),
'restore': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'restore_instances'", 'to': u"orm['maintenance.DatabaseRestore']"}),
'slave': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'restore_slave'", 'to': u"orm['physical.Instance']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databaseupgrade': {
'Meta': {'object_name': 'DatabaseUpgrade'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'upgrades'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'source_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'database_upgrades_source'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Plan']"}),
'source_plan_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'target_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'database_upgrades_target'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Plan']"}),
'target_plan_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_upgrades'", 'to': u"orm['notification.TaskHistory']"}),
'task_schedule': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'maintenance_databaseupgrade_related'", 'null': 'True', 'to': u"orm['maintenance.TaskSchedule']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databaseupgradepatch': {
'Meta': {'object_name': 'DatabaseUpgradePatch'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'upgrades_patch'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'source_patch': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'database_minor_upgrades_source'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.EnginePatch']"}),
'source_patch_full_version': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'target_patch': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'database_minor_upgrades_target'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.EnginePatch']"}),
'target_patch_full_version': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_upgrades_patch'", 'to': u"orm['notification.TaskHistory']"}),
'task_schedule': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'maintenance_databaseupgradepatch_related'", 'null': 'True', 'to': u"orm['maintenance.TaskSchedule']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.filermigrate': {
'Meta': {'object_name': 'FilerMigrate'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'filer_migrate'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'original_export_id': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'filer_migrate'", 'to': u"orm['notification.TaskHistory']"}),
'task_schedule': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'maintenance_filermigrate_related'", 'null': 'True', 'to': u"orm['maintenance.TaskSchedule']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.hostmaintenance': {
'Meta': {'unique_together': "((u'host', u'maintenance'),)", 'object_name': 'HostMaintenance', 'index_together': "[[u'host', u'maintenance']]"},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'host': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'host_maintenance'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Host']"}),
'hostname': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'main_log': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'maintenance': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'maintenance'", 'to': u"orm['maintenance.Maintenance']"}),
'rollback_log': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '4'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.hostmigrate': {
'Meta': {'object_name': 'HostMigrate'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database_migrate': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'hosts'", 'null': 'True', 'to': u"orm['maintenance.DatabaseMigrate']"}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'host_migrate'", 'to': u"orm['physical.Environment']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'host': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'migrate'", 'to': u"orm['physical.Host']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'snapshot': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'snapshot_migrate'", 'null': 'True', 'to': u"orm['backup.Snapshot']"}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'host_migrate'", 'to': u"orm['notification.TaskHistory']"}),
'task_schedule': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'maintenance_hostmigrate_related'", 'null': 'True', 'to': u"orm['maintenance.TaskSchedule']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'zone': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'maintenance.maintenance': {
'Meta': {'object_name': 'Maintenance'},
'affected_hosts': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'celery_task_id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'disable_alarms': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'hostsid': ('django.db.models.fields.CommaSeparatedIntegerField', [], {'max_length': '10000'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'main_script': ('django.db.models.fields.TextField', [], {}),
'maximum_workers': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '1'}),
'revoked_by': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'rollback_script': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'scheduled_for': ('django.db.models.fields.DateTimeField', [], {'unique': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.maintenanceparameters': {
'Meta': {'object_name': 'MaintenanceParameters'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'function_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'maintenance': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'maintenance_params'", 'to': u"orm['maintenance.Maintenance']"}),
'parameter_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.recreateslave': {
'Meta': {'object_name': 'RecreateSlave'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'host': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'recreate_slave'", 'to': u"orm['physical.Host']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'snapshot': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'snapshot_recreate_slave'", 'null': 'True', 'to': u"orm['backup.Snapshot']"}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'recreate_slave'", 'to': u"orm['notification.TaskHistory']"}),
'task_schedule': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'maintenance_recreateslave_related'", 'null': 'True', 'to': u"orm['maintenance.TaskSchedule']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.removeinstancedatabase': {
'Meta': {'object_name': 'RemoveInstanceDatabase'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'remove_instances_database_manager'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'remove_instances_database_manager'", 'to': u"orm['physical.Instance']"}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'remove_instances_database_manager'", 'to': u"orm['notification.TaskHistory']"}),
'task_schedule': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'maintenance_removeinstancedatabase_related'", 'null': 'True', 'to': u"orm['maintenance.TaskSchedule']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.restartdatabase': {
'Meta': {'object_name': 'RestartDatabase'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'restart_database_manager'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'restart_database_manager'", 'to': u"orm['notification.TaskHistory']"}),
'task_schedule': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'maintenance_restartdatabase_related'", 'null': 'True', 'to': u"orm['maintenance.TaskSchedule']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.taskschedule': {
'Meta': {'object_name': 'TaskSchedule'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'task_schedules'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'method_path': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'scheduled_for': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.updatessl': {
'Meta': {'object_name': 'UpdateSsl'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'update_ssl_manager'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'update_ssl_manager'", 'to': u"orm['notification.TaskHistory']"}),
'task_schedule': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'maintenance_updatessl_related'", 'null': 'True', 'to': u"orm['maintenance.TaskSchedule']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'notification.taskhistory': {
'Meta': {'object_name': 'TaskHistory'},
'arguments': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'context': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'database_name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'null': 'True', 'blank': 'True'}),
'db_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'details': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'ended_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_class': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'relevance': ('django.db.models.fields.IntegerField', [], {'default': '0', 'max_length': '1'}),
'task_id': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'task_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'task_status': ('django.db.models.fields.CharField', [], {'default': "u'WAITING'", 'max_length': '100', 'db_index': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'null': 'True', 'blank': 'True'})
},
u'physical.cloud': {
'Meta': {'object_name': 'Cloud'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.databaseinfra': {
'Meta': {'object_name': 'DatabaseInfra'},
'backup_hour': ('django.db.models.fields.IntegerField', [], {}),
'capacity': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'database_key': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'disk_offering': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': u"orm['physical.DiskOffering']"}),
'endpoint': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'endpoint_dns': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'engine': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Engine']"}),
'engine_patch': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': u"orm['physical.EnginePatch']"}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Environment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_vm_created': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'maintenance_day': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'maintenance_window': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'name_prefix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'name_stamp': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '406', 'blank': 'True'}),
'per_database_size_mbytes': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'plan': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Plan']"}),
'ssl_configured': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
},
u'physical.diskoffering': {
'Meta': {'object_name': 'DiskOffering'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'size_kb': ('django.db.models.fields.PositiveIntegerField', [], {}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.engine': {
'Meta': {'ordering': "(u'engine_type__name', u'version')", 'unique_together': "((u'version', u'engine_type'),)", 'object_name': 'Engine'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'engine_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'engines'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.EngineType']"}),
'engine_upgrade_option': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'backwards_engine'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Engine']"}),
'has_users': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'major_version': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'minor_version': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'path': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'read_node_description': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'template_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user_data_script': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'write_node_description': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '100', 'null': 'True', 'blank': 'True'})
},
u'physical.enginepatch': {
'Meta': {'object_name': 'EnginePatch'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'engine': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'patchs'", 'to': u"orm['physical.Engine']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_initial_patch': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'patch_path': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '200', 'null': 'True', 'blank': 'True'}),
'patch_version': ('django.db.models.fields.PositiveIntegerField', [], {}),
'required_disk_size_gb': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.enginetype': {
'Meta': {'ordering': "(u'name',)", 'object_name': 'EngineType'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_in_memory': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.environment': {
'Meta': {'object_name': 'Environment'},
'cloud': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'environment_cloud'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Cloud']"}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'migrate_environment': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'migrate_to'", 'null': 'True', 'to': u"orm['physical.Environment']"}),
'min_of_zones': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.host': {
'Meta': {'object_name': 'Host'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'future_host': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Host']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'hostname': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'identifier': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '255'}),
'monitor_url': ('django.db.models.fields.URLField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'offering': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Offering']", 'null': 'True'}),
'os_description': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '406', 'null': 'True', 'blank': 'True'}),
'root_size_gb': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'ssl_expire_at': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
u'physical.instance': {
'Meta': {'unique_together': "((u'address', u'port'),)", 'object_name': 'Instance'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'databaseinfra': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'instances'", 'to': u"orm['physical.DatabaseInfra']"}),
'dns': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'future_instance': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Instance']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'hostname': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'instances'", 'to': u"orm['physical.Host']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance_type': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'port': ('django.db.models.fields.IntegerField', [], {}),
'read_only': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'shard': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'total_size_in_bytes': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'used_size_in_bytes': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'})
},
u'physical.offering': {
'Meta': {'object_name': 'Offering'},
'cpus': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'environments': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "u'offerings'", 'symmetrical': 'False', 'to': u"orm['physical.Environment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'memory_size_mb': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.parameter': {
'Meta': {'ordering': "(u'engine_type__name', u'name')", 'unique_together': "((u'name', u'engine_type'),)", 'object_name': 'Parameter'},
'allowed_values': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '200', 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'custom_method': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'dynamic': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'engine_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'enginetype'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.EngineType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'parameter_type': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.plan': {
'Meta': {'object_name': 'Plan'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'disk_offering': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'plans'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': u"orm['physical.DiskOffering']"}),
'engine': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'plans'", 'to': u"orm['physical.Engine']"}),
'engine_equivalent_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'backwards_plan'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Plan']"}),
'environments': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "u'plans'", 'symmetrical': 'False', 'to': u"orm['physical.Environment']"}),
'has_persistence': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_ha': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'max_db_size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'migrate_engine_equivalent_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'backwards_engine_plan'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Plan']"}),
'migrate_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'migrate_to'", 'null': 'True', 'to': u"orm['physical.Plan']"}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'provider': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'replication_topology': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'replication_topology'", 'null': 'True', 'to': u"orm['physical.ReplicationTopology']"}),
'stronger_offering': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'main_offerings'", 'null': 'True', 'to': u"orm['physical.Offering']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'weaker_offering': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'weaker_offerings'", 'null': 'True', 'to': u"orm['physical.Offering']"})
},
u'physical.replicationtopology': {
'Meta': {'object_name': 'ReplicationTopology'},
'can_change_parameters': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_clone_db': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_recreate_slave': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'can_reinstall_vm': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_resize_vm': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_setup_ssl': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'can_switch_master': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_upgrade_db': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'class_path': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'details': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'engine': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "u'replication_topologies'", 'symmetrical': 'False', 'to': u"orm['physical.Engine']"}),
'has_horizontal_scalability': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'parameter': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'replication_topologies'", 'blank': 'True', 'to': u"orm['physical.Parameter']"}),
'script': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'replication_topologies'", 'null': 'True', 'to': u"orm['physical.Script']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.script': {
'Meta': {'object_name': 'Script'},
'configuration': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'initialization': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'metric_collector': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'start_database': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'start_replication': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.volume': {
'Meta': {'object_name': 'Volume'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'host': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'volumes'", 'to': u"orm['physical.Host']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'identifier': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'total_size_kb': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'used_size_kb': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['maintenance'] | globocom/database-as-a-service | dbaas/maintenance/migrations/0051_auto__add_removeinstancedatabase.py | Python | bsd-3-clause | 78,775 |
#!/usr/bin/env python
#
# newsfetch.py
#
# kindle-newsfetch is a simple Python script which fetches calibre recipes,
# turns them into Kindle newspapers using 'ebook-convert' and sends them to
# the configured Kindle e-mail adress using 'calibre-smtp'.
#
# (c) 2011 Stefan Schleifer, see LICENSE-file
import sys, os
import ConfigParser
import subprocess
import glob
from datetime import datetime
import shutil
# full path to configuration file
CONFIGFILE = 'newsfetch.cfg'
# print help information
def usage():
print "\nUsage: %s <command> [options]\n" % sys.argv[0]
print "\tinit: Create configuration file."
print "\tall: Fetch and convert all configured items."
print "\tsection|-s <section_name>: Fetch and convert all items of given section."
print "\titem|-i <item_name>: Only fetch and convert item named <item_name>."
print "\tadd <recipe_name> <item_name> <section_name>: Add a new item <item_name> with recipe-id <recipe_name> to section <section_name>."
print "\tlist: Get a list of all configured items."
sys.exit(1)
# create configuraton file
def create_configuration():
try:
i = raw_input("I'm going to ask you a few questions and create %s, is this ok (y/n)? " % CONFIGFILE)
if i is not 'y':
print "Ok, not creating configuration file. Bye!"
sys.exit(1)
config = ConfigParser.SafeConfigParser()
config.add_section('config')
config.set('config', 'KINDLE_ADDR', raw_input("Please enter your Kindle e-mail address where you want the converted files to be delivered to: "))
recipes_path = raw_input("Please enter the absolute path to the directory where your recipes are stored [%s/recipes]: " % os.getcwd())
if not recipes_path: # user chose to use default value
recipes_path = "%s/recipes" % os.getcwd()
# create the directory if it does not exist
if not os.access(recipes_path, os.W_OK): os.mkdir(recipes_path)
config.set('config', 'RECIPES_PATH', recipes_path)
output_path = raw_input("Please enter the absolute path to the directory for storing the converted files [%s/tmp]: " % os.getcwd())
if not output_path: # user chose to use default value
output_path = "%s/tmp" % os.getcwd()
# create the directory if it does not exist
if not os.access(output_path, os.W_OK): os.mkdir(output_path)
config.set('config', 'OUTPUT_PATH', output_path)
config.set('config', 'SMTP_SERVER', raw_input("Please enter the address of your desired SMTP server: "))
config.set('config', 'SMTP_USER', raw_input("Please enter the username for the given server: "))
config.set('config', 'SMTP_PW', raw_input("Please enter the password for the given user (WILL BE STORED IN PLAINTEXT!): "))
config.set('config', 'SMTP_MAILADDR', raw_input("Please enter your mail address for this server: "))
ebook_convert = raw_input("Please enter the absolute path to 'ebook-convert' [/usr/bin/ebook-convert]: ")
if not ebook_convert:
ebook_convert = '/usr/bin/ebook-convert'
config.set('config', 'EBOOK_CONVERT', ebook_convert)
calibre_smtp = raw_input("Please enter the absolute path to 'calibre-smtp' [/usr/bin/calibre-smtp]: ")
if not calibre_smtp:
calibre_smtp = '/usr/bin/calibre-smtp'
config.set('config', 'CALIBRE-SMTP', calibre_smtp)
keep_backup = raw_input("Keep backup of converted newspapers (y/n)? ")
if 'y' == keep_backup:
backup_path = raw_input("Please enter the absolute path where to store the backup [%s/backup]: " % os.getcwd())
if not backup_path:
backup_path = "%s/backup" % os.getcwd()
if not os.access(backup_path, os.W_OK): os.mkdir(backup_path)
config.set('config', 'backup_path', backup_path)
config.set('config', 'backup', 'true')
else:
config.set('config', 'backup', 'false')
config.add_section('example')
config.set('example', 'nytimes', 'New York Times')
config.set('example', 'sueddeutsche', 'Sueddeutsche Zeitung')
with open(CONFIGFILE, 'w') as configfile:
config.write(configfile)
except Exception, e:
print "Could not create %s: %s" % (CONFIGFILE, e)
else:
print "Successfully created %s. We've added a few example entries too." % CONFIGFILE
sys.exit(0)
# list all configured items with their names
def list_all_items():
config = ConfigParser.SafeConfigParser()
config.read(CONFIGFILE)
for section in config.sections():
# ignore config and example sections
if section != 'config' and section != 'example':
print "Section: %s" % section
for recipe, name in config.items(section):
print "\t%s (%s)" % (name, recipe)
# add a new configuration item
def add_item(recipe, name, section):
config = ConfigParser.SafeConfigParser()
config.read(CONFIGFILE)
# check if section already exists
try:
config.add_section(section)
except ConfigParser.DuplicateSectionError, ValueError:
pass
# entry already exists, asking whether to replace it
if config.has_option(section, recipe):
i = raw_input("Recipe %s with name %s already exists in section %s, do you want to update it (y/n)? " % (recipe, config.get(section, recipe), section))
if i is not 'y':
raise Exception("Adding item aborted by user as the item already exists.")
config.set(section, recipe, name)
with open(CONFIGFILE, 'w') as configfile:
config.write(configfile)
print "Successfully added item %s. Please add the required %s.recipe in %s now." % (name, recipe, config.get('config', 'recipes_path'))
# return a list of unique recipe names which
# should be converted in the current run
def collect_recipes(section='all', item=None):
recipes = []
config = ConfigParser.SafeConfigParser()
config.read(CONFIGFILE)
if item is None: # no request for specific item
# all entries requested
if 'all' == section:
for section in config.sections():
if section != 'config' and section != 'example':
for recipe, name in config.items(section):
recipes.append(recipe)
else: # all entries for specific section
if config.has_section(section):
for recipe, name in config.items(section):
recipes.append(recipe)
else:
raise Exception("Section %s is not available in current configuration." % section)
else: # specific entry
for section in config.sections():
if section != 'config' and section != 'example':
for recipe, name in config.items(section):
if item == recipe:
recipes.append(item)
if 0 == len(recipes): # no such recipe found
raise Exception("Recipe named %s could not be found, please check the name and your configuration." % item)
# Attention: We're removing duplicate entries here, user hopefully expect this behavior!
return list(set(recipes))
# convert a list of recipes to .mobi-format using ebook-convert
def convert_recipes(recipes):
config = ConfigParser.SafeConfigParser()
config.read(CONFIGFILE)
recipes_path = config.get('config', 'recipes_path')
output_path = config.get('config', 'output_path')
ebook_convert = config.get('config', 'ebook_convert')
for recipe in recipes:
try:
retcode = subprocess.call([ebook_convert, os.path.join(recipes_path, recipe + ".recipe"), os.path.join(output_path, recipe + ".mobi"), "--output-profile=kindle"])
if 0 != retcode:
raise Exception("Error while converting recipe %s" % recipe)
except Exception ,e:
print "Could not convert %s: %s." % ( os.path.join(recipes_path, recipe + ".mobi"), e)
# send all .mobi-files in defined output-directory
# to user via calibre-smtp
def send_ebooks():
config = ConfigParser.SafeConfigParser()
config.read(CONFIGFILE)
calibre_smtp = config.get('config', 'calibre-smtp')
# get all .mobi-files in output-dir
files = glob.glob(config.get('config', 'output_path') + "/*.mobi")
for f in files:
try:
retcode = subprocess.call([calibre_smtp, '-r', config.get('config', 'smtp_server'), '-u', config.get('config', 'smtp_user'), '-p', config.get('config', 'smtp_pw'), '-s', 'Send to Kindle', '-a', f, '-vv', config.get('config', 'smtp_mailaddr'), config.get('config', 'kindle_addr'), 'Send to Kindle'])
if 0 != retcode:
raise Exception("Error while sending .mobi-files via calibre-smtp.")
except Exception, e:
print "Could not send convertes files via mail: %s" % e
# clean output direcotry
def cleanup():
config = ConfigParser.SafeConfigParser()
config.read(CONFIGFILE)
output_path = config.get('config', 'output_path')
# get all .mobi-files in output directory
files = glob.glob(config.get('config', 'output_path') + "/*.mobi")
# create a backup of created .mobi-files?
if 'true' == config.get('config', 'backup'):
backup_path = config.get('config', 'backup_path')
for f in files:
# add current time to file
now = datetime.now().strftime('%Y%m%d%H%M%S')
shutil.move(f, os.path.join(backup_path, now + "-" + os.path.basename(f)))
else:
# remove files
for f in files:
os.remove(f)
if '__main__' == __name__:
if not len(sys.argv) > 1:
usage()
if 'init' == sys.argv[1]:
create_configuration()
# check if configuration file exists
# or promt to create one
try:
with open(CONFIGFILE, 'r') as configfile:
pass
except:
i = raw_input("Neccessary configuration file %s could not be found, do you want to create it now (y/n)? " % CONFIGFILE)
if 'y' == i:
create_configuration()
else:
print "Cannot continue without configuration file. Either rerun %s and let it create the configuration file for you or create it manually. See example.cfg for possible options/values." % sys.argv[0]
sys.exit(1)
if sys.argv[1] in ['all', 'section', 'item', '-i', '-s']:
if 'section' == sys.argv[1] or '-s' == sys.argv[1]:
recipes = collect_recipes(sys.argv[2])
elif 'item' == sys.argv[1] or '-i' == sys.argv[1]:
recipes = collect_recipes(item=sys.argv[2])
else:
recipes = collect_recipes()
convert_recipes(recipes)
send_ebooks()
cleanup()
elif 'add' == sys.argv[1]: # add a new configuration item
try:
add_item(sys.argv[2], sys.argv[3], sys.argv[4])
except Exception, e:
print "Could not add new item: %s" % e
else:
print "Successfully added item to configuration."
elif 'list' == sys.argv[1]: # list all configured items
try:
list_all_items()
except Exception, e:
print "Could not list all items: %s" % e
else:
usage()
| stefanschleifer/kindle-newsfetch | newsfetch.py | Python | bsd-3-clause | 10,161 |
"""
Base backends structures.
This module defines base classes needed to define custom OpenID or OAuth
auth services from third parties. This customs must subclass an Auth and
and Backend class, check current implementation for examples.
Also the modules *must* define a BACKENDS dictionary with the backend name
(which is used for URLs matching) and Auth class, otherwise it won't be
enabled.
"""
from urllib2 import Request, urlopen, HTTPError
from urllib import urlencode
from urlparse import urlsplit
from openid.consumer.consumer import Consumer, SUCCESS, CANCEL, FAILURE
from openid.consumer.discover import DiscoveryFailure
from openid.extensions import sreg, ax
from oauth2 import Consumer as OAuthConsumer, Token, Request as OAuthRequest
from django.db import models
from django.contrib.auth import authenticate
from django.contrib.auth.backends import ModelBackend
from django.utils import simplejson
from django.utils.importlib import import_module
from django.utils.crypto import constant_time_compare, get_random_string
from django.middleware.csrf import CSRF_KEY_LENGTH
from social_auth.utils import setting, log, model_to_ctype, ctype_to_model, \
clean_partial_pipeline
from social_auth.store import DjangoOpenIDStore
from social_auth.backends.exceptions import StopPipeline, AuthException, \
AuthFailed, AuthCanceled, \
AuthUnknownError, AuthTokenError, \
AuthMissingParameter, \
AuthForbidden
from social_auth.backends.utils import build_consumer_oauth_request
if setting('SOCIAL_AUTH_USER_MODEL'):
User = models.get_model(*setting('SOCIAL_AUTH_USER_MODEL').rsplit('.', 1))
else:
from django.contrib.auth.models import User
# OpenID configuration
OLD_AX_ATTRS = [
('http://schema.openid.net/contact/email', 'old_email'),
('http://schema.openid.net/namePerson', 'old_fullname'),
('http://schema.openid.net/namePerson/friendly', 'old_nickname')
]
AX_SCHEMA_ATTRS = [
# Request both the full name and first/last components since some
# providers offer one but not the other.
('http://axschema.org/contact/email', 'email'),
('http://axschema.org/namePerson', 'fullname'),
('http://axschema.org/namePerson/first', 'first_name'),
('http://axschema.org/namePerson/last', 'last_name'),
('http://axschema.org/namePerson/friendly', 'nickname'),
]
SREG_ATTR = [
('email', 'email'),
('fullname', 'fullname'),
('nickname', 'nickname')
]
OPENID_ID_FIELD = 'openid_identifier'
SESSION_NAME = 'openid'
# key for username in user details dict used around, see get_user_details
# method
USERNAME = 'username'
PIPELINE = setting('SOCIAL_AUTH_PIPELINE', (
'social_auth.backends.pipeline.social.social_auth_user',
'social_auth.backends.pipeline.associate.associate_by_email',
'social_auth.backends.pipeline.user.get_username',
'social_auth.backends.pipeline.user.create_user',
'social_auth.backends.pipeline.social.associate_user',
'social_auth.backends.pipeline.social.load_extra_data',
'social_auth.backends.pipeline.user.update_user_details',
))
class SocialAuthBackend(ModelBackend):
"""A django.contrib.auth backend that authenticates the user based on
a authentication provider response"""
name = '' # provider name, it's stored in database
def authenticate(self, *args, **kwargs):
"""Authenticate user using social credentials
Authentication is made if this is the correct backend, backend
verification is made by kwargs inspection for current backend
name presence.
"""
# Validate backend and arguments. Require that the Social Auth
# response be passed in as a keyword argument, to make sure we
# don't match the username/password calling conventions of
# authenticate.
if not (self.name and kwargs.get(self.name) and 'response' in kwargs):
return None
response = kwargs.get('response')
pipeline = PIPELINE
kwargs = kwargs.copy()
kwargs['backend'] = self
if 'pipeline_index' in kwargs:
pipeline = pipeline[kwargs['pipeline_index']:]
else:
kwargs['details'] = self.get_user_details(response)
kwargs['uid'] = self.get_user_id(kwargs['details'], response)
kwargs['is_new'] = False
out = self.pipeline(pipeline, *args, **kwargs)
if not isinstance(out, dict):
return out
social_user = out.get('social_user')
if social_user:
# define user.social_user attribute to track current social
# account
user = social_user.user
user.social_user = social_user
user.is_new = out.get('is_new')
return user
def pipeline(self, pipeline, *args, **kwargs):
"""Pipeline"""
out = kwargs.copy()
if 'pipeline_index' in kwargs:
base_index = int(kwargs['pipeline_index'])
else:
base_index = 0
for idx, name in enumerate(pipeline):
out['pipeline_index'] = base_index + idx
mod_name, func_name = name.rsplit('.', 1)
try:
mod = import_module(mod_name)
except ImportError:
log('exception', 'Error importing pipeline %s', name)
else:
func = getattr(mod, func_name, None)
if callable(func):
try:
result = func(*args, **out) or {}
except StopPipeline:
# Clean partial pipeline on stop
if 'request' in kwargs:
clean_partial_pipeline(kwargs['request'])
break
if isinstance(result, dict):
out.update(result)
else:
return result
return out
def extra_data(self, user, uid, response, details):
"""Return default blank user extra data"""
return {}
def get_user_id(self, details, response):
"""Must return a unique ID from values returned on details"""
raise NotImplementedError('Implement in subclass')
def get_user_details(self, response):
"""Must return user details in a know internal struct:
{USERNAME: <username if any>,
'email': <user email if any>,
'fullname': <user full name if any>,
'first_name': <user first name if any>,
'last_name': <user last name if any>}
"""
raise NotImplementedError('Implement in subclass')
@classmethod
def tokens(cls, instance):
"""Return the tokens needed to authenticate the access to any API the
service might provide. The return value will be a dictionary with the
token type name as key and the token value.
instance must be a UserSocialAuth instance.
"""
if instance.extra_data and 'access_token' in instance.extra_data:
return {
'access_token': instance.extra_data['access_token']
}
else:
return {}
def get_user(self, user_id):
"""
Return user with given ID from the User model used by this backend
"""
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
class OAuthBackend(SocialAuthBackend):
"""OAuth authentication backend base class.
EXTRA_DATA defines a set of name that will be stored in
extra_data field. It must be a list of tuples with
name and alias.
Also settings will be inspected to get more values names that should be
stored on extra_data field. Setting name is created from current backend
name (all uppercase) plus _EXTRA_DATA.
access_token is always stored.
"""
EXTRA_DATA = None
def get_user_id(self, details, response):
"""OAuth providers return an unique user id in response"""
return response['id']
def extra_data(self, user, uid, response, details):
"""Return access_token and extra defined names to store in
extra_data field"""
data = {'access_token': response.get('access_token', '')}
name = self.name.replace('-', '_').upper()
names = (self.EXTRA_DATA or []) + setting(name + '_EXTRA_DATA', [])
for entry in names:
if len(entry) == 2:
(name, alias), discard = entry, False
elif len(entry) == 3:
name, alias, discard = entry
elif len(entry) == 1:
name = alias = entry
else: # ???
continue
value = response.get(name)
if discard and not value:
continue
data[alias] = value
return data
class OpenIDBackend(SocialAuthBackend):
"""Generic OpenID authentication backend"""
name = 'openid'
def get_user_id(self, details, response):
"""Return user unique id provided by service"""
return response.identity_url
def values_from_response(self, response, sreg_names=None, ax_names=None):
"""Return values from SimpleRegistration response or
AttributeExchange response if present.
@sreg_names and @ax_names must be a list of name and aliases
for such name. The alias will be used as mapping key.
"""
values = {}
# Use Simple Registration attributes if provided
if sreg_names:
resp = sreg.SRegResponse.fromSuccessResponse(response)
if resp:
values.update((alias, resp.get(name) or '')
for name, alias in sreg_names)
# Use Attribute Exchange attributes if provided
if ax_names:
resp = ax.FetchResponse.fromSuccessResponse(response)
if resp:
for src, alias in ax_names:
name = alias.replace('old_', '')
values[name] = resp.getSingle(src, '') or values.get(name)
return values
def get_user_details(self, response):
"""Return user details from an OpenID request"""
values = {USERNAME: '', 'email': '', 'fullname': '',
'first_name': '', 'last_name': ''}
# update values using SimpleRegistration or AttributeExchange
# values
values.update(self.values_from_response(response,
SREG_ATTR,
OLD_AX_ATTRS + \
AX_SCHEMA_ATTRS))
fullname = values.get('fullname') or ''
first_name = values.get('first_name') or ''
last_name = values.get('last_name') or ''
if not fullname and first_name and last_name:
fullname = first_name + ' ' + last_name
elif fullname:
try: # Try to split name for django user storage
first_name, last_name = fullname.rsplit(' ', 1)
except ValueError:
last_name = fullname
values.update({'fullname': fullname, 'first_name': first_name,
'last_name': last_name,
USERNAME: values.get(USERNAME) or \
(first_name.title() + last_name.title())})
return values
def extra_data(self, user, uid, response, details):
"""Return defined extra data names to store in extra_data field.
Settings will be inspected to get more values names that should be
stored on extra_data field. Setting name is created from current
backend name (all uppercase) plus _SREG_EXTRA_DATA and
_AX_EXTRA_DATA because values can be returned by SimpleRegistration
or AttributeExchange schemas.
Both list must be a value name and an alias mapping similar to
SREG_ATTR, OLD_AX_ATTRS or AX_SCHEMA_ATTRS
"""
name = self.name.replace('-', '_').upper()
sreg_names = setting(name + '_SREG_EXTRA_DATA')
ax_names = setting(name + '_AX_EXTRA_DATA')
data = self.values_from_response(response, sreg_names, ax_names)
return data
class BaseAuth(object):
"""Base authentication class, new authenticators should subclass
and implement needed methods.
AUTH_BACKEND Authorization backend related with this service
"""
AUTH_BACKEND = None
def __init__(self, request, redirect):
self.request = request
# Use request because some auth providers use POST urls with needed
# GET parameters on it
self.data = request.REQUEST
self.redirect = redirect
def auth_url(self):
"""Must return redirect URL to auth provider"""
raise NotImplementedError('Implement in subclass')
def auth_html(self):
"""Must return login HTML content returned by provider"""
raise NotImplementedError('Implement in subclass')
def auth_complete(self, *args, **kwargs):
"""Completes loging process, must return user instance"""
raise NotImplementedError('Implement in subclass')
def to_session_dict(self, next_idx, *args, **kwargs):
"""Returns dict to store on session for partial pipeline."""
return {
'next': next_idx,
'backend': self.AUTH_BACKEND.name,
'args': tuple(map(model_to_ctype, args)),
'kwargs': dict((key, model_to_ctype(val))
for key, val in kwargs.iteritems())
}
def from_session_dict(self, entry, *args, **kwargs):
"""Takes session saved entry to continue pipeline and merges with
any new extra argument needed. Returns tuple with next pipeline
index entry, arguments and keyword arguments to continue the
process."""
args = args[:] + tuple(map(ctype_to_model, entry['args']))
kwargs = kwargs.copy()
kwargs.update((key, ctype_to_model(val))
for key, val in entry['kwargs'].iteritems())
return (entry['next'], args, kwargs)
def continue_pipeline(self, *args, **kwargs):
"""Continue previous halted pipeline"""
kwargs.update({
'auth': self,
self.AUTH_BACKEND.name: True
})
return authenticate(*args, **kwargs)
def request_token_extra_arguments(self):
"""Return extra arguments needed on request-token process,
setting is per backend and defined by:
<backend name in uppercase>_REQUEST_TOKEN_EXTRA_ARGUMENTS.
"""
backend_name = self.AUTH_BACKEND.name.upper().replace('-', '_')
return setting(backend_name + '_REQUEST_TOKEN_EXTRA_ARGUMENTS', {})
def auth_extra_arguments(self):
"""Return extra arguments needed on auth process, setting is per
backend and defined by:
<backend name in uppercase>_AUTH_EXTRA_ARGUMENTS.
"""
backend_name = self.AUTH_BACKEND.name.upper().replace('-', '_')
return setting(backend_name + '_AUTH_EXTRA_ARGUMENTS', {})
@property
def uses_redirect(self):
"""Return True if this provider uses redirect url method,
otherwise return false."""
return True
@classmethod
def enabled(cls):
"""Return backend enabled status, all enabled by default"""
return True
def disconnect(self, user, association_id=None):
"""Deletes current backend from user if associated.
Override if extra operations are needed.
"""
if association_id:
user.social_auth.get(id=association_id).delete()
else:
user.social_auth.filter(provider=self.AUTH_BACKEND.name).delete()
def build_absolute_uri(self, path=None):
"""Build absolute URI for given path. Replace http:// schema with
https:// if SOCIAL_AUTH_REDIRECT_IS_HTTPS is defined.
"""
uri = self.request.build_absolute_uri(path)
if setting('SOCIAL_AUTH_REDIRECT_IS_HTTPS'):
uri = uri.replace('http://', 'https://')
return uri
class OpenIdAuth(BaseAuth):
"""OpenId process handling"""
AUTH_BACKEND = OpenIDBackend
def auth_url(self):
"""Return auth URL returned by service"""
openid_request = self.setup_request(self.auth_extra_arguments())
# Construct completion URL, including page we should redirect to
return_to = self.build_absolute_uri(self.redirect)
return openid_request.redirectURL(self.trust_root(), return_to)
def auth_html(self):
"""Return auth HTML returned by service"""
openid_request = self.setup_request(self.auth_extra_arguments())
return_to = self.build_absolute_uri(self.redirect)
form_tag = {'id': 'openid_message'}
return openid_request.htmlMarkup(self.trust_root(), return_to,
form_tag_attrs=form_tag)
def trust_root(self):
"""Return trust-root option"""
return setting('OPENID_TRUST_ROOT') or self.build_absolute_uri('/')
def continue_pipeline(self, *args, **kwargs):
"""Continue previous halted pipeline"""
response = self.consumer().complete(dict(self.data.items()),
self.build_absolute_uri())
kwargs.update({
'auth': self,
'response': response,
self.AUTH_BACKEND.name: True
})
return authenticate(*args, **kwargs)
def auth_complete(self, *args, **kwargs):
"""Complete auth process"""
response = self.consumer().complete(dict(self.data.items()),
self.build_absolute_uri())
if not response:
raise AuthException(self, 'OpenID relying party endpoint')
elif response.status == SUCCESS:
kwargs.update({
'auth': self,
'response': response,
self.AUTH_BACKEND.name: True
})
return authenticate(*args, **kwargs)
elif response.status == FAILURE:
raise AuthFailed(self, response.message)
elif response.status == CANCEL:
raise AuthCanceled(self)
else:
raise AuthUnknownError(self, response.status)
def setup_request(self, extra_params=None):
"""Setup request"""
openid_request = self.openid_request(extra_params)
# Request some user details. Use attribute exchange if provider
# advertises support.
if openid_request.endpoint.supportsType(ax.AXMessage.ns_uri):
fetch_request = ax.FetchRequest()
# Mark all attributes as required, Google ignores optional ones
for attr, alias in (AX_SCHEMA_ATTRS + OLD_AX_ATTRS):
fetch_request.add(ax.AttrInfo(attr, alias=alias,
required=True))
else:
fetch_request = sreg.SRegRequest(optional=dict(SREG_ATTR).keys())
openid_request.addExtension(fetch_request)
return openid_request
def consumer(self):
"""Create an OpenID Consumer object for the given Django request."""
return Consumer(self.request.session.setdefault(SESSION_NAME, {}),
DjangoOpenIDStore())
@property
def uses_redirect(self):
"""Return true if openid request will be handled with redirect or
HTML content will be returned.
"""
return self.openid_request().shouldSendRedirect()
def openid_request(self, extra_params=None):
"""Return openid request"""
openid_url = self.openid_url()
if extra_params:
query = urlsplit(openid_url).query
openid_url += (query and '&' or '?') + urlencode(extra_params)
try:
return self.consumer().begin(openid_url)
except DiscoveryFailure, err:
raise AuthException(self, 'OpenID discovery error: %s' % err)
def openid_url(self):
"""Return service provider URL.
This base class is generic accepting a POST parameter that specifies
provider URL."""
if OPENID_ID_FIELD not in self.data:
raise AuthMissingParameter(self, OPENID_ID_FIELD)
return self.data[OPENID_ID_FIELD]
class BaseOAuth(BaseAuth):
"""OAuth base class"""
SETTINGS_KEY_NAME = ''
SETTINGS_SECRET_NAME = ''
def __init__(self, request, redirect):
"""Init method"""
super(BaseOAuth, self).__init__(request, redirect)
self.redirect_uri = self.build_absolute_uri(self.redirect)
@classmethod
def get_key_and_secret(cls):
"""Return tuple with Consumer Key and Consumer Secret for current
service provider. Must return (key, secret), order *must* be respected.
"""
return setting(cls.SETTINGS_KEY_NAME), \
setting(cls.SETTINGS_SECRET_NAME)
@classmethod
def enabled(cls):
"""Return backend enabled status by checking basic settings"""
return setting(cls.SETTINGS_KEY_NAME) and \
setting(cls.SETTINGS_SECRET_NAME)
class ConsumerBasedOAuth(BaseOAuth):
"""Consumer based mechanism OAuth authentication, fill the needed
parameters to communicate properly with authentication service.
AUTHORIZATION_URL Authorization service url
REQUEST_TOKEN_URL Request token URL
ACCESS_TOKEN_URL Access token URL
SERVER_URL Authorization server URL
"""
AUTHORIZATION_URL = ''
REQUEST_TOKEN_URL = ''
ACCESS_TOKEN_URL = ''
SERVER_URL = ''
def auth_url(self):
"""Return redirect url"""
token = self.unauthorized_token()
name = self.AUTH_BACKEND.name + 'unauthorized_token_name'
self.request.session[name] = token.to_string()
return self.oauth_authorization_request(token).to_url()
def auth_complete(self, *args, **kwargs):
"""Return user, might be logged in"""
name = self.AUTH_BACKEND.name + 'unauthorized_token_name'
unauthed_token = self.request.session.get(name)
if not unauthed_token:
raise AuthTokenError('Missing unauthorized token')
token = Token.from_string(unauthed_token)
if token.key != self.data.get('oauth_token', 'no-token'):
raise AuthTokenError('Incorrect tokens')
try:
access_token = self.access_token(token)
except HTTPError, e:
if e.code == 400:
raise AuthCanceled(self)
else:
raise
data = self.user_data(access_token)
if data is not None:
data['access_token'] = access_token.to_string()
kwargs.update({
'auth': self,
'response': data,
self.AUTH_BACKEND.name: True
})
return authenticate(*args, **kwargs)
def unauthorized_token(self):
"""Return request for unauthorized token (first stage)"""
request = self.oauth_request(token=None, url=self.REQUEST_TOKEN_URL,
extra_params=self.request_token_extra_arguments())
response = self.fetch_response(request)
return Token.from_string(response)
def oauth_authorization_request(self, token):
"""Generate OAuth request to authorize token."""
return OAuthRequest.from_token_and_callback(token=token,
callback=self.redirect_uri,
http_url=self.AUTHORIZATION_URL,
parameters=self.auth_extra_arguments())
def oauth_request(self, token, url, extra_params=None):
"""Generate OAuth request, setups callback url"""
return build_consumer_oauth_request(self, token, url,
self.redirect_uri,
self.data.get('oauth_verifier'),
extra_params)
def fetch_response(self, request):
"""Executes request and fetchs service response"""
response = urlopen(request.to_url())
return '\n'.join(response.readlines())
def access_token(self, token):
"""Return request for access token value"""
request = self.oauth_request(token, self.ACCESS_TOKEN_URL)
return Token.from_string(self.fetch_response(request))
def user_data(self, access_token, *args, **kwargs):
"""Loads user data from service"""
raise NotImplementedError('Implement in subclass')
@property
def consumer(self):
"""Setups consumer"""
return OAuthConsumer(*self.get_key_and_secret())
class BaseOAuth2(BaseOAuth):
"""Base class for OAuth2 providers.
OAuth2 draft details at:
http://tools.ietf.org/html/draft-ietf-oauth-v2-10
Attributes:
AUTHORIZATION_URL Authorization service url
ACCESS_TOKEN_URL Token URL
FORCE_STATE_CHECK Ensure state argument check (check issue #386
for further details)
"""
AUTHORIZATION_URL = None
ACCESS_TOKEN_URL = None
SCOPE_SEPARATOR = ' '
RESPONSE_TYPE = 'code'
SCOPE_VAR_NAME = None
DEFAULT_SCOPE = None
FORCE_STATE_CHECK = True
def csrf_token(self):
"""Generate csrf token to include as state parameter."""
return get_random_string(CSRF_KEY_LENGTH)
def auth_url(self):
"""Return redirect url"""
client_id, client_secret = self.get_key_and_secret()
args = {'client_id': client_id, 'redirect_uri': self.redirect_uri}
if self.FORCE_STATE_CHECK:
state = self.csrf_token()
args['state'] = state
self.request.session[self.AUTH_BACKEND.name + '_state'] = state
scope = self.get_scope()
if scope:
args['scope'] = self.SCOPE_SEPARATOR.join(self.get_scope())
if self.RESPONSE_TYPE:
args['response_type'] = self.RESPONSE_TYPE
args.update(self.auth_extra_arguments())
return self.AUTHORIZATION_URL + '?' + urlencode(args)
def auth_complete(self, *args, **kwargs):
"""Completes loging process, must return user instance"""
if self.data.get('error'):
error = self.data.get('error_description') or self.data['error']
raise AuthFailed(self, error)
if self.FORCE_STATE_CHECK:
if 'state' not in self.data:
raise AuthMissingParameter(self, 'state')
state = self.request.session[self.AUTH_BACKEND.name + '_state']
if not constant_time_compare(self.data['state'], state):
raise AuthForbidden(self)
client_id, client_secret = self.get_key_and_secret()
params = {'grant_type': 'authorization_code', # request auth code
'code': self.data.get('code', ''), # server response code
'client_id': client_id,
'client_secret': client_secret,
'redirect_uri': self.redirect_uri}
headers = {'Content-Type': 'application/x-www-form-urlencoded',
'Accept': 'application/json'}
request = Request(self.ACCESS_TOKEN_URL, data=urlencode(params),
headers=headers)
try:
response = simplejson.loads(urlopen(request).read())
except HTTPError, e:
if e.code == 400:
raise AuthCanceled(self)
else:
raise
except (ValueError, KeyError):
raise AuthUnknownError(self)
if response.get('error'):
error = response.get('error_description') or response.get('error')
raise AuthFailed(self, error)
else:
data = self.user_data(response['access_token'], response)
response.update(data or {})
kwargs.update({
'auth': self,
'response': response,
self.AUTH_BACKEND.name: True
})
return authenticate(*args, **kwargs)
def get_scope(self):
"""Return list with needed access scope"""
scope = self.DEFAULT_SCOPE or []
if self.SCOPE_VAR_NAME:
scope = scope + setting(self.SCOPE_VAR_NAME, [])
return scope
# Backend loading was previously performed via the
# SOCIAL_AUTH_IMPORT_BACKENDS setting - as it's no longer used,
# provide a deprecation warning.
if setting('SOCIAL_AUTH_IMPORT_BACKENDS'):
from warnings import warn
warn("SOCIAL_AUTH_IMPORT_SOURCES is deprecated")
# Cache for discovered backends.
BACKENDSCACHE = {}
def get_backends(force_load=False):
"""
Entry point to the BACKENDS cache. If BACKENDSCACHE hasn't been
populated, each of the modules referenced in
AUTHENTICATION_BACKENDS is imported and checked for a BACKENDS
definition and if enabled, added to the cache.
Previously all backends were attempted to be loaded at
import time of this module, which meant that backends that subclass
bases found in this module would not have the chance to be loaded
by the time they were added to this module's BACKENDS dict. See:
https://github.com/omab/django-social-auth/issues/204
This new approach ensures that backends are allowed to subclass from
bases in this module and still be picked up.
A force_load boolean arg is also provided so that get_backend
below can retry a requested backend that may not yet be discovered.
"""
if not BACKENDSCACHE or force_load:
for auth_backend in setting('AUTHENTICATION_BACKENDS'):
mod, cls_name = auth_backend.rsplit('.', 1)
module = import_module(mod)
backend = getattr(module, cls_name)
if issubclass(backend, SocialAuthBackend):
name = backend.name
backends = getattr(module, 'BACKENDS', {})
if name in backends and backends[name].enabled():
BACKENDSCACHE[name] = backends[name]
return BACKENDSCACHE
def get_backend(name, *args, **kwargs):
"""Returns a backend by name. Backends are stored in the BACKENDSCACHE
cache dict. If not found, each of the modules referenced in
AUTHENTICATION_BACKENDS is imported and checked for a BACKENDS
definition. If the named backend is found in the module's BACKENDS
definition, it's then stored in the cache for future access.
"""
try:
# Cached backend which has previously been discovered.
return BACKENDSCACHE[name](*args, **kwargs)
except KeyError:
# Force a reload of BACKENDS to ensure a missing
# backend hasn't been missed.
get_backends(force_load=True)
try:
return BACKENDSCACHE[name](*args, **kwargs)
except KeyError:
return None
BACKENDS = {
'openid': OpenIdAuth
}
| makinacorpus/django-social-auth | social_auth/backends/__init__.py | Python | bsd-3-clause | 31,390 |
"""Phone number to time zone mapping functionality
>>> import phonenumbers
>>> from phonenumbers.timezone import time_zones_for_number
>>> ro_number = phonenumbers.parse("+40721234567", "RO")
>>> tzlist = time_zones_for_number(ro_number)
>>> len(tzlist)
1
>>> str(tzlist[0])
'Europe/Bucharest'
>>> mx_number = phonenumbers.parse("+523291234567", "GB")
>>> tzlist = time_zones_for_number(mx_number)
>>> len(tzlist)
2
>>> str(tzlist[0])
'America/Mazatlan'
>>> str(tzlist[1])
'America/Mexico_City'
"""
# Based very loosely on original Java code:
# java/geocoder/src/com/google/i18n/phonenumbers/PhoneNumberToTimeZonesMapper.java
# Copyright (C) 2013 The Libphonenumber Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .util import prnt, u, U_PLUS
from .phonenumberutil import PhoneNumberType, number_type
from .phonenumberutil import PhoneNumberFormat, format_number
from .phonenumberutil import is_number_type_geographical
try:
from .tzdata import TIMEZONE_DATA, TIMEZONE_LONGEST_PREFIX
except ImportError: # pragma no cover
# Before the generated code exists, the carrierdata/ directory is empty.
# The generation process imports this module, creating a circular
# dependency. The hack below works around this.
import os
import sys
if (os.path.basename(sys.argv[0]) == "buildmetadatafromxml.py" or
os.path.basename(sys.argv[0]) == "buildprefixdata.py"):
prnt("Failed to import generated data (but OK as during autogeneration)", file=sys.stderr)
TIMEZONE_DATA = {'4411': u('Europe/London')}
TIMEZONE_LONGEST_PREFIX = 4
else:
raise
__all__ = ['UNKNOWN_TIMEZONE', 'time_zones_for_geographical_number', 'time_zones_for_number']
# This is defined by ICU as the unknown time zone.
UNKNOWN_TIMEZONE = u("Etc/Unknown")
_UNKNOWN_TIME_ZONE_LIST = (UNKNOWN_TIMEZONE,)
def time_zones_for_geographical_number(numobj):
"""Returns a list of time zones to which a phone number belongs.
This method assumes the validity of the number passed in has already been
checked, and that the number is geo-localizable. We consider fixed-line
and mobile numbers possible candidates for geo-localization.
Arguments:
numobj -- a valid phone number for which we want to get the time zones
to which it belongs
Returns a list of the corresponding time zones or a single element list
with the default unknown time zone if no other time zone was found or if
the number was invalid"""
e164_num = format_number(numobj, PhoneNumberFormat.E164)
if not e164_num.startswith(U_PLUS): # pragma no cover
# Can only hit this arm if there's an internal error in the rest of
# the library
raise Exception("Expect E164 number to start with +")
for prefix_len in range(TIMEZONE_LONGEST_PREFIX, 0, -1):
prefix = e164_num[1:(1 + prefix_len)]
if prefix in TIMEZONE_DATA:
return TIMEZONE_DATA[prefix]
return _UNKNOWN_TIME_ZONE_LIST
def time_zones_for_number(numobj):
"""As time_zones_for_geographical_number() but explicitly checks the
validity of the number passed in.
Arguments:
numobj -- a valid phone number for which we want to get the time zones to which it belongs
Returns a list of the corresponding time zones or a single element list with the default
unknown time zone if no other time zone was found or if the number was invalid"""
ntype = number_type(numobj)
if ntype == PhoneNumberType.UNKNOWN:
return _UNKNOWN_TIME_ZONE_LIST
elif not is_number_type_geographical(ntype, numobj.country_code):
return _country_level_time_zones_for_number(numobj)
return time_zones_for_geographical_number(numobj)
def _country_level_time_zones_for_number(numobj):
"""Returns the list of time zones corresponding to the country calling code of a number.
Arguments:
numobj -- the phone number to look up
Returns a list of the corresponding time zones or a single element list with the default
unknown time zone if no other time zone was found or if the number was invalid"""
cc = str(numobj.country_code)
for prefix_len in range(TIMEZONE_LONGEST_PREFIX, 0, -1):
prefix = cc[:(1 + prefix_len)]
if prefix in TIMEZONE_DATA:
return TIMEZONE_DATA[prefix]
return _UNKNOWN_TIME_ZONE_LIST
if __name__ == '__main__': # pragma no cover
import doctest
doctest.testmod()
| vicky2135/lucious | oscar/lib/python2.7/site-packages/phonenumbers/timezone.py | Python | bsd-3-clause | 4,947 |
from __future__ import unicode_literals
from operator import attrgetter
from django.db import models
from django.test import TestCase
from .models import Answer, Dimension, Entity, Post, Question
class OrderWithRespectToTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.q1 = Question.objects.create(text="Which Beatle starts with the letter 'R'?")
Answer.objects.create(text="John", question=cls.q1)
Answer.objects.create(text="Paul", question=cls.q1)
Answer.objects.create(text="George", question=cls.q1)
Answer.objects.create(text="Ringo", question=cls.q1)
def test_default_to_insertion_order(self):
# Answers will always be ordered in the order they were inserted.
self.assertQuerysetEqual(
self.q1.answer_set.all(), [
"John", "Paul", "George", "Ringo",
],
attrgetter("text"),
)
def test_previous_and_next_in_order(self):
# We can retrieve the answers related to a particular object, in the
# order they were created, once we have a particular object.
a1 = Answer.objects.filter(question=self.q1)[0]
self.assertEqual(a1.text, "John")
self.assertEqual(a1.get_next_in_order().text, "Paul")
a2 = list(Answer.objects.filter(question=self.q1))[-1]
self.assertEqual(a2.text, "Ringo")
self.assertEqual(a2.get_previous_in_order().text, "George")
def test_item_ordering(self):
# We can retrieve the ordering of the queryset from a particular item.
a1 = Answer.objects.filter(question=self.q1)[1]
id_list = [o.pk for o in self.q1.answer_set.all()]
self.assertEqual(a1.question.get_answer_order(), id_list)
# It doesn't matter which answer we use to check the order, it will
# always be the same.
a2 = Answer.objects.create(text="Number five", question=self.q1)
self.assertEqual(
a1.question.get_answer_order(), a2.question.get_answer_order()
)
def test_change_ordering(self):
# The ordering can be altered
a = Answer.objects.create(text="Number five", question=self.q1)
# Swap the last two items in the order list
id_list = [o.pk for o in self.q1.answer_set.all()]
x = id_list.pop()
id_list.insert(-1, x)
# By default, the ordering is different from the swapped version
self.assertNotEqual(a.question.get_answer_order(), id_list)
# Change the ordering to the swapped version -
# this changes the ordering of the queryset.
a.question.set_answer_order(id_list)
self.assertQuerysetEqual(
self.q1.answer_set.all(), [
"John", "Paul", "George", "Number five", "Ringo"
],
attrgetter("text")
)
class OrderWithRespectToTests2(TestCase):
def test_recursive_ordering(self):
p1 = Post.objects.create(title='1')
p2 = Post.objects.create(title='2')
p1_1 = Post.objects.create(title="1.1", parent=p1)
p1_2 = Post.objects.create(title="1.2", parent=p1)
Post.objects.create(title="2.1", parent=p2)
p1_3 = Post.objects.create(title="1.3", parent=p1)
self.assertEqual(p1.get_post_order(), [p1_1.pk, p1_2.pk, p1_3.pk])
def test_duplicate_order_field(self):
class Bar(models.Model):
pass
class Foo(models.Model):
bar = models.ForeignKey(Bar)
order = models.OrderWrt()
class Meta:
order_with_respect_to = 'bar'
count = 0
for field in Foo._meta.local_fields:
if isinstance(field, models.OrderWrt):
count += 1
self.assertEqual(count, 1)
class TestOrderWithRespectToOneToOnePK(TestCase):
def test_set_order(self):
e = Entity.objects.create()
d = Dimension.objects.create(entity=e)
c1 = d.component_set.create()
c2 = d.component_set.create()
d.set_component_order([c1.id, c2.id])
self.assertQuerysetEqual(d.component_set.all(), [c1.id, c2.id], attrgetter('pk'))
| 52ai/django-ccsds | tests/order_with_respect_to/tests.py | Python | bsd-3-clause | 4,155 |
# Coordinate reference systems and functions.
#
# PROJ.4 is the law of this land: http://proj.osgeo.org/. But whereas PROJ.4
# coordinate reference systems are described by strings of parameters such as
#
# +proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs
#
# here we use mappings:
#
# {'proj': 'longlat', 'ellps': 'WGS84', 'datum': 'WGS84', 'no_defs': True}
#
def to_string(crs):
"""Turn a parameter mapping into a more conventional PROJ.4 string.
Mapping keys are tested against the ``all_proj_keys`` list. Values of
``True`` are omitted, leaving the key bare: {'no_defs': True} -> "+no_defs"
and items where the value is otherwise not a str, int, or float are
omitted.
"""
items = []
for k, v in sorted(filter(
lambda x: x[0] in all_proj_keys and x[1] is not False and type
(x[1]) in (bool, int, float, str, unicode),
crs.items() )):
items.append(
"+" + "=".join(
map(str, filter(lambda y: y and y is not True, (k, v)))) )
return " ".join(items)
def from_string(prjs):
"""Turn a PROJ.4 string into a mapping of parameters.
Bare parameters like "+no_defs" are given a value of ``True``. All keys
are checked against the ``all_proj_keys`` list.
"""
parts = [o.lstrip('+') for o in prjs.strip().split()]
def parse(v):
try:
return int(v)
except ValueError:
pass
try:
return float(v)
except ValueError:
return v
items = map(
lambda kv: len(kv) == 2 and (kv[0], parse(kv[1])) or (kv[0], True),
(p.split('=') for p in parts) )
return dict((k,v) for k, v in items if k in all_proj_keys)
def from_epsg(code):
"""Given an integer code, returns an EPSG-like mapping.
Note: the input code is not validated against an EPSG database.
"""
if int(code) <= 0:
raise ValueError("EPSG codes are positive integers")
return {'init': "epsg:%s" % code, 'no_defs': True}
# Below is the big list of PROJ4 parameters from
# http://trac.osgeo.org/proj/wiki/GenParms.
# It is parsed into a list of paramter keys ``all_proj_keys``.
_param_data = """
+a Semimajor radius of the ellipsoid axis
+alpha ? Used with Oblique Mercator and possibly a few others
+axis Axis orientation (new in 4.8.0)
+b Semiminor radius of the ellipsoid axis
+datum Datum name (see `proj -ld`)
+ellps Ellipsoid name (see `proj -le`)
+k Scaling factor (old name)
+k_0 Scaling factor (new name)
+lat_0 Latitude of origin
+lat_1 Latitude of first standard parallel
+lat_2 Latitude of second standard parallel
+lat_ts Latitude of true scale
+lon_0 Central meridian
+lonc ? Longitude used with Oblique Mercator and possibly a few others
+lon_wrap Center longitude to use for wrapping (see below)
+nadgrids Filename of NTv2 grid file to use for datum transforms (see below)
+no_defs Don't use the /usr/share/proj/proj_def.dat defaults file
+over Allow longitude output outside -180 to 180 range, disables wrapping (see below)
+pm Alternate prime meridian (typically a city name, see below)
+proj Projection name (see `proj -l`)
+south Denotes southern hemisphere UTM zone
+to_meter Multiplier to convert map units to 1.0m
+towgs84 3 or 7 term datum transform parameters (see below)
+units meters, US survey feet, etc.
+vto_meter vertical conversion to meters.
+vunits vertical units.
+x_0 False easting
+y_0 False northing
+zone UTM zone
+a Semimajor radius of the ellipsoid axis
+alpha ? Used with Oblique Mercator and possibly a few others
+azi
+b Semiminor radius of the ellipsoid axis
+belgium
+beta
+czech
+e Eccentricity of the ellipsoid = sqrt(1 - b^2/a^2) = sqrt( f*(2-f) )
+ellps Ellipsoid name (see `proj -le`)
+es Eccentricity of the ellipsoid squared
+f Flattening of the ellipsoid (often presented as an inverse, e.g. 1/298)
+gamma
+geoc
+guam
+h
+k Scaling factor (old name)
+K
+k_0 Scaling factor (new name)
+lat_0 Latitude of origin
+lat_1 Latitude of first standard parallel
+lat_2 Latitude of second standard parallel
+lat_b
+lat_t
+lat_ts Latitude of true scale
+lon_0 Central meridian
+lon_1
+lon_2
+lonc ? Longitude used with Oblique Mercator and possibly a few others
+lsat
+m
+M
+n
+no_cut
+no_off
+no_rot
+ns
+o_alpha
+o_lat_1
+o_lat_2
+o_lat_c
+o_lat_p
+o_lon_1
+o_lon_2
+o_lon_c
+o_lon_p
+o_proj
+over
+p
+path
+proj Projection name (see `proj -l`)
+q
+R
+R_a
+R_A Compute radius such that the area of the sphere is the same as the area of the ellipsoid
+rf Reciprocal of the ellipsoid flattening term (e.g. 298)
+R_g
+R_h
+R_lat_a
+R_lat_g
+rot
+R_V
+s
+south Denotes southern hemisphere UTM zone
+sym
+t
+theta
+tilt
+to_meter Multiplier to convert map units to 1.0m
+units meters, US survey feet, etc.
+vopt
+W
+westo
+x_0 False easting
+y_0 False northing
+zone UTM zone
"""
_lines = filter(lambda x: len(x) > 1, _param_data.split("\n"))
all_proj_keys = list(
set(line.split()[0].lstrip("+").strip() for line in _lines)
) + ['no_mayo']
| sgillies/Fiona | src/fiona/crs.py | Python | bsd-3-clause | 5,254 |
#!/usr/bin/env python
import sys
import pymongo
import json
print "import crawled json file into mongodb 'newspapers' database."
if len(sys.argv) < 3:
print "input as [collection] [json_file]"
exit(1)
connection = pymongo.Connection("localhost", 27017)
news_database = connection.newspapers
news_collection = news_database[sys.argv[1]]
json_file_name = sys.argv[2]
try:
with open(json_file_name, mode='r') as json_file:
items = json.loads(json_file.read())
json_file.close()
except Exception, e:
raise e
for item in items:
news_collection.save(item)
print len(items), " items saved to mongodb."
| ShiZhan/newspapers | utils/import-to-mongodb.py | Python | bsd-3-clause | 637 |
"""Auto-generated file, do not edit by hand. BS metadata"""
from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata
PHONE_METADATA_BS = PhoneMetadata(id='BS', country_code=1, international_prefix='011',
general_desc=PhoneNumberDesc(national_number_pattern='[2589]\\d{9}', possible_number_pattern='\\d{7}(?:\\d{3})?'),
fixed_line=PhoneNumberDesc(national_number_pattern='242(?:3(?:02|[236][1-9]|4[0-24-9]|5[0-68]|7[3467]|8[0-4]|9[2-467])|461|502|6(?:0[12]|12|7[67]|8[78]|9[89])|702)\\d{4}', possible_number_pattern='\\d{7}(?:\\d{3})?', example_number='2423456789'),
mobile=PhoneNumberDesc(national_number_pattern='242(?:3(?:5[79]|[79]5)|4(?:[2-4][1-9]|5[1-8]|6[2-8]|7\\d|81)|5(?:2[45]|3[35]|44|5[1-9]|65|77)|6[34]6|727)\\d{4}', possible_number_pattern='\\d{10}', example_number='2423591234'),
toll_free=PhoneNumberDesc(national_number_pattern='242300\\d{4}|8(?:00|44|55|66|77|88)[2-9]\\d{6}', possible_number_pattern='\\d{10}', example_number='8002123456'),
premium_rate=PhoneNumberDesc(national_number_pattern='900[2-9]\\d{6}', possible_number_pattern='\\d{10}', example_number='9002123456'),
shared_cost=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
personal_number=PhoneNumberDesc(national_number_pattern='5(?:00|33|44|66|77)[2-9]\\d{6}', possible_number_pattern='\\d{10}', example_number='5002345678'),
voip=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
pager=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
uan=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
voicemail=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
no_international_dialling=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
national_prefix='1',
national_prefix_for_parsing='1',
leading_digits='242')
| WillisXChen/django-oscar | oscar/lib/python2.7/site-packages/phonenumbers/data/region_BS.py | Python | bsd-3-clause | 1,927 |
from .service import Service
class KickstarterService(Service):
def get_supporters(self):
r = self.get('kickstarter/')
self.print_supporters(r)
def print_supporters(self, s):
for supporter in s:
print("%-40s %s" % (str(supporter['name']), str(supporter['message'])))
kick = KickstarterService() | dmonn/socialize | socialize/services/kickstarter_service.py | Python | bsd-3-clause | 346 |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file '/Users/lee/backups/code/iblah_py/ui/ui_profile_dialog.ui'
#
# Created: Fri May 6 21:47:58 2011
# by: PyQt4 UI code generator 4.8.3
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
_fromUtf8 = lambda s: s
class Ui_ProfileDialog(object):
def setupUi(self, ProfileDialog):
ProfileDialog.setObjectName(_fromUtf8("ProfileDialog"))
ProfileDialog.setEnabled(True)
ProfileDialog.resize(470, 300)
self.save_btn = QtGui.QPushButton(ProfileDialog)
self.save_btn.setEnabled(True)
self.save_btn.setGeometry(QtCore.QRect(330, 240, 114, 32))
self.save_btn.setObjectName(_fromUtf8("save_btn"))
self.avatar_label = QtGui.QLabel(ProfileDialog)
self.avatar_label.setGeometry(QtCore.QRect(310, 20, 130, 130))
self.avatar_label.setStyleSheet(_fromUtf8("border: 2px solid #ccc;"))
self.avatar_label.setObjectName(_fromUtf8("avatar_label"))
self.label_2 = QtGui.QLabel(ProfileDialog)
self.label_2.setGeometry(QtCore.QRect(21, 117, 26, 16))
self.label_2.setObjectName(_fromUtf8("label_2"))
self.impresa_text_edit = QtGui.QTextEdit(ProfileDialog)
self.impresa_text_edit.setGeometry(QtCore.QRect(80, 170, 361, 51))
self.impresa_text_edit.setObjectName(_fromUtf8("impresa_text_edit"))
self.fullname_line_edit = QtGui.QLineEdit(ProfileDialog)
self.fullname_line_edit.setGeometry(QtCore.QRect(81, 117, 201, 22))
self.fullname_line_edit.setObjectName(_fromUtf8("fullname_line_edit"))
self.label_3 = QtGui.QLabel(ProfileDialog)
self.label_3.setGeometry(QtCore.QRect(21, 21, 39, 16))
self.label_3.setObjectName(_fromUtf8("label_3"))
self.label_4 = QtGui.QLabel(ProfileDialog)
self.label_4.setGeometry(QtCore.QRect(21, 53, 39, 16))
self.label_4.setObjectName(_fromUtf8("label_4"))
self.cellphone_no_line_edit = QtGui.QLineEdit(ProfileDialog)
self.cellphone_no_line_edit.setEnabled(True)
self.cellphone_no_line_edit.setGeometry(QtCore.QRect(81, 53, 201, 22))
self.cellphone_no_line_edit.setText(_fromUtf8(""))
self.cellphone_no_line_edit.setReadOnly(True)
self.cellphone_no_line_edit.setObjectName(_fromUtf8("cellphone_no_line_edit"))
self.fetion_no_line_edit = QtGui.QLineEdit(ProfileDialog)
self.fetion_no_line_edit.setEnabled(True)
self.fetion_no_line_edit.setGeometry(QtCore.QRect(81, 21, 201, 22))
self.fetion_no_line_edit.setText(_fromUtf8(""))
self.fetion_no_line_edit.setReadOnly(True)
self.fetion_no_line_edit.setObjectName(_fromUtf8("fetion_no_line_edit"))
self.label_5 = QtGui.QLabel(ProfileDialog)
self.label_5.setGeometry(QtCore.QRect(21, 85, 33, 16))
self.label_5.setObjectName(_fromUtf8("label_5"))
self.email_line_edit = QtGui.QLineEdit(ProfileDialog)
self.email_line_edit.setEnabled(True)
self.email_line_edit.setGeometry(QtCore.QRect(81, 85, 201, 22))
self.email_line_edit.setText(_fromUtf8(""))
self.email_line_edit.setReadOnly(True)
self.email_line_edit.setObjectName(_fromUtf8("email_line_edit"))
self.label_6 = QtGui.QLabel(ProfileDialog)
self.label_6.setGeometry(QtCore.QRect(21, 170, 52, 16))
self.label_6.setObjectName(_fromUtf8("label_6"))
self.retranslateUi(ProfileDialog)
QtCore.QObject.connect(self.save_btn, QtCore.SIGNAL(_fromUtf8("clicked()")), ProfileDialog.accept)
QtCore.QMetaObject.connectSlotsByName(ProfileDialog)
def retranslateUi(self, ProfileDialog):
ProfileDialog.setWindowTitle(QtGui.QApplication.translate("ProfileDialog", "Profile", None, QtGui.QApplication.UnicodeUTF8))
self.save_btn.setText(QtGui.QApplication.translate("ProfileDialog", "关闭 (&C)", None, QtGui.QApplication.UnicodeUTF8))
self.save_btn.setShortcut(QtGui.QApplication.translate("ProfileDialog", "Return", None, QtGui.QApplication.UnicodeUTF8))
self.avatar_label.setText(QtGui.QApplication.translate("ProfileDialog", "avatar", None, QtGui.QApplication.UnicodeUTF8))
self.label_2.setText(QtGui.QApplication.translate("ProfileDialog", "姓名", None, QtGui.QApplication.UnicodeUTF8))
self.label_3.setText(QtGui.QApplication.translate("ProfileDialog", "飞信号", None, QtGui.QApplication.UnicodeUTF8))
self.label_4.setText(QtGui.QApplication.translate("ProfileDialog", "手机号", None, QtGui.QApplication.UnicodeUTF8))
self.label_5.setText(QtGui.QApplication.translate("ProfileDialog", "EMail", None, QtGui.QApplication.UnicodeUTF8))
self.label_6.setText(QtGui.QApplication.translate("ProfileDialog", "心情短语", None, QtGui.QApplication.UnicodeUTF8))
| williamyangcn/iBlah_py | ui/ui_profile_dialog.py | Python | bsd-3-clause | 4,945 |
import backend # hopefully fixes issues with Celery finding tasks?
| bleedingwolf/Spyglass | spyglass/__init__.py | Python | bsd-3-clause | 68 |
from setuptools import setup, find_packages
setup(
name='django-test-html-form',
version='0.1',
description="Make your Django HTML form tests more explicit and concise.",
long_description=open('README.rst').read(),
keywords='django test assert',
author='Dan Claudiu Pop',
author_email='dancladiupop@gmail.com',
url='https://github.com/danclaudiupop/assertHtmlForm',
license='BSD License',
packages=find_packages(),
include_package_data=True,
install_requires=[
'beautifulsoup4',
],
)
| danclaudiupop/django-test-html-form | setup.py | Python | bsd-3-clause | 546 |
import logging, os, random
from zc.buildout import UserError, easy_install
from zc.recipe.egg import Egg
SETTINGS_TEMPLATE = '''
from %(settings_module)s import *
SECRET_KEY = "%(secret)s"
%(settings_override)s
'''
SCRIPT_TEMPLATES = {
'wsgi': easy_install.script_header + '''
%(relative_paths_setup)s
import sys
sys.path[0:0] = [
%(path)s,
]
%(initialization)s
import os
try:
from django.core.wsgi import get_wsgi_application
IS_14_PLUS = True
except ImportError:
from django.core.handlers.wsgi import WSGIHandler
IS_14_PLUS = False
os.environ['DJANGO_SETTINGS_MODULE'] = "%(module_name)s%(attrs)s"
def app_factory(global_config, **local_config):
"""This function wraps our simple WSGI app so it
can be used with paste.deploy"""
if IS_14_PLUS:
return get_wsgi_application()
else:
return WSGIHandler()
application = app_factory(%(arguments)s)
''',
'manage': easy_install.script_header + '''
%(relative_paths_setup)s
import sys
sys.path[0:0] = [
%(path)s,
]
%(initialization)s
import os
try:
from django.core.management import execute_from_command_line
IS_14_PLUS = True
except ImportError:
from django.core.management import ManagementUtility
IS_14_PLUS = False
os.environ['DJANGO_SETTINGS_MODULE'] = "%(module_name)s%(attrs)s"
if IS_14_PLUS:
execute_from_command_line(%(arguments)s)
else:
utility = ManagementUtility(%(arguments)s)
utility.execute()
'''
}
class Recipe(object):
wsgi_file = 'wsgi.py'
settings_file = 'settings.py'
sites_default = 'sites'
site_settings_template = '%(name)s_site_config'
secret_cfg = '.secret.cfg'
def __init__(self, buildout, name, options):
self.buildout, self.name, self.options = buildout, name, options
self.logger = logging.getLogger(name)
self.options['location'] = os.path.join(
self.buildout['buildout']['parts-directory'], self.name
)
self.options.setdefault('extra-paths', '')
self.options.setdefault('environment-vars', '')
self.options.setdefault('sites-directory', self.sites_default)
self.options.setdefault('settings-override', '')
self.options.setdefault('settings-file', self.settings_file)
self.options.setdefault('wsgi-file', self.wsgi_file)
self.options.setdefault('manage-py-file', 'django')
self.eggs = [ ]
if 'eggs' in self.buildout['buildout']:
self.eggs.extend(self.buildout['buildout']['eggs'].split())
if 'eggs' in self.options:
self.eggs.extend(self.options['eggs'].split())
self.working_set = None
self.extra_paths = [ self.options['location'] ]
sites_path = os.path.join(
self.buildout['buildout']['directory'],
self.options['sites-directory']
)
if os.path.isdir(sites_path):
self.extra_paths.append(sites_path)
if os.path.isdir(sites_path) and 'settings-module' not in self.options:
# Check if the user has created a module %(name)s_config
settings_module = self.site_settings_template % {
'name': self.name
}
settings_module_path = os.path.join(sites_path, settings_module)
initpy = os.path.join(settings_module_path, '__init__.py')
settingspy = os.path.join(settings_module_path, 'settings.py')
if os.path.isdir(settings_module_path) and \
os.path.isfile(initpy) and os.path.isfile(settingspy):
self.options.setdefault('settings-module',
'%s.settings' % settings_module)
self.extra_paths.extend(self.options['extra-paths'].split())
self.secret_key = None
def setup_working_set(self):
egg = Egg(
self.buildout, 'Django', self.options
)
self.working_set = egg.working_set(self.eggs)
def setup_secret(self):
secret_file = os.path.join(
self.buildout['buildout']['directory'],
self.secret_cfg
)
if os.path.isfile(secret_file):
stream = open(secret_file, 'rb')
data = stream.read().decode('utf-8').strip()
stream.close()
self.logger.debug("Read secret: %s" % data)
else:
stream = open(secret_file, 'wb')
chars = u'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)'
data = u''.join([random.choice(chars) for __ in range(50)])
stream.write(data.encode('utf-8')+u"\n")
stream.close()
self.logger.debug(
"Generated secret: %s (and written to %s)" % (data, secret_file)
)
self.secret_key = data
return secret_file
def setup_module_file(self, module, name, data):
with open(os.path.join(module, name), 'wb') as stream:
stream.write(data)
def get_settings(self, static_directory=None, media_directory=None):
if 'settings-module' not in self.options:
raise UserError(
("You should specify 'settings-module' in %(name)s "
"or create a module named '"+self.site_settings_template+"' "
"in '%(sites)s' with a 'settings.py' file in it") % {
'name': self.name,
'sites': self.options['sites-directory']
}
)
settings_override = self.options['settings-override']
if static_directory is not None:
settings_override += '\nSTATIC_ROOT = "%s"\n' % (
static_directory,
)
if media_directory is not None:
settings_override += '\nMEDIA_ROOT = "%s"\n' % (
media_directory,
)
return SETTINGS_TEMPLATE % {
'settings_module': self.options['settings-module'],
'secret': self.secret_key,
'settings_override': settings_override
}
def setup_directories(self):
result = []
for directory in [ 'static-directory', 'media-directory' ]:
result.append(None)
if directory in self.options:
path = os.path.join(
self.buildout['buildout']['directory'],
self.options[directory]
)
if not os.path.isdir(path):
os.makedirs(path)
result[-1] = path
return result
def get_initialization(self):
# The initialization code is expressed as a list of lines
initialization = []
# Gets the initialization code: the tricky part here is to preserve
# indentation.
# Since buildout does totally waste whitespace, if one wants to
# preserve indentation must prefix its lines with '>>> ' or '... '
raw_value = self.options.get('initialization', '')
is_indented = False
indentations = ('>>> ', '... ')
for line in raw_value.splitlines():
if line != "":
if len(initialization) == 0:
if line.startswith(indentations[0]):
is_indented = True
else:
if is_indented and not line.startswith(indentations[1]):
raise UserError(
("Line '%s' should be indented "
"properly but is not") % line
)
if is_indented:
line = line[4:]
initialization.append(line)
# Gets the environment-vars option and generates code to set the
# enviroment variables via os.environ
environment_vars = []
for line in self.options.get('environment-vars', '').splitlines():
line = line.strip()
if len(line) > 0:
try:
var_name, raw_value = line.split(' ', 1)
except ValueError:
raise RuntimeError(
"Bad djc.recipe2 environment-vars contents: %s" % line
)
environment_vars.append(
'os.environ["%s"] = r"%s"' % (
var_name,
raw_value.strip()
)
)
if len(environment_vars) > 0:
initialization.append("import os")
initialization.extend(environment_vars)
if len(initialization) > 0:
return "\n"+"\n".join(initialization)+"\n"
return ""
def create_script(self, name, path, settings, template, arguments):
"""Create arbitrary script.
This script will also include the eventual code found in
``initialization`` and will also set (via ``os.environ``) the
environment variables found in ``environment-vars``
"""
self.logger.info(
"Creating script at %s" % (os.path.join(path, name),)
)
settings = settings.rsplit(".", 1)
module = settings[0]
attrs = ""
if len(settings) > 1:
attrs = "." + settings[1]
old_script_template = easy_install.script_template
easy_install.script_template = template
script = easy_install.scripts(
reqs=[(name, module, attrs)],
working_set=self.working_set[1],
executable=self.options['executable'],
dest=path,
extra_paths=self.extra_paths,
initialization=self.get_initialization(),
arguments=str(arguments)
)
easy_install.script_template = old_script_template
return script
def setup_manage_script(self, settings):
arguments = "sys.argv"
return self.create_script(
self.options['manage-py-file'],
self.buildout['buildout']['bin-directory'],
settings,
SCRIPT_TEMPLATES['manage'],
arguments
)
def setup_wsgi_script(self, module_path, settings):
arguments = "global_config={}"
return self.create_script(
self.options['wsgi-file'],
module_path,
settings,
SCRIPT_TEMPLATES['wsgi'],
arguments
)
def setup(self, static_directory=None, media_directory=None):
part_module = '%s_part_site' % self.name
part_module_path = os.path.join(self.options['location'], part_module)
settings_module = "%s.%s" % (
part_module,
os.path.splitext(self.options['settings-file'])[0]
)
if not os.path.exists(part_module_path):
os.makedirs(part_module_path)
self.setup_module_file(part_module_path, '__init__.py', "#\n")
self.setup_module_file(
part_module_path,
self.options['settings-file'],
self.get_settings(static_directory, media_directory)
)
self.setup_wsgi_script(part_module_path, settings_module)
files = [ self.options['location'] ]
files.extend(self.setup_manage_script(settings_module))
return files
def install(self):
files = []
self.setup_working_set()
# The .secret.cfg file is not reported so it doesn't get deleted
self.setup_secret()
static_directory, media_directory = self.setup_directories()
# static and media are not added to files so that updates
# won't delete them, nor reinstallations of parts
files.extend(self.setup(static_directory, media_directory))
return tuple(files)
update = install
| abstract-open-solutions/djc.recipe2 | djc/recipe2/recipe.py | Python | bsd-3-clause | 11,686 |
from datetime import datetime
from django.test import TestCase
from simple_history.signals import (
post_create_historical_record,
pre_create_historical_record,
)
from ..models import Poll
today = datetime(2021, 1, 1, 10, 0)
class PrePostCreateHistoricalRecordSignalTest(TestCase):
def setUp(self):
self.signal_was_called = False
self.signal_instance = None
self.signal_history_instance = None
self.signal_sender = None
def test_pre_create_historical_record_signal(self):
def handler(sender, instance, **kwargs):
self.signal_was_called = True
self.signal_instance = instance
self.signal_history_instance = kwargs["history_instance"]
self.signal_sender = sender
pre_create_historical_record.connect(handler)
p = Poll(question="what's up?", pub_date=today)
p.save()
self.assertTrue(self.signal_was_called)
self.assertEqual(self.signal_instance, p)
self.assertIsNotNone(self.signal_history_instance)
self.assertEqual(self.signal_sender, p.history.first().__class__)
def test_post_create_historical_record_signal(self):
def handler(sender, instance, history_instance, **kwargs):
self.signal_was_called = True
self.signal_instance = instance
self.signal_history_instance = history_instance
self.signal_sender = sender
post_create_historical_record.connect(handler)
p = Poll(question="what's up?", pub_date=today)
p.save()
self.assertTrue(self.signal_was_called)
self.assertEqual(self.signal_instance, p)
self.assertIsNotNone(self.signal_history_instance)
self.assertEqual(self.signal_sender, p.history.first().__class__)
| treyhunner/django-simple-history | simple_history/tests/tests/test_signals.py | Python | bsd-3-clause | 1,806 |
# -*- coding: utf-8 -*-
"""hamming.py: Return the Hamming distance between two integers (bitwise)."""
__author__ = "Russell J. Funk"
__date__ = "February 7, 2013"
__copyright__ = "Copyright (C) 2013"
__reference__ = ["http://wiki.python.org/moin/BitManipulation",
"http://en.wikipedia.org/wiki/Hamming_distance"]
__status__ = "Prototype"
def hamming(a, b):
"""Calculate the Hamming distance between two integers (bitwise).
Args:
a: a list of 1s and 0s
b: a list of 1s and 0s
Returns:
The hamming distance between two integers.
Raises:
Value Error: Inputs must have the same bit length.
"""
if len(a) != len(b):
raise ValueError("Inputs must have same bit length.")
else:
distance = 0
for i in range(len(a)):
if a[i] != b[i]:
distance += 1
return distance
def hamming_ratio(a, b, bits = 384):
"""Calculates the hamming ratio between two integers
represented as a list of bits.
Args:
a and b must be lists of 1s and 0s; the calculation
is relative to the number of bits.
Returns:
The hamming ratio between two integers.
"""
return float((bits - hamming(a,b)))/bits
| jkatzsam/matchtools | matchtools/hamming.py | Python | bsd-3-clause | 1,270 |
#!/usr/bin/env python
"""
Example that displays how to switch between Emacs and Vi input mode.
"""
from prompt_toolkit import prompt
from prompt_toolkit.enums import EditingMode
from prompt_toolkit.key_binding.manager import KeyBindingManager
from prompt_toolkit.keys import Keys
from prompt_toolkit.styles import style_from_dict
from prompt_toolkit.token import Token
def run():
# Create a set of key bindings that have Vi mode enabled if the
# ``vi_mode_enabled`` is True..
manager = KeyBindingManager.for_prompt()
# Add an additional key binding for toggling this flag.
@manager.registry.add_binding(Keys.F4)
def _(event):
" Toggle between Emacs and Vi mode. "
if event.cli.editing_mode == EditingMode.VI:
event.cli.editing_mode = EditingMode.EMACS
else:
event.cli.editing_mode = EditingMode.VI
# Add a bottom toolbar to display the status.
style = style_from_dict({
Token.Toolbar: 'reverse',
})
def get_bottom_toolbar_tokens(cli):
" Display the current input mode. "
text = 'Vi' if cli.editing_mode == EditingMode.VI else 'Emacs'
return [
(Token.Toolbar, ' [F4] %s ' % text)
]
prompt('> ', key_bindings_registry=manager.registry,
get_bottom_toolbar_tokens=get_bottom_toolbar_tokens,
style=style)
if __name__ == '__main__':
run()
| melund/python-prompt-toolkit | examples/switch-between-vi-emacs.py | Python | bsd-3-clause | 1,411 |
import datetime
import numbers
from django.db.models.fields import FieldDoesNotExist
from django.utils import tree
from django.core.exceptions import FieldError
try:
from django.db.models.sql.constants import LOOKUP_SEP
except:
from django.db.models.constants import LOOKUP_SEP
class HStoreConstraint():
value_operators = {'exact': '=', 'iexact': '=', 'in': 'IN', 'lt': '<', 'lte': '<=', 'gt': '>', 'gte': '>='}
def __init__(self, alias, field, value, lookup_type, key=None):
self.lvalue = '%s'
self.alias = alias
self.field = field
self.values = [value]
if lookup_type == 'contains':
if isinstance(value, basestring):
self.operator = '?'
elif isinstance(value, (list, tuple)):
self.operator = '?&'
self.values = [list(value)]
else:
raise ValueError('invalid value %r' % value)
elif lookup_type in self.value_operators:
self.operator = self.value_operators[lookup_type]
if self.operator == 'IN':
test_value = value[0] if len(value) > 0 else ''
self.values = [tuple(value)]
else:
test_value = value
if isinstance(test_value, datetime.datetime):
cast_type = 'timestamp'
elif isinstance(test_value, datetime.date):
cast_type = 'date'
elif isinstance(test_value, datetime.time):
cast_type = 'time'
elif isinstance(test_value, int):
cast_type = 'integer'
elif isinstance(test_value, numbers.Number):
cast_type = 'double precision'
elif isinstance(test_value, basestring):
cast_type = None
else:
raise ValueError('invalid value %r' % test_value)
if cast_type:
self.lvalue = "CAST(NULLIF(%%s->'%s','') AS %s)" % (key, cast_type)
elif lookup_type == 'iexact':
self.lvalue = "lower(%%s->'%s')" % key
self.values = [value.lower()]
elif lookup_type == 'in' and not value:
self.operator = '?'
self.values = [key]
else:
self.lvalue = "%%s->'%s'" % key
else:
raise TypeError('invalid lookup type')
def sql_for_column(self, qn, connection):
if self.alias:
return '%s.%s' % (qn(self.alias), qn(self.field))
else:
return qn(self.field)
def as_sql(self, qn=None, connection=None):
lvalue = self.lvalue % self.sql_for_column(qn, connection)
expr = '%s %s %%s' % (lvalue, self.operator)
return (expr, self.values)
class HQ(tree.Node):
AND = 'AND'
OR = 'OR'
default = AND
query_terms = ['exact', 'iexact', 'lt', 'lte', 'gt', 'gte', 'in', 'contains']
def __init__(self, **kwargs):
super(HQ, self).__init__(children=kwargs.items())
def _combine(self, other, conn):
if not isinstance(other, HQ):
raise TypeError(other)
obj = type(self)()
obj.add(self, conn)
obj.add(other, conn)
return obj
def __or__(self, other):
return self._combine(other, self.OR)
def __and__(self, other):
return self._combine(other, self.AND)
def __invert__(self):
obj = type(self)()
obj.add(self, self.AND)
obj.negate()
return obj
def add_to_query(self, query, used_aliases):
self.add_to_node(query.where, query, used_aliases)
def add_to_node(self, where_node, query, used_aliases):
for child in self.children:
if isinstance(child, HQ):
node = query.where_class()
child.add_to_node(node, query, used_aliases)
where_node.add(node, self.connector)
else:
field, value = child
parts = field.split(LOOKUP_SEP)
if not parts:
raise FieldError("Cannot parse keyword query %r" % field)
lookup_type = self.query_terms[0] # Default lookup type
num_parts = len(parts)
if len(parts) > 1 and parts[-1] in self.query_terms:
# Traverse the lookup query to distinguish related fields from
# lookup types.
lookup_model = query.model
for counter, field_name in enumerate(parts):
try:
lookup_field = lookup_model._meta.get_field(field_name)
except FieldDoesNotExist:
# Not a field. Bail out.
lookup_type = parts.pop()
break
# Unless we're at the end of the list of lookups, let's attempt
# to continue traversing relations.
if (counter + 1) < num_parts:
try:
lookup_model = lookup_field.rel.to
except AttributeError:
# Not a related field. Bail out.
lookup_type = parts.pop()
break
if lookup_type == 'contains':
key = None
else:
key = parts[-1]
parts = parts[:-1]
opts = query.get_meta()
alias = query.get_initial_alias()
field, target, opts, join_list, last, extra = query.setup_joins(parts, opts, alias, True)
col, alias, join_list = query.trim_joins(target, join_list, last, False, False)
where_node.add(HStoreConstraint(alias, col, value, lookup_type, key), self.connector)
if self.negated:
where_node.negate()
def add_hstore(queryset, field, key, name=None):
assert queryset.query.can_filter(), "Cannot change a query once a slice has been taken"
name = name or key
clone = queryset._clone()
clone.query.add_extra({name: "%s -> '%s'" % (field, key)}, None, None, None, None, None)
return clone
| erussell/hstore-field | hstore_field/query.py | Python | bsd-3-clause | 6,295 |
import pytest
from six.moves.urllib.parse import quote
from ...api import MissingEntry
from .base import BaseTestFormgrade
from .manager import HubAuthNotebookServerUserManager
@pytest.mark.formgrader
@pytest.mark.usefixtures("all_formgraders")
class TestGradebook(BaseTestFormgrade):
def _click_element(self, name):
self.browser.find_element_by_css_selector(name).click()
def test_start(self):
# This is just a fake test, since starting up the browser and formgrader
# can take a little while. So if anything goes wrong there, this test
# will fail, rather than having it fail on some other test.
pass
def test_login(self):
if self.manager.jupyterhub is None:
pytest.skip("JupyterHub is not running")
self._get(self.manager.base_formgrade_url)
self._wait_for_element("username_input")
next_url = self.formgrade_url().replace(self.manager.base_url, "")
self._check_url("{}/hub/login?next={}".format(self.manager.base_url, next_url))
# fill out the form
self.browser.find_element_by_id("username_input").send_keys("foobar")
self.browser.find_element_by_id("login_submit").click()
# check the url
self._wait_for_gradebook_page("")
def test_load_assignment_list(self):
# load the main page and make sure it is the Assignments page
self._get(self.formgrade_url())
self._wait_for_gradebook_page("")
self._check_breadcrumbs("Assignments")
# load the assignments page
self._load_gradebook_page("assignments")
self._check_breadcrumbs("Assignments")
# click on the "Problem Set 1" link
self._click_link("Problem Set 1")
self._wait_for_gradebook_page("assignments/Problem Set 1")
def test_load_assignment_notebook_list(self):
self._load_gradebook_page("assignments/Problem Set 1")
self._check_breadcrumbs("Assignments", "Problem Set 1")
# click the "Assignments" link
self._click_link("Assignments")
self._wait_for_gradebook_page("assignments")
self.browser.back()
# click on the problem link
for problem in self.gradebook.find_assignment("Problem Set 1").notebooks:
self._click_link(problem.name)
self._wait_for_gradebook_page("assignments/Problem Set 1/{}".format(problem.name))
self.browser.back()
def test_load_assignment_notebook_submissions_list(self):
for problem in self.gradebook.find_assignment("Problem Set 1").notebooks:
self._load_gradebook_page("assignments/Problem Set 1/{}".format(problem.name))
self._check_breadcrumbs("Assignments", "Problem Set 1", problem.name)
# click the "Assignments" link
self._click_link("Assignments")
self._wait_for_gradebook_page("assignments")
self.browser.back()
# click the "Problem Set 1" link
self._click_link("Problem Set 1")
self._wait_for_gradebook_page("assignments/Problem Set 1")
self.browser.back()
submissions = problem.submissions
submissions.sort(key=lambda x: x.id)
for i in range(len(submissions)):
# click on the "Submission #i" link
self._click_link("Submission #{}".format(i + 1))
self._wait_for_formgrader("submissions/{}/?index=0".format(submissions[i].id))
self.browser.back()
def test_load_student_list(self):
# load the student view
self._load_gradebook_page("students")
self._check_breadcrumbs("Students")
# click on student
for student in self.gradebook.students:
## TODO: they should have a link here, even if they haven't submitted anything!
if len(student.submissions) == 0:
continue
self._click_link("{}, {}".format(student.last_name, student.first_name))
self._wait_for_gradebook_page("students/{}".format(student.id))
self.browser.back()
def test_load_student_assignment_list(self):
for student in self.gradebook.students:
self._load_gradebook_page("students/{}".format(student.id))
self._check_breadcrumbs("Students", student.id)
try:
self.gradebook.find_submission("Problem Set 1", student.id)
except MissingEntry:
## TODO: make sure link doesn't exist
continue
self._click_link("Problem Set 1")
self._wait_for_gradebook_page("students/{}/Problem Set 1".format(student.id))
def test_load_student_assignment_submissions_list(self):
for student in self.gradebook.students:
try:
submission = self.gradebook.find_submission("Problem Set 1", student.id)
except MissingEntry:
## TODO: make sure link doesn't exist
continue
self._load_gradebook_page("students/{}/Problem Set 1".format(student.id))
self._check_breadcrumbs("Students", student.id, "Problem Set 1")
for problem in self.gradebook.find_assignment("Problem Set 1").notebooks:
submission = self.gradebook.find_submission_notebook(problem.name, "Problem Set 1", student.id)
self._click_link(problem.name)
self._wait_for_formgrader("submissions/{}/?index=0".format(submission.id))
self.browser.back()
self._wait_for_gradebook_page("students/{}/Problem Set 1".format(student.id))
def test_switch_views(self):
# load the main page
self._load_gradebook_page("assignments")
# click the "Change View" button
self._click_link("Change View", partial=True)
# click the "Students" option
self._click_link("Students")
self._wait_for_gradebook_page("students")
# click the "Change View" button
self._click_link("Change View", partial=True)
# click the "Assignments" option
self._click_link("Assignments")
self._wait_for_gradebook_page("assignments")
def test_formgrade_view_breadcrumbs(self):
for problem in self.gradebook.find_assignment("Problem Set 1").notebooks:
submissions = problem.submissions
submissions.sort(key=lambda x: x.id)
for i, submission in enumerate(submissions):
self._get(self.formgrade_url("submissions/{}".format(submission.id)))
self._wait_for_formgrader("submissions/{}/?index=0".format(submission.id))
# click on the "Assignments" link
self._click_link("Assignments")
self._wait_for_gradebook_page("assignments")
# go back
self.browser.back()
self._wait_for_formgrader("submissions/{}/?index=0".format(submission.id))
# click on the "Problem Set 1" link
self._click_link("Problem Set 1")
self._wait_for_gradebook_page("assignments/Problem Set 1")
# go back
self.browser.back()
self._wait_for_formgrader("submissions/{}/?index=0".format(submission.id))
# click on the problem link
self._click_link(problem.name)
self._wait_for_gradebook_page("assignments/Problem Set 1/{}".format(problem.name))
# go back
self.browser.back()
self._wait_for_formgrader("submissions/{}/?index=0".format(submission.id))
def test_load_live_notebook(self):
for problem in self.gradebook.find_assignment("Problem Set 1").notebooks:
submissions = problem.submissions
submissions.sort(key=lambda x: x.id)
for i, submission in enumerate(submissions):
self._get(self.formgrade_url("submissions/{}".format(submission.id)))
self._wait_for_formgrader("submissions/{}/?index=0".format(submission.id))
# check the live notebook link
self._click_link("Submission #{}".format(i + 1))
self.browser.switch_to_window(self.browser.window_handles[1])
self._wait_for_notebook_page(self.notebook_url("autograded/{}/Problem Set 1/{}.ipynb".format(submission.student.id, problem.name)))
self.browser.close()
self.browser.switch_to_window(self.browser.window_handles[0])
def test_formgrade_images(self):
submissions = self.gradebook.find_notebook("Problem 1", "Problem Set 1").submissions
submissions.sort(key=lambda x: x.id)
for submission in submissions:
self._get(self.formgrade_url("submissions/{}".format(submission.id)))
self._wait_for_formgrader("submissions/{}/?index=0".format(submission.id))
images = self.browser.find_elements_by_tag_name("img")
for image in images:
# check that the image is loaded, and that it has a width
assert self.browser.execute_script("return arguments[0].complete", image)
assert self.browser.execute_script("return arguments[0].naturalWidth", image) > 0
def test_next_prev_assignments(self):
problem = self.gradebook.find_notebook("Problem 1", "Problem Set 1")
submissions = problem.submissions
submissions.sort(key=lambda x: x.id)
# test navigating both with the arrow keys and with clicking the
# next/previous links
next_functions = [
(self._click_element, ".next a")
]
prev_functions = [
(self._click_element, ".previous a")
]
for n, p in zip(next_functions, prev_functions):
# first element is the function, the other elements are the arguments
# to that function
next_function = lambda: n[0](*n[1:])
prev_function = lambda: p[0](*p[1:])
# Load the first submission
self._get(self.formgrade_url("submissions/{}".format(submissions[0].id)))
self._wait_for_formgrader("submissions/{}/?index=0".format(submissions[0].id))
# Move to the next submission
next_function()
self._wait_for_formgrader("submissions/{}/?index=0".format(submissions[1].id))
# Move to the next submission (should return to notebook list)
next_function()
self._wait_for_gradebook_page("assignments/Problem Set 1/Problem 1")
# Go back
self.browser.back()
self._wait_for_formgrader("submissions/{}/?index=0".format(submissions[1].id))
# Move to the previous submission
prev_function()
self._wait_for_formgrader("submissions/{}/?index=0".format(submissions[0].id))
# Move to the previous submission (should return to the notebook list)
prev_function()
self._wait_for_gradebook_page("assignments/Problem Set 1/Problem 1")
def test_logout(self):
"""Make sure after we've logged out we can't access any of the formgrader pages."""
if self.manager.jupyterhub is None:
pytest.skip("JupyterHub is not running")
# logout and wait for the login page to appear
self._get("{}/hub".format(self.manager.base_url))
self._wait_for_element("logout")
self._wait_for_visibility_of_element("logout")
element = self.browser.find_element_by_id("logout")
element.click()
self._wait_for_element("username_input")
# try going to a formgrader page
self._get(self.manager.base_formgrade_url)
self._wait_for_element("username_input")
next_url = self.formgrade_url().replace(self.manager.base_url, "")
self._check_url("{}/hub/login?next={}".format(self.manager.base_url, next_url))
# try going to a live notebook page
problem = self.gradebook.find_assignment("Problem Set 1").notebooks[0]
submission = sorted(problem.submissions, key=lambda x: x.id)[0]
url = self.notebook_url("autograded/{}/Problem Set 1/{}.ipynb".format(submission.student.id, problem.name))
self._get(url)
self._wait_for_element("username_input")
self._check_url("{}/hub/login".format(self.manager.base_url))
| EdwardJKim/nbgrader | nbgrader/tests/formgrader/test_gradebook_navigation.py | Python | bsd-3-clause | 12,452 |
# Copyright The IETF Trust 2007, All Rights Reserved
# Django settings for ietf project.
# BASE_DIR and "settings_local" are from
# http://code.djangoproject.com/wiki/SplitSettings
import os
try:
import syslog
syslog.openlog("datatracker", syslog.LOG_PID, syslog.LOG_USER)
except ImportError:
pass
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
# a place to put ajax logs if necessary.
LOG_DIR = '/var/log/datatracker'
import sys
sys.path.append(os.path.abspath(BASE_DIR + "/.."))
sys.path.append(os.path.abspath(BASE_DIR + "/../redesign"))
DEBUG = True
TEMPLATE_DEBUG = DEBUG
# Domain name of the IETF
IETF_DOMAIN = 'ietf.org'
ADMINS = (
('IETF Django Developers', 'django-project@' + IETF_DOMAIN),
('GMail Tracker Archive', 'ietf.tracker.archive+errors@gmail.com'),
('Henrik Levkowetz', 'henrik@levkowetz.com'),
('Robert Sparks', 'rjsparks@nostrum.com'),
('Ole Laursen', 'olau@iola.dk'),
)
# Server name of the tools server
TOOLS_SERVER = 'tools.' + IETF_DOMAIN
# Override this in the settings_local.py file:
SERVER_EMAIL = 'Django Server <django-project@' + TOOLS_SERVER + '>'
DEFAULT_FROM_EMAIL = 'IETF Secretariat <ietf-secretariat-reply@' + IETF_DOMAIN + '>'
MANAGERS = ADMINS
DATABASES = {
'default': {
'NAME': 'ietf_utf8',
'ENGINE': 'django.db.backends.mysql',
'USER': 'ietf',
#'PASSWORD': 'ietf',
#'OPTIONS': {},
},
# 'legacy': {
# 'NAME': 'ietf',
# 'ENGINE': 'django.db.backends.mysql',
# 'USER': 'ietf',
# #'PASSWORD': 'ietf',
# },
}
DATABASE_TEST_OPTIONS = {
# Uncomment this to speed up testing if your database supports InnoDB:
# 'init_command': 'SET storage_engine=InnoDB',
}
# Local time zone for this installation. Choices can be found here:
# http://www.postgresql.org/docs/8.1/static/datetime-keywords.html#DATETIME-TIMEZONE-SET-TABLE
# although not all variations may be possible on all operating systems.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'PST8PDT'
# Language code for this installation. All choices can be found here:
# http://www.w3.org/TR/REC-html40/struct/dirlang.html#langcodes
# http://blogs.law.harvard.edu/tech/stories/storyReader$15
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = False
# Absolute path to the directory that holds media.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = BASE_DIR + "/../static/"
# URL that handles the media served from MEDIA_ROOT.
# Example: "http://media.lawrence.com"
MEDIA_URL = ''
# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
# trailing slash.
# Examples: "http://foo.com/media/", "/media/".
ADMIN_MEDIA_PREFIX = '/media/'
DAJAXICE_MEDIA_PREFIX="dajaxice"
AUTH_PROFILE_MODULE = 'person.Person'
AUTHENTICATION_BACKENDS = ( 'django.contrib.auth.backends.RemoteUserBackend', )
#DATABASE_ROUTERS = ["ietf.legacy_router.LegacyRouter"]
SESSION_COOKIE_AGE = 43200 # 12 hours
SESSION_EXPIRE_AT_BROWSER_CLOSE = True
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.load_template_source',
'django.template.loaders.app_directories.load_template_source',
'ietf.dbtemplate.template.load_template_source',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.RemoteUserMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.http.ConditionalGetMiddleware',
'django.middleware.doc.XViewMiddleware',
'ietf.middleware.SQLLogMiddleware',
'ietf.middleware.SMTPExceptionMiddleware',
'ietf.middleware.RedirectTrailingPeriod',
'django.middleware.transaction.TransactionMiddleware',
'ietf.middleware.UnicodeNfkcNormalization',
'ietf.secr.middleware.secauth.SecAuthMiddleware'
)
ROOT_URLCONF = 'ietf.urls'
TEMPLATE_DIRS = (
BASE_DIR + "/templates",
BASE_DIR + "/secr/templates",
)
TEMPLATE_CONTEXT_PROCESSORS = (
'django.core.context_processors.auth',
'django.core.context_processors.debug',
'django.core.context_processors.i18n',
'django.core.context_processors.request',
'django.contrib.messages.context_processors.messages',
'ietf.context_processors.server_mode',
'ietf.context_processors.revision_info',
'ietf.secr.context_processors.secr_revision_info',
'ietf.secr.context_processors.static',
'ietf.context_processors.rfcdiff_prefix',
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.sitemaps',
'django.contrib.admin',
'django.contrib.admindocs',
'django.contrib.humanize',
'django.contrib.messages',
'south',
'workflows',
'permissions',
'ietf.person',
'ietf.name',
'ietf.group',
'ietf.doc',
'ietf.message',
'ietf.announcements',
'ietf.idindex',
'ietf.idtracker',
'ietf.ietfauth',
'ietf.iesg',
'ietf.ipr',
'ietf.liaisons',
'ietf.mailinglists',
'ietf.meeting',
#'ietf.proceedings',
'ietf.redirects',
'ietf.idrfc',
'ietf.wginfo',
'ietf.submit',
'ietf.ietfworkflows',
'ietf.wgchairs',
'ietf.wgcharter',
'ietf.sync',
'ietf.community',
'ietf.release',
# secretariat apps
'form_utils',
'ietf.secr.announcement',
'ietf.secr.areas',
'ietf.secr.drafts',
'ietf.secr.groups',
'ietf.secr.ipradmin',
'ietf.secr.meetings',
'ietf.secr.proceedings',
'ietf.secr.roles',
'ietf.secr.rolodex',
'ietf.secr.telechat',
'ietf.secr.sreq',
'ietf.nomcom',
'ietf.dbtemplate',
'dajaxice',
)
INTERNAL_IPS = (
# AMS servers
'64.170.98.32',
'64.170.98.86',
# local
'127.0.0.1',
'::1',
)
# no slash at end
IDTRACKER_BASE_URL = "http://datatracker.ietf.org"
RFCDIFF_PREFIX = "//www.ietf.org/rfcdiff"
# Valid values:
# 'production', 'test', 'development'
# Override this in settings_local.py if it's not true
SERVER_MODE = 'development'
# The name of the method to use to invoke the test suite
TEST_RUNNER = 'ietf.utils.test_runner.run_tests'
# WG Chair configuration
MAX_WG_DELEGATES = 3
DATE_FORMAT = "Y-m-d"
DATETIME_FORMAT = "Y-m-d H:i"
# Override this in settings_local.py if needed
# *_PATH variables ends with a slash/ .
INTERNET_DRAFT_PATH = '/a/www/ietf-ftp/internet-drafts/'
INTERNET_DRAFT_PDF_PATH = '/a/www/ietf-datatracker/pdf/'
RFC_PATH = '/a/www/ietf-ftp/rfc/'
CHARTER_PATH = '/a/www/ietf-ftp/charter/'
CHARTER_TXT_URL = 'http://www.ietf.org/charter/'
CONFLICT_REVIEW_PATH = '/a/www/ietf-ftp/conflict-reviews'
CONFLICT_REVIEW_TXT_URL = 'http://www.ietf.org/cr/'
STATUS_CHANGE_PATH = '/a/www/ietf-ftp/status-changes'
STATUS_CHANGE_TXT_URL = 'http://www.ietf.org/sc/'
AGENDA_PATH = '/a/www/www6s/proceedings/'
AGENDA_PATH_PATTERN = '/a/www/www6s/proceedings/%(meeting)s/agenda/%(wg)s.%(ext)s'
MINUTES_PATH_PATTERN = '/a/www/www6s/proceedings/%(meeting)s/minutes/%(wg)s.%(ext)s'
SLIDES_PATH_PATTERN = '/a/www/www6s/proceedings/%(meeting)s/slides/%(wg)s-*'
IPR_DOCUMENT_PATH = '/a/www/ietf-ftp/ietf/IPR/'
IETFWG_DESCRIPTIONS_PATH = '/a/www/www6s/wg-descriptions/'
IESG_TASK_FILE = '/a/www/www6/iesg/internal/task.txt'
IESG_ROLL_CALL_FILE = '/a/www/www6/iesg/internal/rollcall.txt'
IESG_MINUTES_FILE = '/a/www/www6/iesg/internal/minutes.txt'
IESG_WG_EVALUATION_DIR = "/a/www/www6/iesg/evaluation"
INTERNET_DRAFT_ARCHIVE_DIR = '/a/www/www6s/draft-archive'
# Ideally, more of these would be local -- but since we don't support
# versions right now, we'll point to external websites
DOC_HREFS = {
"agenda": "/meeting/{meeting}/agenda/{doc.group.acronym}/",
#"charter": "/doc/{doc.name}-{doc.rev}/",
"charter": "http://www.ietf.org/charter/{doc.name}-{doc.rev}.txt",
#"draft": "/doc/{doc.name}-{doc.rev}/",
"draft": "http://tools.ietf.org/html/{doc.name}-{doc.rev}",
# I can't figure out the liaison maze. Hopefully someone
# who understands this better can take care of it.
#"liai-att": None
#"liaison": None
"minutes": "http://www.ietf.org/proceedings/{meeting}/minutes/{doc.name}",
"slides": "http://www.ietf.org/proceedings/{meeting}/slides/{doc.name}",
}
# Override this in settings_local.py if needed
CACHE_MIDDLEWARE_SECONDS = 300
CACHE_MIDDLEWARE_KEY_PREFIX = ''
if SERVER_MODE == 'production':
CACHE_BACKEND= 'file://'+'/a/www/ietf-datatracker/cache/'
else:
# Default to no caching in development/test, so that every developer
# doesn't have to set CACHE_BACKEND in settings_local
CACHE_BACKEND = 'dummy:///'
# For readonly database operation
# CACHE_BACKEND = 'memcached://127.0.0.1:11211/'
# SESSION_ENGINE = "django.contrib.sessions.backends.cache"
IPR_EMAIL_TO = ['ietf-ipr@ietf.org', ]
DOC_APPROVAL_EMAIL_CC = ["RFC Editor <rfc-editor@rfc-editor.org>", ]
# Put real password in settings_local.py
IANA_SYNC_PASSWORD = "secret"
IANA_SYNC_CHANGES_URL = "https://datatracker.iana.org:4443/data-tracker/changes"
IANA_SYNC_PROTOCOLS_URL = "http://www.iana.org/protocols/"
RFC_EDITOR_SYNC_PASSWORD="secret"
RFC_EDITOR_SYNC_NOTIFICATION_URL = "http://www.rfc-editor.org/parser/parser.php"
RFC_EDITOR_QUEUE_URL = "http://www.rfc-editor.org/queue2.xml"
RFC_EDITOR_INDEX_URL = "http://www.rfc-editor.org/rfc/rfc-index.xml"
# Liaison Statement Tool settings
LIAISON_UNIVERSAL_FROM = 'Liaison Statement Management Tool <lsmt@' + IETF_DOMAIN + '>'
LIAISON_ATTACH_PATH = '/a/www/ietf-datatracker/documents/LIAISON/'
LIAISON_ATTACH_URL = '/documents/LIAISON/'
# ID Submission Tool settings
IDSUBMIT_FROM_EMAIL = 'IETF I-D Submission Tool <idsubmission@ietf.org>'
IDSUBMIT_TO_EMAIL = 'internet-drafts@ietf.org'
IDSUBMIT_ANNOUNCE_FROM_EMAIL = 'internet-drafts@ietf.org'
IDSUBMIT_ANNOUNCE_LIST_EMAIL = 'i-d-announce@ietf.org'
# NomCom Tool settings
ROLODEX_URL = ""
PUBLIC_KEYS_URL = BASE_DIR + '/nomcom/public_keys/'
NOMCOM_FROM_EMAIL = DEFAULT_FROM_EMAIL
NOMCOM_ADMIN_EMAIL = DEFAULT_FROM_EMAIL
OPENSSL_COMMAND = '/usr/bin/openssl'
DAYS_TO_EXPIRE_NOMINATION_LINK = ''
DEFAULT_FEEDBACK_TYPE = 'offtopic'
NOMINEE_FEEDBACK_TYPES = ['comment', 'questio', 'nomina']
# Days from meeting to cut off dates on submit
FIRST_CUTOFF_DAYS = 19
SECOND_CUTOFF_DAYS = 12
CUTOFF_HOUR = 00 # midnight UTC
SUBMISSION_START_DAYS = -90
SUBMISSION_CUTOFF_DAYS = 33
SUBMISSION_CORRECTION_DAYS = 52
INTERNET_DRAFT_DAYS_TO_EXPIRE = 185
IDSUBMIT_REPOSITORY_PATH = INTERNET_DRAFT_PATH
IDSUBMIT_STAGING_PATH = '/a/www/www6s/staging/'
IDSUBMIT_STAGING_URL = 'http://www.ietf.org/staging/'
IDSUBMIT_IDNITS_BINARY = '/a/www/ietf-datatracker/scripts/idnits'
MAX_PLAIN_DRAFT_SIZE = 6291456 # Max size of the txt draft in bytes
# DOS THRESHOLDS PER DAY (Sizes are in MB)
MAX_SAME_DRAFT_NAME = 20
MAX_SAME_DRAFT_NAME_SIZE = 50
MAX_SAME_SUBMITTER = 50
MAX_SAME_SUBMITTER_SIZE = 150
MAX_SAME_WG_DRAFT = 150
MAX_SAME_WG_DRAFT_SIZE = 450
MAX_DAILY_SUBMISSION = 1000
MAX_DAILY_SUBMISSION_SIZE = 2000
# End of ID Submission Tool settings
# Account settings
DAYS_TO_EXPIRE_REGISTRATION_LINK = 3
HTPASSWD_COMMAND = "/usr/bin/htpasswd2"
HTPASSWD_FILE = "/www/htpasswd"
# DB redesign
USE_DB_REDESIGN_PROXY_CLASSES = True
SOUTH_TESTS_MIGRATE = False
# Generation of bibxml files for xml2rfc
BIBXML_BASE_PATH = '/a/www/ietf-ftp/xml2rfc'
# Timezone files for iCalendar
TZDATA_ICS_PATH = '/www/ietf-datatracker/tz/ics/'
CHANGELOG_PATH = '/www/ietf-datatracker/web/changelog'
# Secretariat Tool
# this is a tuple of regular expressions. if the incoming URL matches one of
# these, than non secretariat access is allowed.
SECR_AUTH_UNRESTRICTED_URLS = (
#(r'^/$'),
(r'^/secr/announcement/'),
(r'^/secr/proceedings/'),
(r'^/secr/sreq/'),
)
SECR_BLUE_SHEET_PATH = '/a/www/ietf-datatracker/documents/blue_sheet.rtf'
SECR_BLUE_SHEET_URL = 'https://datatracker.ietf.org/documents/blue_sheet.rtf'
SECR_INTERIM_LISTING_DIR = '/a/www/www6/meeting/interim'
SECR_MAX_UPLOAD_SIZE = 40960000
SECR_PROCEEDINGS_DIR = '/a/www/www6s/proceedings/'
SECR_STATIC_URL = '/secr/'
USE_ETAGS=True
# Put SECRET_KEY in here, or any other sensitive or site-specific
# changes. DO NOT commit settings_local.py to svn.
from settings_local import *
| mcr/ietfdb | ietf/settings.py | Python | bsd-3-clause | 12,464 |
#
# This sets up how models are displayed
# in the web admin interface.
#
from django.contrib import admin
from src.comms.models import Channel, Msg, PlayerChannelConnection, ExternalChannelConnection
class MsgAdmin(admin.ModelAdmin):
list_display = ('id', 'db_date_sent', 'db_sender', 'db_receivers', 'db_channels', 'db_message', 'db_lock_storage')
list_display_links = ("id",)
ordering = ["db_date_sent", 'db_sender', 'db_receivers', 'db_channels']
#readonly_fields = ['db_message', 'db_sender', 'db_receivers', 'db_channels']
search_fields = ['id', '^db_date_sent', '^db_message']
save_as = True
save_on_top = True
list_select_related = True
#admin.site.register(Msg, MsgAdmin)
class PlayerChannelConnectionInline(admin.TabularInline):
model = PlayerChannelConnection
fieldsets = (
(None, {
'fields':(('db_player', 'db_channel')),
'classes':('collapse',)}),)
extra = 1
class ExternalChannelConnectionInline(admin.StackedInline):
model = ExternalChannelConnection
fieldsets = (
(None, {
'fields':(('db_is_enabled','db_external_key', 'db_channel'), 'db_external_send_code', 'db_external_config'),
'classes':('collapse',)
}),)
extra = 1
class ChannelAdmin(admin.ModelAdmin):
inlines = (PlayerChannelConnectionInline, ExternalChannelConnectionInline)
list_display = ('id', 'db_key', 'db_desc', 'db_aliases', 'db_keep_log', 'db_lock_storage')
list_display_links = ("id", 'db_key')
ordering = ["db_key"]
search_fields = ['id', 'db_key', 'db_aliases']
save_as = True
save_on_top = True
list_select_related = True
fieldsets = (
(None, {'fields':(('db_key', 'db_aliases', 'db_desc'),'db_lock_storage', 'db_keep_log')}),
)
admin.site.register(Channel, ChannelAdmin)
# class PlayerChannelConnectionAdmin(admin.ModelAdmin):
# list_display = ('db_channel', 'db_player')
# list_display_links = ("db_player", 'db_channel')
# ordering = ["db_channel"]
# search_fields = ['db_channel', 'db_player']
# save_as = True
# save_on_top = True
# list_select_related = True
# admin.site.register(PlayerChannelConnection, PlayerChannelConnectionAdmin)
# class ExternalChannelConnectionAdmin(admin.ModelAdmin):
# list_display = ('db_channel', 'db_external_key', 'db_external_config')
# list_display_links = ("db_channel", 'db_external_key', 'db_external_config')
# ordering = ["db_channel"]
# search_fields = ['db_channel', 'db_external_key']
# save_as = True
# save_on_top = True
# list_select_related = True
# admin.site.register(ExternalChannelConnection, ExternalChannelConnectionAdmin)
| TaliesinSkye/evennia | src/comms/admin.py | Python | bsd-3-clause | 2,737 |
# -*- encoding: utf-8 -*-
from __future__ import unicode_literals
"""
LANG_INFO is a dictionary structure to provide meta information about languages.
About name_local: capitalize it as if your language name was appearing
inside a sentence in your language.
The 'fallback' key can be used to specify a special fallback logic which doesn't
follow the traditional 'fr-ca' -> 'fr' fallback logic.
"""
LANG_INFO = {
'af': {
'bidi': False,
'code': 'af',
'name': 'Afrikaans',
'name_local': 'Afrikaans',
},
'ar': {
'bidi': True,
'code': 'ar',
'name': 'Arabic',
'name_local': 'العربيّة',
},
'ast': {
'bidi': False,
'code': 'ast',
'name': 'Asturian',
'name_local': 'asturianu',
},
'az': {
'bidi': True,
'code': 'az',
'name': 'Azerbaijani',
'name_local': 'Azərbaycanca',
},
'be': {
'bidi': False,
'code': 'be',
'name': 'Belarusian',
'name_local': 'беларуская',
},
'bg': {
'bidi': False,
'code': 'bg',
'name': 'Bulgarian',
'name_local': 'български',
},
'bn': {
'bidi': False,
'code': 'bn',
'name': 'Bengali',
'name_local': 'বাংলা',
},
'br': {
'bidi': False,
'code': 'br',
'name': 'Breton',
'name_local': 'brezhoneg',
},
'bs': {
'bidi': False,
'code': 'bs',
'name': 'Bosnian',
'name_local': 'bosanski',
},
'ca': {
'bidi': False,
'code': 'ca',
'name': 'Catalan',
'name_local': 'català',
},
'cs': {
'bidi': False,
'code': 'cs',
'name': 'Czech',
'name_local': 'česky',
},
'cy': {
'bidi': False,
'code': 'cy',
'name': 'Welsh',
'name_local': 'Cymraeg',
},
'da': {
'bidi': False,
'code': 'da',
'name': 'Danish',
'name_local': 'dansk',
},
'de': {
'bidi': False,
'code': 'de',
'name': 'German',
'name_local': 'Deutsch',
},
'el': {
'bidi': False,
'code': 'el',
'name': 'Greek',
'name_local': 'Ελληνικά',
},
'en': {
'bidi': False,
'code': 'en',
'name': 'English',
'name_local': 'English',
},
'en-au': {
'bidi': False,
'code': 'en-au',
'name': 'Australian English',
'name_local': 'Australian English',
},
'en-gb': {
'bidi': False,
'code': 'en-gb',
'name': 'British English',
'name_local': 'British English',
},
'eo': {
'bidi': False,
'code': 'eo',
'name': 'Esperanto',
'name_local': 'Esperanto',
},
'es': {
'bidi': False,
'code': 'es',
'name': 'Spanish',
'name_local': 'español',
},
'es-ar': {
'bidi': False,
'code': 'es-ar',
'name': 'Argentinian Spanish',
'name_local': 'español de Argentina',
},
'es-co': {
'bidi': False,
'code': 'es-co',
'name': 'Colombian Spanish',
'name_local': 'español de Colombia',
},
'es-mx': {
'bidi': False,
'code': 'es-mx',
'name': 'Mexican Spanish',
'name_local': 'español de Mexico',
},
'es-ni': {
'bidi': False,
'code': 'es-ni',
'name': 'Nicaraguan Spanish',
'name_local': 'español de Nicaragua',
},
'es-ve': {
'bidi': False,
'code': 'es-ve',
'name': 'Venezuelan Spanish',
'name_local': 'español de Venezuela',
},
'et': {
'bidi': False,
'code': 'et',
'name': 'Estonian',
'name_local': 'eesti',
},
'eu': {
'bidi': False,
'code': 'eu',
'name': 'Basque',
'name_local': 'Basque',
},
'fa': {
'bidi': True,
'code': 'fa',
'name': 'Persian',
'name_local': 'فارسی',
},
'fi': {
'bidi': False,
'code': 'fi',
'name': 'Finnish',
'name_local': 'suomi',
},
'fr': {
'bidi': False,
'code': 'fr',
'name': 'French',
'name_local': 'français',
},
'fy': {
'bidi': False,
'code': 'fy',
'name': 'Frisian',
'name_local': 'frysk',
},
'ga': {
'bidi': False,
'code': 'ga',
'name': 'Irish',
'name_local': 'Gaeilge',
},
'gd': {
'bidi': False,
'code': 'gd',
'name': 'Scottish Gaelic',
'name_local': 'Gàidhlig',
},
'gl': {
'bidi': False,
'code': 'gl',
'name': 'Galician',
'name_local': 'galego',
},
'he': {
'bidi': True,
'code': 'he',
'name': 'Hebrew',
'name_local': 'עברית',
},
'hi': {
'bidi': False,
'code': 'hi',
'name': 'Hindi',
'name_local': 'Hindi',
},
'hr': {
'bidi': False,
'code': 'hr',
'name': 'Croatian',
'name_local': 'Hrvatski',
},
'hu': {
'bidi': False,
'code': 'hu',
'name': 'Hungarian',
'name_local': 'Magyar',
},
'ia': {
'bidi': False,
'code': 'ia',
'name': 'Interlingua',
'name_local': 'Interlingua',
},
'io': {
'bidi': False,
'code': 'io',
'name': 'Ido',
'name_local': 'ido',
},
'id': {
'bidi': False,
'code': 'id',
'name': 'Indonesian',
'name_local': 'Bahasa Indonesia',
},
'is': {
'bidi': False,
'code': 'is',
'name': 'Icelandic',
'name_local': 'Íslenska',
},
'it': {
'bidi': False,
'code': 'it',
'name': 'Italian',
'name_local': 'italiano',
},
'ja': {
'bidi': False,
'code': 'ja',
'name': 'Japanese',
'name_local': '日本語',
},
'ka': {
'bidi': False,
'code': 'ka',
'name': 'Georgian',
'name_local': 'ქართული',
},
'kk': {
'bidi': False,
'code': 'kk',
'name': 'Kazakh',
'name_local': 'Қазақ',
},
'km': {
'bidi': False,
'code': 'km',
'name': 'Khmer',
'name_local': 'Khmer',
},
'kn': {
'bidi': False,
'code': 'kn',
'name': 'Kannada',
'name_local': 'Kannada',
},
'ko': {
'bidi': False,
'code': 'ko',
'name': 'Korean',
'name_local': '한국어',
},
'lb': {
'bidi': False,
'code': 'lb',
'name': 'Luxembourgish',
'name_local': 'Lëtzebuergesch',
},
'lt': {
'bidi': False,
'code': 'lt',
'name': 'Lithuanian',
'name_local': 'Lietuviškai',
},
'lv': {
'bidi': False,
'code': 'lv',
'name': 'Latvian',
'name_local': 'latviešu',
},
'mk': {
'bidi': False,
'code': 'mk',
'name': 'Macedonian',
'name_local': 'Македонски',
},
'ml': {
'bidi': False,
'code': 'ml',
'name': 'Malayalam',
'name_local': 'Malayalam',
},
'mn': {
'bidi': False,
'code': 'mn',
'name': 'Mongolian',
'name_local': 'Mongolian',
},
'mr': {
'bidi': False,
'code': 'mr',
'name': 'Marathi',
'name_local': 'मराठी',
},
'my': {
'bidi': False,
'code': 'my',
'name': 'Burmese',
'name_local': 'မြန်မာဘာသာ',
},
'nb': {
'bidi': False,
'code': 'nb',
'name': 'Norwegian Bokmal',
'name_local': 'norsk (bokmål)',
},
'ne': {
'bidi': False,
'code': 'ne',
'name': 'Nepali',
'name_local': 'नेपाली',
},
'nl': {
'bidi': False,
'code': 'nl',
'name': 'Dutch',
'name_local': 'Nederlands',
},
'nn': {
'bidi': False,
'code': 'nn',
'name': 'Norwegian Nynorsk',
'name_local': 'norsk (nynorsk)',
},
'no': {
'bidi': False,
'code': 'no',
'name': 'Norwegian',
'name_local': 'norsk',
},
'os': {
'bidi': False,
'code': 'os',
'name': 'Ossetic',
'name_local': 'Ирон',
},
'pa': {
'bidi': False,
'code': 'pa',
'name': 'Punjabi',
'name_local': 'Punjabi',
},
'pl': {
'bidi': False,
'code': 'pl',
'name': 'Polish',
'name_local': 'polski',
},
'pt': {
'bidi': False,
'code': 'pt',
'name': 'Portuguese',
'name_local': 'Português',
},
'pt-br': {
'bidi': False,
'code': 'pt-br',
'name': 'Brazilian Portuguese',
'name_local': 'Português Brasileiro',
},
'ro': {
'bidi': False,
'code': 'ro',
'name': 'Romanian',
'name_local': 'Română',
},
'ru': {
'bidi': False,
'code': 'ru',
'name': 'Russian',
'name_local': 'Русский',
},
'sk': {
'bidi': False,
'code': 'sk',
'name': 'Slovak',
'name_local': 'Slovensky',
},
'sl': {
'bidi': False,
'code': 'sl',
'name': 'Slovenian',
'name_local': 'Slovenščina',
},
'sq': {
'bidi': False,
'code': 'sq',
'name': 'Albanian',
'name_local': 'shqip',
},
'sr': {
'bidi': False,
'code': 'sr',
'name': 'Serbian',
'name_local': 'српски',
},
'sr-latn': {
'bidi': False,
'code': 'sr-latn',
'name': 'Serbian Latin',
'name_local': 'srpski (latinica)',
},
'sv': {
'bidi': False,
'code': 'sv',
'name': 'Swedish',
'name_local': 'svenska',
},
'sw': {
'bidi': False,
'code': 'sw',
'name': 'Swahili',
'name_local': 'Kiswahili',
},
'ta': {
'bidi': False,
'code': 'ta',
'name': 'Tamil',
'name_local': 'தமிழ்',
},
'te': {
'bidi': False,
'code': 'te',
'name': 'Telugu',
'name_local': 'తెలుగు',
},
'th': {
'bidi': False,
'code': 'th',
'name': 'Thai',
'name_local': 'ภาษาไทย',
},
'tr': {
'bidi': False,
'code': 'tr',
'name': 'Turkish',
'name_local': 'Türkçe',
},
'tt': {
'bidi': False,
'code': 'tt',
'name': 'Tatar',
'name_local': 'Татарча',
},
'udm': {
'bidi': False,
'code': 'udm',
'name': 'Udmurt',
'name_local': 'Удмурт',
},
'uk': {
'bidi': False,
'code': 'uk',
'name': 'Ukrainian',
'name_local': 'Українська',
},
'ur': {
'bidi': True,
'code': 'ur',
'name': 'Urdu',
'name_local': 'اردو',
},
'vi': {
'bidi': False,
'code': 'vi',
'name': 'Vietnamese',
'name_local': 'Tiếng Việt',
},
'zh-cn': {
'fallback': ['zh-hans'],
},
'zh-hans': {
'bidi': False,
'code': 'zh-hans',
'name': 'Simplified Chinese',
'name_local': '简体中文',
},
'zh-hant': {
'bidi': False,
'code': 'zh-hant',
'name': 'Traditional Chinese',
'name_local': '繁體中文',
},
'zh-hk': {
'fallback': ['zh-hant'],
},
'zh-mo': {
'fallback': ['zh-hant'],
},
'zh-my': {
'fallback': ['zh-hans'],
},
'zh-sg': {
'fallback': ['zh-hans'],
},
'zh-tw': {
'fallback': ['zh-hant'],
},
}
| yephper/django | django/conf/locale/__init__.py | Python | bsd-3-clause | 12,721 |
DEPS = [
'depot_tools',
'gclient',
'gerrit',
'gitiles',
'recipe_engine/buildbucket',
'recipe_engine/context',
'recipe_engine/commit_position',
'recipe_engine/cq',
'recipe_engine/json',
'recipe_engine/path',
'recipe_engine/platform',
'recipe_engine/properties',
'recipe_engine/python',
'recipe_engine/raw_io',
'recipe_engine/runtime',
'recipe_engine/source_manifest',
'recipe_engine/step',
'tryserver',
]
from recipe_engine.recipe_api import Property
from recipe_engine.config import ConfigGroup, Single
PROPERTIES = {
# Gerrit patches will have all properties about them prefixed with patch_.
'deps_revision_overrides': Property(default={}),
'fail_patch': Property(default=None, kind=str),
'$depot_tools/bot_update': Property(
help='Properties specific to bot_update module.',
param_name='properties',
kind=ConfigGroup(
# Whether we should do the patching in gclient instead of bot_update
apply_patch_on_gclient=Single(bool),
),
default={},
),
}
| endlessm/chromium-browser | third_party/depot_tools/recipes/recipe_modules/bot_update/__init__.py | Python | bsd-3-clause | 1,081 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import autocomplete_light
from django.utils.encoding import force_text
from .settings import USER_MODEL
from .utils.module_loading import get_real_model_class
class UserAutocomplete(autocomplete_light.AutocompleteModelBase):
search_fields = [
'^first_name',
'last_name',
'username'
]
model = get_real_model_class(USER_MODEL)
order_by = ['first_name', 'last_name']
# choice_template = 'django_documentos/user_choice_autocomplete.html'
limit_choices = 10
attrs = {
'data-autcomplete-minimum-characters': 0,
'placeholder': 'Pessoa que irá assinar',
}
# widget_attrs = {'data-widget-maximum-values': 3}
def choice_value(self, choice):
"""
Return the pk of the choice by default.
"""
return choice.pk
def choice_label(self, choice):
"""
Return the textual representation of the choice by default.
"""
# return force_text("{}-{}".format(choice.pk, choice.get_full_name().title()))
return force_text(choice.get_full_name().title())
# def choice_label(self, choice):
# return choice.get_full_name().title()
def choices_for_request(self):
return super(UserAutocomplete, self).choices_for_request()
autocomplete_light.register(UserAutocomplete)
| luzfcb/django_documentos | django_documentos/autocomplete_light_registry.py | Python | bsd-3-clause | 1,391 |
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from rest_framework import viewsets, serializers
from dbaas_credentials.models import Credential
from .environment import EnvironmentSerializer
from .integration_type import CredentialTypeSerializer
class IntegrationCredentialSerializer(serializers.HyperlinkedModelSerializer):
environments = EnvironmentSerializer(many=True, read_only=True)
integration_type = CredentialTypeSerializer(many=False, read_only=True)
class Meta:
model = Credential
fields = ('user', 'password', 'integration_type', 'token',
'secret', 'endpoint', 'environments', "project", "team")
class IntegrationCredentialAPI(viewsets.ModelViewSet):
"""
Integration Credential Api
"""
serializer_class = IntegrationCredentialSerializer
queryset = Credential.objects.all()
| globocom/database-as-a-service | dbaas/api/integration_credential.py | Python | bsd-3-clause | 892 |
# Copyright (c) 2015, Imperial College London
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the names of the copyright holders nor the names of their
# contributors may be used to endorse or promote products derived from this
# software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# -----------------------------------------------------------------------------
#
# This file is part of the libhpc-cf Coordination Forms library that has been
# developed as part of the libhpc projects
# (http://www.imperial.ac.uk/lesc/projects/libhpc).
#
# We gratefully acknowledge the Engineering and Physical Sciences Research
# Council (EPSRC) for their support of the projects:
# - libhpc: Intelligent Component-based Development of HPC Applications
# (EP/I030239/1).
# - libhpc Stage II: A Long-term Solution for the Usability, Maintainability
# and Sustainability of HPC Software (EP/K038788/1).
from libhpc.cf.params import Parameter
from libhpc.cf.component import Component
# COMPONENT PARAMETER DEFINITIONS
fastq_split_input = Parameter('fastq_split_input', 'string', 'input', False)
fastq_split_output1 = Parameter('fastq_split_output1', 'string', 'inout', False)
fastq_split_output2 = Parameter('fastq_split_output2', 'string', 'inout', False)
bwa_index_param1 = Parameter('ref_genome_param', 'string', 'input', False)
bwa_index_output_file = Parameter('ref_genome_param', 'string', 'input', False)
bwa_index_result = Parameter('ref_genome_status', 'int', 'output', True)
bwa_aln_ref_genome = Parameter('ref_genome_param', 'string', 'input', False)
bwa_aln_short_read = Parameter('short_read_file', 'string', 'input', False)
bwa_aln_output_file = Parameter('output_file', 'string', 'inout', False)
bwa_aln_result = Parameter('bwa_aln_status', 'int', 'output', True)
bwa_sampe_param1 = Parameter('ref_genome_param', 'string', 'input', False)
bwa_sampe_param2 = Parameter('short_read_indexes_param', 'list', 'input', False)
bwa_sampe_param3 = Parameter('short_read_file_param', 'list', 'input', False)
bwa_sampe_output_file = Parameter('sam_output_file', 'string', 'output', False)
bwa_sampe_result = Parameter('bwa_sample_status', 'int', 'output', True)
samtools_import_param1 = Parameter('ref_genome_param', 'string', 'input', False)
samtools_import_param2 = Parameter('sam_file_param', 'string', 'input', False)
samtools_import_output = Parameter('bam_file_param', 'string', 'inout', False)
samtools_import_result = Parameter('samtools_import_status', 'int', 'output', True)
samtools_sort_baminput = Parameter('samtools_bam_input_param', 'string', 'input', False)
samtools_sort_sortedoutput = Parameter('samtools_sorted_output_param', 'string', 'output', False)
samtools_sort_result = Parameter('samtools_sort_status', 'int', 'output', True)
samtools_index_input = Parameter('samtools_index_input_param', 'string', 'input', False)
samtools_index_output = Parameter('samtools_index_output_param', 'string', 'output', False)
samtools_index_result = Parameter('samtools_index_status', 'string', 'output', True)
samtools_faidx_input = Parameter('samtools_faidx_input_param', 'string', 'input', False)
samtools_faidx_output = Parameter('samtools_faidx_output_param', 'string', 'output', False)
samtools_faidx_result = Parameter('samtools_faidx_status', 'string', 'output', True)
samtools_mpileup_input = Parameter('samtools_mpileup_input_param', 'string', 'input', False)
samtools_mpileup_output = Parameter('samtools_mpileup_output_param', 'string', 'inout', False)
samtools_mpileup_result = Parameter('samtools_mpileup_status', 'string', 'output', True)
samtools_bcf2vcf_input = Parameter('samtools_bcf2vcf_input_param', 'string', 'input', False)
samtools_bcf2vcf_output = Parameter('samtools_bcf2vcf_output_param', 'string', 'inout', False)
samtools_bcf2vcf_result = Parameter('samtools_bcf2vcf_status', 'string', 'output', True)
picard_remove_duplicates_input = Parameter('picard_remove_duplicates_input', 'string', 'input', False)
picard_remove_duplicates_output = Parameter('picard_remove_duplicates_output', 'string', 'inout', False)
picard_remove_duplicates_metrics = Parameter('picard_remove_duplicates_metrics', 'string', 'inout', False)
picard_remove_duplicates_result = Parameter('picard_remove_duplicates_result', 'int', 'output', True)
picard_add_read_groups_baminput = Parameter('add_read_groups_input_param', 'string', 'input', False)
picard_add_read_groups_rgidinput = Parameter('add_read_groups_rgid_param', 'string', 'input', False)
picard_add_read_groups_rglbinput = Parameter('add_read_groups_rglb_param', 'string', 'input', False)
picard_add_read_groups_rgplinput = Parameter('add_read_groups_rgpl_param', 'string', 'input', False)
picard_add_read_groups_rgpuinput = Parameter('add_read_groups_rgpu_param', 'string', 'input', False)
picard_add_read_groups_rgsminput = Parameter('add_read_groups_rgsm_param', 'string', 'input', False)
#picard_add_read_groups_result = Parameter('add_read_groups_status', 'int', 'output', False)
picard_add_read_groups_output = Parameter('add_read_groups_output_param', 'string', 'output', False)
picard_merge_sam_input = Parameter('merge_sam_input_file_list', 'list', 'input', False)
picard_merge_sam_output = Parameter('merge_sam_output_file', 'string', 'output', False)
picard_merge_sam_status = Parameter('merge_sam_status', 'int', 'output', True)
picard_create_dictionary_ref_input = Parameter('picard_create_dict_ref_input', 'string', 'input', False)
picard_create_dictionary_output = Parameter('picard_create_dict_file', 'string', 'inout', False)
picard_create_dictionary_status = Parameter('picard_create_dict_status', 'int', 'output', True)
picard_build_bam_index_input = Parameter('picard_build_bam_index_input_param', 'string', 'input', False)
picard_build_bam_index_output = Parameter('picard_build_bam_index_output_param', 'string', 'output', False)
picard_build_bam_index_result = Parameter('picard_build_bam_index_status', 'string', 'output', True)
picard_check_reference_input = Parameter('ref_genome_param', 'string', 'input', False)
picard_check_reference_output = Parameter('new_ref_genome_param', 'string', 'inout', False)
gatk_indel_targets_ref_input = Parameter('gatk_indel_targets_ref_input', 'string', 'input', False)
gatk_indel_targets_bam_input = Parameter('gatk_indel_targets_bam_input', 'string', 'input', False)
gatk_indel_targets_output = Parameter('gatk_indel_targets_output', 'string', 'inout', False)
gatk_indel_targets_status = Parameter('gatk_indel_targets_status', 'string', 'output', True)
gatk_indel_realigner_ref_input = Parameter('gatk_realigner_ref_input', 'string', 'input', False)
gatk_indel_realigner_bam_input = Parameter('gatk_realigner_bam_input', 'string', 'input', False)
gatk_indel_realigner_interval_input = Parameter('gatk_realigner_interval_input', 'string', 'input', False)
gatk_indel_realigner_output = Parameter('gatk_realigner_output', 'string', 'inout', False)
gatk_indel_realigner_status = Parameter('gatk_realigner_status', 'string', 'output', True)
gatk_base_recal_ref_genome = Parameter('gatk_recalibrator_ref_input', 'string', 'input', False)
gatk_base_recal_bam_input = Parameter('gatk_recalibrator_bam_input', 'string', 'input', False)
gatk_base_recal_known_sites = Parameter('gatk_recalibrator_known_sites_input', 'string', 'input', False)
gatk_base_recal_output = Parameter('gatk_recalibrator_output', 'string', 'inout', False)
gatk_base_recal_status = Parameter('gatk_recalibrator_status', 'string', 'output', True)
gatk_print_reads_ref_genome = Parameter('gatk_print_reads_ref_input', 'string', 'input', False)
gatk_print_reads_bam_input = Parameter('gatk_print_reads_bam_input', 'string', 'input', False)
gatk_print_reads_recal_table = Parameter('gatk_print_reads_recal_table', 'string', 'input', False)
gatk_print_reads_output = Parameter('gatk_print_reads_output', 'string', 'inout', False)
gatk_print_reads_status = Parameter('gatk_print_reads_status', 'string', 'output', True)
# COMPONENT DEFINITIONS
fastq_split = Component('fastq.splitter', 'Paired FASTQ File Splitter', 'libhpc.wrapper.bio.fastqsplitter.split_fastq', [fastq_split_input, fastq_split_output1, fastq_split_output2], [])
#bwa_index = Component('bwa.index', 'BWA Index', 'bwa.index', [bwa_index_param1], [bwa_index_result])
bwa_index = Component('bwa.index', 'BWA Index', 'libhpc.wrapper.bio.bwa.index', [bwa_index_param1, bwa_index_output_file], [bwa_index_result])
bwa_aln = Component('bwa.align', 'BWA Initial Alignment', 'libhpc.wrapper.bio.bwa.align', [bwa_aln_ref_genome, bwa_aln_short_read, bwa_aln_output_file], [bwa_aln_result])
bwa_sampe = Component('bwa.sampe', 'BWA Paired Alignment', 'libhpc.wrapper.bio.bwa.sampe', [bwa_sampe_param1, bwa_sampe_param2, bwa_sampe_param3, bwa_sampe_output_file], [bwa_sampe_result], 'pre')
samtools_import = Component('samtools.import', 'SAMtools Import', 'libhpc.wrapper.bio.samtools.import_sam', [samtools_import_param1, samtools_import_param2, samtools_import_output], [samtools_import_result], 'pre')
samtools_sort = Component('samtools.sort', 'SAMtools Sort', 'libhpc.wrapper.bio.samtools.sort', [samtools_sort_baminput, samtools_sort_sortedoutput], [samtools_import_result])
samtools_index = Component('samtools.index', 'SAMtools Index', 'libhpc.wrapper.bio.samtools.index', [samtools_index_input, samtools_index_output], [samtools_index_result])
samtools_faidx = Component('samtools.faidx', 'SAMtools faidx', 'libhpc.wrapper.bio.samtools.faidx', [samtools_faidx_input, samtools_faidx_output], [samtools_faidx_result])
samtools_mpileup = Component('samtools.mpileup', 'SAMtools mpileup', 'libhpc.wrapper.bio.samtools.mpileup', [samtools_mpileup_input, samtools_mpileup_output], [samtools_mpileup_result])
samtools_bcf2vcf = Component('samtools.bcf2vcf', 'SAMtools BCF to VCF conversion', 'libhpc.wrapper.bio.samtools.bcf2vcf', [samtools_bcf2vcf_input, samtools_bcf2vcf_output], [samtools_bcf2vcf_result])
picard_add_read_groups = Component('picard.add_read_groups', 'Picard - Add Read Groups', 'libhpc.wrapper.bio.picard.add_read_groups', [picard_add_read_groups_baminput, picard_add_read_groups_rgidinput, picard_add_read_groups_rglbinput, picard_add_read_groups_rgplinput, picard_add_read_groups_rgpuinput, picard_add_read_groups_rgsminput], [picard_add_read_groups_output]) # , picard_add_read_groups_result removed from return values
picard_merge_sam = Component('picard.merge_sam', 'Picard - Merge SAM/BAM files', 'libhpc.wrapper.bio.picard.merge_sam', [picard_merge_sam_input], [picard_merge_sam_output])
picard_remove_duplicates = Component('picard.remove_duplicates', 'Picard - Remove Duplicates', 'libhpc.wrapper.bio.picard.remove_duplicates', [picard_remove_duplicates_input, picard_remove_duplicates_output, picard_remove_duplicates_metrics], [picard_remove_duplicates_result])
picard_create_dictionary = Component('picard.create_dictionary', 'Picard - Create Dictionary', 'libhpc.wrapper.bio.picard.create_dictionary', [picard_create_dictionary_ref_input, picard_create_dictionary_output], [picard_create_dictionary_status])
picard_build_bam_index = Component('picard.build_bam_index', 'Picard - Build BAM Index', 'libhpc.wrapper.bio.picard.build_bam_index', [picard_build_bam_index_input, picard_build_bam_index_output], [picard_build_bam_index_result])
picard_check_reference = Component('picard.check_reference', 'Check reference genome extension', 'libhpc.wrapper.bio.picard.check_rename_reference', [picard_check_reference_input, picard_check_reference_output], [])
gatk_realigner_targets = Component('gatk.realigner_targets', 'Generate realigner targets for indel processing', 'libhpc.wrapper.bio.gatk.create_realigner_targets', [gatk_indel_targets_ref_input, gatk_indel_targets_bam_input, gatk_indel_targets_output], [gatk_indel_targets_status])
gatk_indel_realigner = Component('gatk.indel_realigner', 'Realign BAM based on indels', 'libhpc.wrapper.bio.gatk.indel_realigner', [gatk_indel_realigner_ref_input, gatk_indel_realigner_bam_input, gatk_indel_realigner_interval_input, gatk_indel_realigner_output], [gatk_indel_realigner_status])
gatk_base_recalibrator = Component('gatk.base_recalibrator', 'GATK Base Recalibrator', 'libhpc.wrapper.bio.gatk.base_recalibrator', [gatk_base_recal_ref_genome, gatk_base_recal_bam_input, gatk_base_recal_known_sites, gatk_base_recal_output], [gatk_base_recal_status], 'pre')
gatk_print_reads = Component('gatk.print_reads', 'GATK Print Reads', 'libhpc.wrapper.bio.gatk.print_reads', [gatk_print_reads_ref_genome, gatk_print_reads_bam_input, gatk_print_reads_recal_table, gatk_print_reads_output], [gatk_print_reads_status], 'pre') | london-escience/libhpc-cf | libhpc/component/bio.py | Python | bsd-3-clause | 13,873 |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Worker.ping_response_dts'
db.add_column('job_runner_worker', 'ping_response_dts',
self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Worker.ping_response_dts'
db.delete_column('job_runner_worker', 'ping_response_dts')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'job_runner.job': {
'Meta': {'ordering': "('title',)", 'unique_together': "(('title', 'job_template'),)", 'object_name': 'Job'},
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'disable_enqueue_after_fails': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'enqueue_is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'fail_times': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'job_template': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['job_runner.JobTemplate']"}),
'notification_addresses': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['job_runner.Job']"}),
'reschedule_interval': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'reschedule_interval_type': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '6', 'blank': 'True'}),
'reschedule_type': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '18', 'blank': 'True'}),
'script_content': ('django.db.models.fields.TextField', [], {}),
'script_content_partial': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'job_runner.jobtemplate': {
'Meta': {'ordering': "('title',)", 'object_name': 'JobTemplate'},
'auth_groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'body': ('django.db.models.fields.TextField', [], {}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'enqueue_is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'notification_addresses': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'worker': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['job_runner.Worker']"})
},
'job_runner.killrequest': {
'Meta': {'object_name': 'KillRequest'},
'enqueue_dts': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'execute_dts': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'run': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['job_runner.Run']"}),
'schedule_dts': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'})
},
'job_runner.project': {
'Meta': {'ordering': "('title',)", 'object_name': 'Project'},
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'enqueue_is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'notification_addresses': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'job_runner.rescheduleexclude': {
'Meta': {'object_name': 'RescheduleExclude'},
'end_time': ('django.db.models.fields.TimeField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'job': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['job_runner.Job']"}),
'note': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'start_time': ('django.db.models.fields.TimeField', [], {})
},
'job_runner.run': {
'Meta': {'ordering': "('-return_dts', '-start_dts', '-enqueue_dts', 'schedule_dts')", 'object_name': 'Run'},
'enqueue_dts': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_manual': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'job': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['job_runner.Job']"}),
'pid': ('django.db.models.fields.PositiveIntegerField', [], {'default': 'None', 'null': 'True'}),
'return_dts': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'return_success': ('django.db.models.fields.NullBooleanField', [], {'default': 'None', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'schedule_children': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'schedule_dts': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'start_dts': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'})
},
'job_runner.runlog': {
'Meta': {'ordering': "('-run',)", 'object_name': 'RunLog'},
'content': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'run': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'run_log'", 'unique': 'True', 'to': "orm['job_runner.Run']"})
},
'job_runner.worker': {
'Meta': {'ordering': "('title',)", 'object_name': 'Worker'},
'api_key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'enqueue_is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'notification_addresses': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'ping_response_dts': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['job_runner.Project']"}),
'secret': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'})
}
}
complete_apps = ['job_runner'] | spilgames/job-runner | job_runner/apps/job_runner/migrations/0013_auto__add_field_worker_ping_response_dts.py | Python | bsd-3-clause | 9,629 |
# -*- coding: utf-8 -*-
"""
eve.io.mongo.geo
~~~~~~~~~~~~~~~~~~~
Geospatial functions and classes for mongo IO layer
:copyright: (c) 2017 by Nicola Iarocci.
:license: BSD, see LICENSE for more details.
"""
class GeoJSON(dict):
def __init__(self, json):
try:
self['type'] = json['type']
except KeyError:
raise TypeError("Not compilant to GeoJSON")
self.update(json)
if len(self.keys()) != 2:
raise TypeError("Not compilant to GeoJSON")
def _correct_position(self, position):
return isinstance(position, list) and \
all(isinstance(pos, int) or isinstance(pos, float)
for pos in position)
class Geometry(GeoJSON):
def __init__(self, json):
super(Geometry, self).__init__(json)
try:
if not isinstance(self['coordinates'], list) or \
self['type'] != self.__class__.__name__:
raise TypeError
except (KeyError, TypeError):
raise TypeError("Geometry not compilant to GeoJSON")
class GeometryCollection(GeoJSON):
def __init__(self, json):
super(GeometryCollection, self).__init__(json)
try:
if not isinstance(self['geometries'], list):
raise TypeError
for geometry in self['geometries']:
factory = factories[geometry["type"]]
factory(geometry)
except (KeyError, TypeError, AttributeError):
raise TypeError("Geometry not compilant to GeoJSON")
class Point(Geometry):
def __init__(self, json):
super(Point, self).__init__(json)
if not self._correct_position(self['coordinates']):
raise TypeError
class MultiPoint(GeoJSON):
def __init__(self, json):
super(MultiPoint, self).__init__(json)
for position in self["coordinates"]:
if not self._correct_position(position):
raise TypeError
class LineString(GeoJSON):
def __init__(self, json):
super(LineString, self).__init__(json)
for position in self["coordinates"]:
if not self._correct_position(position):
raise TypeError
class MultiLineString(GeoJSON):
def __init__(self, json):
super(MultiLineString, self).__init__(json)
for linestring in self["coordinates"]:
for position in linestring:
if not self._correct_position(position):
raise TypeError
class Polygon(GeoJSON):
def __init__(self, json):
super(Polygon, self).__init__(json)
for linestring in self["coordinates"]:
for position in linestring:
if not self._correct_position(position):
raise TypeError
class MultiPolygon(GeoJSON):
def __init__(self, json):
super(MultiPolygon, self).__init__(json)
for polygon in self["coordinates"]:
for linestring in polygon:
for position in linestring:
if not self._correct_position(position):
raise TypeError
factories = dict([(_type.__name__, _type)
for _type in
[GeometryCollection, Point, MultiPoint, LineString,
MultiLineString, Polygon, MultiPolygon]])
| bcrochet/eve | eve/io/mongo/geo.py | Python | bsd-3-clause | 3,352 |
from __future__ import with_statement, absolute_import
from django.contrib import admin
from django.contrib.admin.options import IncorrectLookupParameters
from django.contrib.admin.views.main import ChangeList, SEARCH_VAR, ALL_VAR
from django.contrib.auth.models import User
from django.template import Context, Template
from django.test import TestCase
from django.test.client import RequestFactory
from .admin import (ChildAdmin, QuartetAdmin, BandAdmin, ChordsBandAdmin,
GroupAdmin, ParentAdmin, DynamicListDisplayChildAdmin,
DynamicListDisplayLinksChildAdmin, CustomPaginationAdmin,
FilteredChildAdmin, CustomPaginator, site as custom_site,
SwallowAdmin)
from .models import (Child, Parent, Genre, Band, Musician, Group, Quartet,
Membership, ChordsMusician, ChordsBand, Invitation, Swallow,
UnorderedObject, OrderedObject)
class ChangeListTests(TestCase):
urls = "regressiontests.admin_changelist.urls"
def setUp(self):
self.factory = RequestFactory()
def _create_superuser(self, username):
return User.objects.create(username=username, is_superuser=True)
def _mocked_authenticated_request(self, url, user):
request = self.factory.get(url)
request.user = user
return request
def test_select_related_preserved(self):
"""
Regression test for #10348: ChangeList.get_query_set() shouldn't
overwrite a custom select_related provided by ModelAdmin.queryset().
"""
m = ChildAdmin(Child, admin.site)
request = self.factory.get('/child/')
cl = ChangeList(request, Child, m.list_display, m.list_display_links,
m.list_filter, m.date_hierarchy, m.search_fields,
m.list_select_related, m.list_per_page, m.list_max_show_all, m.list_editable, m)
self.assertEqual(cl.query_set.query.select_related, {'parent': {'name': {}}})
def test_result_list_empty_changelist_value(self):
"""
Regression test for #14982: EMPTY_CHANGELIST_VALUE should be honored
for relationship fields
"""
new_child = Child.objects.create(name='name', parent=None)
request = self.factory.get('/child/')
m = ChildAdmin(Child, admin.site)
list_display = m.get_list_display(request)
list_display_links = m.get_list_display_links(request, list_display)
cl = ChangeList(request, Child, list_display, list_display_links,
m.list_filter, m.date_hierarchy, m.search_fields,
m.list_select_related, m.list_per_page, m.list_max_show_all, m.list_editable, m)
cl.formset = None
template = Template('{% load admin_list %}{% spaceless %}{% result_list cl %}{% endspaceless %}')
context = Context({'cl': cl})
table_output = template.render(context)
row_html = '<tbody><tr class="row1"><th><a href="%d/">name</a></th><td class="nowrap">(None)</td></tr></tbody>' % new_child.id
self.assertFalse(table_output.find(row_html) == -1,
'Failed to find expected row element: %s' % table_output)
def test_result_list_html(self):
"""
Verifies that inclusion tag result_list generates a table when with
default ModelAdmin settings.
"""
new_parent = Parent.objects.create(name='parent')
new_child = Child.objects.create(name='name', parent=new_parent)
request = self.factory.get('/child/')
m = ChildAdmin(Child, admin.site)
list_display = m.get_list_display(request)
list_display_links = m.get_list_display_links(request, list_display)
cl = ChangeList(request, Child, list_display, list_display_links,
m.list_filter, m.date_hierarchy, m.search_fields,
m.list_select_related, m.list_per_page, m.list_max_show_all, m.list_editable, m)
cl.formset = None
template = Template('{% load admin_list %}{% spaceless %}{% result_list cl %}{% endspaceless %}')
context = Context({'cl': cl})
table_output = template.render(context)
row_html = '<tbody><tr class="row1"><th><a href="%d/">name</a></th><td class="nowrap">Parent object</td></tr></tbody>' % new_child.id
self.assertFalse(table_output.find(row_html) == -1,
'Failed to find expected row element: %s' % table_output)
def test_result_list_editable_html(self):
"""
Regression tests for #11791: Inclusion tag result_list generates a
table and this checks that the items are nested within the table
element tags.
Also a regression test for #13599, verifies that hidden fields
when list_editable is enabled are rendered in a div outside the
table.
"""
new_parent = Parent.objects.create(name='parent')
new_child = Child.objects.create(name='name', parent=new_parent)
request = self.factory.get('/child/')
m = ChildAdmin(Child, admin.site)
# Test with list_editable fields
m.list_display = ['id', 'name', 'parent']
m.list_display_links = ['id']
m.list_editable = ['name']
cl = ChangeList(request, Child, m.list_display, m.list_display_links,
m.list_filter, m.date_hierarchy, m.search_fields,
m.list_select_related, m.list_per_page, m.list_max_show_all, m.list_editable, m)
FormSet = m.get_changelist_formset(request)
cl.formset = FormSet(queryset=cl.result_list)
template = Template('{% load admin_list %}{% spaceless %}{% result_list cl %}{% endspaceless %}')
context = Context({'cl': cl})
table_output = template.render(context)
# make sure that hidden fields are in the correct place
hiddenfields_div = '<div class="hiddenfields"><input type="hidden" name="form-0-id" value="%d" id="id_form-0-id" /></div>' % new_child.id
self.assertFalse(table_output.find(hiddenfields_div) == -1,
'Failed to find hidden fields in: %s' % table_output)
# make sure that list editable fields are rendered in divs correctly
editable_name_field = '<input name="form-0-name" value="name" class="vTextField" maxlength="30" type="text" id="id_form-0-name" />'
self.assertFalse('<td>%s</td>' % editable_name_field == -1,
'Failed to find "name" list_editable field in: %s' % table_output)
def test_result_list_editable(self):
"""
Regression test for #14312: list_editable with pagination
"""
new_parent = Parent.objects.create(name='parent')
for i in range(200):
new_child = Child.objects.create(name='name %s' % i, parent=new_parent)
request = self.factory.get('/child/', data={'p': -1}) # Anything outside range
m = ChildAdmin(Child, admin.site)
# Test with list_editable fields
m.list_display = ['id', 'name', 'parent']
m.list_display_links = ['id']
m.list_editable = ['name']
self.assertRaises(IncorrectLookupParameters, lambda: \
ChangeList(request, Child, m.list_display, m.list_display_links,
m.list_filter, m.date_hierarchy, m.search_fields,
m.list_select_related, m.list_per_page, m.list_max_show_all, m.list_editable, m))
def test_custom_paginator(self):
new_parent = Parent.objects.create(name='parent')
for i in range(200):
new_child = Child.objects.create(name='name %s' % i, parent=new_parent)
request = self.factory.get('/child/')
m = CustomPaginationAdmin(Child, admin.site)
cl = ChangeList(request, Child, m.list_display, m.list_display_links,
m.list_filter, m.date_hierarchy, m.search_fields,
m.list_select_related, m.list_per_page, m.list_max_show_all, m.list_editable, m)
cl.get_results(request)
self.assertIsInstance(cl.paginator, CustomPaginator)
def test_distinct_for_m2m_in_list_filter(self):
"""
Regression test for #13902: When using a ManyToMany in list_filter,
results shouldn't apper more than once. Basic ManyToMany.
"""
blues = Genre.objects.create(name='Blues')
band = Band.objects.create(name='B.B. King Review', nr_of_members=11)
band.genres.add(blues)
band.genres.add(blues)
m = BandAdmin(Band, admin.site)
request = self.factory.get('/band/', data={'genres': blues.pk})
cl = ChangeList(request, Band, m.list_display,
m.list_display_links, m.list_filter, m.date_hierarchy,
m.search_fields, m.list_select_related, m.list_per_page,
m.list_max_show_all, m.list_editable, m)
cl.get_results(request)
# There's only one Group instance
self.assertEqual(cl.result_count, 1)
def test_distinct_for_through_m2m_in_list_filter(self):
"""
Regression test for #13902: When using a ManyToMany in list_filter,
results shouldn't apper more than once. With an intermediate model.
"""
lead = Musician.objects.create(name='Vox')
band = Group.objects.create(name='The Hype')
Membership.objects.create(group=band, music=lead, role='lead voice')
Membership.objects.create(group=band, music=lead, role='bass player')
m = GroupAdmin(Group, admin.site)
request = self.factory.get('/group/', data={'members': lead.pk})
cl = ChangeList(request, Group, m.list_display,
m.list_display_links, m.list_filter, m.date_hierarchy,
m.search_fields, m.list_select_related, m.list_per_page,
m.list_max_show_all, m.list_editable, m)
cl.get_results(request)
# There's only one Group instance
self.assertEqual(cl.result_count, 1)
def test_distinct_for_inherited_m2m_in_list_filter(self):
"""
Regression test for #13902: When using a ManyToMany in list_filter,
results shouldn't apper more than once. Model managed in the
admin inherits from the one that defins the relationship.
"""
lead = Musician.objects.create(name='John')
four = Quartet.objects.create(name='The Beatles')
Membership.objects.create(group=four, music=lead, role='lead voice')
Membership.objects.create(group=four, music=lead, role='guitar player')
m = QuartetAdmin(Quartet, admin.site)
request = self.factory.get('/quartet/', data={'members': lead.pk})
cl = ChangeList(request, Quartet, m.list_display,
m.list_display_links, m.list_filter, m.date_hierarchy,
m.search_fields, m.list_select_related, m.list_per_page,
m.list_max_show_all, m.list_editable, m)
cl.get_results(request)
# There's only one Quartet instance
self.assertEqual(cl.result_count, 1)
def test_distinct_for_m2m_to_inherited_in_list_filter(self):
"""
Regression test for #13902: When using a ManyToMany in list_filter,
results shouldn't apper more than once. Target of the relationship
inherits from another.
"""
lead = ChordsMusician.objects.create(name='Player A')
three = ChordsBand.objects.create(name='The Chords Trio')
Invitation.objects.create(band=three, player=lead, instrument='guitar')
Invitation.objects.create(band=three, player=lead, instrument='bass')
m = ChordsBandAdmin(ChordsBand, admin.site)
request = self.factory.get('/chordsband/', data={'members': lead.pk})
cl = ChangeList(request, ChordsBand, m.list_display,
m.list_display_links, m.list_filter, m.date_hierarchy,
m.search_fields, m.list_select_related, m.list_per_page,
m.list_max_show_all, m.list_editable, m)
cl.get_results(request)
# There's only one ChordsBand instance
self.assertEqual(cl.result_count, 1)
def test_distinct_for_non_unique_related_object_in_list_filter(self):
"""
Regressions tests for #15819: If a field listed in list_filters
is a non-unique related object, distinct() must be called.
"""
parent = Parent.objects.create(name='Mary')
# Two children with the same name
Child.objects.create(parent=parent, name='Daniel')
Child.objects.create(parent=parent, name='Daniel')
m = ParentAdmin(Parent, admin.site)
request = self.factory.get('/parent/', data={'child__name': 'Daniel'})
cl = ChangeList(request, Parent, m.list_display, m.list_display_links,
m.list_filter, m.date_hierarchy, m.search_fields,
m.list_select_related, m.list_per_page,
m.list_max_show_all, m.list_editable, m)
# Make sure distinct() was called
self.assertEqual(cl.query_set.count(), 1)
def test_distinct_for_non_unique_related_object_in_search_fields(self):
"""
Regressions tests for #15819: If a field listed in search_fields
is a non-unique related object, distinct() must be called.
"""
parent = Parent.objects.create(name='Mary')
Child.objects.create(parent=parent, name='Danielle')
Child.objects.create(parent=parent, name='Daniel')
m = ParentAdmin(Parent, admin.site)
request = self.factory.get('/parent/', data={SEARCH_VAR: 'daniel'})
cl = ChangeList(request, Parent, m.list_display, m.list_display_links,
m.list_filter, m.date_hierarchy, m.search_fields,
m.list_select_related, m.list_per_page,
m.list_max_show_all, m.list_editable, m)
# Make sure distinct() was called
self.assertEqual(cl.query_set.count(), 1)
def test_pagination(self):
"""
Regression tests for #12893: Pagination in admins changelist doesn't
use queryset set by modeladmin.
"""
parent = Parent.objects.create(name='anything')
for i in range(30):
Child.objects.create(name='name %s' % i, parent=parent)
Child.objects.create(name='filtered %s' % i, parent=parent)
request = self.factory.get('/child/')
# Test default queryset
m = ChildAdmin(Child, admin.site)
cl = ChangeList(request, Child, m.list_display, m.list_display_links,
m.list_filter, m.date_hierarchy, m.search_fields,
m.list_select_related, m.list_per_page, m.list_max_show_all,
m.list_editable, m)
self.assertEqual(cl.query_set.count(), 60)
self.assertEqual(cl.paginator.count, 60)
self.assertEqual(cl.paginator.page_range, [1, 2, 3, 4, 5, 6])
# Test custom queryset
m = FilteredChildAdmin(Child, admin.site)
cl = ChangeList(request, Child, m.list_display, m.list_display_links,
m.list_filter, m.date_hierarchy, m.search_fields,
m.list_select_related, m.list_per_page, m.list_max_show_all,
m.list_editable, m)
self.assertEqual(cl.query_set.count(), 30)
self.assertEqual(cl.paginator.count, 30)
self.assertEqual(cl.paginator.page_range, [1, 2, 3])
def test_dynamic_list_display(self):
"""
Regression tests for #14206: dynamic list_display support.
"""
parent = Parent.objects.create(name='parent')
for i in range(10):
Child.objects.create(name='child %s' % i, parent=parent)
user_noparents = self._create_superuser('noparents')
user_parents = self._create_superuser('parents')
# Test with user 'noparents'
m = custom_site._registry[Child]
request = self._mocked_authenticated_request('/child/', user_noparents)
response = m.changelist_view(request)
self.assertNotContains(response, 'Parent object')
list_display = m.get_list_display(request)
list_display_links = m.get_list_display_links(request, list_display)
self.assertEqual(list_display, ['name', 'age'])
self.assertEqual(list_display_links, ['name'])
# Test with user 'parents'
m = DynamicListDisplayChildAdmin(Child, admin.site)
request = self._mocked_authenticated_request('/child/', user_parents)
response = m.changelist_view(request)
self.assertContains(response, 'Parent object')
custom_site.unregister(Child)
list_display = m.get_list_display(request)
list_display_links = m.get_list_display_links(request, list_display)
self.assertEqual(list_display, ('parent', 'name', 'age'))
self.assertEqual(list_display_links, ['parent'])
# Test default implementation
custom_site.register(Child, ChildAdmin)
m = custom_site._registry[Child]
request = self._mocked_authenticated_request('/child/', user_noparents)
response = m.changelist_view(request)
self.assertContains(response, 'Parent object')
def test_show_all(self):
parent = Parent.objects.create(name='anything')
for i in range(30):
Child.objects.create(name='name %s' % i, parent=parent)
Child.objects.create(name='filtered %s' % i, parent=parent)
# Add "show all" parameter to request
request = self.factory.get('/child/', data={ALL_VAR: ''})
# Test valid "show all" request (number of total objects is under max)
m = ChildAdmin(Child, admin.site)
# 200 is the max we'll pass to ChangeList
cl = ChangeList(request, Child, m.list_display, m.list_display_links,
m.list_filter, m.date_hierarchy, m.search_fields,
m.list_select_related, m.list_per_page, 200, m.list_editable, m)
cl.get_results(request)
self.assertEqual(len(cl.result_list), 60)
# Test invalid "show all" request (number of total objects over max)
# falls back to paginated pages
m = ChildAdmin(Child, admin.site)
# 30 is the max we'll pass to ChangeList for this test
cl = ChangeList(request, Child, m.list_display, m.list_display_links,
m.list_filter, m.date_hierarchy, m.search_fields,
m.list_select_related, m.list_per_page, 30, m.list_editable, m)
cl.get_results(request)
self.assertEqual(len(cl.result_list), 10)
def test_dynamic_list_display_links(self):
"""
Regression tests for #16257: dynamic list_display_links support.
"""
parent = Parent.objects.create(name='parent')
for i in range(1, 10):
Child.objects.create(id=i, name='child %s' % i, parent=parent, age=i)
m = DynamicListDisplayLinksChildAdmin(Child, admin.site)
superuser = self._create_superuser('superuser')
request = self._mocked_authenticated_request('/child/', superuser)
response = m.changelist_view(request)
for i in range(1, 10):
self.assertContains(response, '<a href="%s/">%s</a>' % (i, i))
list_display = m.get_list_display(request)
list_display_links = m.get_list_display_links(request, list_display)
self.assertEqual(list_display, ('parent', 'name', 'age'))
self.assertEqual(list_display_links, ['age'])
def test_tuple_list_display(self):
"""
Regression test for #17128
(ChangeList failing under Python 2.5 after r16319)
"""
swallow = Swallow.objects.create(
origin='Africa', load='12.34', speed='22.2')
model_admin = SwallowAdmin(Swallow, admin.site)
superuser = self._create_superuser('superuser')
request = self._mocked_authenticated_request('/swallow/', superuser)
response = model_admin.changelist_view(request)
# just want to ensure it doesn't blow up during rendering
self.assertContains(response, unicode(swallow.origin))
self.assertContains(response, unicode(swallow.load))
self.assertContains(response, unicode(swallow.speed))
def test_deterministic_order_for_unordered_model(self):
"""
Ensure that the primary key is systematically used in the ordering of
the changelist's results to guarantee a deterministic order, even
when the Model doesn't have any default ordering defined.
Refs #17198.
"""
superuser = self._create_superuser('superuser')
for counter in range(1, 51):
UnorderedObject.objects.create(id=counter, bool=True)
class UnorderedObjectAdmin(admin.ModelAdmin):
list_per_page = 10
def check_results_order(reverse=False):
admin.site.register(UnorderedObject, UnorderedObjectAdmin)
model_admin = UnorderedObjectAdmin(UnorderedObject, admin.site)
counter = 51 if reverse else 0
for page in range (0, 5):
request = self._mocked_authenticated_request('/unorderedobject/?p=%s' % page, superuser)
response = model_admin.changelist_view(request)
for result in response.context_data['cl'].result_list:
counter += -1 if reverse else 1
self.assertEqual(result.id, counter)
admin.site.unregister(UnorderedObject)
# When no order is defined at all, everything is ordered by 'pk'.
check_results_order()
# When an order field is defined but multiple records have the same
# value for that field, make sure everything gets ordered by pk as well.
UnorderedObjectAdmin.ordering = ['bool']
check_results_order()
# When order fields are defined, including the pk itself, use them.
UnorderedObjectAdmin.ordering = ['bool', '-pk']
check_results_order(reverse=True)
UnorderedObjectAdmin.ordering = ['bool', 'pk']
check_results_order()
UnorderedObjectAdmin.ordering = ['-id', 'bool']
check_results_order(reverse=True)
UnorderedObjectAdmin.ordering = ['id', 'bool']
check_results_order()
def test_deterministic_order_for_model_ordered_by_its_manager(self):
"""
Ensure that the primary key is systematically used in the ordering of
the changelist's results to guarantee a deterministic order, even
when the Model has a manager that defines a default ordering.
Refs #17198.
"""
superuser = self._create_superuser('superuser')
for counter in range(1, 51):
OrderedObject.objects.create(id=counter, bool=True, number=counter)
class OrderedObjectAdmin(admin.ModelAdmin):
list_per_page = 10
def check_results_order(reverse=False):
admin.site.register(OrderedObject, OrderedObjectAdmin)
model_admin = OrderedObjectAdmin(OrderedObject, admin.site)
counter = 51 if reverse else 0
for page in range (0, 5):
request = self._mocked_authenticated_request('/orderedobject/?p=%s' % page, superuser)
response = model_admin.changelist_view(request)
for result in response.context_data['cl'].result_list:
counter += -1 if reverse else 1
self.assertEqual(result.id, counter)
admin.site.unregister(OrderedObject)
# When no order is defined at all, use the model's default ordering (i.e. '-number')
check_results_order(reverse=True)
# When an order field is defined but multiple records have the same
# value for that field, make sure everything gets ordered by pk as well.
OrderedObjectAdmin.ordering = ['bool']
check_results_order()
# When order fields are defined, including the pk itself, use them.
OrderedObjectAdmin.ordering = ['bool', '-pk']
check_results_order(reverse=True)
OrderedObjectAdmin.ordering = ['bool', 'pk']
check_results_order()
OrderedObjectAdmin.ordering = ['-id', 'bool']
check_results_order(reverse=True)
OrderedObjectAdmin.ordering = ['id', 'bool']
check_results_order() | lisael/pg-django | tests/regressiontests/admin_changelist/tests.py | Python | bsd-3-clause | 24,196 |
"""
Utilities for plotting various figures and animations in EEG101.
"""
# Author: Hubert Banville <hubert@neurotechx.com>
#
# License: TBD
import numpy as np
import matplotlib.pylab as plt
import collections
from scipy import signal
def dot_plot(x, labels, step=1, figsize=(12,8)):
"""
Make a 1D dot plot.
Inputs
x : 1D array containing the points to plot
labels : 1D array containing the label for each point in x
step : vertical space between two points
"""
# Get the histogram for each class
classes = np.unique(labels)
hist = [np.histogram(x[labels==c], density=True) for c in classes]
# Prepare the figure
fig, ax = plt.subplots(figsize=figsize)
for hi, h in enumerate(hist):
bin_centers = (h[1][1:] + h[1][0:-1])/2. # Get bin centers
# Format the data so that each bin has as many points as the histogram bar for that bin
x1 = []
y1 = []
for i, j in zip(np.round(h[0]).astype(int), bin_centers):
y = range(0, i, step)
y1 += y
x1 += [j]*len(y)
# Plot
ax.plot(x1, (-1)**hi*np.array(y1), 'o', markersize=10, label=classes[hi])
ax.legend(scatterpoints=1)
ax.set_xlabel('Alpha power')
ax.set_ylabel('Number of points')
ax.set_yticklabels([])
ax.set_yticks([])
ax.legend()
plt.tight_layout()
def psd_with_bands_plot(f, psd, figsize=(12,8)):
"""
Plot a static PSD.
INPUTS
f : 1D array containing frequencies of the PSD
psd : 1D array containing the power at each frequency in f
figsize : figure size
"""
bands = collections.OrderedDict()
bands[r'$\delta$'] = (0,4)
bands[r'$\theta$'] = (4,8)
bands[r'$\alpha$'] = (8,13)
bands[r'$\beta$'] = (13, 30)
bands[r'$\gamma$'] = (30, 120)
fig, ax = plt.subplots(figsize=figsize)
ax.plot(f, psd)
ax.set_xlabel('Frequency (Hz)')
ax.set_ylabel('Power (dB)')
ylim = ax.get_ylim()
for i, [bkey, bfreq] in enumerate(bands.iteritems()):
ind = (f>=bfreq[0]) & (f<=bfreq[1])
f1 = f[ind]
y1 = psd[ind]
ax.fill_between(f1, y1, ylim[0], facecolor=[(0.7, i/5., 0.7)], alpha=0.5)
ax.text(np.mean(f1), (ylim[0] + ylim[1])/1.22, bkey, fontsize=16, verticalalignment='top', horizontalalignment='center')
ax.set_xlim([min(f), max(f)])
def sinewave(A, f, phi, t):
"""
Return a sine wave with specified parameters at the given time points.
INPUTS
A : Amplitude
f : Frequency (Hz)
phi : Phase (rad)
t : time (in s)
"""
return A*np.sin(2*np.pi*f*t + phi)
def animate_signals(nb_signals, incre, fs=256, refresh_rate=30., anim_dur=10., figsize=(12,8)):
"""
Draw and update a figure in real-time representing the summation of many
sine waves, to explain the concept of Fourier decomposition.
INPUTS
nb_signals : number of signals to sum together
incre : increment, in Hz, between each of the signals
fs : sampling frequency
refresh_rate : refresh rate of the animation
anim_dur : approximate duration of the animation, in seconds
"""
# Initialize values that remain constant throughout the animation
A = 1
t = np.linspace(0, 2, fs)
offsets = np.arange(nb_signals+1).reshape((nb_signals+1,1))*(A*(nb_signals+1))
freqs = np.arange(nb_signals)*incre
# Initialize the figure
fig, ax = plt.subplots(figsize=figsize)
ax.hold(True)
plt.xlabel('Time')
ax.yaxis.set_ticks(offsets)
ax.set_yticklabels([str(f)+' Hz' for f in freqs] + ['Sum'])
ax.xaxis.set_ticks([])
# Initialize the Line2D elements for each signal
sines = np.array([sinewave(A, f, 0, t) for f in freqs])
sines = np.vstack((sines, np.sum(sines, axis=0))) + offsets
points = [ax.plot(t, x)[0] for x in sines]
# Animation refresh loop
for i in np.arange(anim_dur*refresh_rate):
# Update time
t = np.linspace(0, 2, fs) + i*fs/refresh_rate
# Update signals
sines = np.array([sinewave(A, f, 0, t) for f in freqs])
sines = np.vstack((sines, np.sum(sines, axis=0))) + offsets
# Update figure
for p, x in zip(points, sines):
p.set_ydata(x)
# Wait before starting another cycle
plt.pause(1./refresh_rate)
if __name__ == '__main__':
# 1) DISTRIBUTION OF TRAINING DATA
# Generate fake data
nb_points = 10*10
relax_data = np.random.normal(0.01, 0.01, size=(nb_points,))
focus_data = np.random.normal(0.03, 0.01, size=(nb_points,))
dot_plot(x=np.concatenate((relax_data, focus_data)),
labels=np.concatenate((np.zeros((nb_points,)), np.ones((nb_points,)))),
step=4)
# 2) PSD PLOT
# Generate fake data
f = np.arange(0, 110, 1) # one-second windows = 1-Hz bins
psd = 10*np.log10(1./f)
psd_with_bands_plot(f, psd)
# 3) FOURIER DECOMPOSITION ANIMATION
animate_signals(4, 2) | NeuroTechX/eeg-101 | python_tools/utilities.py | Python | isc | 5,241 |
import datetime
import uuid
import pytz
import os
import time
from django.contrib.sites.shortcuts import get_current_site
from django.core.servers.basehttp import FileWrapper
from django.views.decorators.csrf import csrf_exempt
from django.core.urlresolvers import reverse, resolve
from django.db.models import FieldDoesNotExist
from django.http import HttpResponse
from django.shortcuts import redirect
from django.views.decorators.http import require_POST
from django.views.generic.base import TemplateView
from django.views.generic import DetailView
from django.views.generic.list import ListView
from django.contrib.auth.decorators import login_required
from django.contrib import messages
from django.contrib.humanize.templatetags.humanize import naturaltime
from . import cid
from .models import BuildInfo, Project
from settings import MAX_CONCURRENT_BUILDS
def cid_context(request):
"""
Main context processor, adds main menu.
"""
main_menu = []
if request.user.is_authenticated():
if request.user.is_staff:
main_menu = []
else:
main_menu = []
# TODO: add github url here
return {'request': request,
'main_menu': main_menu,
'admin_access': request.user.is_staff,
'messages': []}
class PageWithAjax(TemplateView):
template_name = "ajax_page.jinja"
ajax_url = None
def dispatch(self, request, *args, **kwargs):
self.ajax_url = self.kwargs.pop('ajax_url')
return super(PageWithAjax, self).dispatch(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(PageWithAjax, self).get_context_data(**kwargs)
ajax_url = reverse(self.ajax_url, args=self.args, kwargs=self.kwargs)
context['ajax_url'] = ajax_url
response = resolve(ajax_url).func(self.request, *self.args, **self.kwargs)
content = getattr(response, 'rendered_content')
context['initial_content'] = content
return context
page_with_ajax = login_required(PageWithAjax.as_view())
class BuildMixin(object):
status = 200
model = None
link_column = None
columns = []
live_times = []
live_times = ['time_taken']
def render_to_response(self, context, **response_kwargs):
return super(BuildMixin, self).render_to_response(context, status=self.status, **response_kwargs)
def get_context_data(self, **kwargs):
context = super(BuildMixin, self).get_context_data(**kwargs)
context['columns'] = self.columns
context['link_column'] = self.link_column
context['headings'] = self._headings()
context['get_value'] = self._get_value
context['get_verbose_name'] = self._get_verbose_name
context['live_times'] = self.live_times
return context
def _headings(self):
for attr_name in self.columns:
yield self._get_verbose_name(attr_name)
def _get_verbose_name(self, attr_name):
meta = self.model._meta
try:
field = meta.get_field_by_name(attr_name)[0]
return field.verbose_name
except FieldDoesNotExist:
if hasattr(self.model, attr_name) and hasattr(getattr(self.model, attr_name), 'short_description'):
return getattr(self.model, attr_name).short_description
else:
return attr_name
def _get_value(self, obj, attr_name):
value = getattr(obj, attr_name)
if hasattr(value, '__call__'):
value = value()
if attr_name in self.live_times and isinstance(value, datetime.datetime):
return '<span class="live-time" data-start="%s"></span>' % value.isoformat(), True
if isinstance(value, datetime.datetime):
value = naturaltime(value)
return value, False
class BuildList(BuildMixin, ListView):
"""
List of previous builds
"""
model = BuildInfo
template_name = 'build_list.jinja'
link_column = 'created'
columns = ('created', 'time_taken', 'trigger', 'label', 'author', 'show_coverage', 'successful')
paginate_by = 50
def dispatch(self, request, *args, **kwargs):
if not any_active_builds(self.request):
self.status = 201
return super(BuildList, self).dispatch(request, *args, **kwargs)
build_list_ajax = login_required(BuildList.as_view())
class BuildDetails(BuildMixin, DetailView):
"""
details of a build.
"""
model = BuildInfo
template_name = 'build.jinja'
columns = ('created',
'modified',
'time_taken',
'trigger',
'action',
'label',
'on_master',
'fetch_branch',
'commit_url',
'author',
'complete',
'queued',
'test_success',
'test_passed',
'container',)
def get_context_data(self, **kwargs):
self.object = check(self.request, self.object)
if self.object.complete:
self.status = 202
if self.object.process_log:
self.object.process_log = self.object.process_log.replace(self.object.project.github_token,
'<github token>')
return super(BuildDetails, self).get_context_data(**kwargs)
build_details_ajax = login_required(BuildDetails.as_view())
@csrf_exempt
@require_POST
def webhook(request, pk):
project = get_project(pk)
if not project:
return HttpResponse('no project created', status=403)
# this makes it even more impossible to guess key via brute force
time.sleep(0.2)
build_info = BuildInfo.objects.create(project=project)
response_code, build_info2 = cid.process_github_webhook(request, build_info)
if response_code == 202:
set_site(build_info.project, request)
if _start_queue_build(build_info2):
msg = 'build started, id = %d' % build_info2.id
else:
msg = 'build queued, id = %d' % build_info2.id
response_code = 201
else:
build_info.delete()
msg = str(build_info2)
return HttpResponse(msg, status=response_code)
def status_svg(request, pk):
project = get_project(pk)
svg = project.status_svg if project else 'null.svg'
svg_path = os.path.join(os.path.dirname(__file__), 'static', svg)
response = HttpResponse(FileWrapper(open(svg_path)), content_type='image/svg+xml')
response['Etag'] = '"%s"' % uuid.uuid4()
response['Cache-Control'] = 'no-cache'
response['Expires'] = datetime.datetime.now().replace(tzinfo=pytz.UTC).strftime('%a, %d %b %Y %H:%M:%S %Z')
return response
@login_required
@require_POST
def go_build(request):
project = get_project()
if project:
set_site(project, request)
build_info = BuildInfo.objects.create(trigger='manual',
author=request.user.username,
project=project,
on_master=True)
if not _start_queue_build(build_info):
messages.info(request, 'build queued')
else:
messages.warning(request, 'No project created')
return redirect(reverse('build-list'))
def _start_queue_build(build_info):
"""
Check whether the build can begin immediately or needs to be queued.
If it can start; start it, else set queued to True and save build_info.
:param build_info: BuildInfo instance to queue or start
:returns: True if build started, else False
"""
if BuildInfo.objects.filter(complete=False, queued=False).count() >= MAX_CONCURRENT_BUILDS:
build_info.queued = True
build_info.save()
else:
cid.build(build_info)
return not build_info.queued
def check(request, build_info):
bi = build_info
try:
set_site(bi.project, request)
bi = cid.check(build_info)
except cid.KnownError, e:
messages.error(request, str(e))
bi = build_info
finally:
return bi
def check_build(request, build_info):
extract = ['sha', 'complete', 'test_success', 'test_passed', 'start', 'finished', 'process_log', 'ci_log']
bi = check(request, build_info)
return {at: getattr(bi, at) for at in extract}
def any_active_builds(r):
return any([not check_build(r, bi)['complete'] for bi in BuildInfo.objects.filter(complete=False)])
def set_site(project, request):
current_site = get_current_site(request)
project.update_url = 'http://' + current_site.domain + '/'
project.save()
def get_project(pk=None):
"""
gets the first project, stop gap until we support more than one project
"""
projects = Project.objects.all()
if pk is not None:
projects = projects.filter(pk=pk)
return projects.first()
| codeadict/kushillu | kushillu/views.py | Python | mit | 8,944 |
"""
Tests related specifically to the FacilityDataset model.
"""
from django.db.utils import IntegrityError
from django.test import TestCase
from ..models import Classroom
from ..models import Facility
from ..models import FacilityDataset
from ..models import FacilityUser
from ..models import LearnerGroup
class FacilityDatasetTestCase(TestCase):
def setUp(self):
self.facility = Facility.objects.create()
self.classroom = Classroom.objects.create(parent=self.facility)
self.learner_group = LearnerGroup.objects.create(parent=self.classroom)
self.facility_user = FacilityUser.objects.create(
username="blah", password="#", facility=self.facility
)
def test_datasets_equal(self):
self.assertTrue(self.facility.dataset is not None)
self.assertEqual(self.facility.dataset, self.classroom.dataset)
self.assertEqual(self.classroom.dataset, self.learner_group.dataset)
self.assertEqual(self.learner_group.dataset, self.facility_user.dataset)
def test_cannot_create_role_across_datasets(self):
facility2 = Facility.objects.create()
with self.assertRaises(IntegrityError):
facility2.add_admin(self.facility_user)
def test_cannot_create_membership_across_datasets(self):
facility2 = Facility.objects.create()
facility_user2 = FacilityUser.objects.create(
username="blah", password="#", facility=facility2
)
with self.assertRaises(IntegrityError):
self.learner_group.add_learner(facility_user2)
def test_cannot_pass_inappropriate_dataset(self):
facility2 = Facility.objects.create()
with self.assertRaises(IntegrityError):
FacilityUser.objects.create(
facility=self.facility, dataset=facility2.dataset
)
def test_cannot_change_dataset(self):
facility2 = Facility.objects.create()
self.facility_user.dataset = facility2.dataset
with self.assertRaises(IntegrityError):
self.facility_user.save()
def test_cannot_change_facility(self):
facility2 = Facility.objects.create()
self.facility_user.facility = facility2
with self.assertRaises(IntegrityError):
self.facility_user.save()
def test_manually_passing_dataset_for_new_facility(self):
dataset = FacilityDataset.objects.create()
facility = Facility(name="blah", dataset=dataset)
facility.full_clean()
facility.save()
self.assertEqual(dataset, facility.dataset)
def test_dataset_representation(self):
self.assertEqual(
str(self.facility.dataset),
"FacilityDataset for {}".format(self.facility.name),
)
new_dataset = FacilityDataset.objects.create()
self.assertEqual(str(new_dataset), "FacilityDataset (no associated Facility)")
| lyw07/kolibri | kolibri/core/auth/test/test_datasets.py | Python | mit | 2,905 |
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/loot/loot_schematic/shared_spear_rack_schematic.iff"
result.attribute_template_id = -1
result.stfName("craft_item_ingredients_n","spear_rack")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result | anhstudios/swganh | data/scripts/templates/object/tangible/loot/loot_schematic/shared_spear_rack_schematic.py | Python | mit | 479 |
class BinaryIterator(object):
def __init__(self, content):
self.body = content.raw_body
self._stop = False
@classmethod
def open_file(self, path):
return open(path, 'rb')
def __next__(self):
if self._stop:
raise StopIteration
else:
self._stop = True
return self.body
| ayemos/osho | akagi/iterators/binary_iterator.py | Python | mit | 361 |
from flask.ext import uploads
allowed = uploads.IMAGES
_config = uploads.UploadConfiguration(
'./Application/static/uploads/',
base_url='/static/uploads/',
allow=allowed,
deny=())
images = uploads.UploadSet('images', allowed)
images._config = _config | nickw444/ShowCSE | Application/uploads.py | Python | mit | 267 |
#
# Example file for working with date information
# (For Python 3.x, be sure to use the ExampleSnippets3.txt file)
from datetime import date
from datetime import time
from datetime import datetime
def main():
## DATE OBJECTS
# Get today's date from the simple today() method from the date class
today = date.today()
print "Today's date is ", today
# print out the date's individual components
print "Date Components: ", today.day, today.month, today.year
# retrieve today's weekday (0=Monday, 6=Sunday)
print "Today's Weekday #: ", today.weekday()
## DATETIME OBJECTS
# Get today's date from the datetime class
today = datetime.now()
print "The current date and time is ", today
# Get the current time
t = datetime.time(datetime.now())
print "The current time is ", t
# weekday returns 0 (monday) through 6 (sunday)
wd = date.weekday(today)
# Days start at 0 for Monday
days = ["monday","tuesday","wednesday","thursday","friday","saturday","sunday"]
print "Today is day number %d" % wd
print "Which is a " + days[wd]
if __name__ == "__main__":
main();
| thatguyandy27/python-sandbox | learning-python/Ch3/dates_finished.py | Python | mit | 1,126 |
import socket
import poormanslogging as log
LISTENPORT = 6666
class Grunt(object):
def __init__(self):
try:
self.s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.s.bind(('', LISTENPORT))
self.s.listen(1)
log.info('Waiting for orders on port {}'.format(LISTENPORT))
(c, a) = self.s.accept()
self._receive_orders(c)
finally:
log.info('Shutting down')
self.s.close()
def _receive_orders(self, sock):
chunks = []
while 1:
try:
chunks.append(self.s.recv(1024))
except OSError:
break
msg = b''.join(chunks)
print("Message:")
print(msg)
| vyral/bombardier | networking/grunt.py | Python | mit | 598 |
"""Landlab component that simulates relative incidence shortwave radiation
on sloped surface.
Landlab component that computes 1D and 2D total incident shortwave
radiation. This code also computes relative incidence shortwave radiation
compared to a flat surface. Ref: Bras, Rafael L. Hydrology: an introduction
to hydrologic science. Addison Wesley Publishing Company, 1990.
.. codeauthor:: Sai Nudurupati & Erkan Istanbulluoglu
Examples
--------
>>> import numpy as np
>>> from landlab import RasterModelGrid
>>> from landlab.components import Radiation
Create a grid on which to calculate incident shortwave radiation
>>> grid = RasterModelGrid((5, 4), spacing=(0.2, 0.2))
The grid will need some input data. To check the names of the fields
that provide the input to this component, use the *input_var_names*
class property.
>>> Radiation.input_var_names
('topographic__elevation',)
Check the units for the fields.
>>> Radiation.var_units('topographic__elevation')
'm'
Create the input fields.
>>> grid['node']['topographic__elevation'] = np.array([
... 0., 0., 0., 0.,
... 1., 1., 1., 1.,
... 2., 2., 2., 2.,
... 3., 4., 4., 3.,
... 4., 4., 4., 4.])
If you are not sure about one of the input or output variables, you can
get help for specific variables.
>>> Radiation.var_help('topographic__elevation')
name: topographic__elevation
description:
elevation of the ground surface relative to some datum
units: m
at: node
intent: in
Check the output variable names
>>> sorted(Radiation.output_var_names) # doctest: +NORMALIZE_WHITESPACE
['radiation__incoming_shortwave_flux',
'radiation__net_shortwave_flux',
'radiation__ratio_to_flat_surface']
Instantiate the 'Radiation' component to work on this grid, and run it.
>>> rad = Radiation(grid)
Run the *update* method to update output variables with current time
>>> current_time = 0.5
>>> rad.update(current_time)
>>> rad.grid.at_cell['radiation__ratio_to_flat_surface']
array([ 0.38488566, 0.38488566, 0.33309785, 0.33309785, 0.37381705,
0.37381705])
>>> rad.grid.at_cell['radiation__incoming_shortwave_flux']
array([ 398.33664988, 398.33664988, 344.73895668, 344.73895668,
386.88120966, 386.88120966])
"""
from .radiation import Radiation
__all__ = ['Radiation', ]
| laijingtao/landlab | landlab/components/radiation/__init__.py | Python | mit | 2,298 |
"""
Script that trains graph-conv models on ChEMBL dataset.
"""
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
import numpy as np
np.random.seed(123)
import tensorflow as tf
tf.set_random_seed(123)
import deepchem as dc
from chembl_datasets import load_chembl
# Load ChEMBL dataset
chembl_tasks, datasets, transformers = load_chembl(
shard_size=2000, featurizer="GraphConv", set="5thresh", split="random")
train_dataset, valid_dataset, test_dataset = datasets
# Fit models
metric = dc.metrics.Metric(dc.metrics.pearson_r2_score, np.mean)
# Do setup required for tf/keras models
# Number of features on conv-mols
n_feat = 75
# Batch size of models
batch_size = 128
graph_model = dc.nn.SequentialGraph(n_feat)
graph_model.add(dc.nn.GraphConv(128, n_feat, activation='relu'))
graph_model.add(dc.nn.BatchNormalization(epsilon=1e-5, mode=1))
graph_model.add(dc.nn.GraphPool())
graph_model.add(dc.nn.GraphConv(128, 128, activation='relu'))
graph_model.add(dc.nn.BatchNormalization(epsilon=1e-5, mode=1))
graph_model.add(dc.nn.GraphPool())
# Gather Projection
graph_model.add(dc.nn.Dense(256, 128, activation='relu'))
graph_model.add(dc.nn.BatchNormalization(epsilon=1e-5, mode=1))
graph_model.add(dc.nn.GraphGather(batch_size, activation="tanh"))
model = dc.models.MultitaskGraphRegressor(
graph_model,
len(chembl_tasks),
n_feat,
batch_size=batch_size,
learning_rate=1e-3,
learning_rate_decay_time=1000,
optimizer_type="adam",
beta1=.9,
beta2=.999)
# Fit trained model
model.fit(train_dataset, nb_epoch=20)
print("Evaluating model")
train_scores = model.evaluate(train_dataset, [metric], transformers)
valid_scores = model.evaluate(valid_dataset, [metric], transformers)
test_scores = model.evaluate(test_dataset, [metric], transformers)
print("Train scores")
print(train_scores)
print("Validation scores")
print(valid_scores)
print("Test scores")
print(test_scores)
| joegomes/deepchem | examples/chembl/chembl_graph_conv.py | Python | mit | 1,974 |
import logging
import json
import time
from collections import defaultdict
from flotilla.model import FlotillaServiceRevision, FlotillaUnit, \
GLOBAL_ASSIGNMENT, GLOBAL_ASSIGNMENT_SHARDS
from Crypto.Cipher import AES
logger = logging.getLogger('flotilla')
class FlotillaAgentDynamo(object):
"""Database interaction for worker/agent component.
Required table permissions:
status
-PutItem
assignments:
- BatchGetItem
revisions:
- BatchGetItem
units:
- BatchGetItem
"""
def __init__(self, instance_id, service_name, status_table,
assignments_table, revisions_table, units_table, kms):
self._id = instance_id
global_shard = hash(instance_id) % GLOBAL_ASSIGNMENT_SHARDS
self._global_id = '%s_%d' % (GLOBAL_ASSIGNMENT, global_shard)
self._service = service_name
self._status = status_table
self._assignments = assignments_table
self._revisions = revisions_table
self._units = units_table
self._kms = kms
def store_status(self, unit_status):
"""Store unit status.
:param unit_status Unit statuses.
"""
logger.debug('Storing status as %s...', self._id)
data = dict(unit_status)
data['service'] = self._service
data['instance_id'] = self._id
data['status_time'] = time.time()
self._status.put_item(data=data, overwrite=True)
logger.info('Stored status of %s units as %s.', len(unit_status),
self._id)
def get_assignments(self):
assignments = self._assignments.batch_get([
{'instance_id': self._id}, {'instance_id': self._global_id}])
assigned_revisions = [assignment['assignment'] for assignment in
assignments]
return sorted(assigned_revisions)
def get_units(self, assigned_revisions):
"""
Get currently assigned FlotillaUnits.
:param assigned_revisions: Assigned revisions
:return: Revisions.
"""
# Fetch every revision and index units:
revisions = {}
unit_revisions = defaultdict(list)
revision_keys = [{'rev_hash': assigned_revision}
for assigned_revision in set(assigned_revisions)]
for revision_item in self._revisions.batch_get(revision_keys):
rev_hash = revision_item['rev_hash']
revision = FlotillaServiceRevision(label=revision_item['label'])
revisions[rev_hash] = revision
for unit in revision_item['units']:
unit_revisions[unit].append(rev_hash)
# Fetch every unit:
units = []
unit_keys = [{'unit_hash': unit_hash}
for unit_hash in sorted(unit_revisions.keys())]
logger.debug('Fetching %d units for %d/%d revisions.', len(unit_keys),
len(revisions), len(assigned_revisions))
for unit_item in self._units.batch_get(unit_keys):
env_key = unit_item.get('environment_key')
if env_key:
decrypted_key = self._kms.decrypt(env_key.decode('base64'))
iv = unit_item['environment_iv'].decode('base64')
aes = AES.new(decrypted_key['Plaintext'], AES.MODE_CBC, iv)
ciphertext = unit_item['environment_data'].decode('base64')
plaintext = aes.decrypt(ciphertext)
unit_environment = json.loads(plaintext)
else:
unit_environment = unit_item['environment']
unit_file = unit_item['unit_file']
unit = FlotillaUnit(unit_item['name'], unit_file, unit_environment)
unit_hash = unit.unit_hash
if unit_hash != unit_item['unit_hash']:
logger.warn('Unit hash %s expected %s', unit_hash,
unit_item['unit_hash'])
unit_hash = unit_item['unit_hash']
for revision in unit_revisions[unit_hash]:
rev_unit = FlotillaUnit(unit_item['name'], unit_file,
unit_environment, rev_hash)
units.append(rev_unit)
revisions[revision].units.append(rev_unit)
# Verify each revision matches expected hash:
for expected_hash, revision in revisions.items():
revision_hash = revision.revision_hash
if revision_hash != expected_hash:
# FIXME: enforce?
logger.warn('Revision hash %s expected %s', revision_hash,
expected_hash)
return units
| pebble/flotilla | src/flotilla/agent/db.py | Python | mit | 4,650 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
class User(object):
def __init__(self, id, email, passwd, session, session_expire_time, ctime, rtime):
self.id = str(id)
self.email = email
self.passwd = passwd
self.session = session
self.session_expire_time = session_expire_time
self.ctime = self.ctime
@classmethod
def get(cls, id):
pass
@classmethod
def get_by_email(cls, email):
pass
@classmethod
def register(cls, email, passwd):
pass
def login_user(email, passwd):
| liangsun/me | webapp/models/user.py | Python | mit | 573 |
from keras import backend as K
from keras import initializations
from keras.backend.common import _EPSILON
from keras.engine.topology import Layer
from keras.engine import InputSpec
from theano.tensor.nnet import h_softmax
import theano.tensor as T
class HierarchicalSoftmax(Layer):
def __init__(self, output_dim, init='glorot_uniform', **kwargs):
self.init = initializations.get(init)
self.output_dim = output_dim
def hshape(n):
from math import sqrt, ceil
l1 = ceil(sqrt(n))
l2 = ceil(n / l1)
return int(l1), int(l2)
self.n_classes, self.n_outputs_per_class = hshape(output_dim)
super(HierarchicalSoftmax, self).__init__(**kwargs)
def build(self, input_shape):
self.input_spec = [InputSpec(shape=shape) for shape in input_shape]
input_dim = self.input_spec[0].shape[-1]
self.W1 = self.init((input_dim, self.n_classes), name='{}_W1'.format(self.name))
self.b1 = K.zeros((self.n_classes,), name='{}_b1'.format(self.name))
self.W2 = self.init((self.n_classes, input_dim, self.n_outputs_per_class), name='{}_W2'.format(self.name))
self.b2 = K.zeros((self.n_classes, self.n_outputs_per_class), name='{}_b2'.format(self.name))
self.trainable_weights = [self.W1, self.b1, self.W2, self.b2]
def get_output_shape_for(self, input_shape):
return (input_shape[0][0], input_shape[0][1], None)
def call(self, X, mask=None):
input_shape = self.input_spec[0].shape
x = K.reshape(X[0], (-1, input_shape[2]))
target = X[1].flatten() if self.trainable else None
Y = h_softmax(x, K.shape(x)[0], self.output_dim,
self.n_classes, self.n_outputs_per_class,
self.W1, self.b1, self.W2, self.b2, target)
output_dim = 1 if self.trainable else self.output_dim
input_length = K.shape(X[0])[1]
y = K.reshape(Y, (-1, input_length, output_dim))
return y
def get_config(self):
config = {'output_dim': self.output_dim,
'init': self.init.__name__}
base_config = super(HierarchicalSoftmax, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
def hs_categorical_crossentropy(y_true, y_pred):
y_pred = T.clip(y_pred, _EPSILON, 1.0 - _EPSILON)
return T.nnet.categorical_crossentropy(y_pred, y_true)
| jstarc/nli_generation | hierarchical_softmax.py | Python | mit | 2,566 |
import os
import sys
import argparse
import ConfigParser
import testcase_service
from bantorra.util import define
from bantorra.util.log import LOG as L
class TestCase_Base(testcase_service.TestCaseUnit):
config = {}
"""
TestCase_Base.
- Parse Command Line Argument.
- Create Service's Instance.
- Read Config File and get value.
"""
def __init__(self, *args, **kwargs):
super(TestCase_Base, self).__init__(*args, **kwargs)
self.parse()
self.get_config()
self.service_check()
self.get_service()
@classmethod
def set(cls, name, value):
cls.config[name] = value
@classmethod
def get(cls, name):
return cls.config[name]
def parse(self):
"""
Parse Command Line Arguments.
"""
return None
@classmethod
def get_service(cls):
"""
Get Service.
in the wifi branch, Used service is there.
"""
cls.core = cls.service["core"].get()
cls.picture = cls.service["picture"].get()
@classmethod
def get_config(cls, conf=""):
"""
Get Config File.
:arg string conf: config file path.
"""
cls.config = {}
if conf == "":
conf = os.path.join(define.APP_SCRIPT, "config.ini")
try:
config = ConfigParser.ConfigParser()
config.read(conf)
for section in config.sections():
for option in config.options(section):
cls.config["%s.%s" % (section, option)] = config.get(section, option)
except Exception as e:
L.warning('error: could not read config file: %s' % e)
| TE-ToshiakiTanaka/bantorra.old | script/testcase_base.py | Python | mit | 1,751 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('robocrm', '0006_auto_20141005_1800'),
]
operations = [
migrations.AddField(
model_name='robouser',
name='magnetic',
field=models.CharField(max_length=9, null=True, blank=True),
preserve_default=True,
),
]
| CMU-Robotics-Club/roboticsclub.org | robocrm/migrations/0007_robouser_magnetic.py | Python | mit | 461 |
"""The tests for the Recorder component."""
# pylint: disable=too-many-public-methods,protected-access
import unittest
import json
from datetime import datetime, timedelta
from unittest.mock import patch
from homeassistant.const import MATCH_ALL
from homeassistant.components import recorder
from tests.common import get_test_home_assistant
class TestRecorder(unittest.TestCase):
"""Test the recorder module."""
def setUp(self): # pylint: disable=invalid-name
"""Setup things to be run when tests are started."""
self.hass = get_test_home_assistant()
db_uri = 'sqlite://'
with patch('homeassistant.core.Config.path', return_value=db_uri):
recorder.setup(self.hass, config={
"recorder": {
"db_url": db_uri}})
self.hass.start()
recorder._INSTANCE.block_till_db_ready()
self.session = recorder.Session()
recorder._INSTANCE.block_till_done()
def tearDown(self): # pylint: disable=invalid-name
"""Stop everything that was started."""
self.hass.stop()
recorder._INSTANCE.block_till_done()
def _add_test_states(self):
"""Add multiple states to the db for testing."""
now = datetime.now()
five_days_ago = now - timedelta(days=5)
attributes = {'test_attr': 5, 'test_attr_10': 'nice'}
self.hass.pool.block_till_done()
recorder._INSTANCE.block_till_done()
for event_id in range(5):
if event_id < 3:
timestamp = five_days_ago
state = 'purgeme'
else:
timestamp = now
state = 'dontpurgeme'
self.session.add(recorder.get_model('States')(
entity_id='test.recorder2',
domain='sensor',
state=state,
attributes=json.dumps(attributes),
last_changed=timestamp,
last_updated=timestamp,
created=timestamp,
event_id=event_id + 1000
))
self.session.commit()
def _add_test_events(self):
"""Add a few events for testing."""
now = datetime.now()
five_days_ago = now - timedelta(days=5)
event_data = {'test_attr': 5, 'test_attr_10': 'nice'}
self.hass.pool.block_till_done()
recorder._INSTANCE.block_till_done()
for event_id in range(5):
if event_id < 2:
timestamp = five_days_ago
event_type = 'EVENT_TEST_PURGE'
else:
timestamp = now
event_type = 'EVENT_TEST'
self.session.add(recorder.get_model('Events')(
event_type=event_type,
event_data=json.dumps(event_data),
origin='LOCAL',
created=timestamp,
time_fired=timestamp,
))
def test_saving_state(self):
"""Test saving and restoring a state."""
entity_id = 'test.recorder'
state = 'restoring_from_db'
attributes = {'test_attr': 5, 'test_attr_10': 'nice'}
self.hass.states.set(entity_id, state, attributes)
self.hass.pool.block_till_done()
recorder._INSTANCE.block_till_done()
states = recorder.execute(
recorder.query('States'))
self.assertEqual(1, len(states))
self.assertEqual(self.hass.states.get(entity_id), states[0])
def test_saving_event(self):
"""Test saving and restoring an event."""
event_type = 'EVENT_TEST'
event_data = {'test_attr': 5, 'test_attr_10': 'nice'}
events = []
def event_listener(event):
"""Record events from eventbus."""
if event.event_type == event_type:
events.append(event)
self.hass.bus.listen(MATCH_ALL, event_listener)
self.hass.bus.fire(event_type, event_data)
self.hass.pool.block_till_done()
recorder._INSTANCE.block_till_done()
db_events = recorder.execute(
recorder.query('Events').filter_by(
event_type=event_type))
assert len(events) == 1
assert len(db_events) == 1
event = events[0]
db_event = db_events[0]
assert event.event_type == db_event.event_type
assert event.data == db_event.data
assert event.origin == db_event.origin
# Recorder uses SQLite and stores datetimes as integer unix timestamps
assert event.time_fired.replace(microsecond=0) == \
db_event.time_fired.replace(microsecond=0)
def test_purge_old_states(self):
"""Test deleting old states."""
self._add_test_states()
# make sure we start with 5 states
states = recorder.query('States')
self.assertEqual(states.count(), 5)
# run purge_old_data()
recorder._INSTANCE.purge_days = 4
recorder._INSTANCE._purge_old_data()
# we should only have 2 states left after purging
self.assertEqual(states.count(), 2)
def test_purge_old_events(self):
"""Test deleting old events."""
self._add_test_events()
events = recorder.query('Events').filter(
recorder.get_model('Events').event_type.like("EVENT_TEST%"))
self.assertEqual(events.count(), 5)
# run purge_old_data()
recorder._INSTANCE.purge_days = 4
recorder._INSTANCE._purge_old_data()
# now we should only have 3 events left
self.assertEqual(events.count(), 3)
def test_purge_disabled(self):
"""Test leaving purge_days disabled."""
self._add_test_states()
self._add_test_events()
# make sure we start with 5 states and events
states = recorder.query('States')
events = recorder.query('Events').filter(
recorder.get_model('Events').event_type.like("EVENT_TEST%"))
self.assertEqual(states.count(), 5)
self.assertEqual(events.count(), 5)
# run purge_old_data()
recorder._INSTANCE.purge_days = None
recorder._INSTANCE._purge_old_data()
# we should have all of our states still
self.assertEqual(states.count(), 5)
self.assertEqual(events.count(), 5)
| emilhetty/home-assistant | tests/components/test_recorder.py | Python | mit | 6,293 |
# Copyright (c) 2019, CRS4
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
# the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from rest_framework import permissions
from promort.settings import DEFAULT_GROUPS
class CanEnterGodMode(permissions.BasePermission):
"""
Only specific users that belong to ODIN_MEMBERS group will be allowed
to perform queries using Odin toolkit
"""
RESTRICTED_METHODS = ['GET']
def has_permission(self, request, view):
if not (request.user and request.user.is_authenticated()):
return False
else:
if request.method in self.RESTRICTED_METHODS:
if request.user.groups.filter(
name__in=[DEFAULT_GROUPS['odin_members']['name']]
).exists():
return True
else:
return False
else:
return False
| lucalianas/ProMort | promort/odin/permissions.py | Python | mit | 1,894 |
# x = [4, 6, 1, 3, 5, 7, 25]
# def stars (a):
# i = 0
# while (i < len(a)):
# print '*' * a[i]
# i += 1
# stars(x)
x = [4, "Tom", 1, "Michael", 5, 7, "Jimmy Smith"]
def stars (a):
i = 0
while (i < len(a)):
if type(a[i]) is int:
print '*' * a[i]
i+=1
else:
temp = a[i]
temp = temp.lower()
print (len(a[i])) * temp[0]
i += 1
stars(x)
| jiobert/python | Paracha_Junaid/Assignments/Python/Web_fund/stars.py | Python | mit | 368 |
from distutils.core import setup, Extension, Command
from distutils.command.build import build
from distutils.command.build_ext import build_ext
from distutils.command.config import config
from distutils.msvccompiler import MSVCCompiler
from distutils import sysconfig
import string
import sys
mkobjs = ['column', 'custom', 'derived', 'fileio', 'field',
'format', 'handler', 'persist', 'remap', 'std',
'store', 'string', 'table', 'univ', 'view', 'viewx']
class config_mk(config):
def run(self):
# work around bug in Python 2.2-supplied check_header, fixed
# in Python 2.3; body needs to be a valid, non-zero-length string
if self.try_cpp(body="/* body */", headers=['unicodeobject.h'],
include_dirs=[sysconfig.get_python_inc()]):
build = self.distribution.reinitialize_command('build_ext')
build.define = 'HAVE_UNICODEOBJECT_H'
# trust that mk4.h provides the correct HAVE_LONG_LONG value,
# since Mk4py doesn't #include "config.h"
class build_mk(build):
def initialize_options(self):
# build in builds directory by default, unless specified otherwise
build.initialize_options(self)
self.build_base = '../builds'
class build_mkext(build_ext):
def finalize_options(self):
self.run_command('config')
# force use of C++ compiler (helps on some platforms)
import os
cc = os.environ.get('CXX', sysconfig.get_config_var('CXX'))
if not cc:
cc = sysconfig.get_config_var('CCC') # Python 1.5.2
if cc:
os.environ['CC'] = cc
build_ext.finalize_options(self)
def build_extension(self, ext):
# work around linker problem with MacPython 2.3
if sys.platform == 'darwin':
try:
self.compiler.linker_so.remove("-Wl,-x")
except: pass
# work around linker problem with Linux, Python 2.2 and earlier:
# despite setting $CC above, still uses Python compiler
if sys.platform == 'linux2':
try:
ext.libraries.append("stdc++")
except: pass
if ext.name == "Mk4py":
if isinstance(self.compiler, MSVCCompiler):
suffix = '.obj'
if self.debug:
prefix = '../builds/msvc60/mklib/Debug/'
else:
prefix = '../builds/msvc60/mklib/Release/'
else:
suffix = '.o'
prefix = '../builds/'
for i in range(len(ext.extra_objects)):
nm = ext.extra_objects[i]
if nm in mkobjs:
if string.find(nm, '.') == -1:
nm = nm + suffix
nm = prefix + nm
ext.extra_objects[i] = nm
build_ext.build_extension(self, ext)
class test_regrtest(Command):
# Original version of this class posted
# by Berthold Hoellmann to distutils-sig@python.org
description = "test the distribution prior to install"
user_options = [
('build-base=', 'b',
"base build directory (default: 'build.build-base')"),
('build-purelib=', None,
"build directory for platform-neutral distributions"),
('build-platlib=', None,
"build directory for platform-specific distributions"),
('build-lib=', None,
"build directory for all distribution (defaults to either " +
"build-purelib or build-platlib"),
('test-dir=', None,
"directory that contains the test definitions"),
('test-options=', None,
"command-line options to pass to test.regrtest")
]
def initialize_options(self):
self.build_base = None
# these are decided only after 'build_base' has its final value
# (unless overridden by the user or client)
self.build_purelib = None
self.build_platlib = None
self.test_dir = 'test'
self.test_options = None
def finalize_options(self):
build = self.distribution.get_command_obj('build')
build_options = ('build_base', 'build_purelib', 'build_platlib')
for option in build_options:
val = getattr(self, option)
if val:
setattr(build, option, getattr(self, option))
build.ensure_finalized()
for option in build_options:
setattr(self, option, getattr(build, option))
def run(self):
# Invoke the 'build' command to "build" pure Python modules
# (ie. copy 'em into the build tree)
self.run_command('build')
# remember old sys.path to restore it afterwards
old_path = sys.path[:]
# extend sys.path
sys.path.insert(0, self.build_purelib)
sys.path.insert(0, self.build_platlib)
sys.path.insert(0, self.test_dir)
# Use test.regrtest, unlike the original version of this class
import test.regrtest
# jcw 2004-04-26 - why do I need to add these here to find the tests?
#import leaktest - not very portable
import test_inttypes
import test_stringtype
#import test_hash - doesn't work
# jcw end
test.regrtest.STDTESTS = []
test.regrtest.NOTTESTS = []
if self.test_options:
sys.argv[1:] = string.split(self.test_options, ' ')
else:
del sys.argv[1:]
# remove stale modules
del sys.modules['metakit']
try:
del sys.modules['Mk4py']
except:
pass
self.announce("running tests")
test.regrtest.main(testdir=self.test_dir)
# restore sys.path
sys.path = old_path[:]
#try:
# import metakit
#except:
# metakit = sys.modules['metakit']
setup(name = "metakit",
version = "2.4.9.7",
description = "Python bindings to the Metakit database library",
#long_description = metakit.__doc__,
author = "Gordon McMillan / Jean-Claude Wippler",
author_email = "jcw@equi4.com",
url = "http://www.equi4.com/metakit/python.html",
maintainer = "Jean-Claude Wippler",
maintainer_email = "jcw@equi4.com",
license = "X/MIT style, see: http://www.equi4.com/mklicense.html",
keywords = ['database'],
py_modules = ['metakit'],
cmdclass = {'build': build_mk, 'build_ext': build_mkext,
'test': test_regrtest, 'config': config_mk},
ext_modules = [Extension("Mk4py",
sources=["PyProperty.cpp",
"PyRowRef.cpp",
"PyStorage.cpp",
"PyView.cpp",
"scxx/PWOImp.cpp",
],
include_dirs=["scxx",
"../include"],
extra_objects=mkobjs,
)]
)
## Local Variables:
## compile-command: "python setup.py build -b ../builds"
## End:
| electric-cloud/metakit | python/setup.py | Python | mit | 7,317 |
from django.contrib import admin
from attachments.admin import AttachmentInlines
from tasks.models import Task
class TaskOptions(admin.ModelAdmin):
inlines = [AttachmentInlines]
admin.site.register(Task, TaskOptions) | alex/pinax | pinax/apps/tasks/admin.py | Python | mit | 226 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# fakelcs - Waqas Bhatti (wbhatti@astro.princeton.edu) - Oct 2017
# License: MIT. See the LICENSE file for more details.
'''This contains various modules that run variable star classification and
characterize its reliability and completeness via simulating LCs:
- :py:mod:`astrobase.fakelcs.generation`: fake light curve generation and
injection of variability.
- :py:mod:`astrobase.fakelcs.recovery`: recovery of fake light curve variability
and periodic variable stars.
'''
| lgbouma/astrobase | astrobase/fakelcs/__init__.py | Python | mit | 530 |
from random import random
def ran_pi():
dim=[2,3]
out=[]
for i in range(dim[0]):
temp=[]
for j in range(dim[1]):
temp.append(random())
sm=sum(temp)
out.append([temp[i]/sm for i in range(dim[1])])
return out
pi={i:[[1.0,0.0],[0.0,1.0]] for i in range(5)}
temp={i:[[0.0,1.0],[1.0,0.0]] for i in range(5,10)}
pi.update(temp)
| CitizenScienceInAstronomyWorkshop/pyIBCC | attic/create_pi.py | Python | mit | 409 |
#!/usr/bin/env python
#-*- coding:utf-8 -*-
import logging, os.path
import tornado.httpserver
import tornado.ioloop
import tornado.options
import tornado.web
import tornado.websocket
import tornado.gen
class Application(tornado.web.Application):
def __init__(self):
base_dir = os.path.dirname(__file__)
app_settings = {
"debug": True,
'static_path': os.path.join(base_dir, "static"),
}
tornado.web.Application.__init__(self, [
tornado.web.url(r"/", MainHandler, name="main"),
tornado.web.url(r"/live", WebSocketHandler, name="websocket"),
], **app_settings)
class MainHandler(tornado.web.RequestHandler):
def get(self):
self.render('index.html')
class WebSocketHandler(tornado.websocket.WebSocketHandler):
listenners = []
def check_origin(self, origin):
return True
@tornado.gen.engine
def open(self):
WebSocketHandler.listenners.append(self)
def on_close(self):
if self in WebSocketHandler.listenners:
WebSocketHandler.listenners.remove(self)
@tornado.gen.engine
def on_message(self, wsdata):
for listenner in WebSocketHandler.listenners:
listenner.write_message(wsdata)
@tornado.gen.coroutine
def main():
tornado.options.parse_command_line()
http_server = tornado.httpserver.HTTPServer(Application())
http_server.listen(8888)
logging.info("application running on http://localhost:8888")
if __name__ == "__main__":
tornado.ioloop.IOLoop.current().run_sync(main)
tornado.ioloop.IOLoop.current().start()
| mehmetkose/react-websocket | example/server.py | Python | mit | 1,627 |
numbers = [7, 9, 12, 54, 99]
print "This list contains: "
for num in numbers:
print num
# Add your loop below!
for num in numbers:
print num ** 2
| vpstudios/Codecademy-Exercise-Answers | Language Skills/Python/Unit 8/1-Loops/For Loops/13-For your lists.py | Python | mit | 158 |
#!/usr/bin/env python
#
# Wrapper script for Java Conda packages that ensures that the java runtime
# is invoked with the right options. Adapted from the bash script (http://stackoverflow.com/questions/59895/can-a-bash-script-tell-what-directory-its-stored-in/246128#246128).
#
#
# Program Parameters
#
import os
import sys
import subprocess
from os import access, getenv, X_OK
jar_file = 'hops0.31.jar'
default_jvm_mem_opts = ['-Xms1g', '-Xmx2g']
# !!! End of parameter section. No user-serviceable code below this line !!!
def real_dirname(path):
"""Return the symlink-resolved, canonicalized directory-portion of path."""
return os.path.dirname(os.path.realpath(path))
def java_executable():
"""Return the executable name of the Java interpreter."""
java_home = getenv('JAVA_HOME')
java_bin = os.path.join('bin', 'java')
if java_home and access(os.path.join(java_home, java_bin), X_OK):
return os.path.join(java_home, java_bin)
else:
return 'java'
def jvm_opts(argv):
"""Construct list of Java arguments based on our argument list.
The argument list passed in argv must not include the script name.
The return value is a 3-tuple lists of strings of the form:
(memory_options, prop_options, passthrough_options)
"""
mem_opts = []
prop_opts = []
pass_args = []
for arg in argv:
if arg.startswith('-D'):
prop_opts.append(arg)
elif arg.startswith('-XX'):
prop_opts.append(arg)
elif arg.startswith('-Xm'):
mem_opts.append(arg)
else:
pass_args.append(arg)
# In the original shell script the test coded below read:
# if [ "$jvm_mem_opts" == "" ] && [ -z ${_JAVA_OPTIONS+x} ]
# To reproduce the behaviour of the above shell code fragment
# it is important to explictly check for equality with None
# in the second condition, so a null envar value counts as True!
if mem_opts == [] and getenv('_JAVA_OPTIONS') == None:
mem_opts = default_jvm_mem_opts
return (mem_opts, prop_opts, pass_args)
def main():
java = java_executable()
jar_dir = real_dirname(sys.argv[0])
(mem_opts, prop_opts, pass_args) = jvm_opts(sys.argv[1:])
if pass_args != [] and pass_args[0].startswith('eu'):
jar_arg = '-cp'
else:
jar_arg = '-jar'
jar_path = os.path.join(jar_dir, jar_file)
java_args = [java]+ mem_opts + prop_opts + [jar_arg] + [jar_path] + pass_args
if '--jar_dir' in sys.argv[1:]:
print(jar_path)
else:
sys.exit(subprocess.call(java_args))
if __name__ == '__main__':
main()
| jfallmann/bioconda-recipes | recipes/hops/hops.py | Python | mit | 2,649 |
def getitem(list, index):
return list[index]
def entry_point(i):
return getitem([i, 2, 3, 4], 2) + getitem(None, i)
def target(*args):
return entry_point, [int]
def get_llinterp_args():
return [1]
# _____ Run translated _____
def run(c_entry_point):
c_entry_point(0)
| oblique-labs/pyVM | rpython/translator/goal/targetsegfault.py | Python | mit | 291 |
import unittest
import pqkmeans
import numpy
import collections
import pickle
class TestPQKMeans(unittest.TestCase):
def data_source(self, n: int):
for i in range(n):
yield [i * 100] * 6
def setUp(self):
# Train PQ encoder
self.encoder = pqkmeans.encoder.PQEncoder(num_subdim=3, Ks=20)
self.encoder.fit(numpy.array(list(self.data_source(200))))
def test_just_construction(self):
pqkmeans.clustering.PQKMeans(encoder=self.encoder, k=5, iteration=10, verbose=False)
def test_fit_and_predict(self):
engine = pqkmeans.clustering.PQKMeans(encoder=self.encoder, k=2, iteration=10, verbose=False)
codes = self.encoder.transform(numpy.array(list(self.data_source(100))))
predicted = engine.fit_predict(codes)
count = collections.defaultdict(int)
for cluster in predicted:
count[cluster] += 1
# roughly balanced clusters
self.assertGreaterEqual(min(count.values()), max(count.values()) * 0.7)
a = engine.predict(codes[0:1, :])
b = engine.predict(codes[0:1, :])
self.assertEqual(a, b)
def test_cluster_centers_are_really_nearest(self):
engine = pqkmeans.clustering.PQKMeans(encoder=self.encoder, k=2, iteration=10, verbose=False)
codes = self.encoder.transform(numpy.array(list(self.data_source(100))))
fit_predicted = engine.fit_predict(codes)
cluster_centers = numpy.array(engine.cluster_centers_, dtype=numpy.uint8)
predicted = engine.predict(codes)
self.assertTrue((fit_predicted == predicted).all())
# Reconstruct the original vectors
codes_decoded = self.encoder.inverse_transform(codes)
cluster_centers_decoded = self.encoder.inverse_transform(cluster_centers)
for cluster, code_decoded in zip(predicted, codes_decoded):
other_cluster = (cluster + 1) % max(predicted)
self.assertLessEqual(
numpy.linalg.norm(cluster_centers_decoded[cluster] - code_decoded),
numpy.linalg.norm(cluster_centers_decoded[other_cluster] - code_decoded)
)
def test_constructor_with_cluster_center(self):
# Run pqkmeans first.
engine = pqkmeans.clustering.PQKMeans(encoder=self.encoder, k=5, iteration=10, verbose=False)
codes = self.encoder.transform(numpy.array(list(self.data_source(100))))
fit_predicted = engine.fit_predict(codes)
cluster_centers = numpy.array(engine.cluster_centers_, dtype=numpy.uint8)
predicted = engine.predict(codes)
# save current engine and recover from savedata
engine_savedata = pickle.dumps(engine)
engine_recovered = pickle.loads(engine_savedata)
fit_predicted_from_recovered_obj = engine_recovered.predict(codes)
numpy.testing.assert_array_equal(predicted, fit_predicted_from_recovered_obj)
| kogaki/pqkmeans | test/clustering/test_pqkmeans.py | Python | mit | 2,918 |
#!/usr/bin/env python
import os
import time
import django
from userreport import maint
os.environ['DJANGO_SETTINGS_MODULE'] = 'userreport.settings'
django.setup()
start_time = time.time()
remove_time, get_time, save_time = maint.refresh_data()
total_time = time.time() - start_time
print("--- Remove Time: {:>5.2f} seconds, {:>5.2%} ---".format(remove_time, remove_time / total_time))
print("--- Get Time: {:>5.2f} seconds, {:>5.2%} ---".format(get_time, get_time / total_time))
print("--- Save Time: {:>5.2f} seconds, {:>5.2%} ---".format(save_time, save_time / total_time))
print("--- Total Time: {:>5.2f} seconds ---".format(total_time))
| leyyin/stk-stats | maint_graphics.py | Python | mit | 651 |
"""Caching Library using redis."""
import logging
from functools import wraps
from flask import current_app
from walrus import Walrus
import api
import hashlib
import pickle
from api import PicoException
log = logging.getLogger(__name__)
__redis = {
"walrus": None,
"cache": None,
"zsets": {"scores": None},
}
def get_conn():
"""Get a redis connection, reusing one if it exists."""
global __redis
if __redis.get("walrus") is None:
conf = current_app.config
try:
__redis["walrus"] = Walrus(
host=conf["REDIS_ADDR"],
port=conf["REDIS_PORT"],
password=conf["REDIS_PW"],
db=conf["REDIS_DB_NUMBER"],
)
except Exception as error:
raise PicoException(
"Internal server error. " + "Please contact a system administrator.",
data={"original_error": error},
)
return __redis["walrus"]
def get_cache():
"""Get a walrus cache, reusing one if it exists."""
global __redis
if __redis.get("cache") is None:
__redis["cache"] = get_conn().cache(default_timeout=0)
return __redis["cache"]
def get_score_cache():
global __redis
if __redis["zsets"].get("scores") is None:
__redis["zsets"]["scores"] = get_conn().ZSet("scores")
return __redis["zsets"]["scores"]
def get_scoreboard_cache(**kwargs):
global __redis
scoreboard_name = "scoreboard:{}".format(_hash_key((), kwargs))
if __redis["zsets"].get(scoreboard_name) is None:
__redis["zsets"][scoreboard_name] = get_conn().ZSet(scoreboard_name)
return __redis["zsets"][scoreboard_name]
def clear():
global __redis
if __redis.get("walrus") is not None:
__redis["walrus"].flushdb()
def __insert_cache(f, *args, **kwargs):
"""
Directly upserting without first invalidating, thus keeping a memoized
value available without lapse
"""
if f == api.stats.get_score:
raise PicoException("Error: Do not manually reset_cache get_score")
else:
key = "%s:%s" % (f.__name__, _hash_key(args, kwargs))
value = f(*args, **kwargs)
get_cache().set(key, value)
return value
def memoize(_f=None, **cached_kwargs):
"""walrus.Cache.cached wrapper that reuses shared cache."""
def decorator(f):
@wraps(f)
def wrapper(*args, **kwargs):
if kwargs.get("reset_cache", False):
kwargs.pop("reset_cache", None)
return __insert_cache(f, *args, **kwargs)
else:
return get_cache().cached(**cached_kwargs)(f)(*args, **kwargs)
return wrapper
if _f is None:
return decorator
else:
return decorator(_f)
def _hash_key(a, k):
return hashlib.md5(pickle.dumps((a, k))).hexdigest()
def get_scoreboard_key(team):
# For lack of better idea of delimiter, use '>' illegal team name char
return "{}>{}>{}".format(team["team_name"], team["affiliation"], team["tid"])
def decode_scoreboard_item(item, with_weight=False, include_key=False):
"""
:param item: tuple of ZSet (key, score)
:param with_weight: keep decimal weighting of score, or return as int
:param include_key: whether to include to raw key
:return: dict of scoreboard item
"""
key = item[0].decode("utf-8")
data = key.split(">")
score = item[1]
if not with_weight:
score = int(score)
output = {"name": data[0], "affiliation": data[1], "tid": data[2], "score": score}
if include_key:
output["key"] = key
return output
def search_scoreboard_cache(scoreboard, pattern):
"""
:param scoreboard: scoreboard cache ZSet
:param pattern: text pattern to search team names and affiliations,
not including wildcards
:return: sorted list of scoreboard entries
"""
# Trailing '*>' avoids search on last token, tid
results = [
decode_scoreboard_item(item, with_weight=True, include_key=True)
for item in list(scoreboard.search("*{}*>*".format(pattern)))
]
return sorted(results, key=lambda item: item["score"], reverse=True)
def invalidate(f, *args, **kwargs):
"""
Clunky way to replicate busting behavior due to awkward wrapping of walrus
cached decorator
"""
if f == api.stats.get_score:
key = args[0]
get_score_cache().remove(key)
else:
key = "%s:%s" % (f.__name__, _hash_key(args, kwargs))
get_cache().delete(key)
| royragsdale/picoCTF | picoCTF-web/api/cache.py | Python | mit | 4,558 |
'''
MLAB calls MATLAB funcitons and looks like a normal python library.
authors:
Yauhen Yakimovich <eugeny.yakimovitch@gmail.com>
Module wrapping borrowed from `sh` project by Andrew Moffat.
'''
import os
import sys
from types import ModuleType
from mlabwrap import (MlabWrap, choose_release, find_available_releases,
MatlabReleaseNotFound)
import traceback
# TODO: work with ENV
#os.getenv("MLABRAW_CMD_STR", "")
def get_available_releases():
return dict(find_available_releases())
def get_latest_release(available_releases=None):
if not available_releases:
available_releases = dict(find_available_releases())
versions = available_releases.keys()
latest_release_version = sorted(versions)[-1]
return latest_release_version
class MatlabVersions(dict):
def __init__(self, globs):
self.globs = globs
self.__selected_instance = None
self._available_releases = dict(find_available_releases())
def __setitem__(self, k, v):
self.globs[k] = v
def __getitem__(self, k):
try: return self.globs[k]
except KeyError: pass
# the only way we'd get to here is if we've tried to
# import * from a repl. so, raise an exception, since
# that's really the only sensible thing to do
if k == "__all__":
raise ImportError('Cannot import * from mlab. Please import mlab '
'or import versions individually.')
if k.startswith("__") and k.endswith("__"):
raise AttributeError
# how about an return a function variable?
# try: return os.environ[k]
# except KeyError: pass
# is it a "release version"?
if k.startswith('R') and k in self._available_releases:
self[k] = self.get_mlab_instance(k)
return self[k]
if k == 'latest_release':
matlab_release = self.pick_latest_release()
instance = self.get_mlab_instance(matlab_release)
self[k] = instance
self[matlab_release] = instance
return instance
if self.__selected_instance is not None:
instance = self[self.__selected_instance]
try:
return getattr(instance, k)
except AttributeError:
traceback.print_exc(file=sys.stdout)
else:
raise ImportError('Import failed, no MATLAB instance selected. Try import mlab.latest_release')
raise ImportError('Failed to import anything for: %s' % k)
def get_mlab_instance(self, matlab_release):
choose_release(matlab_release)
instance = MlabWrap()
# Make it a module
sys.modules['mlab.releases.' + matlab_release] = instance
sys.modules['matlab'] = instance
return instance
def pick_latest_release(self):
return get_latest_release(self._available_releases)
# this is a thin wrapper around THIS module (we patch sys.modules[__name__]).
# this is in the case that the user does a "from sh import whatever"
# in other words, they only want to import certain programs, not the whole
# system PATH worth of commands. in this case, we just proxy the
# import lookup to our MatlabVersions class
class SelfWrapper(ModuleType):
def __init__(self, self_module):
# this is super ugly to have to copy attributes like this,
# but it seems to be the only way to make reload() behave
# nicely. if i make these attributes dynamic lookups in
# __getattr__, reload sometimes chokes in weird ways...
for attr in ["__builtins__", "__doc__", "__name__", "__package__"]:
setattr(self, attr, getattr(self_module, attr, None))
# python 3.2 (2.7 and 3.3 work fine) breaks on osx (not ubuntu)
# if we set this to None. and 3.3 needs a value for __path__
self.__path__ = []
self.module = self_module
self.instances = MatlabVersions(globals())
def __setattr__(self, name, value):
#if hasattr(self, "instances"): self.instances[name] = value
ModuleType.__setattr__(self, name, value)
def __getattr__(self, name):
if name == "instances": raise AttributeError
if name in dir(self.module):
return getattr(self.module, name)
return self.instances[name]
# accept special keywords argument to define defaults for all operations
# that will be processed with given by return SelfWrapper
def __call__(self, **kwargs):
return SelfWrapper(self.self_module, kwargs)
# we're being imported from somewhere
if __name__ != '__main__':
self = sys.modules[__name__]
sys.modules[__name__] = SelfWrapper(self)
| codypiersall/mlab | src/mlab/releases.py | Python | mit | 4,746 |
#!/usr/bin/python
#
# Coypyright (C) 2010, University of Oxford
#
# Licensed under the MIT License. You may obtain a copy of the License at:
#
# http://www.opensource.org/licenses/mit-license.php
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# $Id: $
"""
Support functions for creating, reading, writing and updating manifest RDF file.
"""
__author__ = "Bhavana Ananda"
__version__ = "0.1"
import logging, os, rdflib
from os.path import isdir
from rdflib import URIRef, Namespace, BNode
from rdflib.namespace import RDF
from rdflib.graph import Graph
from rdflib.plugins.memory import Memory
from rdflib import Literal
Logger = logging.getLogger("MaifestRDFUtils")
oxds = URIRef("http://vocab.ox.ac.uk/dataset/schema#")
oxdsGroupingUri = URIRef(oxds+"Grouping")
def bindNamespaces(rdfGraph, namespaceDict):
# Bind namespaces
for key in namespaceDict:
keyValue = namespaceDict[key]
Logger.debug(key+":"+keyValue)
rdfGraph.bind( key, keyValue , override=True)
# rdfGraph.bind("dcterms", dcterms, override=True)
# rdfGraph.bind("oxds", oxds, override=True)
# URIRef(dcterms+elementList[index])
return rdfGraph
def readManifestFile(manifestPath):
"""
Read from the manifest file.
manifestPath manifest file path
"""
# Read from the manifest.rdf file into an RDF Graph
rdfGraph = Graph()
rdfGraph.parse(manifestPath)
return rdfGraph
def writeToManifestFile(manifestPath, namespaceDict, elementUriList,elementValueList):
"""
Write to the manifest file.
manifestPath manifest file path
elementUriList Element Uri List to be written into the manifest files
elementValueList Element Values List to be written into the manifest files
"""
# Create an empty RDF Graph
rdfGraph = Graph()
subject = BNode()
rdfGraph = bindNamespaces(rdfGraph, namespaceDict)
# Write to the RDF Graph
rdfGraph.add((subject, RDF.type, oxdsGroupingUri))
for index in range(len(elementUriList)):
rdfGraph.add((subject,elementUriList[index], Literal(elementValueList[index])))
# Serialise it to a manifest.rdf file
saveToManifestFile(rdfGraph, manifestPath)
return rdfGraph
def updateManifestFile(manifestPath, elementUriList, elementValueList):
"""
Update the manifest file.
manifestPath manifest file path
elementUriList Element Uri List whose values need to be to be updated in the manifest files
elementValueList Element Values List to be updated into the manifest files
"""
# Read the manifest file and update the title and the description
rdfGraph = readManifestFile(manifestPath)
subject = rdfGraph.value(None,RDF.type, oxdsGroupingUri)
if subject == None :
subject = BNode()
for index in range(len(elementUriList)):
rdfGraph.set((subject, elementUriList[index], Literal(elementValueList[index])))
saveToManifestFile(rdfGraph,manifestPath)
return rdfGraph
def saveToManifestFile(rdfGraph, manifestPath):
"""
Save the RDF Graph into a manifest file.
rdfGraph RDF Graph to be serialised into the manifest file
manifestPath manifest file path
"""
# Serialise the RDf Graph into manifest.rdf file
rdfGraph.serialize(destination=manifestPath, format='pretty-xml')
return
def compareRDFGraphs(graphA, graphB, elementUriListToCompare=[]):
"""
Compare two RDG graphs
graphA RDF Graph of Graph A
graphB RDF Graph of Graph B
graphsEqual Return True if the two graphs are equal or false otherwise
"""
def graphContains(graph, statement):
(s,p,o) = statement
if isinstance(s, BNode): s = None
if isinstance(p, BNode): p = None
if isinstance(o, BNode): o = None
return (s,p,o) in graph
graphsEqual = True
for statement in graphA:
if not graphContains(graphB, statement) : return False
for statement in graphB:
if not graphContains(graphA, statement) : return False
subjectA = graphA.value(None,RDF.type, oxdsGroupingUri)
subjectB = graphB.value(None,RDF.type, oxdsGroupingUri)
for elementUri in elementUriListToCompare :
if graphA.value(subjectA,elementUri,None)!=graphB.value(subjectB,elementUri,None) :
graphsEqual = False
return graphsEqual
def getElementValuesFromManifest(rdfGraph,elementUriList):
"""
Get element values of the element list supplied from the RDF graph
rdfGraph RDF Graph
elementUriList Element Uri List whose values need to be to be extracted from the manifest files
"""
elementValueList = []
subject = rdfGraph.value(None, RDF.type, oxdsGroupingUri)
for elementUri in elementUriList:
elementValueList.append(rdfGraph.value(subject,elementUri,None))
Logger.debug("Element Uri List =" + repr(elementUriList))
Logger.debug("Element Value List =" + repr(elementValueList))
return elementValueList
def getDictionaryFromManifest(manifestPath, elementUriList):
"""
Gets the dictionary of Field-Values from the manifest RDF
manifestPath path of the manifest file
elementList Element Names List whose values need to be to be updated in the manifest files
"""
file = None
elementValueList = []
elementList = []
dict = {}
json = ""
Logger.debug(manifestPath)
if manifestPath != None and ifFileExists(manifestPath):
rdfGraph = readManifestFile(manifestPath)
elementValueList = getElementValuesFromManifest(rdfGraph, elementUriList)
# Logger.debug("Element URi List =" + repr(elementUriList))
# Logger.debug("Element Value List =" + repr(elementValueList))
for index in range(len(elementUriList)):
Logger.debug("Index = " + repr(index))
elementUri = elementUriList[index]
position = elementUri.rfind("/") +1
elementList.append(elementUri[position:])
Logger.debug("substring = " + elementUri[position:])
if elementValueList!=[]:
dict = createDictionary(elementList, elementValueList)
return dict
def ifFileExists(filePath):
"""
Cheks if the file exists; returns True/False
filePath File Path
"""
return os.path.isfile(filePath)
def createDictionary(keyList, valueList):
"""
Creates and returns a dictionary from the keyList and valueList supplied
keyUriList List of key uris
valueList List of values
"""
dict = {}
for index in range(len(keyList)):
dict[keyList[index]] = valueList[index]
# Logger.debug(" Key Uri List = "+ repr(keyUriList))
# Logger.debug(" Key value list = "+ repr(valueList))
return dict
| bhavanaananda/DataStage | src/SubmitDatasetHandler/ManifestRDFUtils.py | Python | mit | 7,221 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('imager_images', '0005_auto_20150728_0129'),
]
operations = [
migrations.AlterField(
model_name='album',
name='privacy',
field=models.CharField(max_length=64, choices=[(b'Private', b'Private'), (b'Shared', b'Shared'), (b'Public', b'Public')]),
),
migrations.AlterField(
model_name='photo',
name='privacy',
field=models.CharField(max_length=64, choices=[(b'Private', b'Private'), (b'Shared', b'Shared'), (b'Public', b'Public')]),
),
]
| tpeek/django-imager | imagersite/imager_images/migrations/0006_auto_20150729_1539.py | Python | mit | 729 |
#!/usr/bin/env python
#encoding:utf-8
import os
import sys
import requests
import MySQLdb
from bs4 import BeautifulSoup
from bs4 import SoupStrainer
if len(sys.argv) != 4:
print 'Invalid parameters!'
exit(1)
print '=' * 60
print 'start:', sys.argv
aim_category_id = int(sys.argv[1])
start_point = (int(sys.argv[2]), int(sys.argv[3]))
immediate_download = False
base_url = 'http://www.3che.com'
session = requests.Session()
username = ''
password = ''
record = {
'category': '',
'detail_category': '',
'post_url': '',
'filename': '',
'url': ''
}
sql_cnt = 0
connection = None
cursor = None
def record_to_mysql():
global sql_cnt, connection, cursor
if sql_cnt % 20 == 0:
if connection:
connection.commit()
connection.close()
cursor.close()
connection = MySQLdb.connect(host='', user='', passwd='', db='', port=3306, charset='utf8')
cursor = connection.cursor()
sql_cnt += 1
cursor.execute('insert into san_che(`category`, `detail_category`, `post_url`, `filename`, `url`) values (%s, %s, %s, %s, %s)',
(record['category'], record['detail_category'], record['post_url'], record['filename'], record['url']))
def login():
login_path = '/member.php?mod=logging&action=login&loginsubmit=yes&infloat=yes&lssubmit=yes&inajax=1'
session.post(base_url + login_path, {'username': username, 'password': password})
def enter_directory(name):
if immediate_download:
if not os.path.exists(name):
os.mkdir(name)
os.chdir(name)
def get_soup(url, parse_only=None):
text = session.get(url).text
return BeautifulSoup(text, 'lxml', parse_only=parse_only)
def download_file(url, filename):
print 'Downloading:', filename, '=>', url
record['url'] = url
record['filename'] = filename
if immediate_download:
with open(filename, 'w') as fp:
res = requests.get(url)
fp.write(res.content)
fp.close()
else:
record_to_mysql()
def crawl_file(url, filename):
try:
soup = get_soup(url, SoupStrainer(id='attachpayform'))
attach_form = soup.find('form', id='attachpayform')
link = attach_form.table.find_all('a')[-1]
except Exception as e:
print 'Error! file url:', url
else:
download_file(link['href'], filename)
# Crawl detail data of one post.
def crawl_detail(detail_category, title, detail_url):
print '-' * 100
print 'Crawling Post:', detail_category, title, '=>', detail_url
record['detail_category'] = detail_category
record['post_url'] = detail_url
# Enter detail directory.
enter_directory(detail_category)
prefix = detail_url.rsplit('/', 1)[-1].split('.', 1)[0]
enter_directory(prefix + title)
soup = get_soup(detail_url, SoupStrainer('p', {'class': 'attnm'}))
attnms = soup.find_all('p', {'class': 'attnm'})
for attnm in attnms:
url = '{0}/{1}'.format(base_url, attnm.a['href'])
crawl_file(url, attnm.a.text.strip(u'[下载]'))
# Leave detail directory.
if immediate_download:
os.chdir('../..')
# Crawl data of one category.
def crawl_category(category, list_url):
print '=' * 100
print 'Crawling category:', category, '=>', list_url
record['category'] = category
# Create corresponding directory and enter.
enter_directory(category)
cur_page_id = 0
url = list_url
while url is not None:
cur_page_id += 1
print 'Crawling page url:', url
soup = get_soup(url, SoupStrainer('span'))
xsts = soup.find_all('span', {'class': 'xst'})
if cur_page_id >= start_point[0]:
cur_in_page_id = 0
for xst in xsts:
cur_in_page_id += 1
detail = xst.find('a', {'class': 'xst'})
if cur_page_id > start_point[0] or cur_in_page_id >= start_point[1]:
crawl_detail(xst.em and xst.em.a.text or '', detail.text, detail['href'])
page_footer = soup.find('span', id='fd_page_top')
next_link = page_footer.label.next_sibling
if next_link is not None:
url = next_link['href']
else:
url = None
# Leave the directory.
if immediate_download:
os.chdir('..')
if __name__ == '__main__':
login()
# Extract categories from home page.
soup = get_soup(base_url, SoupStrainer(id='nv'))
category_lis = soup.find('div', id='nv').ul.find_all('li')
categories = map(lambda x: (x.a.text, x.a['href']), category_lis)
categories = filter(lambda x: x[1] != '/', categories)
crawl_category(categories[aim_category_id][0], categories[aim_category_id][1])
# for category in categories:
# crawl_category(category[0], category[1])
| JayvicWen/Crawler | 3che/crawler.py | Python | mit | 4,816 |
#!/usr/bin/env python3
import traceback
from telethon_examples.interactive_telegram_client \
import InteractiveTelegramClient
def load_settings(path='api/settings'):
"""Loads the user settings located under `api/`"""
result = {}
with open(path, 'r', encoding='utf-8') as file:
for line in file:
value_pair = line.split('=')
left = value_pair[0].strip()
right = value_pair[1].strip()
if right.isnumeric():
result[left] = int(right)
else:
result[left] = right
return result
if __name__ == '__main__':
# Load the settings and initialize the client
settings = load_settings()
kwargs = {}
if settings.get('socks_proxy'):
import socks # $ pip install pysocks
host, port = settings['socks_proxy'].split(':')
kwargs = dict(proxy=(socks.SOCKS5, host, int(port)))
client = InteractiveTelegramClient(
session_user_id=str(settings.get('session_name', 'anonymous')),
user_phone=str(settings['user_phone']),
api_id=settings['api_id'],
api_hash=str(settings['api_hash']),
**kwargs)
print('Initialization done!')
try:
client.run()
except Exception as e:
print('Unexpected error ({}): {} at\n{}'.format(
type(e), e, traceback.format_exc()))
finally:
client.disconnect()
print('Thanks for trying the interactive example! Exiting...')
| kyasabu/Telethon | try_telethon.py | Python | mit | 1,492 |
# Copyright (c) 2010-2017 openpyxl
| 171121130/SWI | venv/Lib/site-packages/openpyxl/reader/__init__.py | Python | mit | 35 |
#Calculen el área de un rectángulo (alineado con los ejes x e y) dadas sus coordenadas x1,x2,y1,y2.
from math import pi
print 'Ejercicio 6'
print '-'*60
x1 = float(raw_input('Introduce x1: '))
x2 = float(raw_input('Introduce x2: '))
y1 = float(raw_input('Introduce y1: '))
y2 = float(raw_input('Introduce y2: '))
base= x2-x1
altura= y2-y1
print 'El area del rectangulo es: ', base * altura
raw_input('Pulse la tecla enter para finalizar')
| txtbits/daw-python | primeros ejercicios/Ejercicios entradasalida/ejercicio6.py | Python | mit | 445 |
"""
Tests that deepchem models make deterministic predictions.
"""
__author__ = "Bharath Ramsundar"
__copyright__ = "Copyright 2016, Stanford University"
__license__ = "MIT"
import os
import tempfile
import numpy as np
import unittest
import sklearn
import shutil
import deepchem as dc
try:
import tensorflow as tf
from tensorflow.python.framework import test_util
from sklearn.ensemble import RandomForestClassifier
from sklearn.ensemble import RandomForestRegressor
has_tensorflow = True
except:
has_tensorflow = False
class TestPredict(unittest.TestCase):
"""
Test that models make deterministic predictions
These tests guard against failures like having dropout turned on at
test time.
"""
def setUp(self):
super(TestPredict, self).setUp()
self.current_dir = os.path.dirname(os.path.abspath(__file__))
'''
def test_tf_progressive_regression_predict(self):
"""Test tf progressive multitask makes deterministic predictions."""
np.random.seed(123)
n_tasks = 9
n_samples = 10
n_features = 3
n_classes = 2
# Generate dummy dataset
ids = np.arange(n_samples)
X = np.random.rand(n_samples, n_features)
y = np.zeros((n_samples, n_tasks))
w = np.ones((n_samples, n_tasks))
dataset = dc.data.NumpyDataset(X, y, w, ids)
regression_metric = dc.metrics.Metric(
dc.metrics.mean_squared_error, task_averager=np.mean)
model = dc.models.ProgressiveMultitaskRegressor(
n_tasks,
n_features,
layer_sizes=[50],
bypass_layer_sizes=[10],
dropouts=[.25],
learning_rate=0.003,
weight_init_stddevs=[.1],
alpha_init_stddevs=[.02],
batch_size=n_samples)
# Fit trained model
model.fit(dataset, nb_epoch=25)
model.save()
# Check same predictions are made.
y_pred_first = model.predict(dataset)
y_pred_second = model.predict(dataset)
np.testing.assert_allclose(y_pred_first, y_pred_second)
'''
| peastman/deepchem | deepchem/models/tests/test_predict.py | Python | mit | 1,984 |
# -*- coding: utf-8 -*-
import logging
from speaklater import make_lazy_string
from quokka.modules.accounts.models import User
logger = logging.getLogger()
def lazy_str_setting(key, default=None):
from flask import current_app
return make_lazy_string(
lambda: current_app.config.get(key, default)
)
def get_current_user():
from flask.ext.security import current_user
try:
if not current_user.is_authenticated():
return None
except RuntimeError:
# Flask-Testing will fail
pass
try:
return User.objects.get(id=current_user.id)
except Exception as e:
logger.warning("No user found: %s" % e.message)
return None
| maurobaraldi/quokka | quokka/utils/__init__.py | Python | mit | 714 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from ._policy_tracked_resources_operations import PolicyTrackedResourcesOperations
from ._remediations_operations import RemediationsOperations
from ._policy_events_operations import PolicyEventsOperations
from ._policy_states_operations import PolicyStatesOperations
from ._operations import Operations
from ._policy_metadata_operations import PolicyMetadataOperations
from ._policy_restrictions_operations import PolicyRestrictionsOperations
from ._attestations_operations import AttestationsOperations
__all__ = [
'PolicyTrackedResourcesOperations',
'RemediationsOperations',
'PolicyEventsOperations',
'PolicyStatesOperations',
'Operations',
'PolicyMetadataOperations',
'PolicyRestrictionsOperations',
'AttestationsOperations',
]
| Azure/azure-sdk-for-python | sdk/policyinsights/azure-mgmt-policyinsights/azure/mgmt/policyinsights/operations/__init__.py | Python | mit | 1,234 |
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Creature()
result.template = "object/mobile/shared_dressed_corellia_cec_officer.iff"
result.attribute_template_id = 9
result.stfName("npc_name","human_base_male")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result | anhstudios/swganh | data/scripts/templates/object/mobile/shared_dressed_corellia_cec_officer.py | Python | mit | 453 |
from __future__ import absolute_import, unicode_literals
import pytest
from virtualenv.seed.wheels.embed import MAX, get_embed_wheel
from virtualenv.seed.wheels.util import Wheel
def test_wheel_support_no_python_requires(mocker):
wheel = get_embed_wheel("setuptools", for_py_version=None)
zip_mock = mocker.MagicMock()
mocker.patch("virtualenv.seed.wheels.util.ZipFile", new=zip_mock)
zip_mock.return_value.__enter__.return_value.read = lambda name: b""
supports = wheel.support_py("3.8")
assert supports is True
def test_bad_as_version_tuple():
with pytest.raises(ValueError, match="bad"):
Wheel.as_version_tuple("bad")
def test_wheel_not_support():
wheel = get_embed_wheel("setuptools", MAX)
assert wheel.support_py("3.3") is False
def test_wheel_repr():
wheel = get_embed_wheel("setuptools", MAX)
assert str(wheel.path) in repr(wheel)
| pypa/virtualenv | tests/unit/seed/wheels/test_wheels_util.py | Python | mit | 901 |
#!/usr/bin/env python
from iris_sdk.models.maps.base_map import BaseMap
class RolesListMap(BaseMap):
role = None | scottbarstow/iris-python | iris_sdk/models/maps/roles_list.py | Python | mit | 119 |
#!/usr/bin/env python
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" A collator for Mojo Application Manifests """
import argparse
import json
import shutil
import sys
import urlparse
def ParseJSONFile(filename):
with open(filename) as json_file:
try:
return json.load(json_file)
except ValueError:
print "%s is not a valid JSON document" % filename
return None
def main():
parser = argparse.ArgumentParser(
description="Collate Mojo application manifests.")
parser.add_argument("--parent")
parser.add_argument("--output")
parser.add_argument("--application-name")
args, children = parser.parse_known_args()
parent = ParseJSONFile(args.parent)
if parent == None:
return 1
app_path = parent['name'].split(':')[1]
if app_path.startswith('//'):
raise ValueError("Application name path component '%s' must not start " \
"with //" % app_path)
if args.application_name != app_path:
raise ValueError("Application name '%s' specified in build file does not " \
"match application name '%s' specified in manifest." %
(args.application_name, app_path))
applications = []
for child in children:
application = ParseJSONFile(child)
if application == None:
return 1
applications.append(application)
if len(applications) > 0:
parent['applications'] = applications
with open(args.output, 'w') as output_file:
json.dump(parent, output_file)
return 0
if __name__ == "__main__":
sys.exit(main())
| junhuac/MQUIC | src/mojo/public/tools/manifest/manifest_collator.py | Python | mit | 1,675 |
import polyglot
import yaml
class Heuristics (object):
""" The Heuristics class receives a path to the file that is trying to be
identified and an array of possible languages for that file. The disambiguate
method will find the array with unambiguous strings of syntax for each language
and check the file in question for those strings. If a match occurrs then the file is
unquestionably written in the language that the string belongs to. If no match
is found then None is returned and the file wasn't determined to be of a
particular language."""
def __init__(self, path, possibleLanguages):
self.syntaxFile = polyglot.Polyglot.tryOpenFile ('syntax.yml')
# {language1: [bits_of_syntax1, bits_of_syntax2], language2: [bits_of_syntax3, bits_of_syntax4]}
self.syntaxBits = yaml.safe_load (self.syntaxFile)
self.disambiguate(path, possibleLanguages)
def disambiguate(self, path, possibleLanguages):
#checks the syntax strings of every possible language until it finds a match
with open (path) as sourceCode:
for lang in possibleLanguages:
if lang not in self.syntaxBits.keys():
continue #there are no stored syntax strings for that language
else:
for string in self.syntaxBits [lang]:
if string in sourceCode.read():
return lang
sourceCode.seek (0) #re-reads from the beginning of the file
return None
| jestlabs/polyglot | polyglot/heuristics.py | Python | mit | 1,381 |
from rest_framework.permissions import BasePermission
class HasValidProjectToken(BasePermission):
"""
Return True if the request has a valid project token.
"""
def has_permission(self, request, view):
return bool(request.auth)
| PersonalGenomesOrg/open-humans | private_sharing/api_permissions.py | Python | mit | 254 |
from xboxdrv_parser import Controller
from time import sleep
import os
def main ():
# Get input from the two analog sticks as yaw, throttle, roll, and pitch. Take the (0 - 255) input value and
# map it to a (-1 - 1) range.
controller = Controller (["X1", "Y1", "X2", "Y2", "L2", "R2", "X", "/\\", "[]"], ["yaw", "throttle", "roll", "pitch", "descend", "ascend", "takeover", "takeoff", "land"], (0, 255), (-1, 1))
#controller = Controller (["X1", "Y1", "X2", "Y2"])
while True:
control_packet = controller.get_values ()
os.system("clear")
for i in control_packet:
print i, ": ", control_packet[i]
# Update at 1000 messages a second
sleep (.01)
if __name__ == '__main__':
main()
| WSU-RoboticsClub/Tutorials | xbox_controller/xboxdrv/quadcopter_controller_example.py | Python | mit | 734 |
import django_filters
from rest_framework import filters
from waldur_core.core import filters as core_filters
from . import models
class InvoiceFilter(django_filters.FilterSet):
customer = core_filters.URLFilter(
view_name='customer-detail', field_name='customer__uuid'
)
customer_uuid = django_filters.UUIDFilter(field_name='customer__uuid')
state = django_filters.MultipleChoiceFilter(choices=models.Invoice.States.CHOICES)
start_date = django_filters.DateFilter(field_name='created', lookup_expr='gt')
end_date = django_filters.DateFilter(field_name='created', lookup_expr='lt')
o = django_filters.OrderingFilter(fields=('created', 'year', 'month'))
class Meta:
model = models.Invoice
fields = ['created', 'year', 'month']
class PaymentProfileFilter(django_filters.FilterSet):
organization = core_filters.URLFilter(
view_name='customer-detail', field_name='organization__uuid'
)
organization_uuid = django_filters.UUIDFilter(field_name='organization__uuid')
payment_type = django_filters.MultipleChoiceFilter(
choices=models.PaymentType.CHOICES
)
o = django_filters.OrderingFilter(fields=('name', 'payment_type', 'is_active'))
class Meta:
model = models.PaymentProfile
fields = []
class PaymentProfileFilterBackend(filters.BaseFilterBackend):
def filter_queryset(self, request, queryset, view):
if request.user.is_staff or request.user.is_support:
return queryset
return queryset.filter(is_active=True)
class PaymentFilter(django_filters.FilterSet):
profile = core_filters.URLFilter(
view_name='payment-profile-detail', field_name='profile__uuid'
)
profile_uuid = django_filters.UUIDFilter(field_name='profile__uuid')
class Meta:
model = models.Payment
fields = ['date_of_payment']
| opennode/nodeconductor-assembly-waldur | src/waldur_mastermind/invoices/filters.py | Python | mit | 1,887 |
import pytest
import os
import sys
from polyglotdb.io import inspect_ilg
from polyglotdb.io.helper import guess_type
from polyglotdb.exceptions import DelimiterError, ILGWordMismatchError
from polyglotdb import CorpusContext
def test_inspect_ilg(ilg_test_dir):
basic_path = os.path.join(ilg_test_dir, 'basic.txt')
parser = inspect_ilg(basic_path)
assert (len(parser.annotation_tiers) == 2)
assert (parser.annotation_tiers[1].trans_delimiter == '.')
def test_inspect_ilg_directory(ilg_test_dir):
parser = inspect_ilg(ilg_test_dir)
assert (len(parser.annotation_tiers) == 2)
@pytest.mark.xfail
def test_export_ilg(graph_db, export_test_dir):
export_path = os.path.join(export_test_dir, 'export_ilg.txt')
with CorpusContext('untimed', **graph_db) as c:
export_discourse_ilg(c, 'test', export_path,
annotations=['label', 'transcription'], words_per_line=3)
expected_lines = ['cats are cute',
'k.ae.t.s aa.r k.uw.t',
'dogs are too',
'd.aa.g.z aa.r t.uw',
'i guess',
'ay g.eh.s']
with open(export_path, 'r') as f:
for i, line in enumerate(f):
assert (line.strip() == expected_lines[i])
def test_ilg_basic(graph_db, ilg_test_dir):
basic_path = os.path.join(ilg_test_dir, 'basic.txt')
parser = inspect_ilg(basic_path)
with CorpusContext('basic_ilg', **graph_db) as c:
c.reset()
c.load(parser, basic_path)
# assert(c.lexicon['a'].frequency == 2)
def test_ilg_mismatched(graph_db, ilg_test_dir):
mismatched_path = os.path.join(ilg_test_dir, 'mismatched.txt')
basic_path = os.path.join(ilg_test_dir, 'basic.txt')
parser = inspect_ilg(basic_path)
with CorpusContext('mismatch', **graph_db) as c:
c.reset()
with pytest.raises(ILGWordMismatchError):
c.load(parser, mismatched_path)
| PhonologicalCorpusTools/PolyglotDB | tests/test_io_ilg.py | Python | mit | 1,972 |
import itertools
import json
import re
import flask
from flask import request
from web.cache import cache
import rethinkdb as r
import web.api.api_util as api_util
import db
import util
api = flask.Blueprint("api", __name__, url_prefix="/api")
r_conn = db.util.r_conn
def _should_skip_get_plugins_cache():
"""Whether the current request to /api/plugins should not be cached."""
page = int(request.args.get('page', 1))
search = request.args.get('query', '')
# Only cache empty searches for now.
# TODO(david): Also cache simple category and tag searches. May also want
# to actually use a proper cache backend like Redis so we can
# arbitrarily cache (right now we use an in-memory cache).
should_cache = search == '' and (1 <= page <= 10)
return not should_cache
def _make_get_plugins_cache_key():
"""Get a cache key for the /api/plugins route.
By default this is just request.path which ignores query params.
"""
page = int(request.args.get('page', 1))
search = request.args.get('query', '')
return '%s_%s_%s' % (request.path, page, search)
# TODO(david): Consider saving categories just as special tags. Would make
# search implementation simpler but determining which category a plugin
# belongs to harder. See discussion on
# http://phabricator.benalpert.com/D171
def _apply_category_filters(results, tokens):
"""Consumes and applies category filters (e.g. "cat:other") to results.
Arguments:
results: List of search result plugins.
tokens: Remaining search text tokens that have not been consumed.
Returns:
(results, tokens): Results that match the given category, and tokens
that have not been consumed.
"""
category_filter = lambda t: t.startswith('cat:')
category_tokens = filter(category_filter, tokens)
tokens = list(itertools.ifilterfalse(category_filter, tokens))
if category_tokens:
category_ids = set(t[len('cat:'):] for t in category_tokens)
results = filter(lambda plugin:
plugin['category'] in category_ids, results)
return results, tokens
def _apply_tag_filters(results, tokens):
"""Consumes and applies tag filters (e.g. "tag:python") to search results.
Arguments:
results: List of search result plugins.
tokens: Remaining search text tokens that have not been consumed.
Returns:
(results, tokens): Results that match the given tag, and tokens
that have not been consumed.
"""
tag_filter = lambda t: t.startswith('tag:')
tag_tokens = filter(tag_filter, tokens)
tokens = list(itertools.ifilterfalse(tag_filter, tokens))
if tag_tokens:
required_tags = set(t[len('tag:'):] for t in tag_tokens)
results = filter(lambda plugin:
required_tags <= set(plugin['tags']), results)
return results, tokens
def _apply_keyword_filters(results, tokens):
"""Filters results that match the given keywords (tokens).
Arguments:
results: List of search result plugins.
tokens: Keywords to filter results on.
Returns:
List of plugins that match the given keywords.
"""
if tokens:
# Create a regex that matches a string S iff for each keyword K in
# `search` there is a corresponding word in S that begins with K.
tokens_regex = (r'\b%s' % re.escape(t) for t in tokens)
search_regex = re.compile('.*'.join(tokens_regex))
# Surprisingly, regex matching like this is slightly faster than
# prefix-matching two sorted lists of tokens.
results = filter(lambda plugin:
search_regex.search(plugin['keywords']), results)
return results
@api.route('/plugins', methods=['GET'])
@cache.cached(timeout=60 * 60 * 25, key_prefix=_make_get_plugins_cache_key,
unless=_should_skip_get_plugins_cache)
def get_plugins():
RESULTS_PER_PAGE = 20
page = int(request.args.get('page', 1))
search = request.args.get('query', '')
results = get_search_index_cached()
if search:
tokens = [t.lower() for t in sorted(search.split())]
results, tokens = _apply_category_filters(results, tokens)
results, tokens = _apply_tag_filters(results, tokens)
results = _apply_keyword_filters(results, tokens)
count = len(results)
total_pages = (count + RESULTS_PER_PAGE - 1) / RESULTS_PER_PAGE # ceil
results = results[((page - 1) * RESULTS_PER_PAGE):
(page * RESULTS_PER_PAGE)]
return api_util.jsonify({
'plugins': results,
'total_pages': total_pages,
'total_results': count,
'results_per_page': RESULTS_PER_PAGE,
})
@api.route('/plugins/<slug>', methods=['GET'])
def get_plugin(slug):
plugin = r.table('plugins').get(slug).run(r_conn())
if plugin:
return api_util.jsonify(db.plugins.to_json(plugin))
else:
return api_util.api_not_found('No plugin with slug %s' % slug)
# TODO(david): Make it not so easy for an attacker to completely obliterate all
# of our tags, or at least be able to recover from it.
@api.route('/plugins/<slug>/tags', methods=['POST', 'PUT'])
def update_plugin_tags(slug):
data = json.loads(flask.request.data)
plugin = r.table('plugins').get(slug).run(r_conn())
if not plugin:
return api_util.api_not_found('No plugin with slug %s' % slug)
db.plugins.update_tags(plugin, data['tags'])
r.table('plugins').update(plugin).run(r_conn())
return api_util.jsonify({
'tags': plugin['tags']
})
@api.route('/tags', methods=['GET'])
@cache.cached(timeout=60 * 60)
def get_tags():
tags = r.table('tags').filter({}).run(r_conn())
return api_util.jsonify(list(tags))
@api.route('/categories', methods=['GET'])
@cache.cached(timeout=60 * 60)
def get_categories():
return api_util.jsonify(get_all_categories_cached())
@api.route('/plugins/<slug>/category/<category>', methods=['PUT'])
def update_plugin_category(slug, category):
plugin = r.table('plugins').get(slug).run(r_conn())
if not plugin:
return api_util.api_not_found('No plugin with slug %s' % slug)
if category not in (c['id'] for c in get_all_categories_cached()):
return api_util.api_bad_request('No such category %s' % category)
# TODO(david): Also update search index (stale cache)
plugin['category'] = category
r.table('plugins').update(plugin).run(r_conn())
return api_util.jsonify({
'category': plugin['category']
})
@api.route('/submit', methods=['POST'])
def submit_plugin():
plugin_data = flask.request.form.to_dict()
plugin_data['tags'] = json.loads(plugin_data['tags'])
db.submitted_plugins.insert(plugin_data)
plugin_markdown = "```\n%s\n```" % json.dumps(plugin_data, indent=4)
util.log_to_gitter("Someone just submitted a plugin!\n%s" % plugin_markdown)
return flask.redirect('/thanks-for-submitting')
@cache.cached(timeout=60 * 60 * 26, key_prefix='search_index')
def get_search_index_cached():
return db.plugins.get_search_index()
@cache.cached(timeout=60 * 60 * 27, key_prefix='all_categories')
def get_all_categories_cached():
return db.categories.get_all()
| vim-awesome/vim-awesome | web/api/api.py | Python | mit | 7,261 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-10-11 19:21
from __future__ import unicode_literals
from django.db import migrations, models
import prosody.utils
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Prosody',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('host', models.TextField(default=prosody.utils.getProsodyDomain)),
('user', models.TextField(db_index=True)),
('store', models.TextField(db_index=True)),
('key', models.TextField(db_index=True)),
('type', models.TextField(default='string')),
('value', models.TextField()),
],
options={
'db_table': 'prosody',
},
),
migrations.AlterUniqueTogether(
name='prosody',
unique_together=set([('user', 'store', 'key')]),
),
]
| Kromey/fbxnano | prosody/migrations/0001_initial.py | Python | mit | 1,099 |
# -*- coding: utf-8 -*-
#
# Configuration file for the Sphinx documentation builder.
#
# This file does only contain a selection of the most common options. For a
# full list see the documentation:
# http://www.sphinx-doc.org/en/stable/config
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath(os.path.dirname(os.path.dirname(__file__))))
# -- Project information -----------------------------------------------------
project = 'pynYNAB'
copyright = '2018, rienafairefr'
author = 'rienafairefr'
# The short X.Y version
version = ''
# The full version, including alpha/beta/rc tags
release = ''
# -- General configuration ---------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc', 'sphinxarg.ext'
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path .
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# The default sidebars (for documents that don't match any pattern) are
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
# html_sidebars = {}
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'pynYNABdoc'
# -- Options for LaTeX output ------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'pynYNAB.tex', 'pynYNAB Documentation',
'rienafairefr', 'manual'),
]
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'pynynab', 'pynYNAB Documentation',
[author], 1)
]
# -- Options for Texinfo output ----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'pynYNAB', 'pynYNAB Documentation',
author, 'pynYNAB', 'One line description of project.',
'Miscellaneous'),
]
# -- Extension configuration ------------------------------------------------- | rienafairefr/pynYNAB | docs/conf.py | Python | mit | 4,876 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri May 3 15:48:55 2019
@author: epnevmatikakis
"""
from caiman.paths import caiman_datadir
from caiman.utils.utils import load_graph
import os
import numpy as np
try:
os.environ["KERAS_BACKEND"] = "tensorflow"
from tensorflow.keras.models import model_from_json
use_keras = True
except(ModuleNotFoundError):
import tensorflow as tf
use_keras = False
def test_tf():
os.environ['CUDA_VISIBLE_DEVICES'] = '-1'
try:
model_name = os.path.join(caiman_datadir(), 'model', 'cnn_model')
if use_keras:
model_file = model_name + ".json"
with open(model_file, 'r') as json_file:
print('USING MODEL:' + model_file)
loaded_model_json = json_file.read()
loaded_model = model_from_json(loaded_model_json)
loaded_model.load_weights(model_name + '.h5')
loaded_model.compile('sgd', 'mse')
else:
model_file = model_name + ".h5.pb"
loaded_model = load_graph(model_file)
except:
raise Exception('NN model could not be loaded. use_keras = ' + str(use_keras))
A = np.random.randn(10, 50, 50, 1)
try:
if use_keras:
predictions = loaded_model.predict(A, batch_size=32)
else:
tf_in = loaded_model.get_tensor_by_name('prefix/conv2d_20_input:0')
tf_out = loaded_model.get_tensor_by_name('prefix/output_node0:0')
with tf.Session(graph=loaded_model) as sess:
predictions = sess.run(tf_out, feed_dict={tf_in: A})
pass
except:
raise Exception('NN model could not be deployed. use_keras = ' + str(use_keras))
| simonsfoundation/CaImAn | caiman/tests/test_tensorflow.py | Python | gpl-2.0 | 1,733 |
from __future__ import division, absolute_import, unicode_literals
from qtpy import QtWidgets
from qtpy.QtCore import Qt
from . import cmds
from . import gitcmds
from . import hotkeys
from . import icons
from . import qtutils
from . import utils
from .i18n import N_
from .widgets import completion
from .widgets import defs
from .widgets import filetree
from .widgets import standard
def diff_commits(parent, a, b, context=None):
"""Show a dialog for diffing two commits"""
dlg = Difftool(parent, a=a, b=b, context=context)
dlg.show()
dlg.raise_()
return dlg.exec_() == QtWidgets.QDialog.Accepted
def diff_expression(parent, expr,
create_widget=False, hide_expr=False,
focus_tree=False, context=None):
"""Show a diff dialog for diff expressions"""
dlg = Difftool(parent,
expr=expr, hide_expr=hide_expr,
focus_tree=focus_tree, context=context)
if create_widget:
return dlg
dlg.show()
dlg.raise_()
return dlg.exec_() == QtWidgets.QDialog.Accepted
class Difftool(standard.Dialog):
def __init__(self, parent, a=None, b=None, expr=None, title=None,
hide_expr=False, focus_tree=False, context=None):
"""Show files with differences and launch difftool"""
standard.Dialog.__init__(self, parent=parent)
self.a = a
self.b = b
self.diff_expr = expr
self.context = context
if title is None:
title = N_('git-cola diff')
self.setWindowTitle(title)
self.setWindowModality(Qt.WindowModal)
self.expr = completion.GitRefLineEdit(parent=self)
if expr is not None:
self.expr.setText(expr)
if expr is None or hide_expr:
self.expr.hide()
self.tree = filetree.FileTree(parent=self)
self.diff_button = qtutils.create_button(text=N_('Compare'),
icon=icons.diff(),
enabled=False,
default=True)
self.diff_button.setShortcut(hotkeys.DIFF)
self.diff_all_button = qtutils.create_button(text=N_('Compare All'),
icon=icons.diff())
self.edit_button = qtutils.edit_button()
self.edit_button.setShortcut(hotkeys.EDIT)
self.close_button = qtutils.close_button()
self.button_layout = qtutils.hbox(defs.no_margin, defs.spacing,
self.close_button,
qtutils.STRETCH,
self.edit_button,
self.diff_all_button,
self.diff_button)
self.main_layout = qtutils.vbox(defs.margin, defs.spacing,
self.expr, self.tree,
self.button_layout)
self.setLayout(self.main_layout)
self.tree.itemSelectionChanged.connect(self.tree_selection_changed)
self.tree.itemDoubleClicked.connect(self.tree_double_clicked)
self.tree.up.connect(self.focus_input)
self.expr.textChanged.connect(self.text_changed)
self.expr.activated.connect(self.focus_tree)
self.expr.down.connect(self.focus_tree)
self.expr.enter.connect(self.focus_tree)
qtutils.connect_button(self.diff_button, self.diff)
qtutils.connect_button(self.diff_all_button,
lambda: self.diff(dir_diff=True))
qtutils.connect_button(self.edit_button, self.edit)
qtutils.connect_button(self.close_button, self.close)
qtutils.add_action(self, 'Focus Input', self.focus_input, hotkeys.FOCUS)
qtutils.add_action(self, 'Diff All', lambda: self.diff(dir_diff=True),
hotkeys.CTRL_ENTER, hotkeys.CTRL_RETURN)
qtutils.add_close_action(self)
self.init_state(None, self.resize_widget, parent)
self.refresh()
if focus_tree:
self.focus_tree()
def resize_widget(self, parent):
"""Set the initial size of the widget"""
width, height = qtutils.default_size(parent, 720, 420)
self.resize(width, height)
def focus_tree(self):
"""Focus the files tree"""
self.tree.setFocus()
def focus_input(self):
"""Focus the expression input"""
self.expr.setFocus()
def text_changed(self, txt):
self.diff_expr = txt
self.refresh()
def refresh(self):
"""Redo the diff when the expression changes"""
if self.diff_expr is not None:
self.diff_arg = utils.shell_split(self.diff_expr)
elif self.b is None:
self.diff_arg = [self.a]
else:
self.diff_arg = [self.a, self.b]
self.refresh_filenames()
def refresh_filenames(self):
if self.a and self.b is None:
filenames = gitcmds.diff_index_filenames(self.a)
else:
filenames = gitcmds.diff(self.diff_arg)
self.tree.set_filenames(filenames, select=True)
def tree_selection_changed(self):
has_selection = self.tree.has_selection()
self.diff_button.setEnabled(has_selection)
self.diff_all_button.setEnabled(has_selection)
def tree_double_clicked(self, item, column):
path = self.tree.filename_from_item(item)
left, right = self._left_right_args()
cmds.difftool_launch(left=left, right=right, paths=[path],
context=self.context)
def diff(self, dir_diff=False):
paths = self.tree.selected_filenames()
left, right = self._left_right_args()
cmds.difftool_launch(left=left, right=right, paths=paths,
dir_diff=dir_diff, context=self.context)
def _left_right_args(self):
if self.diff_arg:
left = self.diff_arg[0]
else:
left = None
if len(self.diff_arg) > 1:
right = self.diff_arg[1]
else:
right = None
return (left, right)
def edit(self):
paths = self.tree.selected_filenames()
cmds.do(cmds.Edit, paths)
| Vdragon/git-cola | cola/difftool.py | Python | gpl-2.0 | 6,355 |
# Copyright (C) 2012 Intel Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit test for cmake.py."""
import unittest2 as unittest
from cmake import CMakeChecker
class CMakeCheckerTest(unittest.TestCase):
"""Tests CMakeChecker class."""
def test_init(self):
"""Test __init__() method."""
def _mock_handle_style_error(self):
pass
checker = CMakeChecker("foo.cmake", _mock_handle_style_error)
self.assertEqual(checker._handle_style_error, _mock_handle_style_error)
def test_check(self):
"""Test check() method."""
errors = []
def _mock_handle_style_error(line_number, category, confidence,
message):
error = (line_number, category, confidence, message)
errors.append(error)
checker = CMakeChecker("foo.cmake", _mock_handle_style_error)
lines = [
'# This file is sample input for cmake_unittest.py and includes below problems:\n',
'IF ()',
'\tmessage("Error line with Tab")\n',
' message("Error line with endding spaces") \n',
' message( "Error line with space after (")\n',
' message("Error line with space before (" )\n',
' MESSAGE("Error line with upper case non-condtional command")\n',
' MESSage("Error line with upper case non-condtional command")\n',
' message("correct message line")\n',
'ENDif ()\n',
'\n',
'if()\n',
'endif ()\n',
'\n',
'macro ()\n',
'ENDMacro()\n',
'\n',
'function ()\n',
'endfunction()\n',
'\n',
'set(name a)\n',
'set(name a b c)\n',
'set(name a\n',
'b)\n',
'set(name',
'abc\n',
')\n',
'list(APPEND name a)\n',
'list(APPEND name\n',
'a\n',
'a\n',
')\n',
'list(APPEND name\n',
'b\n',
'a\n',
'\n',
'c/a.a\n',
'\n',
'c/b/a.a\n',
'${aVariable}\n',
'\n',
'c/c.c\n',
'\n',
'c/b/a.a\n',
')\n',
'list(REMOVE_ITEM name a)\n',
'list(REMOVE_ITEM name\n',
'a\n',
'\n',
'b\n',
')\n',
'list(REMOVE_ITEM name\n',
'a/a.a\n',
'a/b.b\n',
'b/a.a\n',
'\n',
'\n',
'c/a.a\n',
')\n',
]
checker.check(lines)
self.maxDiff = None
self.assertEqual(errors, [
(3, 'whitespace/tab', 5, 'Line contains tab character.'),
(2, 'command/lowercase', 5, 'Use lowercase command "if"'),
(4, 'whitespace/trailing', 5, 'No trailing spaces'),
(5, 'whitespace/parentheses', 5, 'No space after "("'),
(6, 'whitespace/parentheses', 5, 'No space before ")"'),
(7, 'command/lowercase', 5, 'Use lowercase command "message"'),
(8, 'command/lowercase', 5, 'Use lowercase command "message"'),
(10, 'command/lowercase', 5, 'Use lowercase command "endif"'),
(12, 'whitespace/parentheses', 5, 'One space between command "if" and its parentheses, should be "if ("'),
(15, 'whitespace/parentheses', 5, 'No space between command "macro" and its parentheses, should be "macro("'),
(16, 'command/lowercase', 5, 'Use lowercase command "endmacro"'),
(18, 'whitespace/parentheses', 5, 'No space between command "function" and its parentheses, should be "function("'),
(23, 'list/parentheses', 5, 'First listitem "a" should be in a new line.'),
(24, 'list/parentheses', 5, 'The parentheses after the last listitem "b" should be in a new line.'),
(31, 'list/duplicate', 5, 'The item "a" should be added only once to the list.'),
(35, 'list/order', 5, 'Alphabetical sorting problem. "a" should be before "b".'),
(41, 'list/order', 5, 'Alphabetical sorting problem. "c/c.c" should be before "c/b/a.a".'),
(49, 'list/emptyline', 5, 'There should be no empty line between "a" and "b".'),
(54, 'list/emptyline', 5, 'There should be exactly one empty line instead of 0 between "a/b.b" and "b/a.a".'),
(57, 'list/emptyline', 5, 'There should be exactly one empty line instead of 2 between "b/a.a" and "c/a.a".'),
])
| loveyoupeng/rt | modules/web/src/main/native/Tools/Scripts/webkitpy/style/checkers/cmake_unittest.py | Python | gpl-2.0 | 5,918 |
#!/usr/bin/python
# -*- coding: ISO-8859-15 -*-
import rlib
myreport = rlib.Rlib()
print rlib.version
myreport.add_datasource_xml("local_xml")
myreport.add_query_as("local_xml", "data.xml", "data")
myreport.add_report("array.xml")
myreport.set_output_format_from_text("pdf")
myreport.execute()
print myreport.get_content_type_as_text()
open('xml.pdf','wb').write(myreport.get_output())
| SICOM/rlib | src/examples/python/xml.py | Python | gpl-2.0 | 388 |
from Screens.Screen import Screen
from Screens.MessageBox import MessageBox
from Components.ActionMap import ActionMap
from Components.ConfigList import ConfigListScreen
from Components.MenuList import MenuList
from Components.Sources.StaticText import StaticText
from Components.config import config, ConfigNumber, ConfigSelection, ConfigSelectionNumber, getConfigListEntry
from Plugins.Plugin import PluginDescriptor
from enigma import setAnimation_current, setAnimation_speed, setAnimation_current_listbox
g_animation_paused = False
g_orig_show = None
g_orig_doClose = None
config.misc.window_animation_default = ConfigNumber(default = 6)
config.misc.window_animation_speed = ConfigSelectionNumber(1, 30, 1, default = 20)
config.misc.listbox_animation_default = ConfigSelection(default = "0", choices = [("0", _("Disable")), ("1", _("Enable")), ("2", _("Same behavior as current animation"))])
class AnimationSetupConfig(ConfigListScreen, Screen):
skin = """
<screen position="center,center" size="600,140" title="Animation Settings">
<widget name="config" position="0,0" size="600,100" scrollbarMode="showOnDemand" />
<ePixmap pixmap="skin_default/buttons/red.png" position="0,100" size="140,40" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/green.png" position="140,100" size="140,40" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/yellow.png" position="280,100" size="140,40" alphatest="on" />
<widget source="key_red" render="Label" position="0,100" zPosition="1" size="140,40" \
font="Regular;20" halign="center" valign="center" transparent="1" />
<widget source="key_green" render="Label" position="140,100" zPosition="1" size="140,40" \
font="Regular;20" halign="center" valign="center" transparent="1" />
<widget source="key_yellow" render="Label" position="280,100" zPosition="1" size="140,40" \
font="Regular;20" halign="center" valign="center" transparent="1" />
</screen>
"""
def __init__(self, session):
self.session = session
self.entrylist = []
Screen.__init__(self, session)
ConfigListScreen.__init__(self, self.entrylist)
self["actions"] = ActionMap(["OkCancelActions", "ColorActions",], {
"ok" : self.keyGreen,
"green" : self.keyGreen,
"yellow" : self.keyYellow,
"red" : self.keyRed,
"cancel" : self.keyRed,
}, -2)
self["key_red"] = StaticText(_("Cancel"))
self["key_green"] = StaticText(_("Save"))
self["key_yellow"] = StaticText(_("Default"))
self.makeConfigList()
self.onLayoutFinish.append(self.layoutFinished)
def layoutFinished(self):
self.setTitle(_('Animation Setup'))
def keyGreen(self):
config.misc.window_animation_speed.save()
setAnimation_speed(int(config.misc.window_animation_speed.value))
setAnimation_speed(int(config.misc.window_animation_speed.value))
config.misc.listbox_animation_default.save()
setAnimation_current_listbox(int(config.misc.listbox_animation_default.value))
self.close()
def keyRed(self):
config.misc.window_animation_speed.cancel()
config.misc.listbox_animation_default.cancel()
self.close()
def keyYellow(self):
config.misc.window_animation_speed.value = 20
config.misc.listbox_animation_default.value = "0"
self.makeConfigList()
def keyLeft(self):
ConfigListScreen.keyLeft(self)
def keyRight(self):
ConfigListScreen.keyRight(self)
def makeConfigList(self):
self.entrylist = []
self.entrylist.append(getConfigListEntry(_("Animation Speed"), config.misc.window_animation_speed))
self.entrylist.append(getConfigListEntry(_("Enable Focus Animation"), config.misc.listbox_animation_default))
self["config"].list = self.entrylist
self["config"].l.setList(self.entrylist)
class AnimationSetupScreen(Screen):
animationSetupItems = [
{"idx":0, "name":_("Disable Animations")},
{"idx":1, "name":_("Simple fade")},
{"idx":2, "name":_("Grow drop")},
{"idx":3, "name":_("Grow from left")},
{"idx":4, "name":_("Popup")},
{"idx":5, "name":_("Slide drop")},
{"idx":6, "name":_("Slide left to right")},
{"idx":7, "name":_("Slide top to bottom")},
{"idx":8, "name":_("Stripes")},
]
skin = """
<screen name="AnimationSetup" position="center,center" size="580,400" title="Animation Setup">
<ePixmap pixmap="skin_default/buttons/red.png" position="0,0" size="140,40" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/green.png" position="140,0" size="140,40" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/yellow.png" position="280,0" size="140,40" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/blue.png" position="420,0" size="140,40" alphatest="on" />
<widget source="key_red" render="Label" position="0,0" zPosition="1" size="140,40" \
font="Regular;20" halign="center" valign="center" transparent="1" />
<widget source="key_green" render="Label" position="140,0" zPosition="1" size="140,40" \
font="Regular;20" halign="center" valign="center" transparent="1" />
<widget source="key_yellow" render="Label" position="280,0" zPosition="1" size="140,40" \
font="Regular;20" halign="center" valign="center" transparent="1" />
<widget source="key_blue" render="Label" position="420,0" zPosition="1" size="140,40" \
font="Regular;20" halign="center" valign="center" transparent="1" />
<widget name="list" position="10,60" size="560,364" scrollbarMode="showOnDemand" />
<widget source="introduction" render="Label" position="0,370" size="560,40" \
font="Regular;20" valign="center" transparent="1" />
</screen>
"""
def __init__(self, session):
self.skin = AnimationSetupScreen.skin
Screen.__init__(self, session)
self.animationList = []
self["introduction"] = StaticText(_("* current animation"))
self["key_red"] = StaticText(_("Cancel"))
self["key_green"] = StaticText(_("Save"))
self["key_yellow"] = StaticText(_("Settings"))
self["key_blue"] = StaticText(_("Preview"))
self["actions"] = ActionMap(["SetupActions", "ColorActions"],
{
"cancel": self.keyclose,
"save": self.ok,
"ok" : self.ok,
"yellow": self.config,
"blue": self.preview
}, -3)
self["list"] = MenuList(self.animationList)
self.onLayoutFinish.append(self.layoutFinished)
def layoutFinished(self):
l = []
for x in self.animationSetupItems:
key = x.get("idx", 0)
name = x.get("name", "??")
if key == config.misc.window_animation_default.value:
name = "* %s" % (name)
l.append( (name, key) )
self["list"].setList(l)
def ok(self):
current = self["list"].getCurrent()
if current:
key = current[1]
config.misc.window_animation_default.value = key
config.misc.window_animation_default.save()
setAnimation_current(key)
setAnimation_current_listbox(int(config.misc.listbox_animation_default.value))
self.close()
def keyclose(self):
setAnimation_current(config.misc.window_animation_default.value)
setAnimation_speed(int(config.misc.window_animation_speed.value))
setAnimation_current_listbox(int(config.misc.listbox_animation_default.value))
self.close()
def config(self):
self.session.open(AnimationSetupConfig)
def preview(self):
current = self["list"].getCurrent()
if current:
global g_animation_paused
tmp = g_animation_paused
g_animation_paused = False
setAnimation_current(current[1])
self.session.open(MessageBox, current[0], MessageBox.TYPE_INFO, timeout=3)
g_animation_paused = tmp
def checkAttrib(self, paused):
if g_animation_paused is paused:
try:
for (attr, value) in self.skinAttributes:
if attr == "animationPaused" and value in ("1", "on"):
return True
except:
pass
return False
def screen_show(self):
global g_animation_paused
if g_animation_paused:
setAnimation_current(0)
g_orig_show(self)
if checkAttrib(self, False):
g_animation_paused = True
def screen_doClose(self):
global g_animation_paused
if checkAttrib(self, True):
g_animation_paused = False
setAnimation_current(config.misc.window_animation_default.value)
g_orig_doClose(self)
def animationSetupMain(session, **kwargs):
session.open(AnimationSetupScreen)
def startAnimationSetup(menuid):
if menuid == "system":
return [( _("Animations"), animationSetupMain, "animation_setup", None)]
return []
def sessionAnimationSetup(session, reason, **kwargs):
setAnimation_current(config.misc.window_animation_default.value)
setAnimation_speed(int(config.misc.window_animation_speed.value))
setAnimation_current_listbox(int(config.misc.listbox_animation_default.value))
global g_orig_show, g_orig_doClose
if g_orig_show is None:
g_orig_show = Screen.show
if g_orig_doClose is None:
g_orig_doClose = Screen.doClose
Screen.show = screen_show
Screen.doClose = screen_doClose
def Plugins(**kwargs):
return [
PluginDescriptor(
name = "Animations",
description = "Setup UI animations",
where = PluginDescriptor.WHERE_MENU,
needsRestart = False,
fnc = startAnimationSetup),
PluginDescriptor(
where = PluginDescriptor.WHERE_SESSIONSTART,
needsRestart = False,
fnc = sessionAnimationSetup),
]
| Taapat/enigma2-openpli-vuplus | lib/python/Plugins/SystemPlugins/AnimationSetup/plugin.py | Python | gpl-2.0 | 9,065 |
# -*- coding: utf-8 -*-
'''
zen Add-on
Copyright (C) 2016 zen
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re,urllib,json,urlparse,base64,random
from resources.lib.modules import client
from resources.lib.modules import control
class trailer:
def __init__(self):
self.base_link = 'http://www.youtube.com'
self.key_link = random.choice(['QUl6YVN5RDd2aFpDLTYta2habTVuYlVyLTZ0Q0JRQnZWcnFkeHNz', 'QUl6YVN5Q2RiNEFNenZpVG0yaHJhSFY3MXo2Nl9HNXBhM2ZvVXd3'])
self.key_link = '&key=%s' % base64.urlsafe_b64decode(self.key_link)
self.search_link = 'https://www.googleapis.com/youtube/v3/search?part=snippet&type=video&maxResults=5&q=%s'
self.youtube_search = 'https://www.googleapis.com/youtube/v3/search?q='
self.youtube_watch = 'http://www.youtube.com/watch?v=%s'
def play(self, name, url=None):
try:
url = self.worker(name, url)
if url == None: return
title = control.infoLabel('listitem.title')
if title == '': title = control.infoLabel('listitem.label')
icon = control.infoLabel('listitem.icon')
item = control.item(path=url, iconImage=icon, thumbnailImage=icon)
try: item.setArt({'icon': icon})
except: pass
item.setInfo(type='Video', infoLabels = {'title': title})
control.player.play(url, item)
except:
pass
def worker(self, name, url):
try:
if url.startswith(self.base_link):
url = self.resolve(url)
if url == None: raise Exception()
return url
elif not url.startswith('http://'):
url = self.youtube_watch % url
url = self.resolve(url)
if url == None: raise Exception()
return url
else:
raise Exception()
except:
query = name + ' trailer'
query = self.youtube_search + query
url = self.search(query)
if url == None: return
return url
def search(self, url):
try:
query = urlparse.parse_qs(urlparse.urlparse(url).query)['q'][0]
url = self.search_link % urllib.quote_plus(query) + self.key_link
result = client.request(url)
items = json.loads(result)['items']
items = [(i['id']['videoId']) for i in items]
for url in items:
url = self.resolve(url)
if not url is None: return url
except:
return
def resolve(self, url):
try:
id = url.split('?v=')[-1].split('/')[-1].split('?')[0].split('&')[0]
result = client.request('http://www.youtube.com/watch?v=%s' % id)
message = client.parseDOM(result, 'div', attrs = {'id': 'unavailable-submessage'})
message = ''.join(message)
alert = client.parseDOM(result, 'div', attrs = {'id': 'watch7-notification-area'})
if len(alert) > 0: raise Exception()
if re.search('[a-zA-Z]', message): raise Exception()
url = 'plugin://plugin.video.youtube/play/?video_id=%s' % id
return url
except:
return
| Coelhon/MasterRepo.repository | plugin.video.zen/resources/lib/modules/trailer.py | Python | gpl-2.0 | 3,897 |
# -*- python -*-
#
# wireshark_gen.py (part of idl2wrs)
#
# Author : Frank Singleton (frank.singleton@ericsson.com)
#
# Copyright (C) 2001 Frank Singleton, Ericsson Inc.
#
# This file is a backend to "omniidl", used to generate "Wireshark"
# dissectors from CORBA IDL descriptions. The output language generated
# is "C". It will generate code to use the GIOP/IIOP get_CDR_XXX API.
#
# Please see packet-giop.h in Wireshark distro for API description.
# Wireshark is available at https://www.wireshark.org/
#
# Omniidl is part of the OmniOrb distribution, and is available at
# http://omniorb.sourceforge.net
#
# SPDX-License-Identifier: GPL-2.0-or-later
# Description:
#
# Omniidl Back-end which parses an IDL list of "Operation" nodes
# passed from wireshark_be2.py and generates "C" code for compiling
# as a dissector for Wireshark.
#
#
# Strategy (sneaky but ...)
#
# problem: I dont know what variables to declare until AFTER the helper functions
# have been built, so ...
#
# There are 2 passes through genHelpers, the first one is there just to
# make sure the fn_hash data struct is populated properly.
# The second pass is the real thing, generating code and declaring
# variables (from the 1st pass) properly.
"""Wireshark IDL compiler back-end."""
from __future__ import print_function
import collections
import tempfile
from omniidl import idlast, idltype, idlutil, output
# Output class, generates "C" src code for the sub-dissector
#
# in:
#
#
# self - me
# st - output stream
# node - a reference to an Operations object.
# name - scoped name (Module::Module::Interface:: .. ::Operation
# TODO -- FS
#
# 1. generate hf[] data for searchable fields (but what is searchable?) [done, could be improved]
# 2. add item instead of add_text() [done]
# 3. sequence handling [done]
# 4. User Exceptions [done]
# 5. Fix arrays, and structs containing arrays [done]
# 6. Handle pragmas.
# 7. Exception can be common to many operations, so handle them outside the
# operation helper functions [done]
# 8. Automatic variable declaration [done, improve, still get some collisions.add variable delegator function ]
# For example, mutlidimensional arrays.
# 9. wchar and wstring handling [giop API needs improving]
# 10. Support Fixed [done]
# 11. Support attributes (get/set) [started, needs language mapping option, perhaps wireshark GUI option
# to set the attribute function prefix or suffix ? ] For now the prefix is "_get" and "_set"
# eg: attribute string apple => _get_apple and _set_apple
#
# 12. Implement IDL "union" code [done]
# 13. Implement support for plugins [done]
# 14. Don't generate code for empty operations (cf: exceptions without members)
# 15. Generate code to display Enums numerically and symbolically [done]
# 16. Place structs/unions in subtrees
# 17. Recursive struct and union handling [done]
# 18. Improve variable naming for display (eg: structs, unions etc) [done]
#
# Also test, Test, TEST
# Strategy:
# For every operation and attribute do
# For return val and all parameters do
# find basic IDL type for each parameter
# output get_CDR_xxx
# output exception handling code
# output attribute handling code
class wireshark_gen_C:
# Some string constants for our templates
c_u_octet8 = "guint64 u_octet8;"
c_s_octet8 = "gint64 s_octet8;"
c_u_octet4 = "guint32 u_octet4;"
c_s_octet4 = "gint32 s_octet4;"
c_u_octet2 = "guint16 u_octet2;"
c_s_octet2 = "gint16 s_octet2;"
c_u_octet1 = "guint8 u_octet1;"
c_s_octet1 = "gint8 s_octet1;"
c_float = "gfloat my_float;"
c_double = "gdouble my_double;"
c_seq = "const gchar *seq = NULL;" # pointer to buffer of gchars
c_i = "guint32 i_" # loop index
c_i_lim = "guint32 u_octet4_loop_" # loop limit
c_u_disc = "guint32 disc_u_" # unsigned int union discriminant variable name (enum)
c_s_disc = "gint32 disc_s_" # signed int union discriminant variable name (other cases, except Enum)
def __init__(self, st, protocol_name, dissector_name, description, debug=False, aggressive=False):
self.DEBUG = debug
self.AGGRESSIVE = aggressive
self.st = output.Stream(tempfile.TemporaryFile(mode="w"), 4) # for first pass only
self.st_save = st # where 2nd pass should go
self.protoname = protocol_name # Protocol Name (eg: ECHO)
self.dissname = dissector_name # Dissector name (eg: echo)
self.description = description # Detailed Protocol description (eg: Echo IDL Example)
self.exlist = [] # list of exceptions used in operations.
#self.curr_sname # scoped name of current opnode or exnode I am visiting, used for generating "C" var declares
self.fn_hash = {} # top level hash to contain key = function/exception and val = list of variable declarations
# ie a hash of lists
self.fn_hash_built = 0 # flag to indicate the 1st pass is complete, and the fn_hash is correctly
# populated with operations/vars and exceptions/vars
def genCode(self, oplist, atlist, enlist, stlist, unlist): # operation, attribute, enums, struct and union lists
"""Main entry point, controls sequence of generated code."""
# sneaky .. call it now, to populate the fn_hash
# so when I come to that exception later, I have the variables to
# declare already.
self.genHelpers(oplist, stlist, unlist)
self.genExceptionHelpers(oplist)
self.genAttributeHelpers(atlist)
self.fn_hash_built = 1 # DONE, so now I know , see genOperation()
self.st = self.st_save
self.genHeader() # initial dissector comments
self.genWrsCopyright()
self.genGPL()
self.genIncludes()
self.genPrototype()
self.genProtocol()
self.genDeclares(oplist, atlist, enlist, stlist, unlist)
if len(atlist) > 0:
self.genAtList(atlist) # string constant declares for Attributes
if len(enlist) > 0:
self.genEnList(enlist) # string constant declares for Enums
self.genExceptionHelpers(oplist) # helper function to decode user exceptions that have members
self.genExceptionDelegator(oplist) # finds the helper function to decode a user exception
if len(atlist) > 0:
self.genAttributeHelpers(atlist) # helper function to decode "attributes"
self.genHelpers(oplist, stlist, unlist) # operation, struct and union decode helper functions
self.genMainEntryStart(oplist)
self.genOpDelegator(oplist)
self.genAtDelegator(atlist)
self.genMainEntryEnd()
self.gen_proto_register(oplist, atlist, stlist, unlist)
self.gen_proto_reg_handoff(oplist)
# All the dissectors are now built-in
#self.gen_plugin_register()
#self.dumpvars() # debug
self.genModelines()
def genHeader(self):
"""Generate Standard Wireshark Header Comments"""
self.st.out(self.template_Header, dissector_name=self.dissname)
if self.DEBUG:
print("//XXX genHeader")
def genWrsCopyright(self):
if self.DEBUG:
print("//XXX genWrsCopyright")
self.st.out(self.template_wireshark_copyright)
def genModelines(self):
if self.DEBUG:
print("//XXX genModelines")
self.st.out(self.template_Modelines)
def genGPL(self):
if self.DEBUG:
print("//XXX genGPL")
self.st.out(self.template_GPL)
def genIncludes(self):
if self.DEBUG:
print("//XXX genIncludes")
self.st.out(self.template_Includes)
def genOpDeclares(self, op):
"""" Generate hf variables for operation filters
in: opnode ( an operation node)
"""
if self.DEBUG:
print("//XXX genOpDeclares")
print("//XXX return type = ", op.returnType().kind())
sname = self.namespace(op, "_")
rt = op.returnType()
if rt.kind() != idltype.tk_void:
if rt.kind() == idltype.tk_alias: # a typdef return val possibly ?
#self.get_CDR_alias(rt, rt.name())
if rt.unalias().kind() == idltype.tk_sequence:
self.st.out(self.template_hf, name=sname + "_return_loop")
if self.isSeqNativeType(rt.unalias().seqType()) or self.AGGRESSIVE:
self.st.out(self.template_hf, name=sname + "_return")
elif (rt.unalias().kind() != idltype.tk_struct and
rt.unalias().kind() != idltype.tk_objref and
rt.unalias().kind() != idltype.tk_any):
self.st.out(self.template_hf, name=sname + "_return")
elif (rt.kind() != idltype.tk_struct and
rt.kind() != idltype.tk_objref and
rt.kind() != idltype.tk_union and
rt.kind() != idltype.tk_any):
self.st.out(self.template_hf, name=sname + "_return")
for p in op.parameters():
if p.paramType().unalias().kind() == idltype.tk_sequence:
self.st.out(self.template_hf, name=sname + "_" + p.identifier() + "_loop")
if (self.isSeqNativeType(p.paramType().unalias().seqType())) or self.AGGRESSIVE:
self.st.out(self.template_hf, name=sname + "_" + p.identifier())
elif (p.paramType().unalias().kind() != idltype.tk_any and
p.paramType().unalias().kind() != idltype.tk_struct and
p.paramType().unalias().kind() != idltype.tk_objref and
p.paramType().unalias().kind() != idltype.tk_union):
if p.paramType().unalias().kind() == idltype.tk_wchar:
self.st.out(self.template_hf, name=sname + "_" + p.identifier() + "_len")
self.st.out(self.template_hf, name=sname + "_" + p.identifier())
def genAtDeclares(self, at):
"""Generate hf variables for attributes
in: at ( an attribute)
"""
if self.DEBUG:
print("//XXX genAtDeclares")
for decl in at.declarators():
sname = self.namespace(decl, "_")
self.st.out(self.template_hf, name="get" + "_" + sname + "_" + decl.identifier())
if self.AGGRESSIVE:
self.st.out(self.template_hf, name="get" + "_" + sname + "_" + decl.identifier()+"_loop")
if not at.readonly():
self.st.out(self.template_hf, name="set" + "_" + sname + "_" + decl.identifier())
if self.AGGRESSIVE:
self.st.out(self.template_hf, name="set" + "_" + sname + "_" + decl.identifier()+"_loop")
def genStDeclares(self, st):
"""Generate hf variables for structs
in: st ( a struct)
"""
if self.DEBUG:
print("//XXX genStDeclares")
sname = self.namespace(st, "_")
for m in st.members():
if (self.isSeqNativeType(m.memberType())
or m.memberType().unalias().kind() == idltype.tk_sequence
or m.memberType().unalias().kind() == idltype.tk_alias):
for decl in m.declarators():
if m.memberType().unalias().kind() == idltype.tk_sequence:
self.st.out(self.template_hf, name=sname + "_" + decl.identifier() + "_loop")
if (self.isSeqNativeType(m.memberType().unalias().seqType())) or self.AGGRESSIVE:
self.st.out(self.template_hf, name=sname + "_" + decl.identifier())
else:
if m.memberType().unalias().kind() == idltype.tk_wchar:
self.st.out(self.template_hf, name=sname + "_" + decl.identifier() + "_len")
self.st.out(self.template_hf, name=sname + "_" + decl.identifier())
def genExDeclares(self, ex):
"""Generate hf variables for user exception filters
in: exnode ( an exception node)
"""
if self.DEBUG:
print("//XXX genExDeclares")
sname = self.namespace(ex, "_")
for m in ex.members():
for decl in m.declarators():
if m.memberType().unalias().kind() == idltype.tk_sequence:
if self.isSeqNativeType(m.memberType().unalias().seqType()):
self.st.out(self.template_hf, name=sname + "_" + decl.identifier())
self.st.out(self.template_hf, name=sname + "_" + decl.identifier() + "_loop")
elif m.memberType().unalias().kind() != idltype.tk_struct:
self.st.out(self.template_hf, name=sname + "_" + decl.identifier())
def genUnionDeclares(self, un):
"""Generate hf variables for union filters
in: un ( an union)
"""
if self.DEBUG:
print("//XXX genUnionDeclares")
sname = self.namespace(un, "_")
self.st.out(self.template_hf, name=sname + "_" + un.identifier())
for uc in un.cases(): # for all UnionCase objects in this union
# TODO: Is this loop necessary? cl is not used
for cl in uc.labels(): # for all Caselabel objects in this UnionCase
if uc.caseType().unalias().kind() == idltype.tk_sequence:
self.st.out(self.template_hf, name=sname + "_" + uc.declarator().identifier() + "_loop")
if self.isSeqNativeType(uc.caseType().unalias().seqType()):
self.st.out(self.template_hf, name=sname + "_" + uc.declarator().identifier())
elif self.isSeqNativeType(uc.caseType()):
if uc.caseType().unalias().kind() == idltype.tk_wchar:
self.st.out(self.template_hf, name=sname + "_" + uc.declarator().identifier() + "_len")
self.st.out(self.template_hf, name=sname + "_" + uc.declarator().identifier())
def genExpertInfoDeclares(self):
"""Generate ei variables for expert info filters"""
if self.DEBUG:
print("//XXX genExpertInfoDeclares")
self.st.out(self.template_proto_register_ei_filters, dissector_name=self.dissname)
def genDeclares(self, oplist, atlist, enlist, stlist, unlist):
"""generate function prototypes if required
Currently this is used for struct and union helper function declarations.
"""
if self.DEBUG:
print("//XXX genDeclares")
# prototype for operation filters
self.st.out(self.template_hf_operations)
# operation specific filters
if len(oplist) > 0:
self.st.out(self.template_proto_register_op_filter_comment)
for op in oplist:
self.genOpDeclares(op)
# attribute filters
if len(atlist) > 0:
self.st.out(self.template_proto_register_at_filter_comment)
for at in atlist:
self.genAtDeclares(at)
# struct filters
if len(stlist) > 0:
self.st.out(self.template_proto_register_st_filter_comment)
for st in stlist:
self.genStDeclares(st)
# exception List filters
exlist = self.get_exceptionList(oplist) # grab list of exception nodes
if len(exlist) > 0:
self.st.out(self.template_proto_register_ex_filter_comment)
for ex in exlist:
if ex.members(): # only if has members
self.genExDeclares(ex)
# union filters
if len(unlist) > 0:
self.st.out(self.template_proto_register_un_filter_comment)
for un in unlist:
self.genUnionDeclares(un)
# expert info filters
self.genExpertInfoDeclares()
# prototype for start_dissecting()
self.st.out(self.template_prototype_start_dissecting)
# struct prototypes
if len(stlist):
self.st.out(self.template_prototype_struct_start)
for st in stlist:
#print st.repoId()
sname = self.namespace(st, "_")
self.st.out(self.template_prototype_struct_body, stname=st.repoId(), name=sname)
self.st.out(self.template_prototype_struct_end)
# union prototypes
if len(unlist):
self.st.out(self.template_prototype_union_start)
for un in unlist:
sname = self.namespace(un, "_")
self.st.out(self.template_prototype_union_body, unname=un.repoId(), name=sname)
self.st.out(self.template_prototype_union_end)
def genPrototype(self):
self.st.out(self.template_prototype, dissector_name=self.dissname)
def genProtocol(self):
self.st.out(self.template_protocol, dissector_name=self.dissname)
self.st.out(self.template_init_boundary)
def genMainEntryStart(self, oplist):
self.st.out(self.template_main_dissector_start, dissname=self.dissname, disprot=self.protoname)
self.st.inc_indent()
self.st.out(self.template_main_dissector_switch_msgtype_start)
self.st.out(self.template_main_dissector_switch_msgtype_start_request_reply)
self.st.inc_indent()
def genMainEntryEnd(self):
self.st.out(self.template_main_dissector_switch_msgtype_end_request_reply)
self.st.dec_indent()
self.st.out(self.template_main_dissector_switch_msgtype_all_other_msgtype)
self.st.dec_indent()
self.st.out(self.template_main_dissector_end)
# NOTE: Mapping of attributes to operation(function) names is tricky.
#
# The actual accessor function names are language-mapping specific. The attribute name
# is subject to OMG IDL's name scoping rules; the accessor function names are
# guaranteed not to collide with any legal operation names specifiable in OMG IDL.
#
# eg:
#
# static const char get_Penguin_Echo_get_width_at[] = "get_width" ;
# static const char set_Penguin_Echo_set_width_at[] = "set_width" ;
#
# or:
#
# static const char get_Penguin_Echo_get_width_at[] = "_get_width" ;
# static const char set_Penguin_Echo_set_width_at[] = "_set_width" ;
#
# TODO: Implement some language dependent templates to handle naming conventions
# language <=> attribute. for C, C++. Java etc
#
# OR, just add a runtime GUI option to select language binding for attributes -- FS
def genAtList(self, atlist):
"""in: atlist
out: C code for IDL attribute decalarations.
ie: def genAtlist(self,atlist,language)
"""
self.st.out(self.template_comment_attributes_start)
for n in atlist:
for i in n.declarators(): #
sname = self.namespace(i, "_")
atname = i.identifier()
self.st.out(self.template_attributes_declare_Java_get, sname=sname, atname=atname)
if not n.readonly():
self.st.out(self.template_attributes_declare_Java_set, sname=sname, atname=atname)
self.st.out(self.template_comment_attributes_end)
def genEnList(self, enlist):
"""in: enlist
out: C code for IDL Enum decalarations using "static const value_string" template
"""
self.st.out(self.template_comment_enums_start)
for enum in enlist:
sname = self.namespace(enum, "_")
self.st.out(self.template_comment_enum_comment, ename=enum.repoId())
self.st.out(self.template_value_string_start, valstringname=sname)
for enumerator in enum.enumerators():
self.st.out(self.template_value_string_entry,
intval=str(self.valFromEnum(enum, enumerator)),
description=enumerator.identifier())
#atname = n.identifier()
self.st.out(self.template_value_string_end, valstringname=sname)
self.st.out(self.template_comment_enums_end)
def genExceptionDelegator(self, oplist):
"""in: oplist
out: C code for User exception delegator
"""
self.st.out(self.template_main_exception_delegator_start)
self.st.inc_indent()
exlist = self.get_exceptionList(oplist) # grab list of ALL UNIQUE exception nodes
for ex in exlist:
if self.DEBUG:
print("//XXX Exception ", ex.repoId())
print("//XXX Exception Identifier", ex.identifier())
print("//XXX Exception Scoped Name", ex.scopedName())
if ex.members(): # only if has members
sname = self.namespace(ex, "_")
self.st.out(self.template_ex_delegate_code, sname=sname, exname=ex.repoId())
self.st.dec_indent()
self.st.out(self.template_main_exception_delegator_end)
def genAttributeHelpers(self, atlist):
"""Generate private helper functions to decode Attributes.
in: atlist
For readonly attribute - generate get_xxx()
If NOT readonly attribute - also generate set_xxx()
"""
if self.DEBUG:
print("//XXX genAttributeHelpers: atlist = ", atlist)
self.st.out(self.template_attribute_helpers_start)
for attrib in atlist:
for decl in attrib.declarators():
self.genAtHelper(attrib, decl, "get") # get accessor
if not attrib.readonly():
self.genAtHelper(attrib, decl, "set") # set accessor
self.st.out(self.template_attribute_helpers_end)
def genAtHelper(self, attrib, decl, order):
"""Generate private helper functions to decode an attribute
in: at - attribute node
in: decl - declarator belonging to this attribute
in: order - to generate a "get" or "set" helper
"""
if self.DEBUG:
print("//XXX genAtHelper")
sname = order + "_" + self.namespace(decl, "_") # must use set or get prefix to avoid collision
self.curr_sname = sname # update current opnode/exnode scoped name
if not self.fn_hash_built:
self.fn_hash[sname] = [] # init empty list as val for this sname key
# but only if the fn_hash is not already built
self.st.out(self.template_attribute_helper_function_start, sname=sname, atname=decl.repoId())
self.st.inc_indent()
if len(self.fn_hash[sname]) > 0:
self.st.out(self.template_helper_function_vars_start)
self.dumpCvars(sname)
self.st.out(self.template_helper_function_vars_end_item)
self.getCDR(attrib.attrType(), sname + "_" + decl.identifier())
self.st.dec_indent()
self.st.out(self.template_attribute_helper_function_end)
def genExceptionHelpers(self, oplist):
"""Generate private helper functions to decode Exceptions used
within operations
in: oplist
"""
exlist = self.get_exceptionList(oplist) # grab list of exception nodes
if self.DEBUG:
print("//XXX genExceptionHelpers: exlist = ", exlist)
self.st.out(self.template_exception_helpers_start)
for ex in exlist:
if ex.members(): # only if has members
#print("//XXX Exception = " + ex.identifier())
self.genExHelper(ex)
self.st.out(self.template_exception_helpers_end)
def genExHelper(self, ex):
"""Generate private helper functions to decode User Exceptions
in: exnode ( an exception node)
"""
if self.DEBUG:
print("//XXX genExHelper")
# check to see if we need an item
need_item = False
for m in ex.members():
if self.isItemVarType(m.memberType()):
need_item = True
break
sname = self.namespace(ex, "_")
self.curr_sname = sname # update current opnode/exnode scoped name
if not self.fn_hash_built:
self.fn_hash[sname] = [] # init empty list as val for this sname key
# but only if the fn_hash is not already built
if need_item:
self.st.out(self.template_exception_helper_function_start_item, sname=sname, exname=ex.repoId())
else:
self.st.out(self.template_exception_helper_function_start_no_item, sname=sname, exname=ex.repoId())
self.st.inc_indent()
if len(self.fn_hash[sname]) > 0:
self.st.out(self.template_helper_function_vars_start)
self.dumpCvars(sname)
if need_item:
self.st.out(self.template_helper_function_vars_end_item)
else:
self.st.out(self.template_helper_function_vars_end)
for m in ex.members():
if self.DEBUG:
print("//XXX genExhelper, member = ", m, "member type = ", m.memberType())
for decl in m.declarators():
if self.DEBUG:
print("//XXX genExhelper, d = ", decl)
if decl.sizes(): # an array
indices = self.get_indices_from_sizes(decl.sizes())
string_indices = '%i ' % indices # convert int to string
self.st.out(self.template_get_CDR_array_comment, aname=decl.identifier(), asize=string_indices)
self.st.out(self.template_get_CDR_array_start, aname=decl.identifier(), aval=string_indices)
self.addvar(self.c_i + decl.identifier() + ";")
self.st.inc_indent()
self.getCDR(m.memberType(), sname + "_" + decl.identifier())
self.st.dec_indent()
self.st.out(self.template_get_CDR_array_end)
else:
self.getCDR(m.memberType(), sname + "_" + decl.identifier())
self.st.dec_indent()
self.st.out(self.template_exception_helper_function_end)
def genHelpers(self, oplist, stlist, unlist):
"""Generate private helper functions
Generate private helper functions for each IDL operation.
Generate private helper functions for each IDL struct.
Generate private helper functions for each IDL union.
in: oplist, stlist, unlist
"""
for op in oplist:
self.genOperation(op)
for st in stlist:
self.genStructHelper(st)
for un in unlist:
self.genUnionHelper(un)
def genOperation(self, opnode):
"""Generate private helper functions for a specific IDL operation.
in: opnode
"""
if self.DEBUG:
print("//XXX genOperation called")
sname = self.namespace(opnode, "_")
if not self.fn_hash_built:
self.fn_hash[sname] = [] # init empty list as val for this sname key
# but only if the fn_hash is not already built
self.curr_sname = sname # update current opnode's scoped name
opname = opnode.identifier()
self.st.out(self.template_helper_function_comment, repoid=opnode.repoId())
self.st.out(self.template_helper_function_start, sname=sname)
self.st.inc_indent()
if len(self.fn_hash[sname]) > 0:
self.st.out(self.template_helper_function_vars_start)
self.dumpCvars(sname)
self.st.out(self.template_helper_function_vars_end_item)
self.st.out(self.template_helper_switch_msgtype_start)
self.st.out(self.template_helper_switch_msgtype_request_start)
self.st.inc_indent()
self.genOperationRequest(opnode)
self.st.out(self.template_helper_switch_msgtype_request_end)
self.st.dec_indent()
self.st.out(self.template_helper_switch_msgtype_reply_start)
self.st.inc_indent()
self.st.out(self.template_helper_switch_rep_status_start)
self.st.out(self.template_helper_switch_msgtype_reply_no_exception_start)
self.st.inc_indent()
self.genOperationReply(opnode)
self.st.out(self.template_helper_switch_msgtype_reply_no_exception_end)
self.st.dec_indent()
self.st.out(self.template_helper_switch_msgtype_reply_user_exception_start)
self.st.inc_indent()
self.genOpExceptions(opnode)
self.st.out(self.template_helper_switch_msgtype_reply_user_exception_end)
self.st.dec_indent()
self.st.out(self.template_helper_switch_msgtype_reply_default_start, dissector_name=self.dissname)
self.st.out(self.template_helper_switch_msgtype_reply_default_end)
self.st.out(self.template_helper_switch_rep_status_end)
self.st.dec_indent()
self.st.out(self.template_helper_switch_msgtype_default_start, dissector_name=self.dissname)
self.st.out(self.template_helper_switch_msgtype_default_end)
self.st.out(self.template_helper_switch_msgtype_end)
self.st.dec_indent()
self.st.out(self.template_helper_function_end, sname=sname)
def genOperationRequest(self, opnode):
"""Decode function parameters for a GIOP request message"""
for p in opnode.parameters():
if p.is_in():
if self.DEBUG:
print("//XXX parameter = ", p)
print("//XXX parameter type = ", p.paramType())
print("//XXX parameter type kind = ", p.paramType().kind())
self.getCDR(p.paramType(), self.curr_sname + "_" + p.identifier())
def genOperationReply(self, opnode):
"""Decode function parameters for a GIOP reply message"""
rt = opnode.returnType() # get return type
if self.DEBUG:
print("//XXX genOperationReply")
print("//XXX opnode = ", opnode)
print("//XXX return type = ", rt)
print("//XXX return type.unalias = ", rt.unalias())
print("//XXX return type.kind() = ", rt.kind())
sname = self.namespace(opnode, "_")
if rt.kind() == idltype.tk_alias: # a typdef return val possibly ?
#self.getCDR(rt.decl().alias().aliasType(),"dummy") # return value maybe a typedef
self.get_CDR_alias(rt, sname + "_return")
#self.get_CDR_alias(rt, rt.name())
else:
self.getCDR(rt, sname + "_return") # return value is NOT an alias
for p in opnode.parameters():
if p.is_out(): # out or inout
self.getCDR(p.paramType(), self.curr_sname + "_" + p.identifier())
#self.st.dec_indent()
# TODO: this method seems unnecessary
def genOpExceptions(self, opnode):
for ex in opnode.raises():
if ex.members():
#print ex.members()
for m in ex.members():
t = 0
#print m.memberType(), m.memberType().kind()
def genOpDelegator(self, oplist):
"""Delegator for Operations"""
for op in oplist:
iname = "/".join(op.scopedName()[:-1])
opname = op.identifier()
sname = self.namespace(op, "_")
self.st.out(self.template_op_delegate_code, interface=iname, sname=sname, opname=opname)
def genAtDelegator(self, atlist):
"""Delegator for Attributes"""
for a in atlist:
for i in a.declarators():
sname = self.namespace(i, "_")
self.st.out(self.template_at_delegate_code_get, sname=sname)
if not a.readonly():
self.st.out(self.template_at_delegate_code_set, sname=sname)
def addvar(self, var):
"""Add a variable declaration to the hash of list"""
if var not in self.fn_hash[self.curr_sname]:
self.fn_hash[self.curr_sname].append(var)
def dumpvars(self):
"""Print the variable declaration from the hash of list"""
for fn in self.fn_hash.keys():
print("FN = " + fn)
for v in self.fn_hash[fn]:
print("-> " + v)
def dumpCvars(self, sname):
"""Print the "C" variable declaration from the hash of list
for a given scoped operation name (eg: tux_penguin_eat)"""
for v in self.fn_hash[sname]:
self.st.out(v)
def valFromEnum(self, enumNode, enumeratorNode):
"""Given an enum node, and a enumerator node, return the enumerator's numerical value.
eg: enum Color {red,green,blue} should return
val = 1 for green
"""
if self.DEBUG:
print("//XXX valFromEnum, enumNode = ", enumNode, " from ", enumNode.repoId())
print("//XXX valFromEnum, enumeratorNode = ", enumeratorNode, " from ", enumeratorNode.repoId())
if isinstance(enumeratorNode, idlast.Enumerator):
value = enumNode.enumerators().index(enumeratorNode)
return value
# tk_null = 0
# tk_void = 1
# tk_short = 2
# tk_long = 3
# tk_ushort = 4
# tk_ulong = 5
# tk_float = 6
# tk_double = 7
# tk_boolean = 8
# tk_char = 9
# tk_octet = 10
# tk_any = 11
# tk_TypeCode = 12
# tk_Principal = 13
# tk_objref = 14
# tk_struct = 15
# tk_union = 16
# tk_enum = 17
# tk_string = 18
# tk_sequence = 19
# tk_array = 20
# tk_alias = 21
# tk_except = 22
# tk_longlong = 23
# tk_ulonglong = 24
# tk_longdouble = 25
# tk_wchar = 26
# tk_wstring = 27
# tk_fixed = 28
# tk_value = 29
# tk_value_box = 30
# tk_native = 31
# tk_abstract_interface = 32
def isSeqNativeType(self, type):
"""Return true for "native" datatypes that will generate a direct proto_tree_add_xxx
call for a sequence. Used to determine if a separate hf variable is needed for
the loop over the sequence"""
pt = type.unalias().kind() # param CDR type
if self.DEBUG:
print("//XXX isSeqNativeType: kind = ", pt)
if pt == idltype.tk_ulong:
return 1
elif pt == idltype.tk_longlong:
return 1
elif pt == idltype.tk_ulonglong:
return 1
elif pt == idltype.tk_short:
return 1
elif pt == idltype.tk_long:
return 1
elif pt == idltype.tk_ushort:
return 1
elif pt == idltype.tk_float:
return 1
elif pt == idltype.tk_double:
return 1
elif pt == idltype.tk_boolean:
return 1
elif pt == idltype.tk_octet:
return 1
elif pt == idltype.tk_enum:
return 1
elif pt == idltype.tk_string:
return 1
elif pt == idltype.tk_wstring:
return 1
elif pt == idltype.tk_wchar:
return 1
elif pt == idltype.tk_char:
return 1
else:
return 0
def isItemVarType(self, type):
pt = type.unalias().kind() # param CDR type
if self.DEBUG:
print("//XXX isItemVarType: kind = ", pt)
if pt in [idltype.tk_fixed, idltype.tk_struct, idltype.tk_any, idltype.tk_sequence]:
return 1
return 0
def getCDR(self, type, name="fred"):
"""This is the main "iterator" function. It takes a node, and tries to output
a get_CDR_XXX accessor method(s). It can call itself multiple times
if it finds nested structures etc."""
pt = type.unalias().kind() # param CDR type
pn = name # param name
if self.DEBUG:
print("//XXX getCDR: kind = ", pt)
print("//XXX getCDR: name = ", pn)
if pt == idltype.tk_ulong:
self.get_CDR_ulong(pn)
elif pt == idltype.tk_longlong:
self.get_CDR_longlong(pn)
elif pt == idltype.tk_ulonglong:
self.get_CDR_ulonglong(pn)
elif pt == idltype.tk_void:
self.get_CDR_void(pn)
elif pt == idltype.tk_short:
self.get_CDR_short(pn)
elif pt == idltype.tk_long:
self.get_CDR_long(pn)
elif pt == idltype.tk_ushort:
self.get_CDR_ushort(pn)
elif pt == idltype.tk_float:
self.get_CDR_float(pn)
elif pt == idltype.tk_double:
self.get_CDR_double(pn)
elif pt == idltype.tk_fixed:
self.get_CDR_fixed(type.unalias(), pn)
elif pt == idltype.tk_boolean:
self.get_CDR_boolean(pn)
elif pt == idltype.tk_char:
self.get_CDR_char(pn)
elif pt == idltype.tk_octet:
self.get_CDR_octet(pn)
elif pt == idltype.tk_any:
self.get_CDR_any(pn)
elif pt == idltype.tk_string:
self.get_CDR_string(pn)
elif pt == idltype.tk_wstring:
self.get_CDR_wstring(pn)
elif pt == idltype.tk_wchar:
self.get_CDR_wchar(pn)
elif pt == idltype.tk_enum:
#print type.decl()
self.get_CDR_enum(pn, type)
#self.get_CDR_enum(pn)
elif pt == idltype.tk_struct:
self.get_CDR_struct(type, pn)
elif pt == idltype.tk_TypeCode: # will I ever get here ?
self.get_CDR_TypeCode(pn)
elif pt == idltype.tk_sequence:
if type.unalias().seqType().kind() == idltype.tk_octet:
self.get_CDR_sequence_octet(type, pn)
else:
self.get_CDR_sequence(type, pn)
elif pt == idltype.tk_objref:
self.get_CDR_objref(type, pn)
elif pt == idltype.tk_array:
pass # Supported elsewhere
elif pt == idltype.tk_union:
self.get_CDR_union(type, pn)
elif pt == idltype.tk_alias:
if self.DEBUG:
print("//XXXXX Alias type XXXXX ", type)
self.get_CDR_alias(type, pn)
else:
self.genWARNING("Unknown typecode = " + '%i ' % pt) # put comment in source code
def get_CDR_ulong(self, pn):
self.st.out(self.template_get_CDR_ulong, hfname=pn)
def get_CDR_short(self, pn):
self.st.out(self.template_get_CDR_short, hfname=pn)
def get_CDR_void(self, pn):
self.st.out(self.template_get_CDR_void, hfname=pn)
def get_CDR_long(self, pn):
self.st.out(self.template_get_CDR_long, hfname=pn)
def get_CDR_ushort(self, pn):
self.st.out(self.template_get_CDR_ushort, hfname=pn)
def get_CDR_float(self, pn):
self.st.out(self.template_get_CDR_float, hfname=pn)
def get_CDR_double(self, pn):
self.st.out(self.template_get_CDR_double, hfname=pn)
def get_CDR_longlong(self, pn):
self.st.out(self.template_get_CDR_longlong, hfname=pn)
def get_CDR_ulonglong(self, pn):
self.st.out(self.template_get_CDR_ulonglong, hfname=pn)
def get_CDR_boolean(self, pn):
self.st.out(self.template_get_CDR_boolean, hfname=pn)
def get_CDR_fixed(self, type, pn):
if self.DEBUG:
print("//XXXX calling get_CDR_fixed, type = ", type)
print("//XXXX calling get_CDR_fixed, type.digits() = ", type.digits())
print("//XXXX calling get_CDR_fixed, type.scale() = ", type.scale())
string_digits = '%i ' % type.digits() # convert int to string
string_scale = '%i ' % type.scale() # convert int to string
string_length = '%i ' % self.dig_to_len(type.digits()) # how many octets to hilight for a number of digits
self.st.out(self.template_get_CDR_fixed, hfname=pn, digits=string_digits, scale=string_scale, length=string_length)
self.addvar(self.c_seq)
def get_CDR_char(self, pn):
self.st.out(self.template_get_CDR_char, hfname=pn)
def get_CDR_octet(self, pn):
self.st.out(self.template_get_CDR_octet, hfname=pn)
def get_CDR_any(self, pn):
self.st.out(self.template_get_CDR_any, varname=pn)
def get_CDR_enum(self, pn, type):
#self.st.out(self.template_get_CDR_enum, hfname=pn)
sname = self.namespace(type.unalias(), "_")
self.st.out(self.template_get_CDR_enum_symbolic, valstringarray=sname, hfname=pn)
self.addvar(self.c_u_octet4)
def get_CDR_string(self, pn):
self.st.out(self.template_get_CDR_string, hfname=pn)
def get_CDR_wstring(self, pn):
self.st.out(self.template_get_CDR_wstring, hfname=pn)
self.addvar(self.c_u_octet4)
self.addvar(self.c_seq)
def get_CDR_wchar(self, pn):
self.st.out(self.template_get_CDR_wchar, hfname=pn)
self.addvar(self.c_s_octet1)
self.addvar(self.c_seq)
def get_CDR_TypeCode(self, pn):
self.st.out(self.template_get_CDR_TypeCode, varname=pn)
self.addvar(self.c_u_octet4)
def get_CDR_objref(self, type, pn):
self.st.out(self.template_get_CDR_object)
def get_CDR_union(self, type, pn):
if self.DEBUG:
print("//XXX Union type =", type, " pn = ", pn)
print("//XXX Union type.decl()", type.decl())
print("//XXX Union Scoped Name", type.scopedName())
# If I am a typedef union {..}; node then find the union node
if isinstance(type.decl(), idlast.Declarator):
ntype = type.decl().alias().aliasType().decl()
else:
ntype = type.decl() # I am a union node
if self.DEBUG:
print("//XXX Union ntype =", ntype)
sname = self.namespace(ntype, "_")
self.st.out(self.template_union_start, name=sname)
# Output a call to the union helper function so I can handle recursive union also.
self.st.out(self.template_decode_union, name=sname)
self.st.out(self.template_union_end, name=sname)
def getCDR_hf(self, type, desc, filter, hf_name="fred"):
"""This takes a node, and tries to output the appropriate item for the
hf array."""
pt = type.unalias().kind() # param CDR type
pn = hf_name # param name
if self.DEBUG:
print("//XXX getCDR_hf: kind = ", pt)
print("//XXX getCDR_hf: name = ", pn)
if pt == idltype.tk_ulong:
self.get_CDR_ulong_hf(pn, desc, filter, self.dissname)
elif pt == idltype.tk_longlong:
self.get_CDR_longlong_hf(pn, desc, filter, self.dissname)
elif pt == idltype.tk_ulonglong:
self.get_CDR_ulonglong_hf(pn, desc, filter, self.dissname)
elif pt == idltype.tk_void:
pass # no hf_ variables needed
elif pt == idltype.tk_short:
self.get_CDR_short_hf(pn, desc, filter, self.dissname)
elif pt == idltype.tk_long:
self.get_CDR_long_hf(pn, desc, filter, self.dissname)
elif pt == idltype.tk_ushort:
self.get_CDR_ushort_hf(pn, desc, filter, self.dissname)
elif pt == idltype.tk_float:
self.get_CDR_float_hf(pn, desc, filter, self.dissname)
elif pt == idltype.tk_double:
self.get_CDR_double_hf(pn, desc, filter, self.dissname)
elif pt == idltype.tk_fixed:
self.get_CDR_fixed_hf(pn, desc, filter, self.dissname)
elif pt == idltype.tk_boolean:
self.get_CDR_boolean_hf(pn, desc, filter, self.dissname)
elif pt == idltype.tk_char:
self.get_CDR_char_hf(pn, desc, filter, self.dissname)
elif pt == idltype.tk_octet:
self.get_CDR_octet_hf(pn, desc, filter, self.dissname)
elif pt == idltype.tk_any:
pass # no hf_ variables needed
elif pt == idltype.tk_string:
self.get_CDR_string_hf(pn, desc, filter, self.dissname)
elif pt == idltype.tk_wstring:
self.get_CDR_wstring_hf(pn, desc, filter, self.dissname)
elif pt == idltype.tk_wchar:
self.get_CDR_wchar_hf(pn, desc, filter, self.dissname)
elif pt == idltype.tk_enum:
self.get_CDR_enum_hf(pn, type, desc, filter, self.dissname)
elif pt == idltype.tk_struct:
pass # no hf_ variables needed (should be already contained in struct members)
elif pt == idltype.tk_TypeCode: # will I ever get here ?
self.get_CDR_TypeCode_hf(pn, desc, filter, self.dissname)
elif pt == idltype.tk_sequence:
if type.unalias().seqType().kind() == idltype.tk_octet:
self.get_CDR_sequence_octet_hf(type, pn, desc, filter, self.dissname)
else:
self.get_CDR_sequence_hf(type, pn, desc, filter, self.dissname)
elif pt == idltype.tk_objref:
pass # no object specific hf_ variables used, use generic ones from giop dissector
elif pt == idltype.tk_array:
pass # Supported elsewhere
elif pt == idltype.tk_union:
pass # no hf_ variables needed (should be already contained in union members)
elif pt == idltype.tk_alias:
if self.DEBUG:
print("//XXXXX Alias type hf //XXXXX ", type)
self.get_CDR_alias_hf(type, desc, filter, pn)
else:
self.genWARNING("Unknown typecode = " + '%i ' % pt) # put comment in source code
def get_CDR_ulong_hf(self, pn, desc, filter, diss):
self.st.out(self.template_get_CDR_ulong_hf, hfname=pn, dissector_name=diss, descname=desc, filtername=filter)
def get_CDR_short_hf(self, pn, desc, filter, diss):
self.st.out(self.template_get_CDR_short_hf, hfname=pn, dissector_name=diss, descname=desc, filtername=filter)
def get_CDR_long_hf(self, pn, desc, filter, diss):
self.st.out(self.template_get_CDR_long_hf, hfname=pn, dissector_name=diss, descname=desc, filtername=filter)
def get_CDR_ushort_hf(self, pn, desc, filter, diss):
self.st.out(self.template_get_CDR_ushort_hf, hfname=pn, dissector_name=diss, descname=desc, filtername=filter)
def get_CDR_float_hf(self, pn, desc, filter, diss):
self.st.out(self.template_get_CDR_float_hf, hfname=pn, dissector_name=diss, descname=desc, filtername=filter)
def get_CDR_double_hf(self, pn, desc, filter, diss):
self.st.out(self.template_get_CDR_double_hf, hfname=pn, dissector_name=diss, descname=desc, filtername=filter)
def get_CDR_fixed_hf(self, pn, desc, filter, diss):
self.st.out(self.template_get_CDR_fixed_hf, hfname=pn, dissector_name=diss, descname=desc, filtername=filter)
def get_CDR_longlong_hf(self, pn, desc, filter, diss):
self.st.out(self.template_get_CDR_longlong_hf, hfname=pn, dissector_name=diss, descname=desc, filtername=filter)
def get_CDR_ulonglong_hf(self, pn, desc, filter, diss):
self.st.out(self.template_get_CDR_ulonglong_hf, hfname=pn, dissector_name=diss, descname=desc, filtername=filter)
def get_CDR_boolean_hf(self, pn, desc, filter, diss):
self.st.out(self.template_get_CDR_boolean_hf, hfname=pn, dissector_name=diss, descname=desc, filtername=filter)
def get_CDR_char_hf(self, pn, desc, filter, diss):
self.st.out(self.template_get_CDR_char_hf, hfname=pn, dissector_name=diss, descname=desc, filtername=filter)
def get_CDR_octet_hf(self, pn, desc, filter, diss):
self.st.out(self.template_get_CDR_octet_hf, hfname=pn, dissector_name=diss, descname=desc, filtername=filter)
def get_CDR_enum_hf(self, pn, type, desc, filter, diss):
sname = self.namespace(type.unalias(), "_")
self.st.out(self.template_get_CDR_enum_symbolic_hf, valstringarray=sname, hfname=pn, dissector_name=diss, descname=desc, filtername=filter)
def get_CDR_string_hf(self, pn, desc, filter, diss):
self.st.out(self.template_get_CDR_string_hf, hfname=pn, dissector_name=diss, descname=desc, filtername=filter)
def get_CDR_wstring_hf(self, pn, desc, filter, diss):
self.st.out(self.template_get_CDR_wstring_hf, hfname=pn, dissector_name=diss, descname=desc, filtername=filter)
# self.addvar(self.c_u_octet4)
# self.addvar(self.c_seq)
def get_CDR_wchar_hf(self, pn, desc, filter, diss):
self.st.out(self.template_get_CDR_wchar_hf, hfname=pn, dissector_name=diss, descname=desc, filtername=filter)
# self.addvar(self.c_s_octet1)
# self.addvar(self.c_seq)
def get_CDR_TypeCode_hf(self, pn, desc, filter, diss):
self.st.out(self.template_get_CDR_TypeCode_hf, hfname=pn, dissector_name=diss, descname=desc, filtername=filter)
def get_CDR_sequence_octet_hf(self, type, pn, desc, filter, diss):
self.st.out(self.template_get_CDR_sequence_octet_hf, hfname=pn, dissector_name=diss, descname=desc, filtername=filter)
def get_CDR_sequence_hf(self,type,pn,desc,filter,diss):
self.st.out(self.template_get_CDR_sequence_hf, hfname=pn, dissector_name=diss, descname=desc, filtername=filter)
if self.isSeqNativeType(type.unalias().seqType()):
self.getCDR_hf(type.unalias().seqType(), desc, filter, pn)
def get_CDR_alias_hf(self, type, desc, filter, pn):
if self.DEBUG:
print("//XXX get_CDR_alias_hf, type = ", type, " pn = ", pn)
print("//XXX get_CDR_alias_hf, type.decl() = ", type.decl())
print("//XXX get_CDR_alias_hf, type.decl().alias() = ", type.decl().alias())
decl = type.decl() # get declarator object
if decl.sizes(): # a typedef array
#indices = self.get_indices_from_sizes(decl.sizes())
#string_indices = '%i ' % indices # convert int to string
#self.st.out(self.template_get_CDR_array_comment, aname=pn, asize=string_indices)
#self.st.out(self.template_get_CDR_array_start, aname=pn, aval=string_indices)
#self.addvar(self.c_i + pn + ";")
#self.st.inc_indent()
self.getCDR_hf(type.decl().alias().aliasType(), desc, filter, pn)
#self.st.dec_indent()
#self.st.out(self.template_get_CDR_array_end)
else: # a simple typdef
if self.DEBUG:
print("//XXX get_CDR_alias_hf, type = ", type, " pn = ", pn)
print("//XXX get_CDR_alias_hf, type.decl() = ", type.decl())
#self.getCDR_hf(type.unalias(), desc, filter, decl.identifier() )
self.getCDR_hf(type.unalias(), desc, filter, pn)
def genUnionHelper(self, un):
"""Code to generate Union Helper functions
in: un - a union node
"""
if self.DEBUG:
print("//XXX genUnionHelper called")
print("//XXX Union type =", un)
print("//XXX Union type.switchType()", un.switchType())
print("//XXX Union Scoped Name", un.scopedName())
print("//XXX Union switchType.unalias", un.switchType().unalias())
print("//XXX Union switchType.unalias.kind", un.switchType().unalias().kind())
# check to see if we need an item
un_need_item = False
if un.switchType().unalias().kind() == idltype.tk_enum:
for uc in un.cases(): # for all UnionCase objects in this union
if self.DEBUG:
print("//XXX checking", uc)
if self.isItemVarType(uc.caseType()):
if uc.caseType().unalias().kind() == idltype.tk_sequence:
if uc.caseType().unalias().seqType().kind() == idltype.tk_struct:
un_need_item = True
else:
un_need_item = True
if self.AGGRESSIVE:
un_need_item = True
if self.DEBUG:
print("//XXX need_item =", un_need_item)
sname = self.namespace(un, "_")
self.curr_sname = sname # update current opnode/exnode/stnode/unnode scoped name
if not self.fn_hash_built:
self.fn_hash[sname] = [] # init empty list as val for this sname key
# but only if the fn_hash is not already built
if un_need_item:
self.st.out(self.template_union_helper_function_start_with_item, sname=sname, unname=un.repoId())
else:
self.st.out(self.template_union_helper_function_start, sname=sname, unname=un.repoId())
self.st.inc_indent()
if len(self.fn_hash[sname]) > 0:
self.st.out(self.template_helper_function_vars_start)
self.dumpCvars(sname)
self.st.out(self.template_helper_function_vars_end_item)
st = un.switchType().unalias() # may be typedef switch type, so find real type
self.st.out(self.template_comment_union_code_start, uname=un.repoId())
self.getCDR(st, sname + "_" + un.identifier())
# Depending on what kind of discriminant I come accross (enum,integer,char,
# short, boolean), make sure I cast the return value of the get_XXX accessor
# to an appropriate value. Omniidl idlast.CaseLabel.value() accessor will
# return an integer, or an Enumerator object that is then converted to its
# integer equivalent.
#
#
# NOTE - May be able to skip some of this stuff, but leave it in for now -- FS
#
if st.kind() == idltype.tk_enum:
std = st.decl()
self.st.out(self.template_comment_union_code_discriminant, uname=std.repoId())
# count the number of cases to ensure variable is needed
num = 0
num_defaults = 0
for uc in un.cases(): # for all UnionCase objects in this union
num += len(uc.labels())
for cl in uc.labels():
if cl.default():
num_defaults += 1
if num != 1 or num_defaults != 1:
self.st.out(self.template_union_code_save_discriminant_enum, discname=un.identifier())
self.addvar(self.c_s_disc + un.identifier() + ";")
elif st.kind() == idltype.tk_long:
self.st.out(self.template_union_code_save_discriminant_long, discname=un.identifier())
self.addvar(self.c_s_disc + un.identifier() + ";")
elif st.kind() == idltype.tk_ulong:
self.st.out(self.template_union_code_save_discriminant_ulong, discname=un.identifier())
self.addvar(self.c_s_disc + un.identifier() + ";")
elif st.kind() == idltype.tk_short:
self.st.out(self.template_union_code_save_discriminant_short, discname=un.identifier())
self.addvar(self.c_s_disc + un.identifier() + ";")
elif st.kind() == idltype.tk_ushort:
self.st.out(self.template_union_code_save_discriminant_ushort, discname=un.identifier())
self.addvar(self.c_s_disc + un.identifier() + ";")
elif st.kind() == idltype.tk_boolean:
self.st.out(self.template_union_code_save_discriminant_boolean, discname=un.identifier())
self.addvar(self.c_s_disc + un.identifier() + ";")
elif st.kind() == idltype.tk_char:
self.st.out(self.template_union_code_save_discriminant_char, discname=un.identifier())
self.addvar(self.c_s_disc + un.identifier() + ";")
else:
print("//XXX Unknown st.kind() = ", st.kind())
# Loop over all cases in this union
for uc in un.cases(): # for all UnionCase objects in this union
for cl in uc.labels(): # for all Caselabel objects in this UnionCase
# get integer value, even if discriminant is
# an Enumerator node
if isinstance(cl.value(), idlast.Enumerator):
if self.DEBUG:
print("//XXX clv.identifier()", cl.value().identifier())
print("//XXX clv.repoId()", cl.value().repoId())
print("//XXX clv.scopedName()", cl.value().scopedName())
# find index of enumerator in enum declaration
# eg: RED is index 0 in enum Colors { RED, BLUE, GREEN }
clv = self.valFromEnum(std, cl.value())
else:
clv = cl.value()
#print "//XXX clv = ",clv
# if char, dont convert to int, but put inside single quotes so that it is understood by C.
# eg: if (disc == 'b')..
#
# TODO : handle \xxx chars generically from a function or table lookup rather than
# a whole bunch of "if" statements. -- FS
if st.kind() == idltype.tk_char:
if clv == '\n':
string_clv = "'\\n'"
elif clv == '\t':
string_clv = "'\\t'"
else:
string_clv = "'" + clv + "'"
else:
string_clv = '%i ' % clv
# If default case, then skp comparison with discriminator
if not cl.default():
self.st.out(self.template_comment_union_code_label_compare_start,
discname=un.identifier(), labelval=string_clv)
self.st.inc_indent()
else:
self.st.out(self.template_comment_union_code_label_default_start)
self.getCDR(uc.caseType(), sname + "_" + uc.declarator().identifier())
if not cl.default():
self.st.dec_indent()
self.st.out(self.template_comment_union_code_label_compare_end)
else:
self.st.out(self.template_comment_union_code_label_default_end)
self.st.dec_indent()
self.st.out(self.template_union_helper_function_end)
def get_CDR_alias(self, type, pn):
"""Currently, get_CDR_alias is geared to finding typedef"""
if self.DEBUG:
print("//XXX get_CDR_alias, type = ", type, " pn = ", pn)
print("//XXX get_CDR_alias, type.decl() = ", type.decl())
print("//XXX get_CDR_alias, type.decl().alias() = ", type.decl().alias())
decl = type.decl() # get declarator object
if decl.sizes(): # a typedef array
indices = self.get_indices_from_sizes(decl.sizes())
string_indices = '%i ' % indices # convert int to string
self.st.out(self.template_get_CDR_array_comment, aname=pn, asize=string_indices)
self.st.out(self.template_get_CDR_array_start, aname=pn, aval=string_indices)
self.addvar(self.c_i + pn + ";")
self.st.inc_indent()
self.getCDR(type.decl().alias().aliasType(), pn)
self.st.dec_indent()
self.st.out(self.template_get_CDR_array_end)
else: # a simple typdef
if self.DEBUG:
print("//XXX type", type.__dict__)
print("//XXX type.unalias()", type.unalias().__dict__)
print("//XXX type.unalias().kind()", type.unalias().kind())
print("//XXX type.decl()", type.decl().__dict__)
self.getCDR(type.unalias(), pn)
def get_CDR_struct(self, type, pn):
"""Handle structs, including recursive"""
# If I am a typedef struct {..}; node then find the struct node
if isinstance(type.decl(), idlast.Declarator):
ntype = type.decl().alias().aliasType().decl()
else:
ntype = type.decl() # I am a struct node
sname = self.namespace(ntype, "_")
self.st.out(self.template_structure_start, name=sname)
# Output a call to the struct helper function so I can handle recursive structs also.
self.st.out(self.template_decode_struct, name=sname)
self.st.out(self.template_structure_end, name=sname)
def genStructHelper(self, st):
"""Generate private helper functions to decode a struct
in: stnode ( a struct node)
"""
if self.DEBUG:
print("//XXX genStructHelper")
sname = self.namespace(st, "_")
self.curr_sname = sname # update current opnode/exnode/stnode scoped name
if not self.fn_hash_built:
self.fn_hash[sname] = [] # init empty list as val for this sname key
# but only if the fn_hash is not already built
self.st.out(self.template_struct_helper_function_start, sname=sname, stname=st.repoId())
self.st.inc_indent()
if len(self.fn_hash[sname]) > 0:
self.st.out(self.template_helper_function_vars_start)
self.dumpCvars(sname)
self.st.out(self.template_helper_function_vars_end_item)
for m in st.members():
for decl in m.declarators():
if decl.sizes(): # an array
indices = self.get_indices_from_sizes(decl.sizes())
string_indices = '%i ' % indices # convert int to string
self.st.out(self.template_get_CDR_array_comment, aname=decl.identifier(), asize=string_indices)
self.st.out(self.template_get_CDR_array_start, aname=decl.identifier(), aval=string_indices)
self.addvar(self.c_i + decl.identifier() + ";")
self.st.inc_indent()
self.getCDR(m.memberType(), sname + "_" + decl.identifier())
self.st.dec_indent()
self.st.out(self.template_get_CDR_array_end)
else:
self.getCDR(m.memberType(), sname + "_" + decl.identifier())
self.st.dec_indent()
self.st.out(self.template_struct_helper_function_end)
def get_CDR_sequence(self,type,pn):
"""Generate code to access a sequence of a type"""
if self.DEBUG:
print("//XXX get_CDR_sequence")
self.st.out(self.template_get_CDR_sequence_length, seqname=pn)
self.st.out(self.template_get_CDR_sequence_loop_start, seqname=pn)
self.addvar(self.c_i_lim + pn + ";")
self.addvar(self.c_i + pn + ";")
self.st.inc_indent()
self.getCDR(type.unalias().seqType(), pn) # and start all over with the type
self.st.dec_indent()
self.st.out(self.template_get_CDR_sequence_loop_end)
def get_CDR_sequence_octet(self, type, pn):
"""Generate code to access a sequence of octet"""
if self.DEBUG:
print("//XXX get_CDR_sequence_octet")
self.st.out(self.template_get_CDR_sequence_length, seqname=pn)
self.st.out(self.template_get_CDR_sequence_octet, seqname=pn)
self.addvar(self.c_i_lim + pn + ";")
self.addvar("const guint8 * binary_seq_" + pn + ";")
self.addvar("gchar * text_seq_" + pn + ";")
@staticmethod
def namespace(node, sep):
"""in - op node
out - scoped operation name, using sep character instead of "::"
eg: Penguin::Echo::echoWString => Penguin_Echo_echoWString if sep = "_"
"""
sname = idlutil.ccolonName(node.scopedName()).replace('::', sep)
#print("//XXX namespace: sname = " + sname)
return sname
def gen_plugin_register(self):
"""generate code for plugin initialisation"""
self.st.out(self.template_plugin_register, description=self.description,
protocol_name=self.protoname, dissector_name=self.dissname)
# TODO - make this a command line option
#
# -e explicit
# -h heuristic
def gen_proto_reg_handoff(self, oplist):
"""generate register_giop_user_module code, and register only
unique interfaces that contain operations. Also output
a heuristic register in case we want to use that."""
self.st.out(self.template_proto_reg_handoff_start, dissector_name=self.dissname)
self.st.inc_indent()
for iname in self.get_intlist(oplist):
self.st.out(self.template_proto_reg_handoff_body, dissector_name=self.dissname,
protocol_name=self.protoname, interface=iname)
self.st.out(self.template_proto_reg_handoff_heuristic, dissector_name=self.dissname,
protocol_name=self.protoname)
self.st.dec_indent()
self.st.out(self.template_proto_reg_handoff_end)
def genOp_hf(self, op):
"""generate hf_ array element for operation, attribute, enums, struct and union lists"""
sname = self.namespace(op, "_")
opname = sname[sname.find("_")+1:]
opname = opname[:opname.find("_")]
rt = op.returnType()
if rt.kind() != idltype.tk_void:
if rt.kind() == idltype.tk_alias: # a typdef return val possibly ?
self.getCDR_hf(rt, rt.name(),
opname + "." + op.identifier() + ".return", sname + "_return")
else:
self.getCDR_hf(rt, "Return value",
opname + "." + op.identifier() + ".return", sname + "_return")
for p in op.parameters():
self.getCDR_hf(p.paramType(),
p.identifier(),
opname + "." + op.identifier() + "." + p.identifier(),
sname + "_" + p.identifier())
def genAt_hf(self, at):
for decl in at.declarators():
sname = self.namespace(decl, "_")
atname = sname[sname.find("_")+1:]
atname = atname[:atname.find("_")]
self.getCDR_hf(at.attrType(), decl.identifier(),
atname + "." + decl.identifier() + ".get", "get" + "_" + sname + "_" + decl.identifier())
if not at.readonly():
self.getCDR_hf(at.attrType(), decl.identifier(),
atname + "." + decl.identifier() + ".set", "set" + "_" + sname + "_" + decl.identifier())
def genSt_hf(self, st):
sname = self.namespace(st, "_")
stname = sname[sname.find("_")+1:]
stname = stname[:stname.find("_")]
for m in st.members():
for decl in m.declarators():
self.getCDR_hf(m.memberType(), st.identifier() + "_" + decl.identifier(),
st.identifier() + "." + decl.identifier(), sname + "_" + decl.identifier())
def genEx_hf(self, ex):
sname = self.namespace(ex, "_")
exname = sname[sname.find("_")+1:]
exname = exname[:exname.find("_")]
for m in ex.members():
for decl in m.declarators():
self.getCDR_hf(m.memberType(), ex.identifier() + "_" + decl.identifier(),
exname + "." + ex.identifier() + "_" + decl.identifier(), sname + "_" + decl.identifier())
def genUnion_hf(self, un):
sname = self.namespace(un, "_")
unname = sname[:sname.rfind("_")]
unname = unname.replace("_", ".")
self.getCDR_hf(un.switchType().unalias(), un.identifier(),
unname + "." + un.identifier(), sname + "_" + un.identifier())
for uc in un.cases(): # for all UnionCase objects in this union
# TODO: is this loop necessary?
for cl in uc.labels(): # for all Caselabel objects in this UnionCase
self.getCDR_hf(uc.caseType(), un.identifier() + "_" + uc.declarator().identifier(),
unname + "." + un.identifier() + "." + uc.declarator().identifier(),
sname + "_" + uc.declarator().identifier())
def gen_proto_register(self, oplist, atlist, stlist, unlist):
"""generate proto_register_<protoname> code,
in - oplist[], atlist[], stline[], unlist[]
"""
self.st.out(self.template_proto_register_start, dissector_name=self.dissname)
# operation specific filters
self.st.out(self.template_proto_register_op_filter_comment)
for op in oplist:
self.genOp_hf(op)
# attribute filters
self.st.out(self.template_proto_register_at_filter_comment)
for at in atlist:
self.genAt_hf(at)
# struct filters
self.st.out(self.template_proto_register_st_filter_comment)
for st in stlist:
if st.members(): # only if has members
self.genSt_hf(st)
# exception List filters
exlist = self.get_exceptionList(oplist) # grab list of exception nodes
self.st.out(self.template_proto_register_ex_filter_comment)
for ex in exlist:
if ex.members(): # only if has members
self.genEx_hf(ex)
# Union filters
self.st.out(self.template_proto_register_un_filter_comment)
for un in unlist:
self.genUnion_hf(un)
self.st.out(self.template_proto_register_end, description=self.description,
protocol_name=self.protoname, dissector_name=self.dissname)
@staticmethod
def get_intlist(oplist):
"""in - oplist[]
out - a list of unique interface names. This will be used in
register_giop_user_module(dissect_giop_auto, "TEST IDL", "Penguin/Echo" ); so the operation
name must be removed from the scope. And we also only want unique interfaces.
"""
int_hash = {} # holds a hash of unique interfaces
for op in oplist:
sc = op.scopedName() # eg: penguin,tux,bite
sc1 = sc[:-1]
sn = idlutil.slashName(sc1) # penguin/tux
if sn not in int_hash:
int_hash[sn] = 0 # dummy val, but at least key is unique
ret = list(int_hash.keys())
ret.sort()
return ret
def get_exceptionList(self, oplist):
"""in - oplist[]
out - a list of exception nodes (unique). This will be used in
to generate dissect_exception_XXX functions.
"""
ex_hash = collections.OrderedDict() # holds a hash of unique exceptions.
for op in oplist:
for ex in op.raises():
if ex not in ex_hash:
ex_hash[ex] = 0 # dummy val, but at least key is unique
if self.DEBUG:
print("//XXX Exception = " + ex.identifier())
ret = list(ex_hash.keys())
return ret
@staticmethod
def get_indices_from_sizes(sizelist):
"""Simple function to take a list of array sizes and find the total number of elements
eg: temp[4][3] = 12 elements
"""
val = 1
for i in sizelist:
val = val * i
return val
@staticmethod
def dig_to_len(dignum):
"""Determine how many octets contain requested number
of digits for an "fixed" IDL type "on the wire" """
return (dignum/2) + 1
def genTODO(self, message):
self.st.out(self.template_debug_TODO, message=message)
def genWARNING(self, message):
self.st.out(self.template_debug_WARNING, message=message)
# Templates for C code
template_helper_function_comment = """\
/*
* @repoid@
*/"""
template_helper_function_vars_start = """\
/* Operation specific Variable declarations Begin */"""
template_helper_function_vars_end = """\
/* Operation specific Variable declarations End */
"""
template_helper_function_vars_end_item = """\
/* Operation specific Variable declarations End */
"""
template_helper_function_start = """\
static void
decode_@sname@(tvbuff_t *tvb _U_, packet_info *pinfo _U_, proto_tree *tree _U_, proto_item *item _U_, int *offset _U_, MessageHeader *header, const gchar *operation _U_, gboolean stream_is_big_endian _U_)
{"""
template_helper_function_end = """\
}
"""
template_proto_reg_handoff_start = """\
/* register me as handler for these interfaces */
void proto_reg_handoff_giop_@dissector_name@(void)
{"""
template_proto_reg_handoff_body = """\
/* Register for Explicit Dissection */
register_giop_user_module(dissect_@dissector_name@, \"@protocol_name@\", \"@interface@\", proto_@dissector_name@ ); /* explicit dissector */
"""
template_proto_reg_handoff_heuristic = """\
/* Register for Heuristic Dissection */
register_giop_user(dissect_@dissector_name@, \"@protocol_name@\" ,proto_@dissector_name@); /* heuristic dissector */
"""
template_proto_reg_handoff_end = """\
}
"""
template_prototype = """
void proto_register_giop_@dissector_name@(void);
void proto_reg_handoff_giop_@dissector_name@(void);"""
# Initialize the protocol
template_protocol = """
/* Initialise the protocol and subtree pointers */
static int proto_@dissector_name@ = -1;
static gint ett_@dissector_name@ = -1;
"""
template_init_boundary = """
/* Initialise the initial Alignment */
static guint32 boundary = GIOP_HEADER_SIZE; /* initial value */"""
# plugin_register and plugin_reg_handoff templates
template_plugin_register = """
#if 0
WS_DLL_PUBLIC_DEF void
plugin_register(void)
{
if (proto_@dissector_name@ == -1) {
proto_register_giop_@dissector_name@();
}
}
WS_DLL_PUBLIC_DEF void
plugin_reg_handoff(void){
proto_register_handoff_giop_@dissector_name@();
}
#endif
"""
template_proto_register_start = """
/* Register the protocol with Wireshark */
void proto_register_giop_@dissector_name@(void)
{
/* setup list of header fields */
static hf_register_info hf[] = {
/* field that indicates the currently ongoing request/reply exchange */
{&hf_operationrequest, {"Request_Operation","giop-@dissector_name@.Request_Operation",FT_STRING,BASE_NONE,NULL,0x0,NULL,HFILL}},"""
template_proto_register_end = """
};
static ei_register_info ei[] = {
{ &ei_@dissector_name@_unknown_giop_msg, { "giop-@dissector_name@.unknown_giop_msg", PI_PROTOCOL, PI_WARN, "Unknown GIOP message", EXPFILL }},
{ &ei_@dissector_name@_unknown_exception, { "giop-@dissector_name@.unknown_exception", PI_PROTOCOL, PI_WARN, "Unknown exception", EXPFILL }},
{ &ei_@dissector_name@_unknown_reply_status, { "giop-@dissector_name@.unknown_reply_status", PI_PROTOCOL, PI_WARN, "Unknown reply status", EXPFILL }},
};
/* setup protocol subtree array */
static gint *ett[] = {
&ett_@dissector_name@,
};
expert_module_t* expert_@dissector_name@;
/* Register the protocol name and description */
proto_@dissector_name@ = proto_register_protocol(\"@description@\" , \"@protocol_name@\", \"giop-@dissector_name@\" );
proto_register_field_array(proto_@dissector_name@, hf, array_length(hf));
proto_register_subtree_array(ett, array_length(ett));
expert_@dissector_name@ = expert_register_protocol(proto_@dissector_name@);
expert_register_field_array(expert_@dissector_name@, ei, array_length(ei));
}
"""
template_proto_register_op_filter_comment = """\
/* Operation filters */"""
template_proto_register_at_filter_comment = """\
/* Attribute filters */"""
template_proto_register_st_filter_comment = """\
/* Struct filters */"""
template_proto_register_ex_filter_comment = """\
/* User exception filters */"""
template_proto_register_un_filter_comment = """\
/* Union filters */"""
template_proto_register_ei_filters = """\
/* Expert info filters */
static expert_field ei_@dissector_name@_unknown_giop_msg = EI_INIT;
static expert_field ei_@dissector_name@_unknown_exception = EI_INIT;
static expert_field ei_@dissector_name@_unknown_reply_status = EI_INIT;
"""
# template for delegation code
template_op_delegate_code = """\
if (strcmp(operation, "@opname@") == 0
&& (!idlname || strcmp(idlname, \"@interface@\") == 0)) {
item = process_RequestOperation(tvb, pinfo, ptree, header, operation); /* fill-up Request_Operation field & info column */
tree = start_dissecting(tvb, pinfo, ptree, offset);
decode_@sname@(tvb, pinfo, tree, item, offset, header, operation, stream_is_big_endian);
return TRUE;
}
"""
# Templates for the helper functions
template_helper_switch_msgtype_start = """\
switch(header->message_type) {"""
template_helper_switch_msgtype_default_start = """\
default:
/* Unknown GIOP Message */
expert_add_info_format(pinfo, item, &ei_@dissector_name@_unknown_giop_msg, "Unknown GIOP message %d", header->message_type);"""
template_helper_switch_msgtype_default_end = """\
break;"""
template_helper_switch_msgtype_end = """\
} /* switch(header->message_type) */"""
template_helper_switch_msgtype_request_start = """\
case Request:"""
template_helper_switch_msgtype_request_end = """\
break;"""
template_helper_switch_msgtype_reply_start = """\
case Reply:"""
template_helper_switch_msgtype_reply_no_exception_start = """\
case NO_EXCEPTION:"""
template_helper_switch_msgtype_reply_no_exception_end = """\
break;"""
template_helper_switch_msgtype_reply_user_exception_start = """\
case USER_EXCEPTION:"""
template_helper_switch_msgtype_reply_user_exception_end = """\
break;"""
template_helper_switch_msgtype_reply_default_start = """\
default:
/* Unknown Exception */
expert_add_info_format(pinfo, item, &ei_@dissector_name@_unknown_exception, "Unknown exception %d", header->rep_status);"""
template_helper_switch_msgtype_reply_default_end = """\
break;"""
template_helper_switch_msgtype_reply_end = """\
break;"""
template_helper_switch_rep_status_start = """\
switch(header->rep_status) {"""
template_helper_switch_rep_status_default_start = """\
default:
/* Unknown Reply Status */
expert_add_info_format(pinfo, item, &ei_@dissector_name@_unknown_reply_status, "Unknown reply status %d", header->rep_status);"""
template_helper_switch_rep_status_default_end = """\
break;"""
template_helper_switch_rep_status_end = """\
} /* switch(header->rep_status) */
break;"""
# Templates for get_CDR_xxx accessors
template_get_CDR_ulong = """\
proto_tree_add_uint(tree, hf_@hfname@, tvb, *offset-4, 4, get_CDR_ulong(tvb,offset,stream_is_big_endian, boundary));
"""
template_get_CDR_short = """\
proto_tree_add_int(tree, hf_@hfname@, tvb, *offset-2, 2, get_CDR_short(tvb,offset,stream_is_big_endian, boundary));
"""
template_get_CDR_void = """\
/* Function returns void */
"""
template_get_CDR_long = """\
proto_tree_add_int(tree, hf_@hfname@, tvb, *offset-4, 4, get_CDR_long(tvb,offset,stream_is_big_endian, boundary));
"""
template_get_CDR_ushort = """\
proto_tree_add_uint(tree, hf_@hfname@, tvb, *offset-2, 2, get_CDR_ushort(tvb,offset,stream_is_big_endian, boundary));
"""
template_get_CDR_float = """\
proto_tree_add_float(tree, hf_@hfname@, tvb, *offset-4, 4, get_CDR_float(tvb,offset,stream_is_big_endian, boundary));
"""
template_get_CDR_double = """\
proto_tree_add_double(tree, hf_@hfname@, tvb, *offset-8, 8, get_CDR_double(tvb,offset,stream_is_big_endian, boundary));
"""
template_get_CDR_longlong = """\
proto_tree_add_int64(tree, hf_@hfname@, tvb, *offset-8, 8, get_CDR_long_long(tvb,offset,stream_is_big_endian, boundary));
"""
template_get_CDR_ulonglong = """\
proto_tree_add_uint64(tree, hf_@hfname@, tvb, *offset-8, 8, get_CDR_ulong_long(tvb,offset,stream_is_big_endian, boundary));
"""
template_get_CDR_boolean = """\
proto_tree_add_boolean(tree, hf_@hfname@, tvb, *offset-1, 1, get_CDR_boolean(tvb,offset));
"""
template_get_CDR_char = """\
proto_tree_add_uint(tree, hf_@hfname@, tvb, *offset-1, 1, get_CDR_char(tvb,offset));
"""
template_get_CDR_octet = """\
proto_tree_add_uint(tree, hf_@hfname@, tvb, *offset-1, 1, get_CDR_octet(tvb,offset));
"""
template_get_CDR_any = """\
get_CDR_any(tvb, pinfo, tree, item, offset, stream_is_big_endian, boundary, header);
"""
template_get_CDR_fixed = """\
get_CDR_fixed(tvb, pinfo, item, &seq, offset, @digits@, @scale@);
proto_tree_add_string_format_value(tree, hf_@hfname@, tvb, *offset-@length@, @length@, seq, "< @digits@, @scale@> = %s", seq);
"""
template_get_CDR_enum_symbolic = """\
u_octet4 = get_CDR_enum(tvb,offset,stream_is_big_endian, boundary);
proto_tree_add_uint(tree, hf_@hfname@, tvb, *offset-4, 4, u_octet4);
"""
template_get_CDR_string = """\
giop_add_CDR_string(tree, tvb, offset, stream_is_big_endian, boundary, hf_@hfname@);
"""
template_get_CDR_wstring = """\
u_octet4 = get_CDR_wstring(tvb, &seq, offset, stream_is_big_endian, boundary, header);
proto_tree_add_string(tree, hf_@hfname@, tvb, *offset-u_octet4, u_octet4, (u_octet4 > 0) ? seq : \"\");
"""
template_get_CDR_wchar = """\
s_octet1 = get_CDR_wchar(tvb, &seq, offset, header);
if (tree) {
if (s_octet1 > 0)
proto_tree_add_uint(tree, hf_@hfname@_len, tvb, *offset-1-s_octet1, 1, s_octet1);
if (s_octet1 < 0)
s_octet1 = -s_octet1;
if (s_octet1 > 0)
proto_tree_add_string(tree, hf_@hfname@, tvb, *offset-s_octet1, s_octet1, seq);
}
"""
template_get_CDR_TypeCode = """\
u_octet4 = get_CDR_typeCode(tvb, pinfo, tree, offset, stream_is_big_endian, boundary, header);
"""
template_get_CDR_object = """\
get_CDR_object(tvb, pinfo, tree, offset, stream_is_big_endian, boundary);
"""
template_get_CDR_sequence_length = """\
u_octet4_loop_@seqname@ = get_CDR_ulong(tvb, offset, stream_is_big_endian, boundary);
proto_tree_add_uint(tree, hf_@seqname@_loop, tvb,*offset-4, 4, u_octet4_loop_@seqname@);
"""
template_get_CDR_sequence_length_item = """\
u_octet4_loop_@seqname@ = get_CDR_ulong(tvb, offset, stream_is_big_endian, boundary);
item = proto_tree_add_uint(tree, hf_@seqname@_loop, tvb,*offset-4, 4, u_octet4_loop_@seqname@);
"""
template_get_CDR_sequence_loop_start = """\
for (i_@seqname@=0; i_@seqname@ < u_octet4_loop_@seqname@; i_@seqname@++) {
"""
template_get_CDR_sequence_loop_end = """\
}
"""
template_get_CDR_sequence_octet = """\
if (u_octet4_loop_@seqname@ > 0 && tree) {
get_CDR_octet_seq(tvb, &binary_seq_@seqname@, offset,
u_octet4_loop_@seqname@);
text_seq_@seqname@ = make_printable_string(binary_seq_@seqname@,
u_octet4_loop_@seqname@);
proto_tree_add_bytes_format_value(tree, hf_@seqname@, tvb, *offset - u_octet4_loop_@seqname@,
u_octet4_loop_@seqname@, binary_seq_@seqname@, \"%s\", text_seq_@seqname@);
}
"""
template_get_CDR_array_start = """\
for (i_@aname@=0; i_@aname@ < @aval@; i_@aname@++) {
"""
template_get_CDR_array_end = """\
}
"""
template_get_CDR_array_comment = """\
/* Array: @aname@[ @asize@] */
"""
template_structure_start = """\
/* Begin struct \"@name@\" */"""
template_structure_end = """\
/* End struct \"@name@\" */"""
template_union_start = """\
/* Begin union \"@name@\" */"""
template_union_end = """\
/* End union \"@name@\" */"""
# Templates for get_CDR_xxx_hf accessors
template_get_CDR_ulong_hf = """\
{&hf_@hfname@, {"@descname@","giop-@dissector_name@.@filtername@",FT_UINT32,BASE_DEC,NULL,0x0,NULL,HFILL}},"""
template_get_CDR_short_hf = """\
{&hf_@hfname@, {"@descname@","giop-@dissector_name@.@filtername@",FT_INT16,BASE_DEC,NULL,0x0,NULL,HFILL}},"""
template_get_CDR_long_hf = """\
{&hf_@hfname@, {"@descname@","giop-@dissector_name@.@filtername@",FT_INT32,BASE_DEC,NULL,0x0,NULL,HFILL}},"""
template_get_CDR_ushort_hf = """\
{&hf_@hfname@, {"@descname@","giop-@dissector_name@.@filtername@",FT_UINT16,BASE_DEC,NULL,0x0,NULL,HFILL}},"""
template_get_CDR_float_hf = """\
{&hf_@hfname@, {"@descname@","giop-@dissector_name@.@filtername@",FT_FLOAT,BASE_NONE,NULL,0x0,NULL,HFILL}},"""
template_get_CDR_double_hf = """\
{&hf_@hfname@, {"@descname@","giop-@dissector_name@.@filtername@",FT_DOUBLE,BASE_NONE,NULL,0x0,NULL,HFILL}},"""
template_get_CDR_fixed_hf = """\
{&hf_@hfname@, {"@descname@","giop-@dissector_name@.@filtername@",FT_STRING,BASE_NONE,NULL,0x0,NULL,HFILL}},"""
template_get_CDR_longlong_hf = """\
{&hf_@hfname@, {"@descname@","giop-@dissector_name@.@filtername@",FT_INT64,BASE_DEC,NULL,0x0,NULL,HFILL}},"""
template_get_CDR_ulonglong_hf = """\
{&hf_@hfname@, {"@descname@","giop-@dissector_name@.@filtername@",FT_UINT64,BASE_DEC,NULL,0x0,NULL,HFILL}},"""
template_get_CDR_boolean_hf = """\
{&hf_@hfname@, {"@descname@","giop-@dissector_name@.@filtername@",FT_BOOLEAN,8,NULL,0x01,NULL,HFILL}},"""
template_get_CDR_char_hf = """\
{&hf_@hfname@, {"@descname@","giop-@dissector_name@.@filtername@",FT_UINT8,BASE_DEC,NULL,0x0,NULL,HFILL}},"""
template_get_CDR_octet_hf = """\
{&hf_@hfname@, {"@descname@","giop-@dissector_name@.@filtername@",FT_UINT8,BASE_HEX,NULL,0x0,NULL,HFILL}},"""
template_get_CDR_enum_symbolic_hf = """\
{&hf_@hfname@, {"@descname@","giop-@dissector_name@.@filtername@",FT_UINT32,BASE_DEC,VALS(@valstringarray@),0x0,NULL,HFILL}},"""
template_get_CDR_string_hf = """\
{&hf_@hfname@, {"@descname@","giop-@dissector_name@.@filtername@",FT_STRING,BASE_NONE,NULL,0x0,NULL,HFILL}},"""
template_get_CDR_wstring_hf = """\
{&hf_@hfname@, {"@descname@","giop-@dissector_name@.@filtername@",FT_STRING,BASE_NONE,NULL,0x0,NULL,HFILL}},"""
template_get_CDR_wchar_hf = """\
{&hf_@hfname@_len, {"@descname@ Length","giop-@dissector_name@.@filtername@.len",FT_UINT8,BASE_DEC,NULL,0x0,NULL,HFILL}},
{&hf_@hfname@, {"@descname@","giop-@dissector_name@.@filtername@",FT_STRING,BASE_NONE,NULL,0x0,NULL,HFILL}},"""
template_get_CDR_TypeCode_hf = """\
{&hf_@hfname@, {"@descname@","giop-@dissector_name@.@filtername@",FT_UINT32,BASE_DEC,NULL,0x0,NULL,HFILL}},"""
template_get_CDR_sequence_hf = """\
{&hf_@hfname@_loop, {"Seq length of @descname@","giop-@dissector_name@.@filtername@.size",FT_UINT32,BASE_DEC,NULL,0x0,NULL,HFILL}},"""
template_get_CDR_sequence_octet_hf = """\
{&hf_@hfname@_loop, {"Seq length of @descname@","giop-@dissector_name@.@filtername@.size",FT_UINT32,BASE_DEC,NULL,0x0,NULL,HFILL}},
{&hf_@hfname@, {"@descname@","giop-@dissector_name@.@filtername@",FT_BYTES,BASE_NONE,NULL,0x0,NULL,HFILL}},"""
template_Header = """\
/* packet-@dissector_name@.c
*
* Routines for IDL dissection
*
* Autogenerated from idl2wrs
* Copyright 2001 Frank Singleton <frank.singleton@@ericsson.com>
*/
"""
template_wireshark_copyright = """\
/*
* Wireshark - Network traffic analyzer
* By Gerald Combs <gerald@@wireshark.org>
* Copyright 1998 Gerald Combs
*/
"""
template_GPL = """\
/*
* SPDX-License-Identifier: GPL-2.0-or-later
*/
"""
template_Modelines = """\
/*
* Editor modelines - https://www.wireshark.org/tools/modelines.html
*
* Local Variables:
* c-basic-offset: 4
* tab-width: 8
* indent-tabs-mode: nil
* End:
*
* ex: set shiftwidth=4 tabstop=8 expandtab:
* :indentSize=4:tabSize=8:noTabs=true:
*/"""
template_Includes = """\
#include "config.h"
#include <string.h>
#include <epan/packet.h>
#include <epan/proto.h>
#include <epan/dissectors/packet-giop.h>
#include <epan/expert.h>
#include "ws_diag_control.h"
#include "ws_compiler_tests.h"
#ifdef _MSC_VER
/* disable warning: "unreference local variable" */
#pragma warning(disable:4101)
#endif
/* XXX this should be autogenerated, or the warnings fixed in the generator */
DIAG_OFF(unused-function)
DIAG_OFF(unused-variable)
#if WS_IS_AT_LEAST_GNUC_VERSION(6,0)
DIAG_OFF(unused-const-variable)
#endif"""
template_main_dissector_start = """\
/*
* Called once we accept the packet as being for us; it sets the
* Protocol and Info columns and creates the top-level protocol
* tree item.
*/
static proto_tree *
start_dissecting(tvbuff_t *tvb, packet_info *pinfo, proto_tree *ptree, int *offset)
{
proto_item *ti = NULL;
proto_tree *tree = NULL; /* init later, inside if(tree) */
col_set_str(pinfo->cinfo, COL_PROTOCOL, \"@disprot@\");
/*
* Do not clear COL_INFO, as nothing is being written there by
* this dissector yet. So leave it as is from the GIOP dissector.
* TODO: add something useful to COL_INFO
* col_clear(pinfo->cinfo, COL_INFO);
*/
if (ptree) {
ti = proto_tree_add_item(ptree, proto_@dissname@, tvb, *offset, tvb_reported_length_remaining(tvb, *offset), ENC_NA);
tree = proto_item_add_subtree(ti, ett_@dissname@);
}
return tree;
}
static proto_item*
process_RequestOperation(tvbuff_t *tvb, packet_info *pinfo, proto_tree *ptree, MessageHeader *header, const gchar *operation)
{
proto_item *pi;
if(header->message_type == Reply) {
/* fill-up info column */
col_append_fstr(pinfo->cinfo, COL_INFO, " op = %s",operation);
}
/* fill-up the field */
pi=proto_tree_add_string(ptree, hf_operationrequest, tvb, 0, 0, operation);
proto_item_set_generated(pi);
return pi;
}
static gboolean
dissect_@dissname@(tvbuff_t *tvb, packet_info *pinfo, proto_tree *ptree, int *offset, MessageHeader *header, const gchar *operation, gchar *idlname)
{
proto_item *item _U_;
proto_tree *tree _U_;
gboolean stream_is_big_endian = is_big_endian(header); /* get endianess */
/* If we have a USER Exception, then decode it and return */
if ((header->message_type == Reply) && (header->rep_status == USER_EXCEPTION)) {
return decode_user_exception(tvb, pinfo, ptree, offset, header, operation, stream_is_big_endian);
}
"""
template_main_dissector_switch_msgtype_start = """\
switch(header->message_type) {
"""
template_main_dissector_switch_msgtype_start_request_reply = """\
case Request:
case Reply:
"""
template_main_dissector_switch_msgtype_end_request_reply = """\
break;
"""
template_main_dissector_switch_msgtype_all_other_msgtype = """\
case CancelRequest:
case LocateRequest:
case LocateReply:
case CloseConnection:
case MessageError:
case Fragment:
return FALSE; /* not handled yet */
default:
return FALSE; /* not handled yet */
} /* switch */
"""
template_main_dissector_end = """\
return FALSE;
} /* End of main dissector */
"""
#-------------------------------------------------------------#
# Exception handling templates #
#-------------------------------------------------------------#
template_exception_helpers_start = """\
/* Begin Exception Helper Functions */
"""
template_exception_helpers_end = """\
/* End Exception Helper Functions */
"""
template_main_exception_delegator_start = """\
/*
* Main delegator for exception handling
*
*/
static gboolean
decode_user_exception(tvbuff_t *tvb _U_, packet_info *pinfo _U_, proto_tree *ptree _U_, int *offset _U_, MessageHeader *header, const gchar *operation _U_, gboolean stream_is_big_endian _U_)
{
proto_tree *tree _U_;
if (!header->exception_id)
return FALSE;
"""
template_ex_delegate_code = """\
if (strcmp(header->exception_id, "@exname@") == 0) {
tree = start_dissecting(tvb, pinfo, ptree, offset);
decode_ex_@sname@(tvb, pinfo, tree, offset, header, operation, stream_is_big_endian); /* @exname@ */
return TRUE;
}
"""
template_main_exception_delegator_end = """
return FALSE; /* user exception not found */
}
"""
template_exception_helper_function_start_no_item = """\
/* Exception = @exname@ */
static void
decode_ex_@sname@(tvbuff_t *tvb _U_, packet_info *pinfo _U_, proto_tree *tree _U_, int *offset _U_, MessageHeader *header _U_, const gchar *operation _U_, gboolean stream_is_big_endian _U_)
{
proto_item *item _U_;
"""
template_exception_helper_function_start_item = """\
/* Exception = @exname@ */
static void
decode_ex_@sname@(tvbuff_t *tvb _U_, packet_info *pinfo _U_, proto_tree *tree _U_, int *offset _U_, MessageHeader *header _U_, const gchar *operation _U_, gboolean stream_is_big_endian _U_)
{
proto_item *item = NULL;
"""
template_exception_helper_function_end = """\
}
"""
template_struct_helper_function_start = """\
/* Struct = @stname@ */
static void
decode_@sname@_st(tvbuff_t *tvb _U_, packet_info *pinfo _U_, proto_tree *tree _U_, proto_item *item _U_, int *offset _U_, MessageHeader *header _U_, const gchar *operation _U_, gboolean stream_is_big_endian _U_)
{
"""
template_struct_helper_function_end = """\
}
"""
template_union_helper_function_start = """\
/* Union = @unname@ */
static void
decode_@sname@_un(tvbuff_t *tvb _U_, packet_info *pinfo _U_, proto_tree *tree _U_, int *offset _U_, MessageHeader *header _U_, const gchar *operation _U_, gboolean stream_is_big_endian _U_)
{
"""
template_union_helper_function_start_with_item = """\
/* Union = @unname@ */
static void
decode_@sname@_un(tvbuff_t *tvb _U_, packet_info *pinfo _U_, proto_tree *tree _U_, int *offset _U_, MessageHeader *header _U_, const gchar *operation _U_, gboolean stream_is_big_endian _U_)
{
proto_item* item = NULL;
"""
template_union_helper_function_end = """\
}
"""
#-------------------------------------------------------------#
# Value string templates #
#-------------------------------------------------------------#
template_value_string_start = """\
static const value_string @valstringname@[] = {
"""
template_value_string_entry = """\
{ @intval@, \"@description@\" },"""
template_value_string_end = """\
{ 0, NULL },
};
"""
#-------------------------------------------------------------#
# Enum handling templates #
#-------------------------------------------------------------#
template_comment_enums_start = """\
/*
* IDL Enums Start
*/
"""
template_comment_enums_end = """\
/*
* IDL Enums End
*/
"""
template_comment_enum_comment = """\
/*
* Enum = @ename@
*/"""
#-------------------------------------------------------------#
# Attribute handling templates #
#-------------------------------------------------------------#
template_comment_attributes_start = """\
/*
* IDL Attributes Start
*/
"""
# get/set accessor method names are language mapping dependent.
template_attributes_declare_Java_get = """static const char get_@sname@_at[] = \"_get_@atname@\" ;"""
template_attributes_declare_Java_set = """static const char set_@sname@_at[] = \"_set_@atname@\" ;"""
template_comment_attributes_end = """
/*
* IDL Attributes End
*/
"""
# template for Attribute delegation code
#
# Note: _get_xxx() should only be called for Reply with NO_EXCEPTION
# Note: _set_xxx() should only be called for Request
template_at_delegate_code_get = """\
if (strcmp(operation, get_@sname@_at) == 0 && (header->message_type == Reply) && (header->rep_status == NO_EXCEPTION) ) {
tree = start_dissecting(tvb, pinfo, ptree, offset);
decode_get_@sname@_at(tvb, pinfo, tree, offset, header, operation, stream_is_big_endian);
return TRUE;
}
"""
template_at_delegate_code_set = """\
if (strcmp(operation, set_@sname@_at) == 0 && (header->message_type == Request) ) {
tree = start_dissecting(tvb, pinfo, ptree, offset);
decode_set_@sname@_at(tvb, pinfo, tree, offset, header, operation, stream_is_big_endian);
return TRUE;
}
"""
template_attribute_helpers_start = """\
/* Begin Attribute Helper Functions */
"""
template_attribute_helpers_end = """\
/* End Attribute Helper Functions */
"""
template_attribute_helper_function_start = """\
/* Attribute = @atname@ */
static void
decode_@sname@_at(tvbuff_t *tvb _U_, packet_info *pinfo _U_, proto_tree *tree _U_, int *offset _U_, MessageHeader *header _U_, const gchar *operation _U_, gboolean stream_is_big_endian _U_)
{
proto_item* item _U_;
"""
template_attribute_helper_function_end = """\
}
"""
#-------------------------------------------------------------#
# Debugging templates #
#-------------------------------------------------------------#
# Template for outputting TODO "C" comments
# so user know I need to improve something.
template_debug_TODO = """\
/* TODO - @message@ */
"""
# Template for outputting WARNING "C" comments
# so user know if I have found a problem.
template_debug_WARNING = """\
/* WARNING - @message@ */
"""
#-------------------------------------------------------------#
# IDL Union templates #
#-------------------------------------------------------------#
template_comment_union_code_start = """\
/*
* IDL Union Start - @uname@
*/
"""
template_comment_union_code_end = """
/*
* IDL union End - @uname@
*/
"""
template_comment_union_code_discriminant = """\
/*
* IDL Union - Discriminant - @uname@
*/
"""
# Cast Unions types to something appropriate
# Enum value cast to guint32, all others cast to gint32
# as omniidl accessor returns integer or Enum.
template_union_code_save_discriminant_enum = """\
disc_s_@discname@ = (gint32) u_octet4; /* save Enum Value discriminant and cast to gint32 */
"""
template_union_code_save_discriminant_long = """\
disc_s_@discname@ = (gint32) s_octet4; /* save gint32 discriminant and cast to gint32 */
"""
template_union_code_save_discriminant_ulong = """\
disc_s_@discname@ = (gint32) u_octet4; /* save guint32 discriminant and cast to gint32 */
"""
template_union_code_save_discriminant_short = """\
disc_s_@discname@ = (gint32) s_octet2; /* save gint16 discriminant and cast to gint32 */
"""
template_union_code_save_discriminant_ushort = """\
disc_s_@discname@ = (gint32) u_octet2; /* save guint16 discriminant and cast to gint32 */
"""
template_union_code_save_discriminant_char = """\
disc_s_@discname@ = (gint32) u_octet1; /* save guint1 discriminant and cast to gint32 */
"""
template_union_code_save_discriminant_boolean = """\
disc_s_@discname@ = (gint32) u_octet1; /* save guint1 discriminant and cast to gint32 */
"""
template_comment_union_code_label_compare_start = """\
if (disc_s_@discname@ == @labelval@) {
"""
template_comment_union_code_label_compare_end = """\
return; /* End Compare for this discriminant type */
}
"""
template_comment_union_code_label_default_start = """
/* Default Union Case Start */
"""
template_comment_union_code_label_default_end = """\
/* Default Union Case End */
"""
# Templates for function prototypes.
# This is used in genDeclares() for declaring function prototypes
# for structs and union helper functions.
template_hf_operations = """
static int hf_operationrequest = -1;/* Request_Operation field */
"""
template_hf = """\
static int hf_@name@ = -1;"""
template_prototype_start_dissecting = """
static proto_tree *start_dissecting(tvbuff_t *tvb, packet_info *pinfo, proto_tree *ptree, int *offset);
"""
template_prototype_struct_start = """\
/* Struct prototype declaration Start */
"""
template_prototype_struct_end = """\
/* Struct prototype declaration End */
"""
template_prototype_struct_body = """\
/* Struct = @stname@ */
static void decode_@name@_st(tvbuff_t *tvb _U_, packet_info *pinfo _U_, proto_tree *tree _U_, proto_item *item _U_, int *offset _U_, MessageHeader *header _U_, const gchar *operation _U_, gboolean stream_is_big_endian _U_);
"""
template_decode_struct = """\
decode_@name@_st(tvb, pinfo, tree, item, offset, header, operation, stream_is_big_endian);"""
template_prototype_union_start = """\
/* Union prototype declaration Start */"""
template_prototype_union_end = """\
/* Union prototype declaration End */"""
template_prototype_union_body = """
/* Union = @unname@ */
static void decode_@name@_un(tvbuff_t *tvb _U_, packet_info *pinfo _U_, proto_tree *tree _U_, int *offset _U_, MessageHeader *header _U_, const gchar *operation _U_, gboolean stream_is_big_endian _U_);
"""
template_decode_union = """\
decode_@name@_un(tvb, pinfo, tree, offset, header, operation, stream_is_big_endian);
"""
#
# Editor modelines - https://www.wireshark.org/tools/modelines.html
#
# Local variables:
# c-basic-offset: 4
# indent-tabs-mode: nil
# End:
#
# vi: set shiftwidth=4 expandtab:
# :indentSize=4:noTabs=true:
#
| zzqcn/wireshark | tools/wireshark_gen.py | Python | gpl-2.0 | 100,934 |
# Copyright (C) 2008-2010 Adam Olsen
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
#
# The developers of the Exaile media player hereby grant permission
# for non-GPL compatible GStreamer and Exaile plugins to be used and
# distributed together with GStreamer and Exaile. This permission is
# above and beyond the permissions granted by the GPL license by which
# Exaile is covered. If you modify this code, you may extend this
# exception to your version of the code, but you are not obligated to
# do so. If you do not wish to do so, delete this exception statement
# from your version.
from xl.metadata._base import BaseFormat
import os
try:
import ctypes
modplug = ctypes.cdll.LoadLibrary("libmodplug.so.0")
modplug.ModPlug_Load.restype = ctypes.c_void_p
modplug.ModPlug_Load.argtypes = (ctypes.c_void_p, ctypes.c_int)
modplug.ModPlug_GetName.restype = ctypes.c_char_p
modplug.ModPlug_GetName.argtypes = (ctypes.c_void_p,)
modplug.ModPlug_GetLength.restype = ctypes.c_int
modplug.ModPlug_GetLength.argtypes = (ctypes.c_void_p,)
except (ImportError, OSError):
modplug = None
class ModFormat(BaseFormat):
writable = False
def load(self):
if modplug:
data = open(self.loc, "rb").read()
f = modplug.ModPlug_Load(data, len(data))
if f:
name = modplug.ModPlug_GetName(f) or os.path.split(self.loc)[-1]
length = modplug.ModPlug_GetLength(f) / 1000.0 or -1
self.mutagen = {'title': name, '__length': length}
else:
self.mutagen = {}
def get_length(self):
try:
return self.mutagen['__length']
except KeyError:
return -1
def get_bitrate(self):
return -1
# vim: et sts=4 sw=4
| sjohannes/exaile | xl/metadata/mod.py | Python | gpl-2.0 | 2,441 |
from news_spider import news_spider
from video_spider import video_spider
if __name__ == "__main__":
print u'开始爬取咨询信息...'
# 设置资讯的存储位置,必须以 \\ 结尾,分为绝对路径和相对路径
# c:\资讯\\ C:\咨询 目录下存放 txt,c:\资讯\图片 目录下存放图片
# 咨询\ 程序当前文件夹下的 资讯 目录存储 txt, 里面的 图片目录存放图片
news_path = u'c:\资讯\\'
# 设置视频目录,同上
video_path = u'c:\视频\\'
print u'游戏资讯的存放路径是:' + news_path
a = news_spider(path = news_path)
a.run()
print u'开始爬取视频信息...'
print u'视频信息的的存放路径是:' + video_path
a = video_spider(path = video_path)
a.run()
pass
| softtyphoon/tz | tools/17173/ymxk/复件 game_spider.py | Python | gpl-2.0 | 839 |
# Opus/UrbanSim urban simulation software.
# Copyright (C) 2005-2009 University of Washington
# See opus_core/LICENSE
specification = {}
specification = {
2:
{
"equation_ids":(1,2),
"constant":(0, "act_2_2"),
"blmz":("blmz_2_1", 0),
"cd1":("cd1_2_1", 0),
"dag":("dag_2_1", 0),
"dprd":("dprd_2_1", 0),
"hai":("hai_2_1", 0),
# "pcd":("pcd_2_1", 0), - Jeff removed all PCD on 21Feb 2006
"phu":("phu_2_1", 0),
"pmu":("pmu_2_1", 0),
"tiv":("tiv_2_1", 0),
},
3:
{
"equation_ids":(1,2,3),
"constant":(0,0,"act_3_3"),
"blmz":("blmz_3_1",0, 0),
"c750":("c750_3_1","c750_3_2", 0),
"dloc":("dloc_3_1","dloc_3_2", 0),
"dnlr":(0,"dnlr_3_2", 0),
"dprd":("dprd_3_1",0, 0),
"dpub":(0,"dpub_3_2", 0),
"dres":("dres_3_1","dres_3_2", 0),
"dtim":("dtim_3_1","dtim_3_2", 0),
"gmps":("gmps_3_1",0, 0),
"h450":(0,"h450_3_2", 0),
"mmps":("mmps_3_1",0, 0),
# "pcd":("pcd_3_1","pcd_3_2", 0),
"pfld":("pfld_3_1",0, 0),
"phu":("phu_3_1","phu_3_2", 0),
"pmf":("pmf_3_1","pmf_3_2", 0),
"pmu":("pmu_3_1","pmu_3_2", 0),
"pslp":("pslp_3_1","pslp_3_2", 0),
"pwa":("pwa_3_1",0, 0),
"shei":("shei_3_1",0, 0),
"tiv":("tiv_3_1",0, 0),
"ugl":("ugl_3_1",0, 0),
},
#4:
# {
# "equation_ids":(1,2,3), # note: this is the to_id's
# "constant":(0, "act_4_2", "act_4_3"), #there is no constant term in the equation for to_id 1
# "aai":(0, "aai_4_2","aai_4_3"),
# "amps":(0, "amps_4_2","amps_4_3"),
# "blmz":(0, "blmz_4_2","blmz_4_3"),
# "c450":(0, "c450_4_2","c450_4_3"),
# "c750":(0, "c750_4_2","c750_4_3"),
# "cd1":(0, "cd1_4_2","cd1_4_3"),
# "crit":(0, "crit_4_2","crit_4_3"),
# "dag":(0, "dag_4_2","dag_4_3"),
# "dc":(0, "dc_4_2","dc_4_3"),
# "dcbd":(0, "dcbd_4_2","dcbd_4_3"),
# "dcri":(0, "dcri_4_2","dcri_4_3"),
# "ddt1":(0, "ddt1_4_2","ddt1_4_3"),
# "de":(0, "de_4_2","de_4_3"),
# "dfre":(0, "dfre_4_2","dfre_4_3"),
# "di":(0, "di_4_2","di_4_3"),
# "dloc":(0, "dloc_4_2","dloc_4_3"),
# "dmu":(0, "dmu_4_2","dmu_4_3"),
# "dnlr":(0, "dnlr_4_2","dnlr_4_3"),
# "dos":(0, "dos_4_2","dos_4_3"),
# "dprd":(0, "dprd_4_2","dprd_4_3"),
# "dpub":(0, "dpub_4_2","dpub_4_3"),
# "dres":(0, "dres_4_2","dres_4_3"),
# "dt1":(0, "dt1_4_2","dt1_4_3"),
# "dtim":(0, "dtim_4_2","dtim_4_3"),
# "dwat":(0, "dwat_4_2","dwat_4_3"),
# "dwet":(0, "dwet_4_2","dwet_4_3"),
# "fai":(0, "fai_4_2","fai_4_3"),
# "fmps":(0, "fmps_4_2","fmps_4_3"),
# "gai":(0, "gai_4_2","gai_4_3"),
# "gmps":(0, "gmps_4_2","gmps_4_3"),
# "h450":(0, "h450_4_2","h450_4_3"),
# "h750":(0, "h750_4_2","h750_4_3"),
# "hai":(0, "hai_4_2","hai_4_3"),
# "hd1":(0, "hd1_4_2","hd1_4_3"),
# "hmps":(0, "hmps_4_2","hmps_4_3"),
# "mai":(0, "mai_4_2","mai_4_3"),
# "mmps":(0, "mmps_4_2","mmps_4_3"),
# "pag":(0, "pag_4_2","pag_4_3"),
# "pcc":(0, "pcc_4_2","pcc_4_3"),
# "pcd":(0, "pcd_4_2","pcd_4_3"),
# "pcf":(0, "pcf_4_2","pcf_4_3"),
# "pcri":(0, "pcri_4_2","pcri_4_3"),
# "pes":(0, "pes_4_2","pes_4_3"),
# "pfld":(0, "pfld_4_2","pfld_4_3"),
# "pgr":(0, "pgr_4_2","pgr_4_3"),
# "phu":(0, "phu_4_2","phu_4_3"),
# "plu":(0, "plu_4_2","plu_4_3"),
# "pmf":(0, "pmf_4_2","pmf_4_3"),
# "pmu":(0, "pmu_4_2","pmu_4_3"),
# "psg":(0, "psg_4_2","psg_4_3"),
# "pslp":(0, "pslp_4_2","pslp_4_3"),
# "pstr":(0, "pstr_4_2","pstr_4_3"),
# "pub":(0, "pub_4_2","pub_4_3"),
# "pwa":(0, "pwa_4_2","pwa_4_3"),
# "pwet":(0, "pwet_4_2","pwet_4_3"),
# "shei":(0, "shei_4_2","shei_4_3"),
# "sslp":(0, "sslp_4_2","sslp_4_3"),
# "tbl":(0, "tbl_4_2","tbl_4_3"),
# "tiv":(0, "tiv_4_2","tiv_4_3"),
# "ugl":(0, "ugl_4_2","ugl_4_3"),
# },
5:
{
"equation_ids":(1,2,3,5,6,7), # note: this is the to_id's
"constant":("act_5_1","act_5_2","act_5_3","act_5_5",0,0),
"aai":(0,"aai_5_2","aai_5_3", 0,"aai_5_6","aai_5_7"),
"amps":("amps_5_1","amps_5_2",0, 0,0,"amps_5_7"),
# # "blmz":(0,0,0, 0,"blmz_5_6",0),
# # "c750":("c750_5_1",0,0, 0,0,0),
"cd1":("cd1_5_1",0,"cd1_5_3", 0,0,"cd1_5_7"),
"dag":("dag_5_1",0,0, 0,"dag_5_6","dag_5_7"),
# # "dc":(0,0,0, 0,0,"dc_5_7"),
# # "dcbd":("dcbd_5_1",0,0, 0,0,0),
"dcri":("dcri_5_1",0,0, 0,0,0),
"de":("de_5_1","de_5_2","de_5_3", 0,0,0),
"dloc":("dloc_5_1","dloc_5_2","dloc_5_3", 0,0,0),
"dnlr":(0,"dnlr_5_2","dnlr_5_3", 0,0,0),
"dos":("dos_5_1",0,0, 0,0,0),
"dprd":("dprd_5_1",0,0, 0,0,"dprd_5_7"),
"dpub":("dpub_5_1","dpub_5_2",0, 0,0,0),
"dres":(0,0,"dres_5_3", 0,0,0),
"dtim":("dtim_5_1","dtim_5_2","dtim_5_3", 0,0,0),
"dwat":("dwat_5_1",0,0, 0,0,"dwat_5_7"),
"dwet":(0,0,"dwet_5_3", 0,0,0),
"fmps":("fmps_5_1","fmps_5_2",0, 0,0,"fmps_5_7"),
"h450":(0,0,0, 0,0,"h450_5_7"),
"h750":("h750_5_1","h750_5_2",0, 0,0,"h750_5_7"),
"hai":(0,0,0, 0,"hai_5_6",0),
# "pcd":("pcd_5_1","pcd_5_2",0, 0,0,0),
"pcf":(0,0,0, 0,0,"pcf_5_7"),
"pcri":("pcri_5_1",0,0, 0,0,0),
"pes":("pes_5_1",0,0, 0,"pes_5_6","pes_5_7"),
"phu":("phu_5_1",0,"phu_5_3", 0,"phu_5_6","phu_5_7"),
"plu":(0,0,"plu_5_3", 0,"plu_5_6",0),
"pmu":("pmu_5_1","pmu_5_2",0, 0,"pmu_5_6","pmu_5_7"),
"pstr":(0,"pstr_5_2","pstr_5_3", 0,"pstr_5_6",0),
"pub":("pub_5_1",0,0, 0,0,0),
"pwa":(0,0,0, 0,"pwa_5_6",0),
"pwet":(0,0,0, 0,0,"pwet_5_7"),
"shei":("shei_5_1","shei_5_2",0, 0,"shei_5_6",0),
"sslp":("sslp_5_1","sslp_5_2","sslp_5_3", 0,"sslp_5_6",0),
"tiv":("tiv_5_1",0,0, 0,0,0),
"ugl":("ugl_5_1",0,0, 0,0,0),
},
6:
{
"equation_ids":(1,2,3,5,6), # note: this is the to_id's
"constant":("act_6_1","act_6_2","act_6_3",0,"act_6_6"),
"aai":(0,"aai_6_2","aai_6_3",0,0),
"blmz":("blmz_6_1",0,0,0,0),
"c750":("c750_6_1",0,0,0,0),
"dcri":("dcri_6_1","dcri_6_2","dcri_6_3",0, 0),
"di":("di_6_1",0,0,0,0),
"dloc":("dloc_6_1","dloc_6_2","dloc_6_3",0, 0),
"dnlr":(0,"dnlr_6_2","dnlr_6_3",0, 0),
"dos":("dos_6_1",0,0,0,0),
"dprd":("dprd_6_1",0,0,0, 0),
"dres":("dres_6_1","dres_6_2",0,0, 0),
"dtim":("dtim_6_1","dtim_6_2","dtim_6_3",0, 0),
"fmps":("fmps_6_1",0,0,0, 0),
"gai":(0,0,"gai_6_3","gai_6_5", 0),
"h750":("h750_6_1","h750_6_2",0,0, 0),
"hmps":("hmps_6_1",0,0,0,0),
# "pcd":("pcd_6_1","pcd_6_2","pcd_6_3","pcd_6_5", 0),
"pcf":("pcf_6_1","pcf_6_2",0,0,0),
"pes":("pes_6_1",0,"pes_6_3",0, 0),
"pgr":("pgr_6_1","pgr_6_2","pgr_6_3","pgr_6_5", 0),
"phu":("phu_6_1","phu_6_2",0,0, 0),
"plu":(0,0,"plu_6_3","plu_6_5", 0),
"pmu":("pmu_6_1","pmu_6_2","pmu_6_3",0, 0),
"pslp":("pslp_6_1","pslp_6_2","pslp_6_3","pslp_6_5", 0),
"pstr":("pstr_6_1","pstr_6_2",0,0, 0),
"pub":("pub_6_1",0,0,0, 0),
"pwa":(0,0,"pwa_6_3",0, 0),
"pwet":("pwet_6_1","pwet_6_2","pwet_6_3",0, 0),
"shei":("shei_6_1",0,"shei_6_3","shei_6_5", 0),
"tiv":("tiv_6_1",0,0,0, 0),
"ugl":("ugl_6_1","ugl_6_2","ugl_6_3",0, 0),
},
7:
{
"equation_ids":(1,2,3,5,7), # note: this is the to_id's
"constant":("act_7_1","act_7_2","act_7_3",0,"act_7_7"),
"aai":(0,0,"aai_7_3",0, 0),
"blmz":(0,"blmz_7_2","blmz_7_3","blmz_7_5", 0),
"crit":(0,"crit_7_2",0,0, 0),
"dc":("dc_7_1",0,0,0,0),
"dcri":("dcri_7_1","dcri_7_2","dcri_7_3",0, 0),
"ddt1":(0,0,"ddt1_7_3","ddt1_7_5", 0),
"dloc":("dloc_7_1","dloc_7_2","dloc_7_3",0, 0),
"dos":("dos_7_1",0,"dos_7_3",0, 0),
"dprd":("dprd_7_1","dprd_7_2",0,0, 0),
"dpub":(0,"dpub_7_2",0,"dpub_7_5", 0),
"dres":("dres_7_1","dres_7_2",0,0, 0),
"dwat":("dwat_7_1","dwat_7_2",0,0, 0),
"fmps":("fmps_7_1","fmps_7_2","fmps_7_3",0, 0),
"gai":(0,0,0,"gai_7_5", 0),
"h750":("h750_7_1","h750_7_2",0,0, 0),
# "pcd":("pcd_7_1","pcd_7_2","pcd_7_3","pcd_7_5", 0),
"pcf":(0,0,0,"pcf_7_5", 0),
"pes":(0,0,0,"pes_7_5", 0),
"pgr":("pgr_7_1","pgr_7_2",0,"pgr_7_5", 0),
"phu":("phu_7_1","phu_7_2",0,0, 0),
"plu":("plu_7_1",0,"plu_7_3",0, 0),
"pmf":("pmf_7_1","pmf_7_2",0,"pmf_7_5", 0),
"pmu":(0,"pmu_7_2",0,0, 0),
"psg":(0,"psg_7_2",0,0, 0),
"pslp":("pslp_7_1","pslp_7_2",0,0, 0),
"pstr":("pstr_7_1","pstr_7_2",0,0, 0),
"pub":("pub_7_1",0,0,"pub_7_5", 0),
"pwa":(0,0,"pwa_7_3",0, 0),
"shei":(0,0,0,"shei_7_5", 0),
"sslp":(0,0,"sslp_7_3",0, 0),
"tiv":(0,"tiv_7_2","tiv_7_3",0, 0),
"ugl":(0,"ugl_7_2",0,0, 0),
},
10:
{
"equation_ids":(1,2,3,10), # note: this is the to_id's
"constant":("constant_10_1","constant_10_2",0, "constant_10_10"),
"blmz":("blmz_10_1","blmz_10_2","blmz_10_3",0),
"c750":("c750_10_1",0,0,0),
"cd1":(0,"cd1_10_2",0,0),
"crit":("crit_10_1","crit_10_2","crit_10_3",0),
"dag":("dag_10_1","dag_10_2","dag_10_3",0),
"dcbd":("dcbd_10_1","dcbd_10_2","dcbd_10_3",0),
"dcri":("dcri_10_1","dcri_10_2","dcri_10_3",0),
"ddt1":("ddt1_10_1",0,0,0),
"de":(0,"de_10_2",0,0),
"dfre":("dfre_10_1",0,0,0),
"dloc":("dloc_10_1",0,0,0),
"dnlr":("dnlr_10_1",0,"dnlr_10_3",0),
"dprd":("dprd_10_1","dprd_10_2",0,0),
"dres":("dres_10_1",0,0,0),
"dtim":(0,0,"dtim_10_3",0),
"gmps":("gmps_10_1","gmps_10_2","gmps_10_3",0),
"h450":("h450_10_1","h450_10_2",0,0),
"h750":("h750_10_1","h750_10_2","h750_10_3",0),
"mmps":("mmps_10_1",0,0,0),
"pag":(0,0,"pag_10_3",0),
# "pcd":("pcd_10_1","pcd_10_2","pcd_10_3",0),
"pes":(0,0,"pes_10_3",0),
"pfld":("pfld_10_1","pfld_10_2","pfld_10_3",0),
"phu":("phu_10_1",0,0,0),
"plu":(0,"plu_10_2","plu_10_3",0),
"pmu":(0,"pmu_10_2",0,0),
"psg":(0,"psg_10_2",0,0),
"pslp":(0,"pslp_10_2",0,0),
"pstr":("pstr_10_1","pstr_10_2",0,0),
"pwet":("pwet_10_1","pwet_10_2",0,0),
"tiv":("tiv_10_1",0,"tiv_10_3",0),
"ugl":("ugl_10_1","ugl_10_2",0,0),
},
8:
{
"equation_ids": (9,), # note the comma after "9"
"constant":("act_8_9",),
},
11:
{
"equation_ids": (11,), # note the comma after "9"
"constant":("act_11_11",),
},
12:
{
"equation_ids": (12,), # note the comma after "9"
"constant":("act_12_12",),
},
13:
{
"equation_ids": (13,), # note the comma after "9"
"constant":("act_13_13",),
},
14:
{
"equation_ids": (14,), # note the comma after "9"
"constant":("act_14_14",),
}
}
| christianurich/VIBe2UrbanSim | 3rdparty/opus/src/biocomplexity/examples/estimation_lccm_specification_ub91to95.py | Python | gpl-2.0 | 10,640 |
# coding=utf-8
import sys
sys.path.append("bindings/python/")
from unittestpy import TeamcityTestRunner
from elliptics_proxy import *
import unittest
r = remote('localhost', 1025)
c = config()
c.groups.append(1)
c.groups.append(2)
c.log_path = 'log'
c.remotes.append(r)
proxy = elliptics_proxy_t(c)
class TestTeamcityMessages(unittest.TestCase):
def test_sync(self):
data = 'test data of test_sync'
key = 'test_sync_key'
lr = proxy.write(key, data)
assert len(lr) == 2
dc2 = proxy.read(key)
assert dc2.data == data
proxy.remove(key)
def test_async(self):
data = 'test data for test_async'
key = 'test_async_key'
dc = data_container_t(data)
dc.timestamp = timespec(123, 456789)
awr = proxy.write_async(key, dc)
lr = awr.get()
assert len(lr) == 2
arr = proxy.read_async(key, embeded = True, groups = [1]);
ldc = arr.get()
dc = ldc[0]
assert dc.data == data
ts2 = dc.timestamp
assert ts2.tv_sec == 123
assert ts2.tv_nsec == 456789
arr = proxy.read_async(key, embeded = True, groups = [2]);
ldc = arr.get()
dc = ldc[0]
assert dc.data == data
ts2 = dc.timestamp
assert ts2.tv_sec == 123
assert ts2.tv_nsec == 456789
arm = proxy.remove_async(key);
arm.wait()
if __name__ == '__main__':
runner = TeamcityTestRunner()
unittest.main(testRunner=runner)
| yandex/libmastermind | tests/bindings/python/test.py | Python | gpl-2.0 | 1,327 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, unicode_literals
import xbmc
from . import kodigui
from .. import utils, variables as v
class OptionsDialog(kodigui.BaseDialog):
xmlFile = 'script-plex-options_dialog.xml'
path = v.ADDON_PATH
theme = 'Main'
res = '1080i'
width = 1920
height = 1080
GROUP_ID = 100
BUTTON_IDS = (1001, 1002, 1003)
def __init__(self, *args, **kwargs):
kodigui.BaseDialog.__init__(self, *args, **kwargs)
self.header = kwargs.get('header')
self.info = kwargs.get('info')
self.button0 = kwargs.get('button0')
self.button1 = kwargs.get('button1')
self.button2 = kwargs.get('button2')
self.buttonChoice = None
def onFirstInit(self):
self.setProperty('header', self.header)
self.setProperty('info', self.info)
if self.button2:
self.setProperty('button.2', self.button2)
if self.button1:
self.setProperty('button.1', self.button1)
if self.button0:
self.setProperty('button.0', self.button0)
self.setBoolProperty('initialized', True)
xbmc.Monitor().waitForAbort(0.1)
self.setFocusId(self.BUTTON_IDS[0])
def onClick(self, controlID):
if controlID in self.BUTTON_IDS:
self.buttonChoice = self.BUTTON_IDS.index(controlID)
self.doClose()
def show(header, info, button0=None, button1=None, button2=None):
w = OptionsDialog.open(header=header, info=info, button0=button0, button1=button1, button2=button2)
choice = w.buttonChoice
del w
utils.garbageCollect()
return choice
| croneter/PlexKodiConnect | resources/lib/windows/optionsdialog.py | Python | gpl-2.0 | 1,700 |
#! /usr/bin/env python
from genomon_pipeline.stage_task import *
class Markduplicates(Stage_task):
task_name = "markduplicates"
script_template = """
#!/bin/bash
#
# Set SGE
#
#$ -S /bin/bash # set shell in UGE
#$ -cwd # execute at the submitted dir
pwd # print current working directory
hostname # print hostname
date # print date
set -xv
set -o pipefail
{biobambam}/bammarkduplicates M={out_prefix}.metrics tmpfile={out_prefix}.tmp markthreads=2 rewritebam=1 rewritebamlevel=1 index=1 md5=1 {input_bam_files} O={out_bam}
"""
def __init__(self, qsub_option, script_dir):
super(Markduplicates, self).__init__(qsub_option, script_dir)
| eigoshimizu/Genomon | scripts/genomon_pipeline/dna_resource/markduplicates.py | Python | gpl-2.0 | 742 |