repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
|---|---|---|---|---|
redebian/documentation
|
refs/heads/master
|
django/contrib/gis/admin/__init__.py
|
637
|
# Getting the normal admin routines, classes, and `site` instance.
from django.contrib.admin import autodiscover, site, AdminSite, ModelAdmin, StackedInline, TabularInline, HORIZONTAL, VERTICAL
# Geographic admin options classes and widgets.
from django.contrib.gis.admin.options import GeoModelAdmin
from django.contrib.gis.admin.widgets import OpenLayersWidget
try:
from django.contrib.gis.admin.options import OSMGeoAdmin
HAS_OSM = True
except ImportError:
HAS_OSM = False
|
dga4654dan/UTM-Demo
|
refs/heads/master
|
V_1_0_2_1/UtmDemo_Sfs_2.9.0/UtmDemo_Sfs_2.9.0_Server/lib/Lib/test/test_string.py
|
7
|
from test_support import *
import string, sys
print_test('string (test_string.py)', 1)
names = {}
def test(name, input, output, *args):
if not names.has_key(name):
print_test(name, 2)
names[name] = 1
f = getattr(string, name)
try:
value = apply(f, (input,) + args)
except:
value = sys.exc_type
if verbose and value <> output:
print 'value=', value, ', output=', output
assert value == output
test('atoi', " 1 ", 1)
test('atoi', " 1x", ValueError)
test('atoi', " x1 ", ValueError)
test('atol', " 1 ", 1L)
test('atol', " 1x ", ValueError)
test('atol', " x1 ", ValueError)
test('atof', " 1 ", 1.0)
#test('atof', " 1x ", ValueError)
test('atof', " x1 ", ValueError)
test('capitalize', ' hello ', ' hello ')
test('capitalize', 'hello ', 'Hello ')
test('find', 'abcdefghiabc', 0, 'abc')
test('find', 'abcdefghiabc', 9, 'abc', 1)
test('find', 'abcdefghiabc', -1, 'def', 4)
test('rfind', 'abcdefghiabc', 9, 'abc')
test('lower', 'HeLLo', 'hello')
test('upper', 'HeLLo', 'HELLO')
test('split', 'this is the split function',
['this', 'is', 'the', 'split', 'function'])
test('split', 'a|b|c|d', ['a', 'b', 'c', 'd'], '|')
test('split', 'a|b|c|d', ['a', 'b', 'c|d'], '|', 2)
test('split', 'a b c d', ['a', 'b c d'], None, 1)
test('split', 'a b c d', ['a', 'b', 'c d'], None, 2)
test('split', 'a b c d', ['a', 'b', 'c', 'd'], None, 3)
test('split', 'a b c d', ['a', 'b', 'c', 'd'], None, 4)
test('split', 'a b c d', ['a b c d'], None, 0)
test('split', 'a b c d', ['a', 'b', 'c', 'd'], None, -1)
test('split', 'a b c d', ['a', 'b', 'c d'], None, 2)
# join now works with any sequence type
class Sequence:
def __init__(self): self.seq = 'wxyz'
def __len__(self): return len(self.seq)
def __getitem__(self, i): return self.seq[i]
test('join', ['a', 'b', 'c', 'd'], 'a b c d')
test('join', ('a', 'b', 'c', 'd'), 'abcd', '')
test('join', Sequence(), 'w x y z')
# try a few long ones
longstring = "\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:x\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
x:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx:x\
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\
xxxxxxxxxxxxxxxxxxx"
assert string.join(['x' * 100] * 100, ':') == longstring
assert string.join(('x' * 100,) * 100, ':') == longstring
test('strip', ' hello ', 'hello')
test('lstrip', ' hello ', 'hello ')
test('rstrip', ' hello ', ' hello')
test('swapcase', 'HeLLo cOmpUteRs', 'hEllO CoMPuTErS')
test('replace', 'one!two!three!', 'one@two!three!', '!', '@', 1)
test('replace', 'one!two!three!', 'one@two@three!', '!', '@', 2)
test('replace', 'one!two!three!', 'one@two@three@', '!', '@', 3)
test('replace', 'one!two!three!', 'one@two@three@', '!', '@', 4)
test('replace', 'one!two!three!', 'one!two!three!', '!', '@', 0)
test('replace', 'one!two!three!', 'one@two@three@', '!', '@', -1)
test('replace', 'one!two!three!', 'one@two@three@', '!', '@')
test('replace', 'one!two!three!', 'one!two!three!', 'x', '@')
test('replace', 'one!two!three!', 'one!two!three!', 'x', '@', 2)
test('count', 'aaa', 3, 'a')
test('count', 'aaa', 2, 'a', 0, 2)
test('count', 'ababab', 3, 'ab', 0)
test('count', 'ababab', 2, 'ab', 0, 5)
test('count', 'ababab', 2, 'ab', 1)
test('count', 'ababab', 2, 'ab', 1, 6)
test('count', 'ababab', 0, 'abc')
test('count', 'ababab', 7, '')
string.whitespace
string.lowercase
string.uppercase
|
zedr/django
|
refs/heads/master
|
tests/modeladmin/tests.py
|
6
|
from __future__ import unicode_literals
from datetime import date
import warnings
from django import forms
from django.contrib.admin.options import (ModelAdmin, TabularInline,
HORIZONTAL, VERTICAL)
from django.contrib.admin.sites import AdminSite
from django.contrib.admin.widgets import AdminDateWidget, AdminRadioSelect
from django.contrib.admin.validation import ModelAdminValidator
from django.contrib.admin import (SimpleListFilter,
BooleanFieldListFilter)
from django.core.checks import Error
from django.core.exceptions import ImproperlyConfigured
from django.forms.models import BaseModelFormSet
from django.forms.widgets import Select
from django.test import TestCase
from .models import Band, Concert, ValidationTestModel, ValidationTestInlineModel
class MockRequest(object):
pass
class MockSuperUser(object):
def has_perm(self, perm):
return True
request = MockRequest()
request.user = MockSuperUser()
class ModelAdminTests(TestCase):
def setUp(self):
self.band = Band.objects.create(
name='The Doors',
bio='',
sign_date=date(1965, 1, 1),
)
self.site = AdminSite()
# form/fields/fieldsets interaction ##############################
def test_default_fields(self):
ma = ModelAdmin(Band, self.site)
self.assertEqual(list(ma.get_form(request).base_fields),
['name', 'bio', 'sign_date'])
self.assertEqual(list(ma.get_fields(request)),
['name', 'bio', 'sign_date'])
self.assertEqual(list(ma.get_fields(request, self.band)),
['name', 'bio', 'sign_date'])
def test_default_fieldsets(self):
# fieldsets_add and fieldsets_change should return a special data structure that
# is used in the templates. They should generate the "right thing" whether we
# have specified a custom form, the fields argument, or nothing at all.
#
# Here's the default case. There are no custom form_add/form_change methods,
# no fields argument, and no fieldsets argument.
ma = ModelAdmin(Band, self.site)
self.assertEqual(ma.get_fieldsets(request),
[(None, {'fields': ['name', 'bio', 'sign_date']})])
self.assertEqual(ma.get_fieldsets(request, self.band),
[(None, {'fields': ['name', 'bio', 'sign_date']})])
def test_get_fieldsets(self):
# Test that get_fieldsets is called when figuring out form fields.
# Refs #18681.
class BandAdmin(ModelAdmin):
def get_fieldsets(self, request, obj=None):
return [(None, {'fields': ['name', 'bio']})]
ma = BandAdmin(Band, self.site)
form = ma.get_form(None)
self.assertEqual(form._meta.fields, ['name', 'bio'])
class InlineBandAdmin(TabularInline):
model = Concert
fk_name = 'main_band'
can_delete = False
def get_fieldsets(self, request, obj=None):
return [(None, {'fields': ['day', 'transport']})]
ma = InlineBandAdmin(Band, self.site)
form = ma.get_formset(None).form
self.assertEqual(form._meta.fields, ['day', 'transport'])
def test_lookup_allowed_allows_nonexistent_lookup(self):
"""
Ensure that a lookup_allowed allows a parameter
whose field lookup doesn't exist.
Refs #21129.
"""
class BandAdmin(ModelAdmin):
fields = ['name']
ma = BandAdmin(Band, self.site)
self.assertTrue(ma.lookup_allowed('name__nonexistent', 'test_value'))
def test_field_arguments(self):
# If we specify the fields argument, fieldsets_add and fielsets_change should
# just stick the fields into a formsets structure and return it.
class BandAdmin(ModelAdmin):
fields = ['name']
ma = BandAdmin(Band, self.site)
self.assertEqual(list(ma.get_fields(request)), ['name'])
self.assertEqual(list(ma.get_fields(request, self.band)), ['name'])
self.assertEqual(ma.get_fieldsets(request),
[(None, {'fields': ['name']})])
self.assertEqual(ma.get_fieldsets(request, self.band),
[(None, {'fields': ['name']})])
def test_field_arguments_restricted_on_form(self):
# If we specify fields or fieldsets, it should exclude fields on the Form class
# to the fields specified. This may cause errors to be raised in the db layer if
# required model fields arent in fields/fieldsets, but that's preferable to
# ghost errors where you have a field in your Form class that isn't being
# displayed because you forgot to add it to fields/fieldsets
# Using `fields`.
class BandAdmin(ModelAdmin):
fields = ['name']
ma = BandAdmin(Band, self.site)
self.assertEqual(list(ma.get_form(request).base_fields), ['name'])
self.assertEqual(list(ma.get_form(request, self.band).base_fields),
['name'])
# Using `fieldsets`.
class BandAdmin(ModelAdmin):
fieldsets = [(None, {'fields': ['name']})]
ma = BandAdmin(Band, self.site)
self.assertEqual(list(ma.get_form(request).base_fields), ['name'])
self.assertEqual(list(ma.get_form(request, self.band).base_fields),
['name'])
# Using `exclude`.
class BandAdmin(ModelAdmin):
exclude = ['bio']
ma = BandAdmin(Band, self.site)
self.assertEqual(list(ma.get_form(request).base_fields),
['name', 'sign_date'])
# You can also pass a tuple to `exclude`.
class BandAdmin(ModelAdmin):
exclude = ('bio',)
ma = BandAdmin(Band, self.site)
self.assertEqual(list(ma.get_form(request).base_fields),
['name', 'sign_date'])
# Using `fields` and `exclude`.
class BandAdmin(ModelAdmin):
fields = ['name', 'bio']
exclude = ['bio']
ma = BandAdmin(Band, self.site)
self.assertEqual(list(ma.get_form(request).base_fields),
['name'])
def test_custom_form_meta_exclude_with_readonly(self):
"""
Ensure that the custom ModelForm's `Meta.exclude` is respected when
used in conjunction with `ModelAdmin.readonly_fields` and when no
`ModelAdmin.exclude` is defined.
Refs #14496.
"""
# First, with `ModelAdmin` -----------------------
class AdminBandForm(forms.ModelForm):
class Meta:
model = Band
exclude = ['bio']
class BandAdmin(ModelAdmin):
readonly_fields = ['name']
form = AdminBandForm
ma = BandAdmin(Band, self.site)
self.assertEqual(list(ma.get_form(request).base_fields),
['sign_date'])
# Then, with `InlineModelAdmin` -----------------
class AdminConcertForm(forms.ModelForm):
class Meta:
model = Concert
exclude = ['day']
class ConcertInline(TabularInline):
readonly_fields = ['transport']
form = AdminConcertForm
fk_name = 'main_band'
model = Concert
class BandAdmin(ModelAdmin):
inlines = [
ConcertInline
]
ma = BandAdmin(Band, self.site)
self.assertEqual(
list(list(ma.get_formsets_with_inlines(request))[0][0]().forms[0].fields),
['main_band', 'opening_band', 'id', 'DELETE'])
def test_custom_form_meta_exclude(self):
"""
Ensure that the custom ModelForm's `Meta.exclude` is overridden if
`ModelAdmin.exclude` or `InlineModelAdmin.exclude` are defined.
Refs #14496.
"""
# First, with `ModelAdmin` -----------------------
class AdminBandForm(forms.ModelForm):
class Meta:
model = Band
exclude = ['bio']
class BandAdmin(ModelAdmin):
exclude = ['name']
form = AdminBandForm
ma = BandAdmin(Band, self.site)
self.assertEqual(list(ma.get_form(request).base_fields),
['bio', 'sign_date'])
# Then, with `InlineModelAdmin` -----------------
class AdminConcertForm(forms.ModelForm):
class Meta:
model = Concert
exclude = ['day']
class ConcertInline(TabularInline):
exclude = ['transport']
form = AdminConcertForm
fk_name = 'main_band'
model = Concert
class BandAdmin(ModelAdmin):
inlines = [
ConcertInline
]
ma = BandAdmin(Band, self.site)
self.assertEqual(
list(list(ma.get_formsets_with_inlines(request))[0][0]().forms[0].fields),
['main_band', 'opening_band', 'day', 'id', 'DELETE'])
def test_custom_form_validation(self):
# If we specify a form, it should use it allowing custom validation to work
# properly. This won't, however, break any of the admin widgets or media.
class AdminBandForm(forms.ModelForm):
delete = forms.BooleanField()
class BandAdmin(ModelAdmin):
form = AdminBandForm
ma = BandAdmin(Band, self.site)
self.assertEqual(list(ma.get_form(request).base_fields),
['name', 'bio', 'sign_date', 'delete'])
self.assertEqual(
type(ma.get_form(request).base_fields['sign_date'].widget),
AdminDateWidget)
def test_form_exclude_kwarg_override(self):
"""
Ensure that the `exclude` kwarg passed to `ModelAdmin.get_form()`
overrides all other declarations. Refs #8999.
"""
class AdminBandForm(forms.ModelForm):
class Meta:
model = Band
exclude = ['name']
class BandAdmin(ModelAdmin):
exclude = ['sign_date']
form = AdminBandForm
def get_form(self, request, obj=None, **kwargs):
kwargs['exclude'] = ['bio']
return super(BandAdmin, self).get_form(request, obj, **kwargs)
ma = BandAdmin(Band, self.site)
self.assertEqual(list(ma.get_form(request).base_fields),
['name', 'sign_date'])
def test_formset_exclude_kwarg_override(self):
"""
Ensure that the `exclude` kwarg passed to `InlineModelAdmin.get_formset()`
overrides all other declarations. Refs #8999.
"""
class AdminConcertForm(forms.ModelForm):
class Meta:
model = Concert
exclude = ['day']
class ConcertInline(TabularInline):
exclude = ['transport']
form = AdminConcertForm
fk_name = 'main_band'
model = Concert
def get_formset(self, request, obj=None, **kwargs):
kwargs['exclude'] = ['opening_band']
return super(ConcertInline, self).get_formset(request, obj, **kwargs)
class BandAdmin(ModelAdmin):
inlines = [
ConcertInline
]
ma = BandAdmin(Band, self.site)
self.assertEqual(
list(list(ma.get_formsets_with_inlines(request))[0][0]().forms[0].fields),
['main_band', 'day', 'transport', 'id', 'DELETE'])
def test_queryset_override(self):
# If we need to override the queryset of a ModelChoiceField in our custom form
# make sure that RelatedFieldWidgetWrapper doesn't mess that up.
band2 = Band(name='The Beatles', bio='', sign_date=date(1962, 1, 1))
band2.save()
class ConcertAdmin(ModelAdmin):
pass
ma = ConcertAdmin(Concert, self.site)
form = ma.get_form(request)()
self.assertHTMLEqual(str(form["main_band"]),
'<select name="main_band" id="id_main_band">\n'
'<option value="" selected="selected">---------</option>\n'
'<option value="%d">The Beatles</option>\n'
'<option value="%d">The Doors</option>\n'
'</select>' % (band2.id, self.band.id))
class AdminConcertForm(forms.ModelForm):
pass
def __init__(self, *args, **kwargs):
super(AdminConcertForm, self).__init__(*args, **kwargs)
self.fields["main_band"].queryset = Band.objects.filter(name='The Doors')
class ConcertAdmin(ModelAdmin):
form = AdminConcertForm
ma = ConcertAdmin(Concert, self.site)
form = ma.get_form(request)()
self.assertHTMLEqual(str(form["main_band"]),
'<select name="main_band" id="id_main_band">\n'
'<option value="" selected="selected">---------</option>\n'
'<option value="%d">The Doors</option>\n'
'</select>' % self.band.id)
def test_regression_for_ticket_15820(self):
"""
Ensure that `obj` is passed from `InlineModelAdmin.get_fieldsets()` to
`InlineModelAdmin.get_formset()`.
"""
class CustomConcertForm(forms.ModelForm):
class Meta:
model = Concert
fields = ['day']
class ConcertInline(TabularInline):
model = Concert
fk_name = 'main_band'
def get_formset(self, request, obj=None, **kwargs):
if obj:
kwargs['form'] = CustomConcertForm
return super(ConcertInline, self).get_formset(request, obj, **kwargs)
class BandAdmin(ModelAdmin):
inlines = [
ConcertInline
]
Concert.objects.create(main_band=self.band, opening_band=self.band, day=1)
ma = BandAdmin(Band, self.site)
inline_instances = ma.get_inline_instances(request)
fieldsets = list(inline_instances[0].get_fieldsets(request))
self.assertEqual(fieldsets[0][1]['fields'], ['main_band', 'opening_band', 'day', 'transport'])
fieldsets = list(inline_instances[0].get_fieldsets(request, inline_instances[0].model))
self.assertEqual(fieldsets[0][1]['fields'], ['day'])
# radio_fields behavior ###########################################
def test_default_foreign_key_widget(self):
# First, without any radio_fields specified, the widgets for ForeignKey
# and fields with choices specified ought to be a basic Select widget.
# ForeignKey widgets in the admin are wrapped with RelatedFieldWidgetWrapper so
# they need to be handled properly when type checking. For Select fields, all of
# the choices lists have a first entry of dashes.
cma = ModelAdmin(Concert, self.site)
cmafa = cma.get_form(request)
self.assertEqual(type(cmafa.base_fields['main_band'].widget.widget),
Select)
self.assertEqual(
list(cmafa.base_fields['main_band'].widget.choices),
[('', '---------'), (self.band.id, 'The Doors')])
self.assertEqual(
type(cmafa.base_fields['opening_band'].widget.widget), Select)
self.assertEqual(
list(cmafa.base_fields['opening_band'].widget.choices),
[('', '---------'), (self.band.id, 'The Doors')])
self.assertEqual(type(cmafa.base_fields['day'].widget), Select)
self.assertEqual(list(cmafa.base_fields['day'].widget.choices),
[('', '---------'), (1, 'Fri'), (2, 'Sat')])
self.assertEqual(type(cmafa.base_fields['transport'].widget),
Select)
self.assertEqual(
list(cmafa.base_fields['transport'].widget.choices),
[('', '---------'), (1, 'Plane'), (2, 'Train'), (3, 'Bus')])
def test_foreign_key_as_radio_field(self):
# Now specify all the fields as radio_fields. Widgets should now be
# RadioSelect, and the choices list should have a first entry of 'None' if
# blank=True for the model field. Finally, the widget should have the
# 'radiolist' attr, and 'inline' as well if the field is specified HORIZONTAL.
class ConcertAdmin(ModelAdmin):
radio_fields = {
'main_band': HORIZONTAL,
'opening_band': VERTICAL,
'day': VERTICAL,
'transport': HORIZONTAL,
}
cma = ConcertAdmin(Concert, self.site)
cmafa = cma.get_form(request)
self.assertEqual(type(cmafa.base_fields['main_band'].widget.widget),
AdminRadioSelect)
self.assertEqual(cmafa.base_fields['main_band'].widget.attrs,
{'class': 'radiolist inline'})
self.assertEqual(list(cmafa.base_fields['main_band'].widget.choices),
[(self.band.id, 'The Doors')])
self.assertEqual(
type(cmafa.base_fields['opening_band'].widget.widget),
AdminRadioSelect)
self.assertEqual(cmafa.base_fields['opening_band'].widget.attrs,
{'class': 'radiolist'})
self.assertEqual(
list(cmafa.base_fields['opening_band'].widget.choices),
[('', 'None'), (self.band.id, 'The Doors')])
self.assertEqual(type(cmafa.base_fields['day'].widget),
AdminRadioSelect)
self.assertEqual(cmafa.base_fields['day'].widget.attrs,
{'class': 'radiolist'})
self.assertEqual(list(cmafa.base_fields['day'].widget.choices),
[(1, 'Fri'), (2, 'Sat')])
self.assertEqual(type(cmafa.base_fields['transport'].widget),
AdminRadioSelect)
self.assertEqual(cmafa.base_fields['transport'].widget.attrs,
{'class': 'radiolist inline'})
self.assertEqual(list(cmafa.base_fields['transport'].widget.choices),
[('', 'None'), (1, 'Plane'), (2, 'Train'), (3, 'Bus')])
class AdminConcertForm(forms.ModelForm):
class Meta:
model = Concert
exclude = ('transport',)
class ConcertAdmin(ModelAdmin):
form = AdminConcertForm
ma = ConcertAdmin(Concert, self.site)
self.assertEqual(list(ma.get_form(request).base_fields),
['main_band', 'opening_band', 'day'])
class AdminConcertForm(forms.ModelForm):
extra = forms.CharField()
class Meta:
model = Concert
fields = ['extra', 'transport']
class ConcertAdmin(ModelAdmin):
form = AdminConcertForm
ma = ConcertAdmin(Concert, self.site)
self.assertEqual(list(ma.get_form(request).base_fields),
['extra', 'transport'])
class ConcertInline(TabularInline):
form = AdminConcertForm
model = Concert
fk_name = 'main_band'
can_delete = True
class BandAdmin(ModelAdmin):
inlines = [
ConcertInline
]
ma = BandAdmin(Band, self.site)
self.assertEqual(
list(list(ma.get_formsets_with_inlines(request))[0][0]().forms[0].fields),
['extra', 'transport', 'id', 'DELETE', 'main_band'])
class CheckTestCase(TestCase):
def assertIsInvalid(self, model_admin, model, msg,
id=None, hint=None, invalid_obj=None):
invalid_obj = invalid_obj or model_admin
errors = model_admin.check(model=model)
expected = [
Error(
msg,
hint=hint,
obj=invalid_obj,
id=id,
)
]
self.assertEqual(errors, expected)
def assertIsValid(self, model_admin, model):
errors = model_admin.check(model=model)
expected = []
self.assertEqual(errors, expected)
class RawIdCheckTests(CheckTestCase):
def test_not_iterable(self):
class ValidationTestModelAdmin(ModelAdmin):
raw_id_fields = 10
self.assertIsInvalid(
ValidationTestModelAdmin, ValidationTestModel,
'"raw_id_fields" must be a list or tuple.',
'admin.E001')
def test_missing_field(self):
class ValidationTestModelAdmin(ModelAdmin):
raw_id_fields = ('non_existent_field',)
self.assertIsInvalid(
ValidationTestModelAdmin, ValidationTestModel,
('"raw_id_fields[0]" refers to field "non_existent_field", '
'which is missing from model modeladmin.ValidationTestModel.'),
'admin.E002')
def test_invalid_field_type(self):
class ValidationTestModelAdmin(ModelAdmin):
raw_id_fields = ('name',)
self.assertIsInvalid(
ValidationTestModelAdmin, ValidationTestModel,
'"raw_id_fields[0]" must be a ForeignKey or ManyToManyField.',
'admin.E003')
def test_valid_case(self):
class ValidationTestModelAdmin(ModelAdmin):
raw_id_fields = ('users',)
self.assertIsValid(ValidationTestModelAdmin, ValidationTestModel)
class FieldsetsCheckTests(CheckTestCase):
def test_valid_case(self):
class ValidationTestModelAdmin(ModelAdmin):
fieldsets = (("General", {"fields": ("name",)}),)
self.assertIsValid(ValidationTestModelAdmin, ValidationTestModel)
def test_not_iterable(self):
class ValidationTestModelAdmin(ModelAdmin):
fieldsets = 10
self.assertIsInvalid(
ValidationTestModelAdmin, ValidationTestModel,
'"fieldsets" must be a list or tuple.',
'admin.E007')
def test_non_iterable_item(self):
class ValidationTestModelAdmin(ModelAdmin):
fieldsets = ({},)
self.assertIsInvalid(
ValidationTestModelAdmin, ValidationTestModel,
'"fieldsets[0]" must be a list or tuple.',
'admin.E008')
def test_item_not_a_pair(self):
class ValidationTestModelAdmin(ModelAdmin):
fieldsets = ((),)
self.assertIsInvalid(
ValidationTestModelAdmin, ValidationTestModel,
'"fieldsets[0]" must be a pair.',
'admin.E009')
def test_second_element_of_item_not_a_dict(self):
class ValidationTestModelAdmin(ModelAdmin):
fieldsets = (("General", ()),)
self.assertIsInvalid(
ValidationTestModelAdmin, ValidationTestModel,
'"fieldsets[0][1]" must be a dictionary.',
'admin.E010')
def test_missing_fields_key(self):
class ValidationTestModelAdmin(ModelAdmin):
fieldsets = (("General", {}),)
self.assertIsInvalid(
ValidationTestModelAdmin, ValidationTestModel,
'"fieldsets[0][1]" must contain "fields" key.',
'admin.E011')
class ValidationTestModelAdmin(ModelAdmin):
fieldsets = (("General", {"fields": ("name",)}),)
self.assertIsValid(ValidationTestModelAdmin, ValidationTestModel)
def test_specified_both_fields_and_fieldsets(self):
class ValidationTestModelAdmin(ModelAdmin):
fieldsets = (("General", {"fields": ("name",)}),)
fields = ["name"]
self.assertIsInvalid(
ValidationTestModelAdmin, ValidationTestModel,
'Both "fieldsets" and "fields" are specified.',
'admin.E005')
def test_duplicate_fields(self):
class ValidationTestModelAdmin(ModelAdmin):
fieldsets = [(None, {'fields': ['name', 'name']})]
self.assertIsInvalid(
ValidationTestModelAdmin, ValidationTestModel,
'There are duplicate field(s) in "fieldsets[0][1]".',
'admin.E012')
def test_fieldsets_with_custom_form_validation(self):
class BandAdmin(ModelAdmin):
fieldsets = (
('Band', {
'fields': ('name',)
}),
)
self.assertIsValid(BandAdmin, Band)
class FieldsCheckTests(CheckTestCase):
def test_duplicate_fields_in_fields(self):
class ValidationTestModelAdmin(ModelAdmin):
fields = ["name", "name"]
self.assertIsInvalid(
ValidationTestModelAdmin, ValidationTestModel,
'There are duplicate field(s) in "fields".',
'admin.E006')
def test_inline(self):
class ValidationTestInline(TabularInline):
model = ValidationTestInlineModel
fields = 10
class ValidationTestModelAdmin(ModelAdmin):
inlines = [ValidationTestInline]
self.assertIsInvalid(
ValidationTestModelAdmin, ValidationTestModel,
'"fields" must be a list or tuple.',
'admin.E004',
invalid_obj=ValidationTestInline)
class FormCheckTests(CheckTestCase):
def test_invalid_type(self):
class FakeForm(object):
pass
class ValidationTestModelAdmin(ModelAdmin):
form = FakeForm
self.assertIsInvalid(
ValidationTestModelAdmin, ValidationTestModel,
'"form" must inherit from BaseModelForm.',
'admin.E016')
def test_fieldsets_with_custom_form_validation(self):
class BandAdmin(ModelAdmin):
fieldsets = (
('Band', {
'fields': ('name',)
}),
)
self.assertIsValid(BandAdmin, Band)
def test_valid_case(self):
class AdminBandForm(forms.ModelForm):
delete = forms.BooleanField()
class BandAdmin(ModelAdmin):
form = AdminBandForm
fieldsets = (
('Band', {
'fields': ('name', 'bio', 'sign_date', 'delete')
}),
)
self.assertIsValid(BandAdmin, Band)
class FilterVerticalCheckTests(CheckTestCase):
def test_not_iterable(self):
class ValidationTestModelAdmin(ModelAdmin):
filter_vertical = 10
self.assertIsInvalid(
ValidationTestModelAdmin, ValidationTestModel,
'"filter_vertical" must be a list or tuple.',
'admin.E017')
def test_missing_field(self):
class ValidationTestModelAdmin(ModelAdmin):
filter_vertical = ("non_existent_field",)
self.assertIsInvalid(
ValidationTestModelAdmin, ValidationTestModel,
('"filter_vertical[0]" refers to field "non_existent_field", '
'which is missing from model modeladmin.ValidationTestModel.'),
'admin.E019')
def test_invalid_field_type(self):
class ValidationTestModelAdmin(ModelAdmin):
filter_vertical = ("name",)
self.assertIsInvalid(
ValidationTestModelAdmin, ValidationTestModel,
'"filter_vertical[0]" must be a ManyToManyField.',
'admin.E020')
def test_valid_case(self):
class ValidationTestModelAdmin(ModelAdmin):
filter_vertical = ("users",)
self.assertIsValid(ValidationTestModelAdmin, ValidationTestModel)
class FilterHorizontalCheckTests(CheckTestCase):
def test_not_iterable(self):
class ValidationTestModelAdmin(ModelAdmin):
filter_horizontal = 10
self.assertIsInvalid(
ValidationTestModelAdmin, ValidationTestModel,
'"filter_horizontal" must be a list or tuple.',
'admin.E018')
def test_missing_field(self):
class ValidationTestModelAdmin(ModelAdmin):
filter_horizontal = ("non_existent_field",)
self.assertIsInvalid(
ValidationTestModelAdmin, ValidationTestModel,
('"filter_horizontal[0]" refers to field "non_existent_field", '
'which is missing from model modeladmin.ValidationTestModel.'),
'admin.E019')
def test_invalid_field_type(self):
class ValidationTestModelAdmin(ModelAdmin):
filter_horizontal = ("name",)
self.assertIsInvalid(
ValidationTestModelAdmin, ValidationTestModel,
'"filter_horizontal[0]" must be a ManyToManyField.',
'admin.E020')
def test_valid_case(self):
class ValidationTestModelAdmin(ModelAdmin):
filter_horizontal = ("users",)
self.assertIsValid(ValidationTestModelAdmin, ValidationTestModel)
class RadioFieldsCheckTests(CheckTestCase):
def test_not_dictionary(self):
class ValidationTestModelAdmin(ModelAdmin):
radio_fields = ()
self.assertIsInvalid(
ValidationTestModelAdmin, ValidationTestModel,
'"radio_fields" must be a dictionary.',
'admin.E021')
def test_missing_field(self):
class ValidationTestModelAdmin(ModelAdmin):
radio_fields = {"non_existent_field": VERTICAL}
self.assertIsInvalid(
ValidationTestModelAdmin, ValidationTestModel,
('"radio_fields" refers to field "non_existent_field", '
'which is missing from model modeladmin.ValidationTestModel.'),
'admin.E022')
def test_invalid_field_type(self):
class ValidationTestModelAdmin(ModelAdmin):
radio_fields = {"name": VERTICAL}
self.assertIsInvalid(
ValidationTestModelAdmin, ValidationTestModel,
('"radio_fields" refers to "name", which is neither an instance '
'of ForeignKey nor does have choices set.'),
'admin.E023')
def test_invalid_value(self):
class ValidationTestModelAdmin(ModelAdmin):
radio_fields = {"state": None}
self.assertIsInvalid(
ValidationTestModelAdmin, ValidationTestModel,
'"radio_fields[\'state\']" is neither admin.HORIZONTAL nor admin.VERTICAL.',
'admin.E024')
def test_valid_case(self):
class ValidationTestModelAdmin(ModelAdmin):
radio_fields = {"state": VERTICAL}
self.assertIsValid(ValidationTestModelAdmin, ValidationTestModel)
class PrepopulatedFieldsCheckTests(CheckTestCase):
def test_not_dictionary(self):
class ValidationTestModelAdmin(ModelAdmin):
prepopulated_fields = ()
self.assertIsInvalid(
ValidationTestModelAdmin, ValidationTestModel,
'"prepopulated_fields" must be a dictionary.',
'admin.E026')
def test_missing_field(self):
class ValidationTestModelAdmin(ModelAdmin):
prepopulated_fields = {"non_existent_field": ("slug",)}
self.assertIsInvalid(
ValidationTestModelAdmin, ValidationTestModel,
('"prepopulated_fields" refers to field "non_existent_field", '
'which is missing from model modeladmin.ValidationTestModel.'),
'admin.E027')
def test_missing_field_again(self):
class ValidationTestModelAdmin(ModelAdmin):
prepopulated_fields = {"slug": ("non_existent_field",)}
self.assertIsInvalid(
ValidationTestModelAdmin, ValidationTestModel,
('"prepopulated_fields[\'slug\'][0]" refers to field "non_existent_field", '
'which is missing from model modeladmin.ValidationTestModel.'),
'admin.E030')
def test_invalid_field_type(self):
class ValidationTestModelAdmin(ModelAdmin):
prepopulated_fields = {"users": ("name",)}
self.assertIsInvalid(
ValidationTestModelAdmin, ValidationTestModel,
('"prepopulated_fields" refers to "users", which must not be '
'a DateTimeField, ForeignKey or ManyToManyField.'),
'admin.E028')
def test_valid_case(self):
class ValidationTestModelAdmin(ModelAdmin):
prepopulated_fields = {"slug": ("name",)}
self.assertIsValid(ValidationTestModelAdmin, ValidationTestModel)
class ListDisplayTests(CheckTestCase):
def test_not_iterable(self):
class ValidationTestModelAdmin(ModelAdmin):
list_display = 10
self.assertIsInvalid(
ValidationTestModelAdmin, ValidationTestModel,
'"list_display" must be a list or tuple.',
'admin.E107')
def test_missing_field(self):
class ValidationTestModelAdmin(ModelAdmin):
list_display = ('non_existent_field',)
self.assertIsInvalid(
ValidationTestModelAdmin, ValidationTestModel,
('"list_display[0]" is neither a callable nor an attribute '
'of "ValidationTestModelAdmin" nor found in model modeladmin.ValidationTestModel.'),
'admin.E110')
def test_invalid_field_type(self):
class ValidationTestModelAdmin(ModelAdmin):
list_display = ('users',)
self.assertIsInvalid(
ValidationTestModelAdmin, ValidationTestModel,
'"list_display[0]" must not be a ManyToManyField.',
'admin.E109')
def test_valid_case(self):
def a_callable(obj):
pass
class ValidationTestModelAdmin(ModelAdmin):
def a_method(self, obj):
pass
list_display = ('name', 'decade_published_in', 'a_method', a_callable)
self.assertIsValid(ValidationTestModelAdmin, ValidationTestModel)
class ListDisplayLinksCheckTests(CheckTestCase):
def test_not_iterable(self):
class ValidationTestModelAdmin(ModelAdmin):
list_display_links = 10
self.assertIsInvalid(
ValidationTestModelAdmin, ValidationTestModel,
'"list_display_links" must be a list or tuple or None.',
'admin.E111')
def test_missing_field(self):
class ValidationTestModelAdmin(ModelAdmin):
list_display_links = ('non_existent_field',)
self.assertIsInvalid(
ValidationTestModelAdmin, ValidationTestModel,
'"list_display_links[0]" refers to "non_existent_field", which is not defined in "list_display".',
'admin.E112')
def test_missing_in_list_display(self):
class ValidationTestModelAdmin(ModelAdmin):
list_display_links = ('name',)
self.assertIsInvalid(
ValidationTestModelAdmin, ValidationTestModel,
'"list_display_links[0]" refers to "name", which is not defined in "list_display".',
'admin.E112')
def test_valid_case(self):
def a_callable(obj):
pass
class ValidationTestModelAdmin(ModelAdmin):
def a_method(self, obj):
pass
list_display = ('name', 'decade_published_in', 'a_method', a_callable)
list_display_links = ('name', 'decade_published_in', 'a_method', a_callable)
self.assertIsValid(ValidationTestModelAdmin, ValidationTestModel)
def test_None_is_valid_case(self):
class ValidationTestModelAdmin(ModelAdmin):
list_display_links = None
self.assertIsValid(ValidationTestModelAdmin, ValidationTestModel)
class ListFilterTests(CheckTestCase):
def test_list_filter_validation(self):
class ValidationTestModelAdmin(ModelAdmin):
list_filter = 10
self.assertIsInvalid(
ValidationTestModelAdmin, ValidationTestModel,
'"list_filter" must be a list or tuple.',
'admin.E113')
def test_missing_field(self):
class ValidationTestModelAdmin(ModelAdmin):
list_filter = ('non_existent_field',)
self.assertIsInvalid(
ValidationTestModelAdmin, ValidationTestModel,
'"list_filter[0]" refers to "non_existent_field", which does not refer to a Field.',
'admin.E117')
def test_not_filter(self):
class RandomClass(object):
pass
class ValidationTestModelAdmin(ModelAdmin):
list_filter = (RandomClass,)
self.assertIsInvalid(
ValidationTestModelAdmin, ValidationTestModel,
'"list_filter[0]" must inherit from ListFilter.',
'admin.E114')
def test_not_filter_again(self):
class RandomClass(object):
pass
class ValidationTestModelAdmin(ModelAdmin):
list_filter = (('is_active', RandomClass),)
self.assertIsInvalid(
ValidationTestModelAdmin, ValidationTestModel,
'"list_filter[0][1]" must inherit from FieldListFilter.',
'admin.E116')
def test_not_filter_again_again(self):
class AwesomeFilter(SimpleListFilter):
def get_title(self):
return 'awesomeness'
def get_choices(self, request):
return (('bit', 'A bit awesome'), ('very', 'Very awesome'), )
def get_queryset(self, cl, qs):
return qs
class ValidationTestModelAdmin(ModelAdmin):
list_filter = (('is_active', AwesomeFilter),)
self.assertIsInvalid(
ValidationTestModelAdmin, ValidationTestModel,
'"list_filter[0][1]" must inherit from FieldListFilter.',
'admin.E116')
def test_not_associated_with_field_name(self):
class ValidationTestModelAdmin(ModelAdmin):
list_filter = (BooleanFieldListFilter,)
self.assertIsInvalid(
ValidationTestModelAdmin, ValidationTestModel,
'"list_filter[0]" must not inherit from FieldListFilter.',
'admin.E115')
def test_valid_case(self):
class AwesomeFilter(SimpleListFilter):
def get_title(self):
return 'awesomeness'
def get_choices(self, request):
return (('bit', 'A bit awesome'), ('very', 'Very awesome'), )
def get_queryset(self, cl, qs):
return qs
class ValidationTestModelAdmin(ModelAdmin):
list_filter = ('is_active', AwesomeFilter, ('is_active', BooleanFieldListFilter), 'no')
self.assertIsValid(ValidationTestModelAdmin, ValidationTestModel)
class ListPerPageCheckTests(CheckTestCase):
def test_not_integer(self):
class ValidationTestModelAdmin(ModelAdmin):
list_per_page = 'hello'
self.assertIsInvalid(
ValidationTestModelAdmin, ValidationTestModel,
'"list_per_page" must be an integer.',
'admin.E119')
def test_valid_case(self):
class ValidationTestModelAdmin(ModelAdmin):
list_per_page = 100
self.assertIsValid(ValidationTestModelAdmin, ValidationTestModel)
class ListMaxShowAllCheckTests(CheckTestCase):
def test_not_integer(self):
class ValidationTestModelAdmin(ModelAdmin):
list_max_show_all = 'hello'
self.assertIsInvalid(
ValidationTestModelAdmin, ValidationTestModel,
'"list_max_show_all" must be an integer.',
'admin.E120')
def test_valid_case(self):
class ValidationTestModelAdmin(ModelAdmin):
list_max_show_all = 200
self.assertIsValid(ValidationTestModelAdmin, ValidationTestModel)
class SearchFieldsCheckTests(CheckTestCase):
def test_not_iterable(self):
class ValidationTestModelAdmin(ModelAdmin):
search_fields = 10
self.assertIsInvalid(
ValidationTestModelAdmin, ValidationTestModel,
'"search_fields" must be a list or tuple.',
'admin.E127')
class DateHierarchyCheckTests(CheckTestCase):
def test_missing_field(self):
class ValidationTestModelAdmin(ModelAdmin):
date_hierarchy = 'non_existent_field'
self.assertIsInvalid(
ValidationTestModelAdmin, ValidationTestModel,
('"date_hierarchy" refers to field "non_existent_field", which '
'is missing from model modeladmin.ValidationTestModel.'),
'admin.E128')
def test_invalid_field_type(self):
class ValidationTestModelAdmin(ModelAdmin):
date_hierarchy = 'name'
self.assertIsInvalid(
ValidationTestModelAdmin, ValidationTestModel,
'"date_hierarchy" must be a DateField or DateTimeField.',
'admin.E129')
def test_valid_case(self):
class ValidationTestModelAdmin(ModelAdmin):
date_hierarchy = 'pub_date'
self.assertIsValid(ValidationTestModelAdmin, ValidationTestModel)
class OrderingCheckTests(CheckTestCase):
def test_not_iterable(self):
class ValidationTestModelAdmin(ModelAdmin):
ordering = 10
self.assertIsInvalid(
ValidationTestModelAdmin, ValidationTestModel,
'"ordering" must be a list or tuple.',
'admin.E031')
class ValidationTestModelAdmin(ModelAdmin):
ordering = ('non_existent_field',)
self.assertIsInvalid(
ValidationTestModelAdmin,
ValidationTestModel,
'"ordering[0]" refers to field "non_existent_field", which is missing from model modeladmin.ValidationTestModel.',
'admin.E033',
)
def test_random_marker_not_alone(self):
class ValidationTestModelAdmin(ModelAdmin):
ordering = ('?', 'name')
self.assertIsInvalid(
ValidationTestModelAdmin, ValidationTestModel,
('"ordering" has the random ordering marker "?", but contains '
'other fields as well.'),
'admin.E032',
hint='Either remove the "?", or remove the other fields.')
def test_valid_random_marker_case(self):
class ValidationTestModelAdmin(ModelAdmin):
ordering = ('?',)
self.assertIsValid(ValidationTestModelAdmin, ValidationTestModel)
def test_valid_complex_case(self):
class ValidationTestModelAdmin(ModelAdmin):
ordering = ('band__name',)
self.assertIsValid(ValidationTestModelAdmin, ValidationTestModel)
def test_valid_case(self):
class ValidationTestModelAdmin(ModelAdmin):
ordering = ('name',)
self.assertIsValid(ValidationTestModelAdmin, ValidationTestModel)
class ListSelectRelatedCheckTests(CheckTestCase):
def test_invalid_type(self):
class ValidationTestModelAdmin(ModelAdmin):
list_select_related = 1
self.assertIsInvalid(ValidationTestModelAdmin, ValidationTestModel,
'"list_select_related" must be a boolean, tuple or list.',
'admin.E118')
def test_valid_case(self):
class ValidationTestModelAdmin(ModelAdmin):
list_select_related = False
self.assertIsValid(ValidationTestModelAdmin, ValidationTestModel)
class SaveAsCheckTests(CheckTestCase):
def test_not_boolean(self):
class ValidationTestModelAdmin(ModelAdmin):
save_as = 1
self.assertIsInvalid(
ValidationTestModelAdmin, ValidationTestModel,
'"save_as" must be a boolean.',
'admin.E101')
def test_valid_case(self):
class ValidationTestModelAdmin(ModelAdmin):
save_as = True
self.assertIsValid(ValidationTestModelAdmin, ValidationTestModel)
class SaveOnTopCheckTests(CheckTestCase):
def test_not_boolean(self):
class ValidationTestModelAdmin(ModelAdmin):
save_on_top = 1
self.assertIsInvalid(
ValidationTestModelAdmin, ValidationTestModel,
'"save_on_top" must be a boolean.',
'admin.E102')
def test_valid_case(self):
class ValidationTestModelAdmin(ModelAdmin):
save_on_top = True
self.assertIsValid(ValidationTestModelAdmin, ValidationTestModel)
class InlinesCheckTests(CheckTestCase):
def test_not_iterable(self):
class ValidationTestModelAdmin(ModelAdmin):
inlines = 10
self.assertIsInvalid(
ValidationTestModelAdmin, ValidationTestModel,
'"inlines" must be a list or tuple.',
'admin.E103')
def test_not_model_admin(self):
class ValidationTestInline(object):
pass
class ValidationTestModelAdmin(ModelAdmin):
inlines = [ValidationTestInline]
self.assertIsInvalid(
ValidationTestModelAdmin, ValidationTestModel,
'"inlines[0]" must inherit from BaseModelAdmin.',
'admin.E104')
def test_missing_model_field(self):
class ValidationTestInline(TabularInline):
pass
class ValidationTestModelAdmin(ModelAdmin):
inlines = [ValidationTestInline]
self.assertIsInvalid(
ValidationTestModelAdmin, ValidationTestModel,
'"model" is a required attribute of "inlines[0]".',
'admin.E105')
def test_invalid_model_type(self):
""" Test if `model` attribute on inline model admin is a models.Model.
"""
class SomethingBad(object):
pass
class ValidationTestInline(TabularInline):
model = SomethingBad
class ValidationTestModelAdmin(ModelAdmin):
inlines = [ValidationTestInline]
self.assertIsInvalid(
ValidationTestModelAdmin, ValidationTestModel,
'"inlines[0].model" must be a Model.',
'admin.E106')
def test_valid_case(self):
class ValidationTestInline(TabularInline):
model = ValidationTestInlineModel
class ValidationTestModelAdmin(ModelAdmin):
inlines = [ValidationTestInline]
self.assertIsValid(ValidationTestModelAdmin, ValidationTestModel)
class FkNameCheckTests(CheckTestCase):
def test_missing_field(self):
class ValidationTestInline(TabularInline):
model = ValidationTestInlineModel
fk_name = "non_existent_field"
class ValidationTestModelAdmin(ModelAdmin):
inlines = [ValidationTestInline]
self.assertIsInvalid(
ValidationTestModelAdmin, ValidationTestModel,
"'modeladmin.ValidationTestInlineModel' has no field named 'non_existent_field'.",
'admin.E202',
invalid_obj=ValidationTestInline)
def test_valid_case(self):
class ValidationTestInline(TabularInline):
model = ValidationTestInlineModel
fk_name = "parent"
class ValidationTestModelAdmin(ModelAdmin):
inlines = [ValidationTestInline]
self.assertIsValid(ValidationTestModelAdmin, ValidationTestModel)
class ExtraCheckTests(CheckTestCase):
def test_not_integer(self):
class ValidationTestInline(TabularInline):
model = ValidationTestInlineModel
extra = "hello"
class ValidationTestModelAdmin(ModelAdmin):
inlines = [ValidationTestInline]
self.assertIsInvalid(
ValidationTestModelAdmin, ValidationTestModel,
'"extra" must be an integer.',
'admin.E203',
invalid_obj=ValidationTestInline)
def test_valid_case(self):
class ValidationTestInline(TabularInline):
model = ValidationTestInlineModel
extra = 2
class ValidationTestModelAdmin(ModelAdmin):
inlines = [ValidationTestInline]
self.assertIsValid(ValidationTestModelAdmin, ValidationTestModel)
class MaxNumCheckTests(CheckTestCase):
def test_not_integer(self):
class ValidationTestInline(TabularInline):
model = ValidationTestInlineModel
max_num = "hello"
class ValidationTestModelAdmin(ModelAdmin):
inlines = [ValidationTestInline]
self.assertIsInvalid(
ValidationTestModelAdmin, ValidationTestModel,
'"max_num" must be an integer.',
'admin.E204',
invalid_obj=ValidationTestInline)
def test_valid_case(self):
class ValidationTestInline(TabularInline):
model = ValidationTestInlineModel
max_num = 2
class ValidationTestModelAdmin(ModelAdmin):
inlines = [ValidationTestInline]
self.assertIsValid(ValidationTestModelAdmin, ValidationTestModel)
class FormsetCheckTests(CheckTestCase):
def test_invalid_type(self):
class FakeFormSet(object):
pass
class ValidationTestInline(TabularInline):
model = ValidationTestInlineModel
formset = FakeFormSet
class ValidationTestModelAdmin(ModelAdmin):
inlines = [ValidationTestInline]
self.assertIsInvalid(
ValidationTestModelAdmin, ValidationTestModel,
'"formset" must inherit from BaseModelFormSet.',
'admin.E205',
invalid_obj=ValidationTestInline)
def test_valid_case(self):
class RealModelFormSet(BaseModelFormSet):
pass
class ValidationTestInline(TabularInline):
model = ValidationTestInlineModel
formset = RealModelFormSet
class ValidationTestModelAdmin(ModelAdmin):
inlines = [ValidationTestInline]
self.assertIsValid(ValidationTestModelAdmin, ValidationTestModel)
class CustomModelAdminTests(CheckTestCase):
def test_deprecation(self):
"Deprecated Custom Validator definitions still work with the check framework."
with warnings.catch_warnings():
warnings.simplefilter("ignore", category=PendingDeprecationWarning)
class CustomValidator(ModelAdminValidator):
def validate_me(self, model_admin, model):
raise ImproperlyConfigured('error!')
class CustomModelAdmin(ModelAdmin):
validator_class = CustomValidator
self.assertIsInvalid(CustomModelAdmin, ValidationTestModel, 'error!')
|
saurabh6790/ON-RISLIB
|
refs/heads/master
|
website/doctype/web_page/templates/generators/web_page.py
|
36
|
doctype = "Web Page"
condition_field = "published"
|
bigfootproject/sahara
|
refs/heads/spark-plugin
|
sahara/tests/unit/service/test_direct_engine.py
|
7
|
# Copyright (c) 2013 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from sahara.service import direct_engine
from sahara.tests.unit import base
from sahara.utils import general as g
class TestDirectEngine(base.SaharaWithDbTestCase):
@mock.patch('sahara.utils.openstack.nova.client')
def test_delete_auto_security_group(self, nova_client):
engine = direct_engine.DirectEngine()
ng = mock.Mock(id="16fd2706-8baf-433b-82eb-8c7fada847da",
auto_security_group=True)
ng.name = "ngname"
ng.cluster.name = "cluster"
auto_name = g.generate_auto_security_group_name(ng)
ng.security_groups = [auto_name]
client = mock.Mock()
nova_client.return_value = client
client.security_groups.get.side_effect = lambda x: SecurityGroup(x)
engine._delete_auto_security_group(ng)
client.security_groups.delete.assert_called_once_with(auto_name)
@mock.patch('sahara.utils.openstack.nova.client')
def test_delete_auto_security_group_other_groups(self, nova_client):
engine = direct_engine.DirectEngine()
ng = mock.Mock(id="16fd2706-8baf-433b-82eb-8c7fada847da",
auto_security_group=True)
ng.name = "ngname"
ng.cluster.name = "cluster"
auto_name = g.generate_auto_security_group_name(ng)
ng.security_groups = ['1', '2', auto_name]
client = mock.Mock()
nova_client.return_value = client
client.security_groups.get.side_effect = lambda x: SecurityGroup(x)
engine._delete_auto_security_group(ng)
client.security_groups.delete.assert_called_once_with(auto_name)
@mock.patch('sahara.utils.openstack.nova.client')
def test_delete_auto_security_group_no_groups(self, nova_client):
engine = direct_engine.DirectEngine()
ng = mock.Mock(id="16fd2706-8baf-433b-82eb-8c7fada847da",
auto_security_group=True)
ng.name = "ngname"
ng.cluster.name = "cluster"
ng.security_groups = []
client = mock.Mock()
nova_client.return_value = client
client.security_groups.get.side_effect = lambda x: SecurityGroup(x)
engine._delete_auto_security_group(ng)
self.assertEqual(0, client.security_groups.delete.call_count)
@mock.patch('sahara.utils.openstack.nova.client')
def test_delete_auto_security_group_wrong_group(self, nova_client):
engine = direct_engine.DirectEngine()
ng = mock.Mock(id="16fd2706-8baf-433b-82eb-8c7fada847da",
auto_security_group=True)
ng.name = "ngname"
ng.cluster.name = "cluster"
ng.security_groups = ['1', '2']
client = mock.Mock()
nova_client.return_value = client
client.security_groups.get.side_effect = lambda x: SecurityGroup(x)
engine._delete_auto_security_group(ng)
self.assertEqual(0, client.security_groups.delete.call_count)
class SecurityGroup(object):
def __init__(self, name):
super(SecurityGroup, self).__init__()
self.name = name
|
AtScaleInc/Impala
|
refs/heads/master
|
tests/verifiers/__init__.py
|
186
|
# This file is needed to make the files in this directory a python module
|
LevinJ/CodeSamples
|
refs/heads/master
|
python/Installation_logs_analysis/calfailureCategory.py
|
1
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import os.path
import sys
import shutil
import os
import glob
import re
import rebootstatistics
import os, os.path
from optparse import OptionParser
from openpyxl import Workbook
def parse_arguments():
parser = OptionParser()
parser.description = \
"This program takes a detections.data_sequence created by ./objects_detection and converts it into the Caltech dataset evaluation format"
parser.add_option("-i", "--input", dest="input_path",
metavar="FILE", type="string",
help="path to the installation result file")
parser.add_option("-d", "--directory", dest="input_directory",
metavar="Directory", type="string",
help="directory to the installation result files")
(options, args) = parser.parse_args()
# print (options, args)
if options.input_path:
if not os.path.exists(options.input_path):
parser.error("Could not find the input file")
elif options.input_directory:
if not os.path.exists(options.input_directory):
parser.error("Could not find the input directory")
else:
parser.error("'input' or 'diretory' option is required to run this program")
return options
current_state="searchStart"
current_loopStr=""
wb = Workbook()
ws = wb.active
ws.append(["NO", "RealFailure","Category","Loop", "SessionID","USB issue times","Failure details", "LZMA_log_path"])
current_numId=0
current_failureLine=""
current_errorcategory=""
current_usbfailedtimes=0
current_sessionid = ''
reboottimestatistic = rebootstatistics.rebootstatisticsProxy()
def getErrorCategory(failedinstallation,line):
if 'FAILED: Target: DEC/RebootKernel installation failed, error: eInstallTimeout' in line:
return "USB"
if 'exception: ' in line:
return "PRC_LZMA"
if 'eInstallTimeout' in line:
return "eInstallTimeout"
if 'eUnknownError' in line:
return "eUnknownError"
if 'eConfigError' in line:
return "eConfigError"
if 'install operation failed due to exception!' in line:
return "PRC"
if not failedinstallation:
return "PRC"
return "Others"
def getLoopNumber(line):
m = re.search('this is loop (.*) out of', line)
if m:
loopnum = m.group(1)
return loopnum
return "NOT FOUND"
def markSessionId(line):
global current_sessionid
if not 'ProductionScripts::Prc::Common::Session: created token session' in line:
return
#only process the first seesionid used to handle usb enumeration issue
if current_sessionid !='':
return
m = re.search('(.*)INFO -- ProductionScripts::Prc::Common::Session: created token session (.*)-(.*)-(.*)', line)
if m:
current_sessionid = m.group(4)
return
current_sessionid = "NOT FOUND"
def markUSBfailedTimes(line):
global current_usbfailedtimes
if 'ProductionScripts::Prc::Process::Packages::Verify ' in line:
current_usbfailedtimes = current_usbfailedtimes + 1
def processtheline(line, file_path):
global current_state
global current_loopStr
global current_numId
global current_failureLine
global current_errorcategory
global ws;
global current_usbfailedtimes
global reboottimestatistic
global current_sessionid
markUSBfailedTimes(line)
markSessionId(line)
reboottimestatistic.processLine(line)
if(current_state=="searchStart"):
if 'this is loop' in line:
current_state="searchFailureorEnd"
current_loopStr = line
current_usbfailedtimes = 0
current_sessionid = ''
# print(line)
#exit the search start state
return
if(current_state=="searchRealFailureOrEnd"):
#find a false failure, log it and then start next iteration
if '## total=' in line:
current_state="searchStart"
#This issue has been resolved, we will not log this issue
# ws.append([current_numId, "FALSE",getErrorCategory(False,current_failureLine),getLoopNumber(current_loopStr), current_sessionid,current_usbfailedtimes,current_failureLine])
#return here since we've reached the end of this iteration
return
#find a real failure, log it and then start next iteration
if 'Error: Installation failed at loop' in line:
current_state="searchStart"
ws.append([current_numId, "TRUE",getErrorCategory(True,current_failureLine),getLoopNumber(current_loopStr),current_sessionid, current_usbfailedtimes,current_failureLine, file_path])
#return here since we've found the first error occurence
return
return
#Here we can be sure we are in the searchFailureorEnd state now
if(current_state=="searchFailureorEnd"):
if '## total=' in line:
current_state="searchStart"
#return here since we've reached the end of this iteration
return
#find the first occurence of failure, log it and then start next iteration
if ('failed' in line) or ('exception: ' in line):
current_state="searchRealFailureOrEnd"
current_failureLine=line
print(getLoopNumber(current_loopStr))
print(line)
current_numId += 1;
#return here since we've found the first error occurence
return
return
def process_one_file(file_path):
with open(file_path) as f:
for line in f:
processtheline(line, file_path)
return
def process_directory(dir):
all_files = []
for path, subdirs, files in os.walk(dir):
for name in files:
if not name.endswith('.log'):
continue
cur_file = os.path.join(path, name)
all_files.append(cur_file)
for f in all_files:
print("process file {}".format(f))
process_one_file(f)
return
def main():
global wb
global reboottimestatistic
options = parse_arguments()
print (options)
if options.input_path:
process_one_file(options.input_path)
filename_prefix = options.input_path
else:
path,folder_name = os.path.split(options.input_directory)
filename_prefix = options.input_directory +"//" + folder_name+ "_summary"
process_directory(options.input_directory)
# with open(options.input_path) as f:
# for line in f:
# wb.save("sample.xlsx")
wb.save(filename_prefix+".xlsx")
# reboottimestatistic.getFinalResult(options.input_path)
return
main()
def test():
# wb = Workbook()
# ws = wb.active
# ws.append(["NO", "Loop", "Failure details"])
# ws.append([4, 5, 6])
# wb.save("sample.xlsx")
# print("ok")
global current_sessionid
line = r'I, [2016-01-21T20:34:02.605865 #9160] INFO -- ProductionScripts::Prc::Common::Session: created token session 947409-589761-207319'
markSessionId(line)
print(current_sessionid)
return
# test()
|
HyperBaton/ansible
|
refs/heads/devel
|
lib/ansible/modules/windows/win_chocolatey_facts.py
|
18
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2018, Ansible Project
# Copyright: (c) 2018, Simon Baerlocher <s.baerlocher@sbaerlocher.ch>
# Copyright: (c) 2018, ITIGO AG <opensource@itigo.ch>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: win_chocolatey_facts
version_added: '2.8'
short_description: Create a facts collection for Chocolatey
description:
- This module shows information from Chocolatey, such as installed packages, configuration, feature and sources.
notes:
- Chocolatey must be installed beforehand, use M(win_chocolatey) to do this.
seealso:
- module: win_chocolatey
- module: win_chocolatey_config
- module: win_chocolatey_feature
- module: win_chocolatey_source
author:
- Simon Bärlocher (@sbaerlocher)
- ITIGO AG (@itigoag)
'''
EXAMPLES = r'''
- name: Gather facts from chocolatey
win_chocolatey_facts:
- name: Displays the Configuration
debug:
var: ansible_chocolatey.config
- name: Displays the Feature
debug:
var: ansible_chocolatey.feature
- name: Displays the Sources
debug:
var: ansible_chocolatey.sources
- name: Displays the Packages
debug:
var: ansible_chocolatey.packages
'''
RETURN = r'''
ansible_facts:
description: Detailed information about the Chocolatey installation
returned: always
type: complex
contains:
ansible_chocolatey:
description: Detailed information about the Chocolatey installation
returned: always
type: complex
contains:
config:
description: Detailed information about stored the configurations
returned: always
type: dict
sample:
commandExecutionTimeoutSeconds: 2700
containsLegacyPackageInstalls: true
feature:
description: Detailed information about enabled and disabled features
returned: always
type: dict
sample:
allowEmptyCheckums: false
autoUninstaller: true
failOnAutoUninstaller: false
sources:
description: List of Chocolatey sources
returned: always
type: complex
contains:
admin_only:
description: Is the source visible to Administrators only
returned: always
type: bool
sample: false
allow_self_service:
description: Is the source allowed to be used with self-service
returned: always
type: bool
sample: false
bypass_proxy:
description: Can the source explicitly bypass configured proxies
returned: always
type: bool
sample: true
certificate:
description: Pth to a PFX certificate for X509 authenticated feeds
returned: always
type: str
sample: C:\chocolatey\cert.pfx
disabled:
description: Is the source disabled
returned: always
type: bool
sample: false
name:
description: Name of the source
returned: always
type: str
sample: chocolatey
priority:
description: The priority order of this source, lower is better, 0 is no priority
returned: always
type: int
sample: 0
source:
description: The source, can be a folder/file or an url
returned: always
type: str
sample: https://chocolatey.org/api/v2/
source_username:
description: Username used to access authenticated feeds
returned: always
type: str
sample: username
packages:
description: List of installed Packages
returned: always
type: complex
contains:
package:
description: Name of the package
returned: always
type: str
sample: vscode
version:
description: Version of the package
returned: always
type: str
sample: '1.27.2'
'''
|
rikadederika/pychess
|
refs/heads/master
|
lib/pychess/Players/ICPlayer.py
|
20
|
from collections import defaultdict
from pychess.compat import Queue
from pychess.Players.Player import Player, PlayerIsDead, TurnInterrupt
from pychess.Utils.Move import parseSAN, toAN
from pychess.Utils.lutils.lmove import ParsingError
from pychess.Utils.Offer import Offer
from pychess.Utils.const import *
from pychess.System.Log import log
class ICPlayer (Player):
__type__ = REMOTE
def __init__ (self, gamemodel, ichandle, gameno, color, name, icrating=None):
Player.__init__(self)
self.offers = {}
self.queue = Queue()
self.okqueue = Queue()
self.setName(name)
self.ichandle = ichandle
self.icrating = icrating
self.color = color
self.gameno = gameno
self.gamemodel = gamemodel
# If some times later FICS creates another game with same wplayer,bplayer,gameno
# this will change to False and boardUpdate messages will be ignored
self.current = True
self.connection = connection = self.gamemodel.connection
self.connections = connections = defaultdict(list)
connections[connection.bm].append(connection.bm.connect_after("boardUpdate", self.__boardUpdate))
connections[connection.bm].append(connection.bm.connect_after("playGameCreated", self.__playGameCreated))
connections[connection.bm].append(connection.bm.connect_after("obsGameCreated", self.__obsGameCreated))
connections[connection.om].append(connection.om.connect("onOfferAdd", self.__onOfferAdd))
connections[connection.om].append(connection.om.connect("onOfferRemove", self.__onOfferRemove))
connections[connection.om].append(connection.om.connect("onOfferDeclined", self.__onOfferDeclined))
connections[connection.cm].append(connection.cm.connect("privateMessage", self.__onPrivateMessage))
def getICHandle (self):
return self.name
@property
def time (self):
self.gamemodel.timemodel.getPlayerTime(self.color)
#===========================================================================
# Handle signals from the connection
#===========================================================================
def __playGameCreated (self, bm, ficsgame):
if self.gamemodel.ficsplayers[0] == ficsgame.wplayer and \
self.gamemodel.ficsplayers[1] == ficsgame.bplayer and \
self.gameno == ficsgame.gameno:
log.debug("ICPlayer.__playGameCreated: gameno reappeared: gameno=%s white=%s black=%s" % \
(ficsgame.gameno, ficsgame.wplayer.name, ficsgame.bplayer.name))
self.current = False
def __obsGameCreated (self, bm, ficsgame):
if self.gamemodel.ficsplayers[0] == ficsgame.wplayer and \
self.gamemodel.ficsplayers[1] == ficsgame.bplayer and \
self.gameno == ficsgame.gameno:
log.debug("ICPlayer.__obsGameCreated: gameno reappeared: gameno=%s white=%s black=%s" % \
(ficsgame.gameno, ficsgame.wplayer.name, ficsgame.bplayer.name))
self.current = False
def __onOfferAdd (self, om, offer):
if self.gamemodel.status in UNFINISHED_STATES and not self.gamemodel.isObservationGame():
log.debug("ICPlayer.__onOfferAdd: emitting offer: self.gameno=%s self.name=%s %s" % \
(self.gameno, self.name, offer))
self.offers[offer.index] = offer
self.emit ("offer", offer)
def __onOfferDeclined (self, om, offer):
for offer_ in self.gamemodel.offers.keys():
if offer.type == offer_.type:
offer.param = offer_.param
log.debug("ICPlayer.__onOfferDeclined: emitting decline for %s" % offer)
self.emit("decline", offer)
def __onOfferRemove (self, om, offer):
if offer.index in self.offers:
log.debug("ICPlayer.__onOfferRemove: emitting withdraw: self.gameno=%s self.name=%s %s" % \
(self.gameno, self.name, offer))
self.emit ("withdraw", self.offers[offer.index])
del self.offers[offer.index]
def __onPrivateMessage (self, cm, name, title, isadmin, text):
if name == self.ichandle:
self.emit("offer", Offer(CHAT_ACTION, param=text))
def __boardUpdate (self, bm, gameno, ply, curcol, lastmove, fen, wname, bname, wms, bms):
log.debug("ICPlayer.__boardUpdate: id(self)=%d self=%s %s %s %s %d %d %s %s %d %d" % \
(id(self), self, gameno, wname, bname, ply, curcol, lastmove, fen, wms, bms))
if gameno == self.gameno and len(self.gamemodel.players) >= 2 \
and wname == self.gamemodel.players[0].ichandle \
and bname == self.gamemodel.players[1].ichandle \
and self.current:
log.debug("ICPlayer.__boardUpdate: id=%d self=%s gameno=%s: this is my move" % \
(id(self), self, gameno))
# In some cases (like lost on time) the last move is resent
if ply <= self.gamemodel.ply:
return
if 1-curcol == self.color:
log.debug("ICPlayer.__boardUpdate: id=%d self=%s ply=%d: putting move=%s in queue" % \
(id(self), self, ply, lastmove))
self.queue.put((ply, lastmove))
# Ensure the fics thread doesn't continue parsing, before the
# game/player thread has recieved the move.
# Specifically this ensures that we aren't killed due to end of
# game before our last move is recieved
self.okqueue.get(block=True)
#===========================================================================
# Ending the game
#===========================================================================
def __disconnect (self):
if self.connections is None: return
for obj in self.connections:
for handler_id in self.connections[obj]:
if obj.handler_is_connected(handler_id):
obj.disconnect(handler_id)
self.connections = None
def end (self, status, reason):
self.__disconnect()
self.queue.put("del")
def kill (self, reason):
self.__disconnect()
self.queue.put("del")
#===========================================================================
# Send the player move updates
#===========================================================================
def makeMove (self, board1, move, board2):
log.debug("ICPlayer.makemove: id(self)=%d self=%s move=%s board1=%s board2=%s" % \
(id(self), self, move, board1, board2))
if board2 and not self.gamemodel.isObservationGame():
# TODO: Will this work if we just always use CASTLE_SAN?
cn = CASTLE_KK
if board2.variant == FISCHERRANDOMCHESS:
cn = CASTLE_SAN
self.connection.bm.sendMove (toAN (board2, move, castleNotation=cn))
item = self.queue.get(block=True)
try:
if item == "del":
raise PlayerIsDead
if item == "int":
raise TurnInterrupt
ply, sanmove = item
if ply < board1.ply:
# This should only happen in an observed game
board1 = self.gamemodel.getBoardAtPly(max(ply-1, 0))
log.debug("ICPlayer.makemove: id(self)=%d self=%s from queue got: ply=%d sanmove=%s" % \
(id(self), self, ply, sanmove))
try:
move = parseSAN (board1, sanmove)
log.debug("ICPlayer.makemove: id(self)=%d self=%s parsed move=%s" % \
(id(self), self, move))
except ParsingError as e:
raise
return move
finally:
log.debug("ICPlayer.makemove: id(self)=%d self=%s returning move=%s" % \
(id(self), self, move))
self.okqueue.put("ok")
#===========================================================================
# Interacting with the player
#===========================================================================
def pause (self):
pass
def resume (self):
pass
def setBoard (self, fen):
# setBoard will currently only be called for ServerPlayer when starting
# to observe some game. In this case FICS already knows how the board
# should look, and we don't need to set anything
pass
def playerUndoMoves (self, movecount, gamemodel):
log.debug("ICPlayer.playerUndoMoves: id(self)=%d self=%s, undoing movecount=%d" % \
(id(self), self, movecount))
# If current player has changed so that it is no longer us to move,
# We raise TurnInterruprt in order to let GameModel continue the game
if movecount % 2 == 1 and gamemodel.curplayer != self:
self.queue.put("int")
def putMessage (self, text):
self.connection.cm.tellPlayer (self.name, text)
#===========================================================================
# Offer handling
#===========================================================================
def offerRematch (self):
if self.gamemodel.timed:
min = int(self.gamemodel.timemodel.intervals[0][0])/60
inc = self.gamemodel.timemodel.gain
else:
min = 0
inc = 0
self.connection.om.challenge(self.ichandle,
self.gamemodel.ficsgame.game_type, min, inc,
self.gamemodel.ficsgame.rated)
def offer (self, offer):
log.debug("ICPlayer.offer: self=%s %s" % (repr(self), offer))
if offer.type == TAKEBACK_OFFER:
# only 1 outstanding takeback offer allowed on FICS, so remove any of ours
indexes = self.offers.keys()
for index in indexes:
if self.offers[index].type == TAKEBACK_OFFER:
log.debug("ICPlayer.offer: del self.offers[%s] %s" % (index, offer))
del self.offers[index]
self.connection.om.offer(offer, self.gamemodel.ply)
def offerDeclined (self, offer):
log.debug("ICPlayer.offerDeclined: sending decline for %s" % offer)
self.connection.om.decline(offer)
def offerWithdrawn (self, offer):
pass
def offerError (self, offer, error):
pass
def observe (self):
self.connection.client.run_command("observe %s" % self.ichandle)
|
AndroidOpenDevelopment/android_external_chromium_org
|
refs/heads/lp
|
build/android/pylib/device/device_list.py
|
114
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A module to keep track of devices across builds."""
import os
LAST_DEVICES_FILENAME = '.last_devices'
LAST_MISSING_DEVICES_FILENAME = '.last_missing'
def GetPersistentDeviceList(file_name):
"""Returns a list of devices.
Args:
file_name: the file name containing a list of devices.
Returns: List of device serial numbers that were on the bot.
"""
with open(file_name) as f:
return f.read().splitlines()
def WritePersistentDeviceList(file_name, device_list):
path = os.path.dirname(file_name)
if not os.path.exists(path):
os.makedirs(path)
with open(file_name, 'w') as f:
f.write('\n'.join(set(device_list)))
|
hchen1202/django-react
|
refs/heads/master
|
virtualenv/lib/python3.6/site-packages/django/db/models/functions/base.py
|
75
|
"""
Classes that represent database functions.
"""
from django.db.models import Func, Transform, Value, fields
class Cast(Func):
"""
Coerce an expression to a new field type.
"""
function = 'CAST'
template = '%(function)s(%(expressions)s AS %(db_type)s)'
mysql_types = {
fields.CharField: 'char',
fields.IntegerField: 'signed integer',
fields.FloatField: 'signed',
}
def __init__(self, expression, output_field):
super(Cast, self).__init__(expression, output_field=output_field)
def as_sql(self, compiler, connection, **extra_context):
if 'db_type' not in extra_context:
extra_context['db_type'] = self._output_field.db_type(connection)
return super(Cast, self).as_sql(compiler, connection, **extra_context)
def as_mysql(self, compiler, connection):
extra_context = {}
output_field_class = type(self._output_field)
if output_field_class in self.mysql_types:
extra_context['db_type'] = self.mysql_types[output_field_class]
return self.as_sql(compiler, connection, **extra_context)
def as_postgresql(self, compiler, connection):
# CAST would be valid too, but the :: shortcut syntax is more readable.
return self.as_sql(compiler, connection, template='%(expressions)s::%(db_type)s')
class Coalesce(Func):
"""
Chooses, from left to right, the first non-null expression and returns it.
"""
function = 'COALESCE'
def __init__(self, *expressions, **extra):
if len(expressions) < 2:
raise ValueError('Coalesce must take at least two expressions')
super(Coalesce, self).__init__(*expressions, **extra)
def as_oracle(self, compiler, connection):
# we can't mix TextField (NCLOB) and CharField (NVARCHAR), so convert
# all fields to NCLOB when we expect NCLOB
if self.output_field.get_internal_type() == 'TextField':
class ToNCLOB(Func):
function = 'TO_NCLOB'
expressions = [
ToNCLOB(expression) for expression in self.get_source_expressions()]
clone = self.copy()
clone.set_source_expressions(expressions)
return super(Coalesce, clone).as_sql(compiler, connection)
return self.as_sql(compiler, connection)
class ConcatPair(Func):
"""
A helper class that concatenates two arguments together. This is used
by `Concat` because not all backend databases support more than two
arguments.
"""
function = 'CONCAT'
def __init__(self, left, right, **extra):
super(ConcatPair, self).__init__(left, right, **extra)
def as_sqlite(self, compiler, connection):
coalesced = self.coalesce()
return super(ConcatPair, coalesced).as_sql(
compiler, connection, template='%(expressions)s', arg_joiner=' || '
)
def as_mysql(self, compiler, connection):
# Use CONCAT_WS with an empty separator so that NULLs are ignored.
return super(ConcatPair, self).as_sql(
compiler, connection, function='CONCAT_WS', template="%(function)s('', %(expressions)s)"
)
def coalesce(self):
# null on either side results in null for expression, wrap with coalesce
c = self.copy()
expressions = [
Coalesce(expression, Value('')) for expression in c.get_source_expressions()
]
c.set_source_expressions(expressions)
return c
class Concat(Func):
"""
Concatenates text fields together. Backends that result in an entire
null expression when any arguments are null will wrap each argument in
coalesce functions to ensure we always get a non-null result.
"""
function = None
template = "%(expressions)s"
def __init__(self, *expressions, **extra):
if len(expressions) < 2:
raise ValueError('Concat must take at least two expressions')
paired = self._paired(expressions)
super(Concat, self).__init__(paired, **extra)
def _paired(self, expressions):
# wrap pairs of expressions in successive concat functions
# exp = [a, b, c, d]
# -> ConcatPair(a, ConcatPair(b, ConcatPair(c, d))))
if len(expressions) == 2:
return ConcatPair(*expressions)
return ConcatPair(expressions[0], self._paired(expressions[1:]))
class Greatest(Func):
"""
Chooses the maximum expression and returns it.
If any expression is null the return value is database-specific:
On Postgres, the maximum not-null expression is returned.
On MySQL, Oracle, and SQLite, if any expression is null, null is returned.
"""
function = 'GREATEST'
def __init__(self, *expressions, **extra):
if len(expressions) < 2:
raise ValueError('Greatest must take at least two expressions')
super(Greatest, self).__init__(*expressions, **extra)
def as_sqlite(self, compiler, connection):
"""Use the MAX function on SQLite."""
return super(Greatest, self).as_sql(compiler, connection, function='MAX')
class Least(Func):
"""
Chooses the minimum expression and returns it.
If any expression is null the return value is database-specific:
On Postgres, the minimum not-null expression is returned.
On MySQL, Oracle, and SQLite, if any expression is null, null is returned.
"""
function = 'LEAST'
def __init__(self, *expressions, **extra):
if len(expressions) < 2:
raise ValueError('Least must take at least two expressions')
super(Least, self).__init__(*expressions, **extra)
def as_sqlite(self, compiler, connection):
"""Use the MIN function on SQLite."""
return super(Least, self).as_sql(compiler, connection, function='MIN')
class Length(Transform):
"""Returns the number of characters in the expression"""
function = 'LENGTH'
lookup_name = 'length'
def __init__(self, expression, **extra):
output_field = extra.pop('output_field', fields.IntegerField())
super(Length, self).__init__(expression, output_field=output_field, **extra)
def as_mysql(self, compiler, connection):
return super(Length, self).as_sql(compiler, connection, function='CHAR_LENGTH')
class Lower(Transform):
function = 'LOWER'
lookup_name = 'lower'
class Now(Func):
template = 'CURRENT_TIMESTAMP'
def __init__(self, output_field=None, **extra):
if output_field is None:
output_field = fields.DateTimeField()
super(Now, self).__init__(output_field=output_field, **extra)
def as_postgresql(self, compiler, connection):
# Postgres' CURRENT_TIMESTAMP means "the time at the start of the
# transaction". We use STATEMENT_TIMESTAMP to be cross-compatible with
# other databases.
return self.as_sql(compiler, connection, template='STATEMENT_TIMESTAMP()')
class Substr(Func):
function = 'SUBSTRING'
def __init__(self, expression, pos, length=None, **extra):
"""
expression: the name of a field, or an expression returning a string
pos: an integer > 0, or an expression returning an integer
length: an optional number of characters to return
"""
if not hasattr(pos, 'resolve_expression'):
if pos < 1:
raise ValueError("'pos' must be greater than 0")
pos = Value(pos)
expressions = [expression, pos]
if length is not None:
if not hasattr(length, 'resolve_expression'):
length = Value(length)
expressions.append(length)
super(Substr, self).__init__(*expressions, **extra)
def as_sqlite(self, compiler, connection):
return super(Substr, self).as_sql(compiler, connection, function='SUBSTR')
def as_oracle(self, compiler, connection):
return super(Substr, self).as_sql(compiler, connection, function='SUBSTR')
class Upper(Transform):
function = 'UPPER'
lookup_name = 'upper'
|
sipak/MaidSafe
|
refs/heads/master
|
src/third_party_libs/googlemock/gtest/test/gtest_shuffle_test.py
|
3023
|
#!/usr/bin/env python
#
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Verifies that test shuffling works."""
__author__ = 'wan@google.com (Zhanyong Wan)'
import os
import gtest_test_utils
# Command to run the gtest_shuffle_test_ program.
COMMAND = gtest_test_utils.GetTestExecutablePath('gtest_shuffle_test_')
# The environment variables for test sharding.
TOTAL_SHARDS_ENV_VAR = 'GTEST_TOTAL_SHARDS'
SHARD_INDEX_ENV_VAR = 'GTEST_SHARD_INDEX'
TEST_FILTER = 'A*.A:A*.B:C*'
ALL_TESTS = []
ACTIVE_TESTS = []
FILTERED_TESTS = []
SHARDED_TESTS = []
SHUFFLED_ALL_TESTS = []
SHUFFLED_ACTIVE_TESTS = []
SHUFFLED_FILTERED_TESTS = []
SHUFFLED_SHARDED_TESTS = []
def AlsoRunDisabledTestsFlag():
return '--gtest_also_run_disabled_tests'
def FilterFlag(test_filter):
return '--gtest_filter=%s' % (test_filter,)
def RepeatFlag(n):
return '--gtest_repeat=%s' % (n,)
def ShuffleFlag():
return '--gtest_shuffle'
def RandomSeedFlag(n):
return '--gtest_random_seed=%s' % (n,)
def RunAndReturnOutput(extra_env, args):
"""Runs the test program and returns its output."""
environ_copy = os.environ.copy()
environ_copy.update(extra_env)
return gtest_test_utils.Subprocess([COMMAND] + args, env=environ_copy).output
def GetTestsForAllIterations(extra_env, args):
"""Runs the test program and returns a list of test lists.
Args:
extra_env: a map from environment variables to their values
args: command line flags to pass to gtest_shuffle_test_
Returns:
A list where the i-th element is the list of tests run in the i-th
test iteration.
"""
test_iterations = []
for line in RunAndReturnOutput(extra_env, args).split('\n'):
if line.startswith('----'):
tests = []
test_iterations.append(tests)
elif line.strip():
tests.append(line.strip()) # 'TestCaseName.TestName'
return test_iterations
def GetTestCases(tests):
"""Returns a list of test cases in the given full test names.
Args:
tests: a list of full test names
Returns:
A list of test cases from 'tests', in their original order.
Consecutive duplicates are removed.
"""
test_cases = []
for test in tests:
test_case = test.split('.')[0]
if not test_case in test_cases:
test_cases.append(test_case)
return test_cases
def CalculateTestLists():
"""Calculates the list of tests run under different flags."""
if not ALL_TESTS:
ALL_TESTS.extend(
GetTestsForAllIterations({}, [AlsoRunDisabledTestsFlag()])[0])
if not ACTIVE_TESTS:
ACTIVE_TESTS.extend(GetTestsForAllIterations({}, [])[0])
if not FILTERED_TESTS:
FILTERED_TESTS.extend(
GetTestsForAllIterations({}, [FilterFlag(TEST_FILTER)])[0])
if not SHARDED_TESTS:
SHARDED_TESTS.extend(
GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '1'},
[])[0])
if not SHUFFLED_ALL_TESTS:
SHUFFLED_ALL_TESTS.extend(GetTestsForAllIterations(
{}, [AlsoRunDisabledTestsFlag(), ShuffleFlag(), RandomSeedFlag(1)])[0])
if not SHUFFLED_ACTIVE_TESTS:
SHUFFLED_ACTIVE_TESTS.extend(GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1)])[0])
if not SHUFFLED_FILTERED_TESTS:
SHUFFLED_FILTERED_TESTS.extend(GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1), FilterFlag(TEST_FILTER)])[0])
if not SHUFFLED_SHARDED_TESTS:
SHUFFLED_SHARDED_TESTS.extend(
GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '1'},
[ShuffleFlag(), RandomSeedFlag(1)])[0])
class GTestShuffleUnitTest(gtest_test_utils.TestCase):
"""Tests test shuffling."""
def setUp(self):
CalculateTestLists()
def testShufflePreservesNumberOfTests(self):
self.assertEqual(len(ALL_TESTS), len(SHUFFLED_ALL_TESTS))
self.assertEqual(len(ACTIVE_TESTS), len(SHUFFLED_ACTIVE_TESTS))
self.assertEqual(len(FILTERED_TESTS), len(SHUFFLED_FILTERED_TESTS))
self.assertEqual(len(SHARDED_TESTS), len(SHUFFLED_SHARDED_TESTS))
def testShuffleChangesTestOrder(self):
self.assert_(SHUFFLED_ALL_TESTS != ALL_TESTS, SHUFFLED_ALL_TESTS)
self.assert_(SHUFFLED_ACTIVE_TESTS != ACTIVE_TESTS, SHUFFLED_ACTIVE_TESTS)
self.assert_(SHUFFLED_FILTERED_TESTS != FILTERED_TESTS,
SHUFFLED_FILTERED_TESTS)
self.assert_(SHUFFLED_SHARDED_TESTS != SHARDED_TESTS,
SHUFFLED_SHARDED_TESTS)
def testShuffleChangesTestCaseOrder(self):
self.assert_(GetTestCases(SHUFFLED_ALL_TESTS) != GetTestCases(ALL_TESTS),
GetTestCases(SHUFFLED_ALL_TESTS))
self.assert_(
GetTestCases(SHUFFLED_ACTIVE_TESTS) != GetTestCases(ACTIVE_TESTS),
GetTestCases(SHUFFLED_ACTIVE_TESTS))
self.assert_(
GetTestCases(SHUFFLED_FILTERED_TESTS) != GetTestCases(FILTERED_TESTS),
GetTestCases(SHUFFLED_FILTERED_TESTS))
self.assert_(
GetTestCases(SHUFFLED_SHARDED_TESTS) != GetTestCases(SHARDED_TESTS),
GetTestCases(SHUFFLED_SHARDED_TESTS))
def testShuffleDoesNotRepeatTest(self):
for test in SHUFFLED_ALL_TESTS:
self.assertEqual(1, SHUFFLED_ALL_TESTS.count(test),
'%s appears more than once' % (test,))
for test in SHUFFLED_ACTIVE_TESTS:
self.assertEqual(1, SHUFFLED_ACTIVE_TESTS.count(test),
'%s appears more than once' % (test,))
for test in SHUFFLED_FILTERED_TESTS:
self.assertEqual(1, SHUFFLED_FILTERED_TESTS.count(test),
'%s appears more than once' % (test,))
for test in SHUFFLED_SHARDED_TESTS:
self.assertEqual(1, SHUFFLED_SHARDED_TESTS.count(test),
'%s appears more than once' % (test,))
def testShuffleDoesNotCreateNewTest(self):
for test in SHUFFLED_ALL_TESTS:
self.assert_(test in ALL_TESTS, '%s is an invalid test' % (test,))
for test in SHUFFLED_ACTIVE_TESTS:
self.assert_(test in ACTIVE_TESTS, '%s is an invalid test' % (test,))
for test in SHUFFLED_FILTERED_TESTS:
self.assert_(test in FILTERED_TESTS, '%s is an invalid test' % (test,))
for test in SHUFFLED_SHARDED_TESTS:
self.assert_(test in SHARDED_TESTS, '%s is an invalid test' % (test,))
def testShuffleIncludesAllTests(self):
for test in ALL_TESTS:
self.assert_(test in SHUFFLED_ALL_TESTS, '%s is missing' % (test,))
for test in ACTIVE_TESTS:
self.assert_(test in SHUFFLED_ACTIVE_TESTS, '%s is missing' % (test,))
for test in FILTERED_TESTS:
self.assert_(test in SHUFFLED_FILTERED_TESTS, '%s is missing' % (test,))
for test in SHARDED_TESTS:
self.assert_(test in SHUFFLED_SHARDED_TESTS, '%s is missing' % (test,))
def testShuffleLeavesDeathTestsAtFront(self):
non_death_test_found = False
for test in SHUFFLED_ACTIVE_TESTS:
if 'DeathTest.' in test:
self.assert_(not non_death_test_found,
'%s appears after a non-death test' % (test,))
else:
non_death_test_found = True
def _VerifyTestCasesDoNotInterleave(self, tests):
test_cases = []
for test in tests:
[test_case, _] = test.split('.')
if test_cases and test_cases[-1] != test_case:
test_cases.append(test_case)
self.assertEqual(1, test_cases.count(test_case),
'Test case %s is not grouped together in %s' %
(test_case, tests))
def testShuffleDoesNotInterleaveTestCases(self):
self._VerifyTestCasesDoNotInterleave(SHUFFLED_ALL_TESTS)
self._VerifyTestCasesDoNotInterleave(SHUFFLED_ACTIVE_TESTS)
self._VerifyTestCasesDoNotInterleave(SHUFFLED_FILTERED_TESTS)
self._VerifyTestCasesDoNotInterleave(SHUFFLED_SHARDED_TESTS)
def testShuffleRestoresOrderAfterEachIteration(self):
# Get the test lists in all 3 iterations, using random seed 1, 2,
# and 3 respectively. Google Test picks a different seed in each
# iteration, and this test depends on the current implementation
# picking successive numbers. This dependency is not ideal, but
# makes the test much easier to write.
[tests_in_iteration1, tests_in_iteration2, tests_in_iteration3] = (
GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1), RepeatFlag(3)]))
# Make sure running the tests with random seed 1 gets the same
# order as in iteration 1 above.
[tests_with_seed1] = GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1)])
self.assertEqual(tests_in_iteration1, tests_with_seed1)
# Make sure running the tests with random seed 2 gets the same
# order as in iteration 2 above. Success means that Google Test
# correctly restores the test order before re-shuffling at the
# beginning of iteration 2.
[tests_with_seed2] = GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(2)])
self.assertEqual(tests_in_iteration2, tests_with_seed2)
# Make sure running the tests with random seed 3 gets the same
# order as in iteration 3 above. Success means that Google Test
# correctly restores the test order before re-shuffling at the
# beginning of iteration 3.
[tests_with_seed3] = GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(3)])
self.assertEqual(tests_in_iteration3, tests_with_seed3)
def testShuffleGeneratesNewOrderInEachIteration(self):
[tests_in_iteration1, tests_in_iteration2, tests_in_iteration3] = (
GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1), RepeatFlag(3)]))
self.assert_(tests_in_iteration1 != tests_in_iteration2,
tests_in_iteration1)
self.assert_(tests_in_iteration1 != tests_in_iteration3,
tests_in_iteration1)
self.assert_(tests_in_iteration2 != tests_in_iteration3,
tests_in_iteration2)
def testShuffleShardedTestsPreservesPartition(self):
# If we run M tests on N shards, the same M tests should be run in
# total, regardless of the random seeds used by the shards.
[tests1] = GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '0'},
[ShuffleFlag(), RandomSeedFlag(1)])
[tests2] = GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '1'},
[ShuffleFlag(), RandomSeedFlag(20)])
[tests3] = GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '2'},
[ShuffleFlag(), RandomSeedFlag(25)])
sorted_sharded_tests = tests1 + tests2 + tests3
sorted_sharded_tests.sort()
sorted_active_tests = []
sorted_active_tests.extend(ACTIVE_TESTS)
sorted_active_tests.sort()
self.assertEqual(sorted_active_tests, sorted_sharded_tests)
if __name__ == '__main__':
gtest_test_utils.Main()
|
Rewardcoin/p2pool
|
refs/heads/master
|
wstools/WSDLTools.py
|
292
|
# Copyright (c) 2001 Zope Corporation and Contributors. All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.0 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
ident = "$Id$"
import weakref
from cStringIO import StringIO
from Namespaces import OASIS, XMLNS, WSA, WSA_LIST, WSAW_LIST, WSRF_V1_2, WSRF
from Utility import Collection, CollectionNS, DOM, ElementProxy, basejoin
from XMLSchema import XMLSchema, SchemaReader, WSDLToolsAdapter
class WSDLReader:
"""A WSDLReader creates WSDL instances from urls and xml data."""
# Custom subclasses of WSDLReader may wish to implement a caching
# strategy or other optimizations. Because application needs vary
# so widely, we don't try to provide any caching by default.
def loadFromStream(self, stream, name=None):
"""Return a WSDL instance loaded from a stream object."""
document = DOM.loadDocument(stream)
wsdl = WSDL()
if name:
wsdl.location = name
elif hasattr(stream, 'name'):
wsdl.location = stream.name
wsdl.load(document)
return wsdl
def loadFromURL(self, url):
"""Return a WSDL instance loaded from the given url."""
document = DOM.loadFromURL(url)
wsdl = WSDL()
wsdl.location = url
wsdl.load(document)
return wsdl
def loadFromString(self, data):
"""Return a WSDL instance loaded from an xml string."""
return self.loadFromStream(StringIO(data))
def loadFromFile(self, filename):
"""Return a WSDL instance loaded from the given file."""
file = open(filename, 'rb')
try:
wsdl = self.loadFromStream(file)
finally:
file.close()
return wsdl
class WSDL:
"""A WSDL object models a WSDL service description. WSDL objects
may be created manually or loaded from an xml representation
using a WSDLReader instance."""
def __init__(self, targetNamespace=None, strict=1):
self.targetNamespace = targetNamespace or 'urn:this-document.wsdl'
self.documentation = ''
self.location = None
self.document = None
self.name = None
self.services = CollectionNS(self)
self.messages = CollectionNS(self)
self.portTypes = CollectionNS(self)
self.bindings = CollectionNS(self)
self.imports = Collection(self)
self.types = Types(self)
self.extensions = []
self.strict = strict
def __del__(self):
if self.document is not None:
self.document.unlink()
version = '1.1'
def addService(self, name, documentation='', targetNamespace=None):
if self.services.has_key(name):
raise WSDLError(
'Duplicate service element: %s' % name
)
item = Service(name, documentation)
if targetNamespace:
item.targetNamespace = targetNamespace
self.services[name] = item
return item
def addMessage(self, name, documentation='', targetNamespace=None):
if self.messages.has_key(name):
raise WSDLError(
'Duplicate message element: %s.' % name
)
item = Message(name, documentation)
if targetNamespace:
item.targetNamespace = targetNamespace
self.messages[name] = item
return item
def addPortType(self, name, documentation='', targetNamespace=None):
if self.portTypes.has_key(name):
raise WSDLError(
'Duplicate portType element: name'
)
item = PortType(name, documentation)
if targetNamespace:
item.targetNamespace = targetNamespace
self.portTypes[name] = item
return item
def addBinding(self, name, type, documentation='', targetNamespace=None):
if self.bindings.has_key(name):
raise WSDLError(
'Duplicate binding element: %s' % name
)
item = Binding(name, type, documentation)
if targetNamespace:
item.targetNamespace = targetNamespace
self.bindings[name] = item
return item
def addImport(self, namespace, location):
item = ImportElement(namespace, location)
self.imports[namespace] = item
return item
def toDom(self):
""" Generate a DOM representation of the WSDL instance.
Not dealing with generating XML Schema, thus the targetNamespace
of all XML Schema elements or types used by WSDL message parts
needs to be specified via import information items.
"""
namespaceURI = DOM.GetWSDLUri(self.version)
self.document = DOM.createDocument(namespaceURI ,'wsdl:definitions')
# Set up a couple prefixes for easy reading.
child = DOM.getElement(self.document, None)
child.setAttributeNS(None, 'targetNamespace', self.targetNamespace)
child.setAttributeNS(XMLNS.BASE, 'xmlns:wsdl', namespaceURI)
child.setAttributeNS(XMLNS.BASE, 'xmlns:xsd', 'http://www.w3.org/1999/XMLSchema')
child.setAttributeNS(XMLNS.BASE, 'xmlns:soap', 'http://schemas.xmlsoap.org/wsdl/soap/')
child.setAttributeNS(XMLNS.BASE, 'xmlns:tns', self.targetNamespace)
if self.name:
child.setAttributeNS(None, 'name', self.name)
# wsdl:import
for item in self.imports:
item.toDom()
# wsdl:message
for item in self.messages:
item.toDom()
# wsdl:portType
for item in self.portTypes:
item.toDom()
# wsdl:binding
for item in self.bindings:
item.toDom()
# wsdl:service
for item in self.services:
item.toDom()
def load(self, document):
# We save a reference to the DOM document to ensure that elements
# saved as "extensions" will continue to have a meaningful context
# for things like namespace references. The lifetime of the DOM
# document is bound to the lifetime of the WSDL instance.
self.document = document
definitions = DOM.getElement(document, 'definitions', None, None)
if definitions is None:
raise WSDLError(
'Missing <definitions> element.'
)
self.version = DOM.WSDLUriToVersion(definitions.namespaceURI)
NS_WSDL = DOM.GetWSDLUri(self.version)
self.targetNamespace = DOM.getAttr(definitions, 'targetNamespace',
None, None)
self.name = DOM.getAttr(definitions, 'name', None, None)
self.documentation = GetDocumentation(definitions)
#
# Retrieve all <wsdl:import>'s, append all children of imported
# document to main document. First iteration grab all original
# <wsdl:import>'s from document, second iteration grab all
# "imported" <wsdl:imports> from document, etc break out when
# no more <wsdl:import>'s.
#
imported = []
base_location = self.location
do_it = True
while do_it:
do_it = False
for element in DOM.getElements(definitions, 'import', NS_WSDL):
location = DOM.getAttr(element, 'location')
if base_location is not None:
location = basejoin(base_location, location)
if location not in imported:
do_it = True
self._import(document, element, base_location)
imported.append(location)
else:
definitions.removeChild(element)
base_location = None
#
# No more <wsdl:import>'s, now load up all other
# WSDL information items.
#
for element in DOM.getElements(definitions, None, None):
targetNamespace = DOM.getAttr(element, 'targetNamespace')
localName = element.localName
if not DOM.nsUriMatch(element.namespaceURI, NS_WSDL):
if localName == 'schema':
tns = DOM.getAttr(element, 'targetNamespace')
reader = SchemaReader(base_url=self.imports[tns].location)
schema = reader.loadFromNode(WSDLToolsAdapter(self),
element)
# schema.setBaseUrl(self.location)
self.types.addSchema(schema)
else:
self.extensions.append(element)
continue
elif localName == 'message':
name = DOM.getAttr(element, 'name')
docs = GetDocumentation(element)
message = self.addMessage(name, docs, targetNamespace)
parts = DOM.getElements(element, 'part', NS_WSDL)
message.load(parts)
continue
elif localName == 'portType':
name = DOM.getAttr(element, 'name')
docs = GetDocumentation(element)
ptype = self.addPortType(name, docs, targetNamespace)
#operations = DOM.getElements(element, 'operation', NS_WSDL)
#ptype.load(operations)
ptype.load(element)
continue
elif localName == 'binding':
name = DOM.getAttr(element, 'name')
type = DOM.getAttr(element, 'type', default=None)
if type is None:
raise WSDLError(
'Missing type attribute for binding %s.' % name
)
type = ParseQName(type, element)
docs = GetDocumentation(element)
binding = self.addBinding(name, type, docs, targetNamespace)
operations = DOM.getElements(element, 'operation', NS_WSDL)
binding.load(operations)
binding.load_ex(GetExtensions(element))
continue
elif localName == 'service':
name = DOM.getAttr(element, 'name')
docs = GetDocumentation(element)
service = self.addService(name, docs, targetNamespace)
ports = DOM.getElements(element, 'port', NS_WSDL)
service.load(ports)
service.load_ex(GetExtensions(element))
continue
elif localName == 'types':
self.types.documentation = GetDocumentation(element)
base_location = DOM.getAttr(element, 'base-location')
if base_location:
element.removeAttribute('base-location')
base_location = base_location or self.location
reader = SchemaReader(base_url=base_location)
for item in DOM.getElements(element, None, None):
if item.localName == 'schema':
schema = reader.loadFromNode(WSDLToolsAdapter(self), item)
# XXX <types> could have been imported
#schema.setBaseUrl(self.location)
schema.setBaseUrl(base_location)
self.types.addSchema(schema)
else:
self.types.addExtension(item)
# XXX remove the attribute
# element.removeAttribute('base-location')
continue
def _import(self, document, element, base_location=None):
'''Algo take <import> element's children, clone them,
and add them to the main document. Support for relative
locations is a bit complicated. The orig document context
is lost, so we need to store base location in DOM elements
representing <types>, by creating a special temporary
"base-location" attribute, and <import>, by resolving
the relative "location" and storing it as "location".
document -- document we are loading
element -- DOM Element representing <import>
base_location -- location of document from which this
<import> was gleaned.
'''
namespace = DOM.getAttr(element, 'namespace', default=None)
location = DOM.getAttr(element, 'location', default=None)
if namespace is None or location is None:
raise WSDLError(
'Invalid import element (missing namespace or location).'
)
if base_location:
location = basejoin(base_location, location)
element.setAttributeNS(None, 'location', location)
obimport = self.addImport(namespace, location)
obimport._loaded = 1
importdoc = DOM.loadFromURL(location)
try:
if location.find('#') > -1:
idref = location.split('#')[-1]
imported = DOM.getElementById(importdoc, idref)
else:
imported = importdoc.documentElement
if imported is None:
raise WSDLError(
'Import target element not found for: %s' % location
)
imported_tns = DOM.findTargetNS(imported)
if imported_tns != namespace:
return
if imported.localName == 'definitions':
imported_nodes = imported.childNodes
else:
imported_nodes = [imported]
parent = element.parentNode
parent.removeChild(element)
for node in imported_nodes:
if node.nodeType != node.ELEMENT_NODE:
continue
child = DOM.importNode(document, node, 1)
parent.appendChild(child)
child.setAttribute('targetNamespace', namespace)
attrsNS = imported._attrsNS
for attrkey in attrsNS.keys():
if attrkey[0] == DOM.NS_XMLNS:
attr = attrsNS[attrkey].cloneNode(1)
child.setAttributeNode(attr)
#XXX Quick Hack, should be in WSDL Namespace.
if child.localName == 'import':
rlocation = child.getAttributeNS(None, 'location')
alocation = basejoin(location, rlocation)
child.setAttribute('location', alocation)
elif child.localName == 'types':
child.setAttribute('base-location', location)
finally:
importdoc.unlink()
return location
class Element:
"""A class that provides common functions for WSDL element classes."""
def __init__(self, name=None, documentation=''):
self.name = name
self.documentation = documentation
self.extensions = []
def addExtension(self, item):
item.parent = weakref.ref(self)
self.extensions.append(item)
def getWSDL(self):
"""Return the WSDL object that contains this information item."""
parent = self
while 1:
# skip any collections
if isinstance(parent, WSDL):
return parent
try: parent = parent.parent()
except: break
return None
class ImportElement(Element):
def __init__(self, namespace, location):
self.namespace = namespace
self.location = location
# def getWSDL(self):
# """Return the WSDL object that contains this Message Part."""
# return self.parent().parent()
def toDom(self):
wsdl = self.getWSDL()
ep = ElementProxy(None, DOM.getElement(wsdl.document, None))
epc = ep.createAppendElement(DOM.GetWSDLUri(wsdl.version), 'import')
epc.setAttributeNS(None, 'namespace', self.namespace)
epc.setAttributeNS(None, 'location', self.location)
_loaded = None
class Types(Collection):
default = lambda self,k: k.targetNamespace
def __init__(self, parent):
Collection.__init__(self, parent)
self.documentation = ''
self.extensions = []
def addSchema(self, schema):
name = schema.targetNamespace
self[name] = schema
return schema
def addExtension(self, item):
self.extensions.append(item)
class Message(Element):
def __init__(self, name, documentation=''):
Element.__init__(self, name, documentation)
self.parts = Collection(self)
def addPart(self, name, type=None, element=None):
if self.parts.has_key(name):
raise WSDLError(
'Duplicate message part element: %s' % name
)
if type is None and element is None:
raise WSDLError(
'Missing type or element attribute for part: %s' % name
)
item = MessagePart(name)
item.element = element
item.type = type
self.parts[name] = item
return item
def load(self, elements):
for element in elements:
name = DOM.getAttr(element, 'name')
part = MessagePart(name)
self.parts[name] = part
elemref = DOM.getAttr(element, 'element', default=None)
typeref = DOM.getAttr(element, 'type', default=None)
if typeref is None and elemref is None:
raise WSDLError(
'No type or element attribute for part: %s' % name
)
if typeref is not None:
part.type = ParseTypeRef(typeref, element)
if elemref is not None:
part.element = ParseTypeRef(elemref, element)
# def getElementDeclaration(self):
# """Return the XMLSchema.ElementDeclaration instance or None"""
# element = None
# if self.element:
# nsuri,name = self.element
# wsdl = self.getWSDL()
# if wsdl.types.has_key(nsuri) and wsdl.types[nsuri].elements.has_key(name):
# element = wsdl.types[nsuri].elements[name]
# return element
#
# def getTypeDefinition(self):
# """Return the XMLSchema.TypeDefinition instance or None"""
# type = None
# if self.type:
# nsuri,name = self.type
# wsdl = self.getWSDL()
# if wsdl.types.has_key(nsuri) and wsdl.types[nsuri].types.has_key(name):
# type = wsdl.types[nsuri].types[name]
# return type
# def getWSDL(self):
# """Return the WSDL object that contains this Message Part."""
# return self.parent().parent()
def toDom(self):
wsdl = self.getWSDL()
ep = ElementProxy(None, DOM.getElement(wsdl.document, None))
epc = ep.createAppendElement(DOM.GetWSDLUri(wsdl.version), 'message')
epc.setAttributeNS(None, 'name', self.name)
for part in self.parts:
part.toDom(epc._getNode())
class MessagePart(Element):
def __init__(self, name):
Element.__init__(self, name, '')
self.element = None
self.type = None
# def getWSDL(self):
# """Return the WSDL object that contains this Message Part."""
# return self.parent().parent().parent().parent()
def getTypeDefinition(self):
wsdl = self.getWSDL()
nsuri,name = self.type
schema = wsdl.types.get(nsuri, {})
return schema.get(name)
def getElementDeclaration(self):
wsdl = self.getWSDL()
nsuri,name = self.element
schema = wsdl.types.get(nsuri, {})
return schema.get(name)
def toDom(self, node):
"""node -- node representing message"""
wsdl = self.getWSDL()
ep = ElementProxy(None, node)
epc = ep.createAppendElement(DOM.GetWSDLUri(wsdl.version), 'part')
epc.setAttributeNS(None, 'name', self.name)
if self.element is not None:
ns,name = self.element
prefix = epc.getPrefix(ns)
epc.setAttributeNS(None, 'element', '%s:%s'%(prefix,name))
elif self.type is not None:
ns,name = self.type
prefix = epc.getPrefix(ns)
epc.setAttributeNS(None, 'type', '%s:%s'%(prefix,name))
class PortType(Element):
'''PortType has a anyAttribute, thus must provide for an extensible
mechanism for supporting such attributes. ResourceProperties is
specified in WS-ResourceProperties. wsa:Action is specified in
WS-Address.
Instance Data:
name -- name attribute
resourceProperties -- optional. wsr:ResourceProperties attribute,
value is a QName this is Parsed into a (namespaceURI, name)
that represents a Global Element Declaration.
operations
'''
def __init__(self, name, documentation=''):
Element.__init__(self, name, documentation)
self.operations = Collection(self)
self.resourceProperties = None
# def getWSDL(self):
# return self.parent().parent()
def getTargetNamespace(self):
return self.targetNamespace or self.getWSDL().targetNamespace
def getResourceProperties(self):
return self.resourceProperties
def addOperation(self, name, documentation='', parameterOrder=None):
item = Operation(name, documentation, parameterOrder)
self.operations[name] = item
return item
def load(self, element):
self.name = DOM.getAttr(element, 'name')
self.documentation = GetDocumentation(element)
self.targetNamespace = DOM.getAttr(element, 'targetNamespace')
for nsuri in WSRF_V1_2.PROPERTIES.XSD_LIST:
if DOM.hasAttr(element, 'ResourceProperties', nsuri):
rpref = DOM.getAttr(element, 'ResourceProperties', nsuri)
self.resourceProperties = ParseQName(rpref, element)
NS_WSDL = DOM.GetWSDLUri(self.getWSDL().version)
elements = DOM.getElements(element, 'operation', NS_WSDL)
for element in elements:
name = DOM.getAttr(element, 'name')
docs = GetDocumentation(element)
param_order = DOM.getAttr(element, 'parameterOrder', default=None)
if param_order is not None:
param_order = param_order.split(' ')
operation = self.addOperation(name, docs, param_order)
item = DOM.getElement(element, 'input', None, None)
if item is not None:
name = DOM.getAttr(item, 'name')
docs = GetDocumentation(item)
msgref = DOM.getAttr(item, 'message')
message = ParseQName(msgref, item)
for WSA in WSA_LIST + WSAW_LIST:
action = DOM.getAttr(item, 'Action', WSA.ADDRESS, None)
if action: break
operation.setInput(message, name, docs, action)
item = DOM.getElement(element, 'output', None, None)
if item is not None:
name = DOM.getAttr(item, 'name')
docs = GetDocumentation(item)
msgref = DOM.getAttr(item, 'message')
message = ParseQName(msgref, item)
for WSA in WSA_LIST + WSAW_LIST:
action = DOM.getAttr(item, 'Action', WSA.ADDRESS, None)
if action: break
operation.setOutput(message, name, docs, action)
for item in DOM.getElements(element, 'fault', None):
name = DOM.getAttr(item, 'name')
docs = GetDocumentation(item)
msgref = DOM.getAttr(item, 'message')
message = ParseQName(msgref, item)
for WSA in WSA_LIST + WSAW_LIST:
action = DOM.getAttr(item, 'Action', WSA.ADDRESS, None)
if action: break
operation.addFault(message, name, docs, action)
def toDom(self):
wsdl = self.getWSDL()
ep = ElementProxy(None, DOM.getElement(wsdl.document, None))
epc = ep.createAppendElement(DOM.GetWSDLUri(wsdl.version), 'portType')
epc.setAttributeNS(None, 'name', self.name)
if self.resourceProperties:
ns,name = self.resourceProperties
prefix = epc.getPrefix(ns)
epc.setAttributeNS(WSRF.PROPERTIES.LATEST, 'ResourceProperties',
'%s:%s'%(prefix,name))
for op in self.operations:
op.toDom(epc._getNode())
class Operation(Element):
def __init__(self, name, documentation='', parameterOrder=None):
Element.__init__(self, name, documentation)
self.parameterOrder = parameterOrder
self.faults = Collection(self)
self.input = None
self.output = None
def getWSDL(self):
"""Return the WSDL object that contains this Operation."""
return self.parent().parent().parent().parent()
def getPortType(self):
return self.parent().parent()
def getInputAction(self):
"""wsa:Action attribute"""
return GetWSAActionInput(self)
def getInputMessage(self):
if self.input is None:
return None
wsdl = self.getPortType().getWSDL()
return wsdl.messages[self.input.message]
def getOutputAction(self):
"""wsa:Action attribute"""
return GetWSAActionOutput(self)
def getOutputMessage(self):
if self.output is None:
return None
wsdl = self.getPortType().getWSDL()
return wsdl.messages[self.output.message]
def getFaultAction(self, name):
"""wsa:Action attribute"""
return GetWSAActionFault(self, name)
def getFaultMessage(self, name):
wsdl = self.getPortType().getWSDL()
return wsdl.messages[self.faults[name].message]
def addFault(self, message, name, documentation='', action=None):
if self.faults.has_key(name):
raise WSDLError(
'Duplicate fault element: %s' % name
)
item = MessageRole('fault', message, name, documentation, action)
self.faults[name] = item
return item
def setInput(self, message, name='', documentation='', action=None):
self.input = MessageRole('input', message, name, documentation, action)
self.input.parent = weakref.ref(self)
return self.input
def setOutput(self, message, name='', documentation='', action=None):
self.output = MessageRole('output', message, name, documentation, action)
self.output.parent = weakref.ref(self)
return self.output
def toDom(self, node):
wsdl = self.getWSDL()
ep = ElementProxy(None, node)
epc = ep.createAppendElement(DOM.GetWSDLUri(wsdl.version), 'operation')
epc.setAttributeNS(None, 'name', self.name)
node = epc._getNode()
if self.input:
self.input.toDom(node)
if self.output:
self.output.toDom(node)
for fault in self.faults:
fault.toDom(node)
class MessageRole(Element):
def __init__(self, type, message, name='', documentation='', action=None):
Element.__init__(self, name, documentation)
self.message = message
self.type = type
self.action = action
def getWSDL(self):
"""Return the WSDL object that contains this information item."""
parent = self
while 1:
# skip any collections
if isinstance(parent, WSDL):
return parent
try: parent = parent.parent()
except: break
return None
def getMessage(self):
"""Return the WSDL object that represents the attribute message
(namespaceURI, name) tuple
"""
wsdl = self.getWSDL()
return wsdl.messages[self.message]
def toDom(self, node):
wsdl = self.getWSDL()
ep = ElementProxy(None, node)
epc = ep.createAppendElement(DOM.GetWSDLUri(wsdl.version), self.type)
if not isinstance(self.message, basestring) and len(self.message) == 2:
ns,name = self.message
prefix = epc.getPrefix(ns)
epc.setAttributeNS(None, 'message', '%s:%s' %(prefix,name))
else:
epc.setAttributeNS(None, 'message', self.message)
if self.action:
epc.setAttributeNS(WSA.ADDRESS, 'Action', self.action)
if self.name:
epc.setAttributeNS(None, 'name', self.name)
class Binding(Element):
def __init__(self, name, type, documentation=''):
Element.__init__(self, name, documentation)
self.operations = Collection(self)
self.type = type
# def getWSDL(self):
# """Return the WSDL object that contains this binding."""
# return self.parent().parent()
def getPortType(self):
"""Return the PortType object associated with this binding."""
return self.getWSDL().portTypes[self.type]
def findBinding(self, kind):
for item in self.extensions:
if isinstance(item, kind):
return item
return None
def findBindings(self, kind):
return [ item for item in self.extensions if isinstance(item, kind) ]
def addOperationBinding(self, name, documentation=''):
item = OperationBinding(name, documentation)
self.operations[name] = item
return item
def load(self, elements):
for element in elements:
name = DOM.getAttr(element, 'name')
docs = GetDocumentation(element)
opbinding = self.addOperationBinding(name, docs)
opbinding.load_ex(GetExtensions(element))
item = DOM.getElement(element, 'input', None, None)
if item is not None:
#TODO: addInputBinding?
mbinding = MessageRoleBinding('input')
mbinding.documentation = GetDocumentation(item)
opbinding.input = mbinding
mbinding.load_ex(GetExtensions(item))
mbinding.parent = weakref.ref(opbinding)
item = DOM.getElement(element, 'output', None, None)
if item is not None:
mbinding = MessageRoleBinding('output')
mbinding.documentation = GetDocumentation(item)
opbinding.output = mbinding
mbinding.load_ex(GetExtensions(item))
mbinding.parent = weakref.ref(opbinding)
for item in DOM.getElements(element, 'fault', None):
name = DOM.getAttr(item, 'name')
mbinding = MessageRoleBinding('fault', name)
mbinding.documentation = GetDocumentation(item)
opbinding.faults[name] = mbinding
mbinding.load_ex(GetExtensions(item))
mbinding.parent = weakref.ref(opbinding)
def load_ex(self, elements):
for e in elements:
ns, name = e.namespaceURI, e.localName
if ns in DOM.NS_SOAP_BINDING_ALL and name == 'binding':
transport = DOM.getAttr(e, 'transport', default=None)
style = DOM.getAttr(e, 'style', default='document')
ob = SoapBinding(transport, style)
self.addExtension(ob)
continue
elif ns in DOM.NS_HTTP_BINDING_ALL and name == 'binding':
verb = DOM.getAttr(e, 'verb')
ob = HttpBinding(verb)
self.addExtension(ob)
continue
else:
self.addExtension(e)
def toDom(self):
wsdl = self.getWSDL()
ep = ElementProxy(None, DOM.getElement(wsdl.document, None))
epc = ep.createAppendElement(DOM.GetWSDLUri(wsdl.version), 'binding')
epc.setAttributeNS(None, 'name', self.name)
ns,name = self.type
prefix = epc.getPrefix(ns)
epc.setAttributeNS(None, 'type', '%s:%s' %(prefix,name))
node = epc._getNode()
for ext in self.extensions:
ext.toDom(node)
for op_binding in self.operations:
op_binding.toDom(node)
class OperationBinding(Element):
def __init__(self, name, documentation=''):
Element.__init__(self, name, documentation)
self.input = None
self.output = None
self.faults = Collection(self)
# def getWSDL(self):
# """Return the WSDL object that contains this binding."""
# return self.parent().parent().parent().parent()
def getBinding(self):
"""Return the parent Binding object of the operation binding."""
return self.parent().parent()
def getOperation(self):
"""Return the abstract Operation associated with this binding."""
return self.getBinding().getPortType().operations[self.name]
def findBinding(self, kind):
for item in self.extensions:
if isinstance(item, kind):
return item
return None
def findBindings(self, kind):
return [ item for item in self.extensions if isinstance(item, kind) ]
def addInputBinding(self, binding):
if self.input is None:
self.input = MessageRoleBinding('input')
self.input.parent = weakref.ref(self)
self.input.addExtension(binding)
return binding
def addOutputBinding(self, binding):
if self.output is None:
self.output = MessageRoleBinding('output')
self.output.parent = weakref.ref(self)
self.output.addExtension(binding)
return binding
def addFaultBinding(self, name, binding):
fault = self.get(name, None)
if fault is None:
fault = MessageRoleBinding('fault', name)
fault.addExtension(binding)
return binding
def load_ex(self, elements):
for e in elements:
ns, name = e.namespaceURI, e.localName
if ns in DOM.NS_SOAP_BINDING_ALL and name == 'operation':
soapaction = DOM.getAttr(e, 'soapAction', default=None)
style = DOM.getAttr(e, 'style', default=None)
ob = SoapOperationBinding(soapaction, style)
self.addExtension(ob)
continue
elif ns in DOM.NS_HTTP_BINDING_ALL and name == 'operation':
location = DOM.getAttr(e, 'location')
ob = HttpOperationBinding(location)
self.addExtension(ob)
continue
else:
self.addExtension(e)
def toDom(self, node):
wsdl = self.getWSDL()
ep = ElementProxy(None, node)
epc = ep.createAppendElement(DOM.GetWSDLUri(wsdl.version), 'operation')
epc.setAttributeNS(None, 'name', self.name)
node = epc._getNode()
for ext in self.extensions:
ext.toDom(node)
if self.input:
self.input.toDom(node)
if self.output:
self.output.toDom(node)
for fault in self.faults:
fault.toDom(node)
class MessageRoleBinding(Element):
def __init__(self, type, name='', documentation=''):
Element.__init__(self, name, documentation)
self.type = type
def findBinding(self, kind):
for item in self.extensions:
if isinstance(item, kind):
return item
return None
def findBindings(self, kind):
return [ item for item in self.extensions if isinstance(item, kind) ]
def load_ex(self, elements):
for e in elements:
ns, name = e.namespaceURI, e.localName
if ns in DOM.NS_SOAP_BINDING_ALL and name == 'body':
encstyle = DOM.getAttr(e, 'encodingStyle', default=None)
namespace = DOM.getAttr(e, 'namespace', default=None)
parts = DOM.getAttr(e, 'parts', default=None)
use = DOM.getAttr(e, 'use', default=None)
if use is None:
raise WSDLError(
'Invalid soap:body binding element.'
)
ob = SoapBodyBinding(use, namespace, encstyle, parts)
self.addExtension(ob)
continue
elif ns in DOM.NS_SOAP_BINDING_ALL and name == 'fault':
encstyle = DOM.getAttr(e, 'encodingStyle', default=None)
namespace = DOM.getAttr(e, 'namespace', default=None)
name = DOM.getAttr(e, 'name', default=None)
use = DOM.getAttr(e, 'use', default=None)
if use is None or name is None:
raise WSDLError(
'Invalid soap:fault binding element.'
)
ob = SoapFaultBinding(name, use, namespace, encstyle)
self.addExtension(ob)
continue
elif ns in DOM.NS_SOAP_BINDING_ALL and name in (
'header', 'headerfault'
):
encstyle = DOM.getAttr(e, 'encodingStyle', default=None)
namespace = DOM.getAttr(e, 'namespace', default=None)
message = DOM.getAttr(e, 'message')
part = DOM.getAttr(e, 'part')
use = DOM.getAttr(e, 'use')
if name == 'header':
_class = SoapHeaderBinding
else:
_class = SoapHeaderFaultBinding
message = ParseQName(message, e)
ob = _class(message, part, use, namespace, encstyle)
self.addExtension(ob)
continue
elif ns in DOM.NS_HTTP_BINDING_ALL and name == 'urlReplacement':
ob = HttpUrlReplacementBinding()
self.addExtension(ob)
continue
elif ns in DOM.NS_HTTP_BINDING_ALL and name == 'urlEncoded':
ob = HttpUrlEncodedBinding()
self.addExtension(ob)
continue
elif ns in DOM.NS_MIME_BINDING_ALL and name == 'multipartRelated':
ob = MimeMultipartRelatedBinding()
self.addExtension(ob)
ob.load_ex(GetExtensions(e))
continue
elif ns in DOM.NS_MIME_BINDING_ALL and name == 'content':
part = DOM.getAttr(e, 'part', default=None)
type = DOM.getAttr(e, 'type', default=None)
ob = MimeContentBinding(part, type)
self.addExtension(ob)
continue
elif ns in DOM.NS_MIME_BINDING_ALL and name == 'mimeXml':
part = DOM.getAttr(e, 'part', default=None)
ob = MimeXmlBinding(part)
self.addExtension(ob)
continue
else:
self.addExtension(e)
def toDom(self, node):
wsdl = self.getWSDL()
ep = ElementProxy(None, node)
epc = ep.createAppendElement(DOM.GetWSDLUri(wsdl.version), self.type)
node = epc._getNode()
for item in self.extensions:
if item: item.toDom(node)
class Service(Element):
def __init__(self, name, documentation=''):
Element.__init__(self, name, documentation)
self.ports = Collection(self)
def getWSDL(self):
return self.parent().parent()
def addPort(self, name, binding, documentation=''):
item = Port(name, binding, documentation)
self.ports[name] = item
return item
def load(self, elements):
for element in elements:
name = DOM.getAttr(element, 'name', default=None)
docs = GetDocumentation(element)
binding = DOM.getAttr(element, 'binding', default=None)
if name is None or binding is None:
raise WSDLError(
'Invalid port element.'
)
binding = ParseQName(binding, element)
port = self.addPort(name, binding, docs)
port.load_ex(GetExtensions(element))
def load_ex(self, elements):
for e in elements:
self.addExtension(e)
def toDom(self):
wsdl = self.getWSDL()
ep = ElementProxy(None, DOM.getElement(wsdl.document, None))
epc = ep.createAppendElement(DOM.GetWSDLUri(wsdl.version), "service")
epc.setAttributeNS(None, "name", self.name)
node = epc._getNode()
for port in self.ports:
port.toDom(node)
class Port(Element):
def __init__(self, name, binding, documentation=''):
Element.__init__(self, name, documentation)
self.binding = binding
# def getWSDL(self):
# return self.parent().parent().getWSDL()
def getService(self):
"""Return the Service object associated with this port."""
return self.parent().parent()
def getBinding(self):
"""Return the Binding object that is referenced by this port."""
wsdl = self.getService().getWSDL()
return wsdl.bindings[self.binding]
def getPortType(self):
"""Return the PortType object that is referenced by this port."""
wsdl = self.getService().getWSDL()
binding = wsdl.bindings[self.binding]
return wsdl.portTypes[binding.type]
def getAddressBinding(self):
"""A convenience method to obtain the extension element used
as the address binding for the port."""
for item in self.extensions:
if isinstance(item, SoapAddressBinding) or \
isinstance(item, HttpAddressBinding):
return item
raise WSDLError(
'No address binding found in port.'
)
def load_ex(self, elements):
for e in elements:
ns, name = e.namespaceURI, e.localName
if ns in DOM.NS_SOAP_BINDING_ALL and name == 'address':
location = DOM.getAttr(e, 'location', default=None)
ob = SoapAddressBinding(location)
self.addExtension(ob)
continue
elif ns in DOM.NS_HTTP_BINDING_ALL and name == 'address':
location = DOM.getAttr(e, 'location', default=None)
ob = HttpAddressBinding(location)
self.addExtension(ob)
continue
else:
self.addExtension(e)
def toDom(self, node):
wsdl = self.getWSDL()
ep = ElementProxy(None, node)
epc = ep.createAppendElement(DOM.GetWSDLUri(wsdl.version), "port")
epc.setAttributeNS(None, "name", self.name)
ns,name = self.binding
prefix = epc.getPrefix(ns)
epc.setAttributeNS(None, "binding", "%s:%s" %(prefix,name))
node = epc._getNode()
for ext in self.extensions:
ext.toDom(node)
class SoapBinding:
def __init__(self, transport, style='rpc'):
self.transport = transport
self.style = style
def getWSDL(self):
return self.parent().getWSDL()
def toDom(self, node):
wsdl = self.getWSDL()
ep = ElementProxy(None, node)
epc = ep.createAppendElement(DOM.GetWSDLSoapBindingUri(wsdl.version), 'binding')
if self.transport:
epc.setAttributeNS(None, "transport", self.transport)
if self.style:
epc.setAttributeNS(None, "style", self.style)
class SoapAddressBinding:
def __init__(self, location):
self.location = location
def getWSDL(self):
return self.parent().getWSDL()
def toDom(self, node):
wsdl = self.getWSDL()
ep = ElementProxy(None, node)
epc = ep.createAppendElement(DOM.GetWSDLSoapBindingUri(wsdl.version), 'address')
epc.setAttributeNS(None, "location", self.location)
class SoapOperationBinding:
def __init__(self, soapAction=None, style=None):
self.soapAction = soapAction
self.style = style
def getWSDL(self):
return self.parent().getWSDL()
def toDom(self, node):
wsdl = self.getWSDL()
ep = ElementProxy(None, node)
epc = ep.createAppendElement(DOM.GetWSDLSoapBindingUri(wsdl.version), 'operation')
if self.soapAction:
epc.setAttributeNS(None, 'soapAction', self.soapAction)
if self.style:
epc.setAttributeNS(None, 'style', self.style)
class SoapBodyBinding:
def __init__(self, use, namespace=None, encodingStyle=None, parts=None):
if not use in ('literal', 'encoded'):
raise WSDLError(
'Invalid use attribute value: %s' % use
)
self.encodingStyle = encodingStyle
self.namespace = namespace
if type(parts) in (type(''), type(u'')):
parts = parts.split()
self.parts = parts
self.use = use
def getWSDL(self):
return self.parent().getWSDL()
def toDom(self, node):
wsdl = self.getWSDL()
ep = ElementProxy(None, node)
epc = ep.createAppendElement(DOM.GetWSDLSoapBindingUri(wsdl.version), 'body')
epc.setAttributeNS(None, "use", self.use)
epc.setAttributeNS(None, "namespace", self.namespace)
class SoapFaultBinding:
def __init__(self, name, use, namespace=None, encodingStyle=None):
if not use in ('literal', 'encoded'):
raise WSDLError(
'Invalid use attribute value: %s' % use
)
self.encodingStyle = encodingStyle
self.namespace = namespace
self.name = name
self.use = use
def getWSDL(self):
return self.parent().getWSDL()
def toDom(self, node):
wsdl = self.getWSDL()
ep = ElementProxy(None, node)
epc = ep.createAppendElement(DOM.GetWSDLSoapBindingUri(wsdl.version), 'body')
epc.setAttributeNS(None, "use", self.use)
epc.setAttributeNS(None, "name", self.name)
if self.namespace is not None:
epc.setAttributeNS(None, "namespace", self.namespace)
if self.encodingStyle is not None:
epc.setAttributeNS(None, "encodingStyle", self.encodingStyle)
class SoapHeaderBinding:
def __init__(self, message, part, use, namespace=None, encodingStyle=None):
if not use in ('literal', 'encoded'):
raise WSDLError(
'Invalid use attribute value: %s' % use
)
self.encodingStyle = encodingStyle
self.namespace = namespace
self.message = message
self.part = part
self.use = use
tagname = 'header'
class SoapHeaderFaultBinding(SoapHeaderBinding):
tagname = 'headerfault'
class HttpBinding:
def __init__(self, verb):
self.verb = verb
class HttpAddressBinding:
def __init__(self, location):
self.location = location
class HttpOperationBinding:
def __init__(self, location):
self.location = location
class HttpUrlReplacementBinding:
pass
class HttpUrlEncodedBinding:
pass
class MimeContentBinding:
def __init__(self, part=None, type=None):
self.part = part
self.type = type
class MimeXmlBinding:
def __init__(self, part=None):
self.part = part
class MimeMultipartRelatedBinding:
def __init__(self):
self.parts = []
def load_ex(self, elements):
for e in elements:
ns, name = e.namespaceURI, e.localName
if ns in DOM.NS_MIME_BINDING_ALL and name == 'part':
self.parts.append(MimePartBinding())
continue
class MimePartBinding:
def __init__(self):
self.items = []
def load_ex(self, elements):
for e in elements:
ns, name = e.namespaceURI, e.localName
if ns in DOM.NS_MIME_BINDING_ALL and name == 'content':
part = DOM.getAttr(e, 'part', default=None)
type = DOM.getAttr(e, 'type', default=None)
ob = MimeContentBinding(part, type)
self.items.append(ob)
continue
elif ns in DOM.NS_MIME_BINDING_ALL and name == 'mimeXml':
part = DOM.getAttr(e, 'part', default=None)
ob = MimeXmlBinding(part)
self.items.append(ob)
continue
elif ns in DOM.NS_SOAP_BINDING_ALL and name == 'body':
encstyle = DOM.getAttr(e, 'encodingStyle', default=None)
namespace = DOM.getAttr(e, 'namespace', default=None)
parts = DOM.getAttr(e, 'parts', default=None)
use = DOM.getAttr(e, 'use', default=None)
if use is None:
raise WSDLError(
'Invalid soap:body binding element.'
)
ob = SoapBodyBinding(use, namespace, encstyle, parts)
self.items.append(ob)
continue
class WSDLError(Exception):
pass
def DeclareNSPrefix(writer, prefix, nsuri):
if writer.hasNSPrefix(nsuri):
return
writer.declareNSPrefix(prefix, nsuri)
def ParseTypeRef(value, element):
parts = value.split(':', 1)
if len(parts) == 1:
return (DOM.findTargetNS(element), value)
nsuri = DOM.findNamespaceURI(parts[0], element)
return (nsuri, parts[1])
def ParseQName(value, element):
nameref = value.split(':', 1)
if len(nameref) == 2:
nsuri = DOM.findNamespaceURI(nameref[0], element)
name = nameref[-1]
else:
nsuri = DOM.findTargetNS(element)
name = nameref[-1]
return nsuri, name
def GetDocumentation(element):
docnode = DOM.getElement(element, 'documentation', None, None)
if docnode is not None:
return DOM.getElementText(docnode)
return ''
def GetExtensions(element):
return [ item for item in DOM.getElements(element, None, None)
if item.namespaceURI != DOM.NS_WSDL ]
def GetWSAActionFault(operation, name):
"""Find wsa:Action attribute, and return value or WSA.FAULT
for the default.
"""
attr = operation.faults[name].action
if attr is not None:
return attr
return WSA.FAULT
def GetWSAActionInput(operation):
"""Find wsa:Action attribute, and return value or the default."""
attr = operation.input.action
if attr is not None:
return attr
portType = operation.getPortType()
targetNamespace = portType.getTargetNamespace()
ptName = portType.name
msgName = operation.input.name
if not msgName:
msgName = operation.name + 'Request'
if targetNamespace.endswith('/'):
return '%s%s/%s' %(targetNamespace, ptName, msgName)
return '%s/%s/%s' %(targetNamespace, ptName, msgName)
def GetWSAActionOutput(operation):
"""Find wsa:Action attribute, and return value or the default."""
attr = operation.output.action
if attr is not None:
return attr
targetNamespace = operation.getPortType().getTargetNamespace()
ptName = operation.getPortType().name
msgName = operation.output.name
if not msgName:
msgName = operation.name + 'Response'
if targetNamespace.endswith('/'):
return '%s%s/%s' %(targetNamespace, ptName, msgName)
return '%s/%s/%s' %(targetNamespace, ptName, msgName)
def FindExtensions(object, kind, t_type=type(())):
if isinstance(kind, t_type):
result = []
namespaceURI, name = kind
return [ item for item in object.extensions
if hasattr(item, 'nodeType') \
and DOM.nsUriMatch(namespaceURI, item.namespaceURI) \
and item.name == name ]
return [ item for item in object.extensions if isinstance(item, kind) ]
def FindExtension(object, kind, t_type=type(())):
if isinstance(kind, t_type):
namespaceURI, name = kind
for item in object.extensions:
if hasattr(item, 'nodeType') \
and DOM.nsUriMatch(namespaceURI, item.namespaceURI) \
and item.name == name:
return item
else:
for item in object.extensions:
if isinstance(item, kind):
return item
return None
class SOAPCallInfo:
"""SOAPCallInfo captures the important binding information about a
SOAP operation, in a structure that is easier to work with than
raw WSDL structures."""
def __init__(self, methodName):
self.methodName = methodName
self.inheaders = []
self.outheaders = []
self.inparams = []
self.outparams = []
self.retval = None
encodingStyle = DOM.NS_SOAP_ENC
documentation = ''
soapAction = None
transport = None
namespace = None
location = None
use = 'encoded'
style = 'rpc'
def addInParameter(self, name, type, namespace=None, element_type=0):
"""Add an input parameter description to the call info."""
parameter = ParameterInfo(name, type, namespace, element_type)
self.inparams.append(parameter)
return parameter
def addOutParameter(self, name, type, namespace=None, element_type=0):
"""Add an output parameter description to the call info."""
parameter = ParameterInfo(name, type, namespace, element_type)
self.outparams.append(parameter)
return parameter
def setReturnParameter(self, name, type, namespace=None, element_type=0):
"""Set the return parameter description for the call info."""
parameter = ParameterInfo(name, type, namespace, element_type)
self.retval = parameter
return parameter
def addInHeaderInfo(self, name, type, namespace, element_type=0,
mustUnderstand=0):
"""Add an input SOAP header description to the call info."""
headerinfo = HeaderInfo(name, type, namespace, element_type)
if mustUnderstand:
headerinfo.mustUnderstand = 1
self.inheaders.append(headerinfo)
return headerinfo
def addOutHeaderInfo(self, name, type, namespace, element_type=0,
mustUnderstand=0):
"""Add an output SOAP header description to the call info."""
headerinfo = HeaderInfo(name, type, namespace, element_type)
if mustUnderstand:
headerinfo.mustUnderstand = 1
self.outheaders.append(headerinfo)
return headerinfo
def getInParameters(self):
"""Return a sequence of the in parameters of the method."""
return self.inparams
def getOutParameters(self):
"""Return a sequence of the out parameters of the method."""
return self.outparams
def getReturnParameter(self):
"""Return param info about the return value of the method."""
return self.retval
def getInHeaders(self):
"""Return a sequence of the in headers of the method."""
return self.inheaders
def getOutHeaders(self):
"""Return a sequence of the out headers of the method."""
return self.outheaders
class ParameterInfo:
"""A ParameterInfo object captures parameter binding information."""
def __init__(self, name, type, namespace=None, element_type=0):
if element_type:
self.element_type = 1
if namespace is not None:
self.namespace = namespace
self.name = name
self.type = type
element_type = 0
namespace = None
default = None
class HeaderInfo(ParameterInfo):
"""A HeaderInfo object captures SOAP header binding information."""
def __init__(self, name, type, namespace, element_type=None):
ParameterInfo.__init__(self, name, type, namespace, element_type)
mustUnderstand = 0
actor = None
def callInfoFromWSDL(port, name):
"""Return a SOAPCallInfo given a WSDL port and operation name."""
wsdl = port.getService().getWSDL()
binding = port.getBinding()
portType = binding.getPortType()
operation = portType.operations[name]
opbinding = binding.operations[name]
messages = wsdl.messages
callinfo = SOAPCallInfo(name)
addrbinding = port.getAddressBinding()
if not isinstance(addrbinding, SoapAddressBinding):
raise ValueError, 'Unsupported binding type.'
callinfo.location = addrbinding.location
soapbinding = binding.findBinding(SoapBinding)
if soapbinding is None:
raise ValueError, 'Missing soap:binding element.'
callinfo.transport = soapbinding.transport
callinfo.style = soapbinding.style or 'document'
soap_op_binding = opbinding.findBinding(SoapOperationBinding)
if soap_op_binding is not None:
callinfo.soapAction = soap_op_binding.soapAction
callinfo.style = soap_op_binding.style or callinfo.style
parameterOrder = operation.parameterOrder
if operation.input is not None:
message = messages[operation.input.message]
msgrole = opbinding.input
mime = msgrole.findBinding(MimeMultipartRelatedBinding)
if mime is not None:
raise ValueError, 'Mime bindings are not supported.'
else:
for item in msgrole.findBindings(SoapHeaderBinding):
part = messages[item.message].parts[item.part]
header = callinfo.addInHeaderInfo(
part.name,
part.element or part.type,
item.namespace,
element_type = part.element and 1 or 0
)
header.encodingStyle = item.encodingStyle
body = msgrole.findBinding(SoapBodyBinding)
if body is None:
raise ValueError, 'Missing soap:body binding.'
callinfo.encodingStyle = body.encodingStyle
callinfo.namespace = body.namespace
callinfo.use = body.use
if body.parts is not None:
parts = []
for name in body.parts:
parts.append(message.parts[name])
else:
parts = message.parts.values()
for part in parts:
callinfo.addInParameter(
part.name,
part.element or part.type,
element_type = part.element and 1 or 0
)
if operation.output is not None:
try:
message = messages[operation.output.message]
except KeyError:
if self.strict:
raise RuntimeError(
"Recieved message not defined in the WSDL schema: %s" %
operation.output.message)
else:
message = wsdl.addMessage(operation.output.message)
print "Warning:", \
"Recieved message not defined in the WSDL schema.", \
"Adding it."
print "Message:", operation.output.message
msgrole = opbinding.output
mime = msgrole.findBinding(MimeMultipartRelatedBinding)
if mime is not None:
raise ValueError, 'Mime bindings are not supported.'
else:
for item in msgrole.findBindings(SoapHeaderBinding):
part = messages[item.message].parts[item.part]
header = callinfo.addOutHeaderInfo(
part.name,
part.element or part.type,
item.namespace,
element_type = part.element and 1 or 0
)
header.encodingStyle = item.encodingStyle
body = msgrole.findBinding(SoapBodyBinding)
if body is None:
raise ValueError, 'Missing soap:body binding.'
callinfo.encodingStyle = body.encodingStyle
callinfo.namespace = body.namespace
callinfo.use = body.use
if body.parts is not None:
parts = []
for name in body.parts:
parts.append(message.parts[name])
else:
parts = message.parts.values()
if parts:
for part in parts:
callinfo.addOutParameter(
part.name,
part.element or part.type,
element_type = part.element and 1 or 0
)
return callinfo
|
Fale/ansible
|
refs/heads/devel
|
test/lib/ansible_test/_internal/powershell_import_analysis.py
|
31
|
"""Analyze powershell import statements."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import re
from .io import (
read_text_file,
)
from .util import (
display,
)
from .util_common import (
resolve_csharp_ps_util,
)
from .data import (
data_context,
)
def get_powershell_module_utils_imports(powershell_targets):
"""Return a dictionary of module_utils names mapped to sets of powershell file paths.
:type powershell_targets: list[TestTarget]
:rtype: dict[str, set[str]]
"""
module_utils = enumerate_module_utils()
imports_by_target_path = {}
for target in powershell_targets:
imports_by_target_path[target.path] = extract_powershell_module_utils_imports(target.path, module_utils)
imports = dict([(module_util, set()) for module_util in module_utils])
for target_path in imports_by_target_path:
for module_util in imports_by_target_path[target_path]:
imports[module_util].add(target_path)
for module_util in sorted(imports):
if not imports[module_util]:
display.warning('No imports found which use the "%s" module_util.' % module_util)
return imports
def get_powershell_module_utils_name(path): # type: (str) -> str
"""Return a namespace and name from the given module_utils path."""
base_path = data_context().content.module_utils_powershell_path
if data_context().content.collection:
prefix = 'ansible_collections.' + data_context().content.collection.prefix + 'plugins.module_utils.'
else:
prefix = ''
name = prefix + os.path.splitext(os.path.relpath(path, base_path))[0].replace(os.path.sep, '.')
return name
def enumerate_module_utils():
"""Return a list of available module_utils imports.
:rtype: set[str]
"""
return set(get_powershell_module_utils_name(p)
for p in data_context().content.walk_files(data_context().content.module_utils_powershell_path)
if os.path.splitext(p)[1] == '.psm1')
def extract_powershell_module_utils_imports(path, module_utils):
"""Return a list of module_utils imports found in the specified source file.
:type path: str
:type module_utils: set[str]
:rtype: set[str]
"""
imports = set()
code = read_text_file(path)
if data_context().content.is_ansible and '# POWERSHELL_COMMON' in code:
imports.add('Ansible.ModuleUtils.Legacy')
lines = code.splitlines()
line_number = 0
for line in lines:
line_number += 1
match = re.search(r'(?i)^#\s*(?:requires\s+-module(?:s?)|ansiblerequires\s+-powershell)\s*((?:Ansible|ansible_collections|\.)\..+)', line)
if not match:
continue
import_name = resolve_csharp_ps_util(match.group(1), path)
if import_name in module_utils:
imports.add(import_name)
elif data_context().content.is_ansible or \
import_name.startswith('ansible_collections.%s' % data_context().content.prefix):
display.warning('%s:%d Invalid module_utils import: %s' % (path, line_number, import_name))
return imports
|
Cazomino05/Test1
|
refs/heads/master
|
vendor/google-breakpad/src/tools/gyp/pylib/gyp/msvs_emulation.py
|
36
|
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
This module helps emulate Visual Studio 2008 behavior on top of other
build systems, primarily ninja.
"""
import os
import re
import subprocess
import sys
import gyp.MSVSVersion
windows_quoter_regex = re.compile(r'(\\*)"')
def QuoteForRspFile(arg):
"""Quote a command line argument so that it appears as one argument when
processed via cmd.exe and parsed by CommandLineToArgvW (as is typical for
Windows programs)."""
# See http://goo.gl/cuFbX and http://goo.gl/dhPnp including the comment
# threads. This is actually the quoting rules for CommandLineToArgvW, not
# for the shell, because the shell doesn't do anything in Windows. This
# works more or less because most programs (including the compiler, etc.)
# use that function to handle command line arguments.
# For a literal quote, CommandLineToArgvW requires 2n+1 backslashes
# preceding it, and results in n backslashes + the quote. So we substitute
# in 2* what we match, +1 more, plus the quote.
arg = windows_quoter_regex.sub(lambda mo: 2 * mo.group(1) + '\\"', arg)
# %'s also need to be doubled otherwise they're interpreted as batch
# positional arguments. Also make sure to escape the % so that they're
# passed literally through escaping so they can be singled to just the
# original %. Otherwise, trying to pass the literal representation that
# looks like an environment variable to the shell (e.g. %PATH%) would fail.
arg = arg.replace('%', '%%')
# These commands are used in rsp files, so no escaping for the shell (via ^)
# is necessary.
# Finally, wrap the whole thing in quotes so that the above quote rule
# applies and whitespace isn't a word break.
return '"' + arg + '"'
def EncodeRspFileList(args):
"""Process a list of arguments using QuoteCmdExeArgument."""
# Note that the first argument is assumed to be the command. Don't add
# quotes around it because then built-ins like 'echo', etc. won't work.
# Take care to normpath only the path in the case of 'call ../x.bat' because
# otherwise the whole thing is incorrectly interpreted as a path and not
# normalized correctly.
if not args: return ''
if args[0].startswith('call '):
call, program = args[0].split(' ', 1)
program = call + ' ' + os.path.normpath(program)
else:
program = os.path.normpath(args[0])
return program + ' ' + ' '.join(QuoteForRspFile(arg) for arg in args[1:])
def _GenericRetrieve(root, default, path):
"""Given a list of dictionary keys |path| and a tree of dicts |root|, find
value at path, or return |default| if any of the path doesn't exist."""
if not root:
return default
if not path:
return root
return _GenericRetrieve(root.get(path[0]), default, path[1:])
def _AddPrefix(element, prefix):
"""Add |prefix| to |element| or each subelement if element is iterable."""
if element is None:
return element
# Note, not Iterable because we don't want to handle strings like that.
if isinstance(element, list) or isinstance(element, tuple):
return [prefix + e for e in element]
else:
return prefix + element
def _DoRemapping(element, map):
"""If |element| then remap it through |map|. If |element| is iterable then
each item will be remapped. Any elements not found will be removed."""
if map is not None and element is not None:
if not callable(map):
map = map.get # Assume it's a dict, otherwise a callable to do the remap.
if isinstance(element, list) or isinstance(element, tuple):
element = filter(None, [map(elem) for elem in element])
else:
element = map(element)
return element
def _AppendOrReturn(append, element):
"""If |append| is None, simply return |element|. If |append| is not None,
then add |element| to it, adding each item in |element| if it's a list or
tuple."""
if append is not None and element is not None:
if isinstance(element, list) or isinstance(element, tuple):
append.extend(element)
else:
append.append(element)
else:
return element
def _FindDirectXInstallation():
"""Try to find an installation location for the DirectX SDK. Check for the
standard environment variable, and if that doesn't exist, try to find
via the registry. May return None if not found in either location."""
# Return previously calculated value, if there is one
if hasattr(_FindDirectXInstallation, 'dxsdk_dir'):
return _FindDirectXInstallation.dxsdk_dir
dxsdk_dir = os.environ.get('DXSDK_DIR')
if not dxsdk_dir:
# Setup params to pass to and attempt to launch reg.exe.
cmd = ['reg.exe', 'query', r'HKLM\Software\Microsoft\DirectX', '/s']
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
for line in p.communicate()[0].splitlines():
if 'InstallPath' in line:
dxsdk_dir = line.split(' ')[3] + "\\"
# Cache return value
_FindDirectXInstallation.dxsdk_dir = dxsdk_dir
return dxsdk_dir
class MsvsSettings(object):
"""A class that understands the gyp 'msvs_...' values (especially the
msvs_settings field). They largely correpond to the VS2008 IDE DOM. This
class helps map those settings to command line options."""
def __init__(self, spec, generator_flags):
self.spec = spec
self.vs_version = GetVSVersion(generator_flags)
self.dxsdk_dir = _FindDirectXInstallation()
# Try to find an installation location for the Windows DDK by checking
# the WDK_DIR environment variable, may be None.
self.wdk_dir = os.environ.get('WDK_DIR')
supported_fields = [
('msvs_configuration_attributes', dict),
('msvs_settings', dict),
('msvs_system_include_dirs', list),
('msvs_disabled_warnings', list),
('msvs_precompiled_header', str),
('msvs_precompiled_source', str),
('msvs_configuration_platform', str),
('msvs_target_platform', str),
]
configs = spec['configurations']
for field, default in supported_fields:
setattr(self, field, {})
for configname, config in configs.iteritems():
getattr(self, field)[configname] = config.get(field, default())
self.msvs_cygwin_dirs = spec.get('msvs_cygwin_dirs', ['.'])
unsupported_fields = [
'msvs_prebuild',
'msvs_postbuild',
]
unsupported = []
for field in unsupported_fields:
for config in configs.values():
if field in config:
unsupported += ["%s not supported (target %s)." %
(field, spec['target_name'])]
if unsupported:
raise Exception('\n'.join(unsupported))
def GetVSMacroEnv(self, base_to_build=None, config=None):
"""Get a dict of variables mapping internal VS macro names to their gyp
equivalents."""
target_platform = 'Win32' if self.GetArch(config) == 'x86' else 'x64'
target_name = self.spec.get('product_prefix', '') + \
self.spec.get('product_name', self.spec['target_name'])
target_dir = base_to_build + '\\' if base_to_build else ''
replacements = {
'$(OutDir)\\': target_dir,
'$(TargetDir)\\': target_dir,
'$(IntDir)': '$!INTERMEDIATE_DIR',
'$(InputPath)': '${source}',
'$(InputName)': '${root}',
'$(ProjectName)': self.spec['target_name'],
'$(TargetName)': target_name,
'$(PlatformName)': target_platform,
'$(ProjectDir)\\': '',
}
# '$(VSInstallDir)' and '$(VCInstallDir)' are available when and only when
# Visual Studio is actually installed.
if self.vs_version.Path():
replacements['$(VSInstallDir)'] = self.vs_version.Path()
replacements['$(VCInstallDir)'] = os.path.join(self.vs_version.Path(),
'VC') + '\\'
# Chromium uses DXSDK_DIR in include/lib paths, but it may or may not be
# set. This happens when the SDK is sync'd via src-internal, rather than
# by typical end-user installation of the SDK. If it's not set, we don't
# want to leave the unexpanded variable in the path, so simply strip it.
replacements['$(DXSDK_DIR)'] = self.dxsdk_dir if self.dxsdk_dir else ''
replacements['$(WDK_DIR)'] = self.wdk_dir if self.wdk_dir else ''
return replacements
def ConvertVSMacros(self, s, base_to_build=None, config=None):
"""Convert from VS macro names to something equivalent."""
env = self.GetVSMacroEnv(base_to_build, config=config)
return ExpandMacros(s, env)
def AdjustLibraries(self, libraries):
"""Strip -l from library if it's specified with that."""
libs = [lib[2:] if lib.startswith('-l') else lib for lib in libraries]
return [lib + '.lib' if not lib.endswith('.lib') else lib for lib in libs]
def _GetAndMunge(self, field, path, default, prefix, append, map):
"""Retrieve a value from |field| at |path| or return |default|. If
|append| is specified, and the item is found, it will be appended to that
object instead of returned. If |map| is specified, results will be
remapped through |map| before being returned or appended."""
result = _GenericRetrieve(field, default, path)
result = _DoRemapping(result, map)
result = _AddPrefix(result, prefix)
return _AppendOrReturn(append, result)
class _GetWrapper(object):
def __init__(self, parent, field, base_path, append=None):
self.parent = parent
self.field = field
self.base_path = [base_path]
self.append = append
def __call__(self, name, map=None, prefix='', default=None):
return self.parent._GetAndMunge(self.field, self.base_path + [name],
default=default, prefix=prefix, append=self.append, map=map)
def GetArch(self, config):
"""Get architecture based on msvs_configuration_platform and
msvs_target_platform. Returns either 'x86' or 'x64'."""
configuration_platform = self.msvs_configuration_platform.get(config, '')
platform = self.msvs_target_platform.get(config, '')
if not platform: # If no specific override, use the configuration's.
platform = configuration_platform
# Map from platform to architecture.
return {'Win32': 'x86', 'x64': 'x64'}.get(platform, 'x86')
def _TargetConfig(self, config):
"""Returns the target-specific configuration."""
# There's two levels of architecture/platform specification in VS. The
# first level is globally for the configuration (this is what we consider
# "the" config at the gyp level, which will be something like 'Debug' or
# 'Release_x64'), and a second target-specific configuration, which is an
# override for the global one. |config| is remapped here to take into
# account the local target-specific overrides to the global configuration.
arch = self.GetArch(config)
if arch == 'x64' and not config.endswith('_x64'):
config += '_x64'
if arch == 'x86' and config.endswith('_x64'):
config = config.rsplit('_', 1)[0]
return config
def _Setting(self, path, config,
default=None, prefix='', append=None, map=None):
"""_GetAndMunge for msvs_settings."""
return self._GetAndMunge(
self.msvs_settings[config], path, default, prefix, append, map)
def _ConfigAttrib(self, path, config,
default=None, prefix='', append=None, map=None):
"""_GetAndMunge for msvs_configuration_attributes."""
return self._GetAndMunge(
self.msvs_configuration_attributes[config],
path, default, prefix, append, map)
def AdjustIncludeDirs(self, include_dirs, config):
"""Updates include_dirs to expand VS specific paths, and adds the system
include dirs used for platform SDK and similar."""
config = self._TargetConfig(config)
includes = include_dirs + self.msvs_system_include_dirs[config]
includes.extend(self._Setting(
('VCCLCompilerTool', 'AdditionalIncludeDirectories'), config, default=[]))
return [self.ConvertVSMacros(p, config=config) for p in includes]
def GetComputedDefines(self, config):
"""Returns the set of defines that are injected to the defines list based
on other VS settings."""
config = self._TargetConfig(config)
defines = []
if self._ConfigAttrib(['CharacterSet'], config) == '1':
defines.extend(('_UNICODE', 'UNICODE'))
if self._ConfigAttrib(['CharacterSet'], config) == '2':
defines.append('_MBCS')
defines.extend(self._Setting(
('VCCLCompilerTool', 'PreprocessorDefinitions'), config, default=[]))
return defines
def GetCompilerPdbName(self, config, expand_special):
"""Get the pdb file name that should be used for compiler invocations, or
None if there's no explicit name specified."""
config = self._TargetConfig(config)
pdbname = self._Setting(
('VCCLCompilerTool', 'ProgramDataBaseFileName'), config)
if pdbname:
pdbname = expand_special(self.ConvertVSMacros(pdbname))
return pdbname
def GetMapFileName(self, config, expand_special):
"""Gets the explicitly overriden map file name for a target or returns None
if it's not set."""
config = self._TargetConfig(config)
map_file = self._Setting(('VCLinkerTool', 'MapFileName'), config)
if map_file:
map_file = expand_special(self.ConvertVSMacros(map_file, config=config))
return map_file
def GetOutputName(self, config, expand_special):
"""Gets the explicitly overridden output name for a target or returns None
if it's not overridden."""
config = self._TargetConfig(config)
type = self.spec['type']
root = 'VCLibrarianTool' if type == 'static_library' else 'VCLinkerTool'
# TODO(scottmg): Handle OutputDirectory without OutputFile.
output_file = self._Setting((root, 'OutputFile'), config)
if output_file:
output_file = expand_special(self.ConvertVSMacros(
output_file, config=config))
return output_file
def GetPDBName(self, config, expand_special, default):
"""Gets the explicitly overridden pdb name for a target or returns
default if it's not overridden, or if no pdb will be generated."""
config = self._TargetConfig(config)
output_file = self._Setting(('VCLinkerTool', 'ProgramDatabaseFile'), config)
generate_debug_info = self._Setting(
('VCLinkerTool', 'GenerateDebugInformation'), config)
if generate_debug_info:
if output_file:
return expand_special(self.ConvertVSMacros(output_file, config=config))
else:
return default
else:
return None
def GetCflags(self, config):
"""Returns the flags that need to be added to .c and .cc compilations."""
config = self._TargetConfig(config)
cflags = []
cflags.extend(['/wd' + w for w in self.msvs_disabled_warnings[config]])
cl = self._GetWrapper(self, self.msvs_settings[config],
'VCCLCompilerTool', append=cflags)
cl('Optimization',
map={'0': 'd', '1': '1', '2': '2', '3': 'x'}, prefix='/O', default='2')
cl('InlineFunctionExpansion', prefix='/Ob')
cl('DisableSpecificWarnings', prefix='/wd')
cl('StringPooling', map={'true': '/GF'})
cl('EnableFiberSafeOptimizations', map={'true': '/GT'})
cl('OmitFramePointers', map={'false': '-', 'true': ''}, prefix='/Oy')
cl('EnableIntrinsicFunctions', map={'false': '-', 'true': ''}, prefix='/Oi')
cl('FavorSizeOrSpeed', map={'1': 't', '2': 's'}, prefix='/O')
cl('WholeProgramOptimization', map={'true': '/GL'})
cl('WarningLevel', prefix='/W')
cl('WarnAsError', map={'true': '/WX'})
cl('DebugInformationFormat',
map={'1': '7', '3': 'i', '4': 'I'}, prefix='/Z')
cl('RuntimeTypeInfo', map={'true': '/GR', 'false': '/GR-'})
cl('EnableFunctionLevelLinking', map={'true': '/Gy', 'false': '/Gy-'})
cl('MinimalRebuild', map={'true': '/Gm'})
cl('BufferSecurityCheck', map={'true': '/GS', 'false': '/GS-'})
cl('BasicRuntimeChecks', map={'1': 's', '2': 'u', '3': '1'}, prefix='/RTC')
cl('RuntimeLibrary',
map={'0': 'T', '1': 'Td', '2': 'D', '3': 'Dd'}, prefix='/M')
cl('ExceptionHandling', map={'1': 'sc','2': 'a'}, prefix='/EH')
cl('DefaultCharIsUnsigned', map={'true': '/J'})
cl('TreatWChar_tAsBuiltInType',
map={'false': '-', 'true': ''}, prefix='/Zc:wchar_t')
cl('EnablePREfast', map={'true': '/analyze'})
cl('AdditionalOptions', prefix='')
cl('EnableEnhancedInstructionSet',
map={'1': 'SSE', '2': 'SSE2', '3': 'AVX', '4': 'IA32'}, prefix='/arch:')
cflags.extend(['/FI' + f for f in self._Setting(
('VCCLCompilerTool', 'ForcedIncludeFiles'), config, default=[])])
if self.vs_version.short_name in ('2013', '2013e'):
# New flag required in 2013 to maintain previous PDB behavior.
cflags.append('/FS')
# ninja handles parallelism by itself, don't have the compiler do it too.
cflags = filter(lambda x: not x.startswith('/MP'), cflags)
return cflags
def _GetPchFlags(self, config, extension):
"""Get the flags to be added to the cflags for precompiled header support.
"""
config = self._TargetConfig(config)
# The PCH is only built once by a particular source file. Usage of PCH must
# only be for the same language (i.e. C vs. C++), so only include the pch
# flags when the language matches.
if self.msvs_precompiled_header[config]:
source_ext = os.path.splitext(self.msvs_precompiled_source[config])[1]
if _LanguageMatchesForPch(source_ext, extension):
pch = os.path.split(self.msvs_precompiled_header[config])[1]
return ['/Yu' + pch, '/FI' + pch, '/Fp${pchprefix}.' + pch + '.pch']
return []
def GetCflagsC(self, config):
"""Returns the flags that need to be added to .c compilations."""
config = self._TargetConfig(config)
return self._GetPchFlags(config, '.c')
def GetCflagsCC(self, config):
"""Returns the flags that need to be added to .cc compilations."""
config = self._TargetConfig(config)
return ['/TP'] + self._GetPchFlags(config, '.cc')
def _GetAdditionalLibraryDirectories(self, root, config, gyp_to_build_path):
"""Get and normalize the list of paths in AdditionalLibraryDirectories
setting."""
config = self._TargetConfig(config)
libpaths = self._Setting((root, 'AdditionalLibraryDirectories'),
config, default=[])
libpaths = [os.path.normpath(
gyp_to_build_path(self.ConvertVSMacros(p, config=config)))
for p in libpaths]
return ['/LIBPATH:"' + p + '"' for p in libpaths]
def GetLibFlags(self, config, gyp_to_build_path):
"""Returns the flags that need to be added to lib commands."""
config = self._TargetConfig(config)
libflags = []
lib = self._GetWrapper(self, self.msvs_settings[config],
'VCLibrarianTool', append=libflags)
libflags.extend(self._GetAdditionalLibraryDirectories(
'VCLibrarianTool', config, gyp_to_build_path))
lib('LinkTimeCodeGeneration', map={'true': '/LTCG'})
lib('TargetMachine', map={'1': 'X86', '17': 'X64'}, prefix='/MACHINE:')
lib('AdditionalOptions')
return libflags
def GetDefFile(self, gyp_to_build_path):
"""Returns the .def file from sources, if any. Otherwise returns None."""
spec = self.spec
if spec['type'] in ('shared_library', 'loadable_module', 'executable'):
def_files = [s for s in spec.get('sources', []) if s.endswith('.def')]
if len(def_files) == 1:
return gyp_to_build_path(def_files[0])
elif len(def_files) > 1:
raise Exception("Multiple .def files")
return None
def _GetDefFileAsLdflags(self, ldflags, gyp_to_build_path):
""".def files get implicitly converted to a ModuleDefinitionFile for the
linker in the VS generator. Emulate that behaviour here."""
def_file = self.GetDefFile(gyp_to_build_path)
if def_file:
ldflags.append('/DEF:"%s"' % def_file)
def GetPGDName(self, config, expand_special):
"""Gets the explicitly overridden pgd name for a target or returns None
if it's not overridden."""
config = self._TargetConfig(config)
output_file = self._Setting(
('VCLinkerTool', 'ProfileGuidedDatabase'), config)
if output_file:
output_file = expand_special(self.ConvertVSMacros(
output_file, config=config))
return output_file
def GetLdflags(self, config, gyp_to_build_path, expand_special,
manifest_base_name, output_name, is_executable, build_dir):
"""Returns the flags that need to be added to link commands, and the
manifest files."""
config = self._TargetConfig(config)
ldflags = []
ld = self._GetWrapper(self, self.msvs_settings[config],
'VCLinkerTool', append=ldflags)
self._GetDefFileAsLdflags(ldflags, gyp_to_build_path)
ld('GenerateDebugInformation', map={'true': '/DEBUG'})
ld('TargetMachine', map={'1': 'X86', '17': 'X64'}, prefix='/MACHINE:')
ldflags.extend(self._GetAdditionalLibraryDirectories(
'VCLinkerTool', config, gyp_to_build_path))
ld('DelayLoadDLLs', prefix='/DELAYLOAD:')
ld('TreatLinkerWarningAsErrors', prefix='/WX',
map={'true': '', 'false': ':NO'})
out = self.GetOutputName(config, expand_special)
if out:
ldflags.append('/OUT:' + out)
pdb = self.GetPDBName(config, expand_special, output_name + '.pdb')
if pdb:
ldflags.append('/PDB:' + pdb)
pgd = self.GetPGDName(config, expand_special)
if pgd:
ldflags.append('/PGD:' + pgd)
map_file = self.GetMapFileName(config, expand_special)
ld('GenerateMapFile', map={'true': '/MAP:' + map_file if map_file
else '/MAP'})
ld('MapExports', map={'true': '/MAPINFO:EXPORTS'})
ld('AdditionalOptions', prefix='')
minimum_required_version = self._Setting(
('VCLinkerTool', 'MinimumRequiredVersion'), config, default='')
if minimum_required_version:
minimum_required_version = ',' + minimum_required_version
ld('SubSystem',
map={'1': 'CONSOLE%s' % minimum_required_version,
'2': 'WINDOWS%s' % minimum_required_version},
prefix='/SUBSYSTEM:')
ld('TerminalServerAware', map={'1': ':NO', '2': ''}, prefix='/TSAWARE')
ld('LinkIncremental', map={'1': ':NO', '2': ''}, prefix='/INCREMENTAL')
ld('BaseAddress', prefix='/BASE:')
ld('FixedBaseAddress', map={'1': ':NO', '2': ''}, prefix='/FIXED')
ld('RandomizedBaseAddress',
map={'1': ':NO', '2': ''}, prefix='/DYNAMICBASE')
ld('DataExecutionPrevention',
map={'1': ':NO', '2': ''}, prefix='/NXCOMPAT')
ld('OptimizeReferences', map={'1': 'NOREF', '2': 'REF'}, prefix='/OPT:')
ld('ForceSymbolReferences', prefix='/INCLUDE:')
ld('EnableCOMDATFolding', map={'1': 'NOICF', '2': 'ICF'}, prefix='/OPT:')
ld('LinkTimeCodeGeneration',
map={'1': '', '2': ':PGINSTRUMENT', '3': ':PGOPTIMIZE',
'4': ':PGUPDATE'},
prefix='/LTCG')
ld('IgnoreDefaultLibraryNames', prefix='/NODEFAULTLIB:')
ld('ResourceOnlyDLL', map={'true': '/NOENTRY'})
ld('EntryPointSymbol', prefix='/ENTRY:')
ld('Profile', map={'true': '/PROFILE'})
ld('LargeAddressAware',
map={'1': ':NO', '2': ''}, prefix='/LARGEADDRESSAWARE')
# TODO(scottmg): This should sort of be somewhere else (not really a flag).
ld('AdditionalDependencies', prefix='')
# If the base address is not specifically controlled, DYNAMICBASE should
# be on by default.
base_flags = filter(lambda x: 'DYNAMICBASE' in x or x == '/FIXED',
ldflags)
if not base_flags:
ldflags.append('/DYNAMICBASE')
# If the NXCOMPAT flag has not been specified, default to on. Despite the
# documentation that says this only defaults to on when the subsystem is
# Vista or greater (which applies to the linker), the IDE defaults it on
# unless it's explicitly off.
if not filter(lambda x: 'NXCOMPAT' in x, ldflags):
ldflags.append('/NXCOMPAT')
have_def_file = filter(lambda x: x.startswith('/DEF:'), ldflags)
manifest_flags, intermediate_manifest, manifest_files = \
self._GetLdManifestFlags(config, manifest_base_name, gyp_to_build_path,
is_executable and not have_def_file, build_dir)
ldflags.extend(manifest_flags)
return ldflags, intermediate_manifest, manifest_files
def _GetLdManifestFlags(self, config, name, gyp_to_build_path,
allow_isolation, build_dir):
"""Returns a 3-tuple:
- the set of flags that need to be added to the link to generate
a default manifest
- the intermediate manifest that the linker will generate that should be
used to assert it doesn't add anything to the merged one.
- the list of all the manifest files to be merged by the manifest tool and
included into the link."""
generate_manifest = self._Setting(('VCLinkerTool', 'GenerateManifest'),
config,
default='true')
if generate_manifest != 'true':
# This means not only that the linker should not generate the intermediate
# manifest but also that the manifest tool should do nothing even when
# additional manifests are specified.
return ['/MANIFEST:NO'], [], []
output_name = name + '.intermediate.manifest'
flags = [
'/MANIFEST',
'/ManifestFile:' + output_name,
]
# Instead of using the MANIFESTUAC flags, we generate a .manifest to
# include into the list of manifests. This allows us to avoid the need to
# do two passes during linking. The /MANIFEST flag and /ManifestFile are
# still used, and the intermediate manifest is used to assert that the
# final manifest we get from merging all the additional manifest files
# (plus the one we generate here) isn't modified by merging the
# intermediate into it.
# Always NO, because we generate a manifest file that has what we want.
flags.append('/MANIFESTUAC:NO')
config = self._TargetConfig(config)
enable_uac = self._Setting(('VCLinkerTool', 'EnableUAC'), config,
default='true')
manifest_files = []
generated_manifest_outer = \
"<?xml version='1.0' encoding='UTF-8' standalone='yes'?>" \
"<assembly xmlns='urn:schemas-microsoft-com:asm.v1' manifestVersion='1.0'>%s" \
"</assembly>"
if enable_uac == 'true':
execution_level = self._Setting(('VCLinkerTool', 'UACExecutionLevel'),
config, default='0')
execution_level_map = {
'0': 'asInvoker',
'1': 'highestAvailable',
'2': 'requireAdministrator'
}
ui_access = self._Setting(('VCLinkerTool', 'UACUIAccess'), config,
default='false')
inner = '''
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
<security>
<requestedPrivileges>
<requestedExecutionLevel level='%s' uiAccess='%s' />
</requestedPrivileges>
</security>
</trustInfo>''' % (execution_level_map[execution_level], ui_access)
else:
inner = ''
generated_manifest_contents = generated_manifest_outer % inner
generated_name = name + '.generated.manifest'
# Need to join with the build_dir here as we're writing it during
# generation time, but we return the un-joined version because the build
# will occur in that directory. We only write the file if the contents
# have changed so that simply regenerating the project files doesn't
# cause a relink.
build_dir_generated_name = os.path.join(build_dir, generated_name)
gyp.common.EnsureDirExists(build_dir_generated_name)
f = gyp.common.WriteOnDiff(build_dir_generated_name)
f.write(generated_manifest_contents)
f.close()
manifest_files = [generated_name]
if allow_isolation:
flags.append('/ALLOWISOLATION')
manifest_files += self._GetAdditionalManifestFiles(config,
gyp_to_build_path)
return flags, output_name, manifest_files
def _GetAdditionalManifestFiles(self, config, gyp_to_build_path):
"""Gets additional manifest files that are added to the default one
generated by the linker."""
files = self._Setting(('VCManifestTool', 'AdditionalManifestFiles'), config,
default=[])
if isinstance(files, str):
files = files.split(';')
return [os.path.normpath(
gyp_to_build_path(self.ConvertVSMacros(f, config=config)))
for f in files]
def IsUseLibraryDependencyInputs(self, config):
"""Returns whether the target should be linked via Use Library Dependency
Inputs (using component .objs of a given .lib)."""
config = self._TargetConfig(config)
uldi = self._Setting(('VCLinkerTool', 'UseLibraryDependencyInputs'), config)
return uldi == 'true'
def IsEmbedManifest(self, config):
"""Returns whether manifest should be linked into binary."""
config = self._TargetConfig(config)
embed = self._Setting(('VCManifestTool', 'EmbedManifest'), config,
default='true')
return embed == 'true'
def IsLinkIncremental(self, config):
"""Returns whether the target should be linked incrementally."""
config = self._TargetConfig(config)
link_inc = self._Setting(('VCLinkerTool', 'LinkIncremental'), config)
return link_inc != '1'
def GetRcflags(self, config, gyp_to_ninja_path):
"""Returns the flags that need to be added to invocations of the resource
compiler."""
config = self._TargetConfig(config)
rcflags = []
rc = self._GetWrapper(self, self.msvs_settings[config],
'VCResourceCompilerTool', append=rcflags)
rc('AdditionalIncludeDirectories', map=gyp_to_ninja_path, prefix='/I')
rcflags.append('/I' + gyp_to_ninja_path('.'))
rc('PreprocessorDefinitions', prefix='/d')
# /l arg must be in hex without leading '0x'
rc('Culture', prefix='/l', map=lambda x: hex(int(x))[2:])
return rcflags
def BuildCygwinBashCommandLine(self, args, path_to_base):
"""Build a command line that runs args via cygwin bash. We assume that all
incoming paths are in Windows normpath'd form, so they need to be
converted to posix style for the part of the command line that's passed to
bash. We also have to do some Visual Studio macro emulation here because
various rules use magic VS names for things. Also note that rules that
contain ninja variables cannot be fixed here (for example ${source}), so
the outer generator needs to make sure that the paths that are written out
are in posix style, if the command line will be used here."""
cygwin_dir = os.path.normpath(
os.path.join(path_to_base, self.msvs_cygwin_dirs[0]))
cd = ('cd %s' % path_to_base).replace('\\', '/')
args = [a.replace('\\', '/').replace('"', '\\"') for a in args]
args = ["'%s'" % a.replace("'", "'\\''") for a in args]
bash_cmd = ' '.join(args)
cmd = (
'call "%s\\setup_env.bat" && set CYGWIN=nontsec && ' % cygwin_dir +
'bash -c "%s ; %s"' % (cd, bash_cmd))
return cmd
def IsRuleRunUnderCygwin(self, rule):
"""Determine if an action should be run under cygwin. If the variable is
unset, or set to 1 we use cygwin."""
return int(rule.get('msvs_cygwin_shell',
self.spec.get('msvs_cygwin_shell', 1))) != 0
def _HasExplicitRuleForExtension(self, spec, extension):
"""Determine if there's an explicit rule for a particular extension."""
for rule in spec.get('rules', []):
if rule['extension'] == extension:
return True
return False
def HasExplicitIdlRules(self, spec):
"""Determine if there's an explicit rule for idl files. When there isn't we
need to generate implicit rules to build MIDL .idl files."""
return self._HasExplicitRuleForExtension(spec, 'idl')
def HasExplicitAsmRules(self, spec):
"""Determine if there's an explicit rule for asm files. When there isn't we
need to generate implicit rules to assemble .asm files."""
return self._HasExplicitRuleForExtension(spec, 'asm')
def GetIdlBuildData(self, source, config):
"""Determine the implicit outputs for an idl file. Returns output
directory, outputs, and variables and flags that are required."""
config = self._TargetConfig(config)
midl_get = self._GetWrapper(self, self.msvs_settings[config], 'VCMIDLTool')
def midl(name, default=None):
return self.ConvertVSMacros(midl_get(name, default=default),
config=config)
tlb = midl('TypeLibraryName', default='${root}.tlb')
header = midl('HeaderFileName', default='${root}.h')
dlldata = midl('DLLDataFileName', default='dlldata.c')
iid = midl('InterfaceIdentifierFileName', default='${root}_i.c')
proxy = midl('ProxyFileName', default='${root}_p.c')
# Note that .tlb is not included in the outputs as it is not always
# generated depending on the content of the input idl file.
outdir = midl('OutputDirectory', default='')
output = [header, dlldata, iid, proxy]
variables = [('tlb', tlb),
('h', header),
('dlldata', dlldata),
('iid', iid),
('proxy', proxy)]
# TODO(scottmg): Are there configuration settings to set these flags?
target_platform = 'win32' if self.GetArch(config) == 'x86' else 'x64'
flags = ['/char', 'signed', '/env', target_platform, '/Oicf']
return outdir, output, variables, flags
def _LanguageMatchesForPch(source_ext, pch_source_ext):
c_exts = ('.c',)
cc_exts = ('.cc', '.cxx', '.cpp')
return ((source_ext in c_exts and pch_source_ext in c_exts) or
(source_ext in cc_exts and pch_source_ext in cc_exts))
class PrecompiledHeader(object):
"""Helper to generate dependencies and build rules to handle generation of
precompiled headers. Interface matches the GCH handler in xcode_emulation.py.
"""
def __init__(
self, settings, config, gyp_to_build_path, gyp_to_unique_output, obj_ext):
self.settings = settings
self.config = config
pch_source = self.settings.msvs_precompiled_source[self.config]
self.pch_source = gyp_to_build_path(pch_source)
filename, _ = os.path.splitext(pch_source)
self.output_obj = gyp_to_unique_output(filename + obj_ext).lower()
def _PchHeader(self):
"""Get the header that will appear in an #include line for all source
files."""
return os.path.split(self.settings.msvs_precompiled_header[self.config])[1]
def GetObjDependencies(self, sources, objs, arch):
"""Given a list of sources files and the corresponding object files,
returns a list of the pch files that should be depended upon. The
additional wrapping in the return value is for interface compatibility
with make.py on Mac, and xcode_emulation.py."""
assert arch is None
if not self._PchHeader():
return []
pch_ext = os.path.splitext(self.pch_source)[1]
for source in sources:
if _LanguageMatchesForPch(os.path.splitext(source)[1], pch_ext):
return [(None, None, self.output_obj)]
return []
def GetPchBuildCommands(self, arch):
"""Not used on Windows as there are no additional build steps required
(instead, existing steps are modified in GetFlagsModifications below)."""
return []
def GetFlagsModifications(self, input, output, implicit, command,
cflags_c, cflags_cc, expand_special):
"""Get the modified cflags and implicit dependencies that should be used
for the pch compilation step."""
if input == self.pch_source:
pch_output = ['/Yc' + self._PchHeader()]
if command == 'cxx':
return ([('cflags_cc', map(expand_special, cflags_cc + pch_output))],
self.output_obj, [])
elif command == 'cc':
return ([('cflags_c', map(expand_special, cflags_c + pch_output))],
self.output_obj, [])
return [], output, implicit
vs_version = None
def GetVSVersion(generator_flags):
global vs_version
if not vs_version:
vs_version = gyp.MSVSVersion.SelectVisualStudioVersion(
generator_flags.get('msvs_version', 'auto'))
return vs_version
def _GetVsvarsSetupArgs(generator_flags, arch):
vs = GetVSVersion(generator_flags)
return vs.SetupScript()
def ExpandMacros(string, expansions):
"""Expand $(Variable) per expansions dict. See MsvsSettings.GetVSMacroEnv
for the canonical way to retrieve a suitable dict."""
if '$' in string:
for old, new in expansions.iteritems():
assert '$(' not in new, new
string = string.replace(old, new)
return string
def _ExtractImportantEnvironment(output_of_set):
"""Extracts environment variables required for the toolchain to run from
a textual dump output by the cmd.exe 'set' command."""
envvars_to_save = (
'goma_.*', # TODO(scottmg): This is ugly, but needed for goma.
'include',
'lib',
'libpath',
'path',
'pathext',
'systemroot',
'temp',
'tmp',
)
env = {}
for line in output_of_set.splitlines():
for envvar in envvars_to_save:
if re.match(envvar + '=', line.lower()):
var, setting = line.split('=', 1)
if envvar == 'path':
# Our own rules (for running gyp-win-tool) and other actions in
# Chromium rely on python being in the path. Add the path to this
# python here so that if it's not in the path when ninja is run
# later, python will still be found.
setting = os.path.dirname(sys.executable) + os.pathsep + setting
env[var.upper()] = setting
break
for required in ('SYSTEMROOT', 'TEMP', 'TMP'):
if required not in env:
raise Exception('Environment variable "%s" '
'required to be set to valid path' % required)
return env
def _FormatAsEnvironmentBlock(envvar_dict):
"""Format as an 'environment block' directly suitable for CreateProcess.
Briefly this is a list of key=value\0, terminated by an additional \0. See
CreateProcess documentation for more details."""
block = ''
nul = '\0'
for key, value in envvar_dict.iteritems():
block += key + '=' + value + nul
block += nul
return block
def _ExtractCLPath(output_of_where):
"""Gets the path to cl.exe based on the output of calling the environment
setup batch file, followed by the equivalent of `where`."""
# Take the first line, as that's the first found in the PATH.
for line in output_of_where.strip().splitlines():
if line.startswith('LOC:'):
return line[len('LOC:'):].strip()
def GenerateEnvironmentFiles(toplevel_build_dir, generator_flags, open_out):
"""It's not sufficient to have the absolute path to the compiler, linker,
etc. on Windows, as those tools rely on .dlls being in the PATH. We also
need to support both x86 and x64 compilers within the same build (to support
msvs_target_platform hackery). Different architectures require a different
compiler binary, and different supporting environment variables (INCLUDE,
LIB, LIBPATH). So, we extract the environment here, wrap all invocations
of compiler tools (cl, link, lib, rc, midl, etc.) via win_tool.py which
sets up the environment, and then we do not prefix the compiler with
an absolute path, instead preferring something like "cl.exe" in the rule
which will then run whichever the environment setup has put in the path.
When the following procedure to generate environment files does not
meet your requirement (e.g. for custom toolchains), you can pass
"-G ninja_use_custom_environment_files" to the gyp to suppress file
generation and use custom environment files prepared by yourself."""
archs = ('x86', 'x64')
if generator_flags.get('ninja_use_custom_environment_files', 0):
cl_paths = {}
for arch in archs:
cl_paths[arch] = 'cl.exe'
return cl_paths
vs = GetVSVersion(generator_flags)
cl_paths = {}
for arch in archs:
# Extract environment variables for subprocesses.
args = vs.SetupScript(arch)
args.extend(('&&', 'set'))
popen = subprocess.Popen(
args, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
variables, _ = popen.communicate()
env = _ExtractImportantEnvironment(variables)
env_block = _FormatAsEnvironmentBlock(env)
f = open_out(os.path.join(toplevel_build_dir, 'environment.' + arch), 'wb')
f.write(env_block)
f.close()
# Find cl.exe location for this architecture.
args = vs.SetupScript(arch)
args.extend(('&&',
'for', '%i', 'in', '(cl.exe)', 'do', '@echo', 'LOC:%~$PATH:i'))
popen = subprocess.Popen(args, shell=True, stdout=subprocess.PIPE)
output, _ = popen.communicate()
cl_paths[arch] = _ExtractCLPath(output)
return cl_paths
def VerifyMissingSources(sources, build_dir, generator_flags, gyp_to_ninja):
"""Emulate behavior of msvs_error_on_missing_sources present in the msvs
generator: Check that all regular source files, i.e. not created at run time,
exist on disk. Missing files cause needless recompilation when building via
VS, and we want this check to match for people/bots that build using ninja,
so they're not surprised when the VS build fails."""
if int(generator_flags.get('msvs_error_on_missing_sources', 0)):
no_specials = filter(lambda x: '$' not in x, sources)
relative = [os.path.join(build_dir, gyp_to_ninja(s)) for s in no_specials]
missing = filter(lambda x: not os.path.exists(x), relative)
if missing:
# They'll look like out\Release\..\..\stuff\things.cc, so normalize the
# path for a slightly less crazy looking output.
cleaned_up = [os.path.normpath(x) for x in missing]
raise Exception('Missing input files:\n%s' % '\n'.join(cleaned_up))
# Sets some values in default_variables, which are required for many
# generators, run on Windows.
def CalculateCommonVariables(default_variables, params):
generator_flags = params.get('generator_flags', {})
# Set a variable so conditions can be based on msvs_version.
msvs_version = gyp.msvs_emulation.GetVSVersion(generator_flags)
default_variables['MSVS_VERSION'] = msvs_version.ShortName()
# To determine processor word size on Windows, in addition to checking
# PROCESSOR_ARCHITECTURE (which reflects the word size of the current
# process), it is also necessary to check PROCESSOR_ARCHITEW6432 (which
# contains the actual word size of the system when running thru WOW64).
if ('64' in os.environ.get('PROCESSOR_ARCHITECTURE', '') or
'64' in os.environ.get('PROCESSOR_ARCHITEW6432', '')):
default_variables['MSVS_OS_BITS'] = 64
else:
default_variables['MSVS_OS_BITS'] = 32
|
duyet-website/api.duyet.net
|
refs/heads/master
|
lib/boto/cloudtrail/layer1.py
|
125
|
# Copyright (c) 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
import boto
from boto.connection import AWSQueryConnection
from boto.regioninfo import RegionInfo
from boto.exception import JSONResponseError
from boto.cloudtrail import exceptions
from boto.compat import json
class CloudTrailConnection(AWSQueryConnection):
"""
AWS CloudTrail
This is the CloudTrail API Reference. It provides descriptions of
actions, data types, common parameters, and common errors for
CloudTrail.
CloudTrail is a web service that records AWS API calls for your
AWS account and delivers log files to an Amazon S3 bucket. The
recorded information includes the identity of the user, the start
time of the AWS API call, the source IP address, the request
parameters, and the response elements returned by the service.
As an alternative to using the API, you can use one of the AWS
SDKs, which consist of libraries and sample code for various
programming languages and platforms (Java, Ruby, .NET, iOS,
Android, etc.). The SDKs provide a convenient way to create
programmatic access to AWSCloudTrail. For example, the SDKs take
care of cryptographically signing requests, managing errors, and
retrying requests automatically. For information about the AWS
SDKs, including how to download and install them, see the `Tools
for Amazon Web Services page`_.
See the CloudTrail User Guide for information about the data that
is included with each AWS API call listed in the log files.
"""
APIVersion = "2013-11-01"
DefaultRegionName = "us-east-1"
DefaultRegionEndpoint = "cloudtrail.us-east-1.amazonaws.com"
ServiceName = "CloudTrail"
TargetPrefix = "com.amazonaws.cloudtrail.v20131101.CloudTrail_20131101"
ResponseError = JSONResponseError
_faults = {
"InvalidMaxResultsException": exceptions.InvalidMaxResultsException,
"InvalidSnsTopicNameException": exceptions.InvalidSnsTopicNameException,
"InvalidS3BucketNameException": exceptions.InvalidS3BucketNameException,
"TrailAlreadyExistsException": exceptions.TrailAlreadyExistsException,
"InvalidTimeRangeException": exceptions.InvalidTimeRangeException,
"InvalidLookupAttributesException": exceptions.InvalidLookupAttributesException,
"InsufficientSnsTopicPolicyException": exceptions.InsufficientSnsTopicPolicyException,
"InvalidCloudWatchLogsLogGroupArnException": exceptions.InvalidCloudWatchLogsLogGroupArnException,
"InvalidCloudWatchLogsRoleArnException": exceptions.InvalidCloudWatchLogsRoleArnException,
"InvalidTrailNameException": exceptions.InvalidTrailNameException,
"CloudWatchLogsDeliveryUnavailableException": exceptions.CloudWatchLogsDeliveryUnavailableException,
"TrailNotFoundException": exceptions.TrailNotFoundException,
"S3BucketDoesNotExistException": exceptions.S3BucketDoesNotExistException,
"InvalidNextTokenException": exceptions.InvalidNextTokenException,
"InvalidS3PrefixException": exceptions.InvalidS3PrefixException,
"MaximumNumberOfTrailsExceededException": exceptions.MaximumNumberOfTrailsExceededException,
"InsufficientS3BucketPolicyException": exceptions.InsufficientS3BucketPolicyException,
}
def __init__(self, **kwargs):
region = kwargs.pop('region', None)
if not region:
region = RegionInfo(self, self.DefaultRegionName,
self.DefaultRegionEndpoint)
if 'host' not in kwargs or kwargs['host'] is None:
kwargs['host'] = region.endpoint
super(CloudTrailConnection, self).__init__(**kwargs)
self.region = region
def _required_auth_capability(self):
return ['hmac-v4']
def create_trail(self, name, s3_bucket_name, s3_key_prefix=None,
sns_topic_name=None, include_global_service_events=None,
cloud_watch_logs_log_group_arn=None,
cloud_watch_logs_role_arn=None):
"""
From the command line, use `create-subscription`.
Creates a trail that specifies the settings for delivery of
log data to an Amazon S3 bucket.
:type name: string
:param name: Specifies the name of the trail.
:type s3_bucket_name: string
:param s3_bucket_name: Specifies the name of the Amazon S3 bucket
designated for publishing log files.
:type s3_key_prefix: string
:param s3_key_prefix: Specifies the Amazon S3 key prefix that precedes
the name of the bucket you have designated for log file delivery.
:type sns_topic_name: string
:param sns_topic_name: Specifies the name of the Amazon SNS topic
defined for notification of log file delivery.
:type include_global_service_events: boolean
:param include_global_service_events: Specifies whether the trail is
publishing events from global services such as IAM to the log
files.
:type cloud_watch_logs_log_group_arn: string
:param cloud_watch_logs_log_group_arn: Specifies a log group name using
an Amazon Resource Name (ARN), a unique identifier that represents
the log group to which CloudTrail logs will be delivered. Not
required unless you specify CloudWatchLogsRoleArn.
:type cloud_watch_logs_role_arn: string
:param cloud_watch_logs_role_arn: Specifies the role for the CloudWatch
Logs endpoint to assume to write to a users log group.
"""
params = {'Name': name, 'S3BucketName': s3_bucket_name, }
if s3_key_prefix is not None:
params['S3KeyPrefix'] = s3_key_prefix
if sns_topic_name is not None:
params['SnsTopicName'] = sns_topic_name
if include_global_service_events is not None:
params['IncludeGlobalServiceEvents'] = include_global_service_events
if cloud_watch_logs_log_group_arn is not None:
params['CloudWatchLogsLogGroupArn'] = cloud_watch_logs_log_group_arn
if cloud_watch_logs_role_arn is not None:
params['CloudWatchLogsRoleArn'] = cloud_watch_logs_role_arn
return self.make_request(action='CreateTrail',
body=json.dumps(params))
def delete_trail(self, name):
"""
Deletes a trail.
:type name: string
:param name: The name of a trail to be deleted.
"""
params = {'Name': name, }
return self.make_request(action='DeleteTrail',
body=json.dumps(params))
def describe_trails(self, trail_name_list=None):
"""
Retrieves settings for the trail associated with the current
region for your account.
:type trail_name_list: list
:param trail_name_list: The trail returned.
"""
params = {}
if trail_name_list is not None:
params['trailNameList'] = trail_name_list
return self.make_request(action='DescribeTrails',
body=json.dumps(params))
def get_trail_status(self, name):
"""
Returns a JSON-formatted list of information about the
specified trail. Fields include information on delivery
errors, Amazon SNS and Amazon S3 errors, and start and stop
logging times for each trail.
:type name: string
:param name: The name of the trail for which you are requesting the
current status.
"""
params = {'Name': name, }
return self.make_request(action='GetTrailStatus',
body=json.dumps(params))
def lookup_events(self, lookup_attributes=None, start_time=None,
end_time=None, max_results=None, next_token=None):
"""
Looks up API activity events captured by CloudTrail that
create, update, or delete resources in your account. Events
for a region can be looked up for the times in which you had
CloudTrail turned on in that region during the last seven
days. Lookup supports five different attributes: time range
(defined by a start time and end time), user name, event name,
resource type, and resource name. All attributes are optional.
The maximum number of attributes that can be specified in any
one lookup request are time range and one other attribute. The
default number of results returned is 10, with a maximum of 50
possible. The response includes a token that you can use to
get the next page of results.
The rate of lookup requests is limited to one per second per
account. If this limit is exceeded, a throttling error occurs.
Events that occurred during the selected time range will not
be available for lookup if CloudTrail logging was not enabled
when the events occurred.
:type lookup_attributes: list
:param lookup_attributes: Contains a list of lookup attributes.
Currently the list can contain only one item.
:type start_time: timestamp
:param start_time: Specifies that only events that occur after or at
the specified time are returned. If the specified start time is
after the specified end time, an error is returned.
:type end_time: timestamp
:param end_time: Specifies that only events that occur before or at the
specified time are returned. If the specified end time is before
the specified start time, an error is returned.
:type max_results: integer
:param max_results: The number of events to return. Possible values are
1 through 50. The default is 10.
:type next_token: string
:param next_token: The token to use to get the next page of results
after a previous API call. This token must be passed in with the
same parameters that were specified in the the original call. For
example, if the original call specified an AttributeKey of
'Username' with a value of 'root', the call with NextToken should
include those same parameters.
"""
params = {}
if lookup_attributes is not None:
params['LookupAttributes'] = lookup_attributes
if start_time is not None:
params['StartTime'] = start_time
if end_time is not None:
params['EndTime'] = end_time
if max_results is not None:
params['MaxResults'] = max_results
if next_token is not None:
params['NextToken'] = next_token
return self.make_request(action='LookupEvents',
body=json.dumps(params))
def start_logging(self, name):
"""
Starts the recording of AWS API calls and log file delivery
for a trail.
:type name: string
:param name: The name of the trail for which CloudTrail logs AWS API
calls.
"""
params = {'Name': name, }
return self.make_request(action='StartLogging',
body=json.dumps(params))
def stop_logging(self, name):
"""
Suspends the recording of AWS API calls and log file delivery
for the specified trail. Under most circumstances, there is no
need to use this action. You can update a trail without
stopping it first. This action is the only way to stop
recording.
:type name: string
:param name: Communicates to CloudTrail the name of the trail for which
to stop logging AWS API calls.
"""
params = {'Name': name, }
return self.make_request(action='StopLogging',
body=json.dumps(params))
def update_trail(self, name, s3_bucket_name=None, s3_key_prefix=None,
sns_topic_name=None, include_global_service_events=None,
cloud_watch_logs_log_group_arn=None,
cloud_watch_logs_role_arn=None):
"""
From the command line, use `update-subscription`.
Updates the settings that specify delivery of log files.
Changes to a trail do not require stopping the CloudTrail
service. Use this action to designate an existing bucket for
log delivery. If the existing bucket has previously been a
target for CloudTrail log files, an IAM policy exists for the
bucket.
:type name: string
:param name: Specifies the name of the trail.
:type s3_bucket_name: string
:param s3_bucket_name: Specifies the name of the Amazon S3 bucket
designated for publishing log files.
:type s3_key_prefix: string
:param s3_key_prefix: Specifies the Amazon S3 key prefix that precedes
the name of the bucket you have designated for log file delivery.
:type sns_topic_name: string
:param sns_topic_name: Specifies the name of the Amazon SNS topic
defined for notification of log file delivery.
:type include_global_service_events: boolean
:param include_global_service_events: Specifies whether the trail is
publishing events from global services such as IAM to the log
files.
:type cloud_watch_logs_log_group_arn: string
:param cloud_watch_logs_log_group_arn: Specifies a log group name using
an Amazon Resource Name (ARN), a unique identifier that represents
the log group to which CloudTrail logs will be delivered. Not
required unless you specify CloudWatchLogsRoleArn.
:type cloud_watch_logs_role_arn: string
:param cloud_watch_logs_role_arn: Specifies the role for the CloudWatch
Logs endpoint to assume to write to a users log group.
"""
params = {'Name': name, }
if s3_bucket_name is not None:
params['S3BucketName'] = s3_bucket_name
if s3_key_prefix is not None:
params['S3KeyPrefix'] = s3_key_prefix
if sns_topic_name is not None:
params['SnsTopicName'] = sns_topic_name
if include_global_service_events is not None:
params['IncludeGlobalServiceEvents'] = include_global_service_events
if cloud_watch_logs_log_group_arn is not None:
params['CloudWatchLogsLogGroupArn'] = cloud_watch_logs_log_group_arn
if cloud_watch_logs_role_arn is not None:
params['CloudWatchLogsRoleArn'] = cloud_watch_logs_role_arn
return self.make_request(action='UpdateTrail',
body=json.dumps(params))
def make_request(self, action, body):
headers = {
'X-Amz-Target': '%s.%s' % (self.TargetPrefix, action),
'Host': self.region.endpoint,
'Content-Type': 'application/x-amz-json-1.1',
'Content-Length': str(len(body)),
}
http_request = self.build_base_http_request(
method='POST', path='/', auth_path='/', params={},
headers=headers, data=body)
response = self._mexe(http_request, sender=None,
override_num_retries=10)
response_body = response.read().decode('utf-8')
boto.log.debug(response_body)
if response.status == 200:
if response_body:
return json.loads(response_body)
else:
json_body = json.loads(response_body)
fault_name = json_body.get('__type', None)
exception_class = self._faults.get(fault_name, self.ResponseError)
raise exception_class(response.status, response.reason,
body=json_body)
|
libracore/erpnext
|
refs/heads/v12
|
erpnext/regional/india/__init__.py
|
6
|
from __future__ import unicode_literals
states = [
'',
'Andaman and Nicobar Islands',
'Andhra Pradesh',
'Arunachal Pradesh',
'Assam',
'Bihar',
'Chandigarh',
'Chhattisgarh',
'Dadra and Nagar Haveli',
'Daman and Diu',
'Delhi',
'Goa',
'Gujarat',
'Haryana',
'Himachal Pradesh',
'Jammu and Kashmir',
'Jharkhand',
'Karnataka',
'Kerala',
'Lakshadweep Islands',
'Madhya Pradesh',
'Maharashtra',
'Manipur',
'Meghalaya',
'Mizoram',
'Nagaland',
'Odisha',
'Other Territory',
'Pondicherry',
'Punjab',
'Rajasthan',
'Sikkim',
'Tamil Nadu',
'Telangana',
'Tripura',
'Uttar Pradesh',
'Uttarakhand',
'West Bengal',
]
state_numbers = {
"Andaman and Nicobar Islands": "35",
"Andhra Pradesh": "37",
"Arunachal Pradesh": "12",
"Assam": "18",
"Bihar": "10",
"Chandigarh": "04",
"Chhattisgarh": "22",
"Dadra and Nagar Haveli": "26",
"Daman and Diu": "25",
"Delhi": "07",
"Goa": "30",
"Gujarat": "24",
"Haryana": "06",
"Himachal Pradesh": "02",
"Jammu and Kashmir": "01",
"Jharkhand": "20",
"Karnataka": "29",
"Kerala": "32",
"Lakshadweep Islands": "31",
"Madhya Pradesh": "23",
"Maharashtra": "27",
"Manipur": "14",
"Meghalaya": "17",
"Mizoram": "15",
"Nagaland": "13",
"Odisha": "21",
"Other Territory": "98",
"Pondicherry": "34",
"Punjab": "03",
"Rajasthan": "08",
"Sikkim": "11",
"Tamil Nadu": "33",
"Telangana": "36",
"Tripura": "16",
"Uttar Pradesh": "09",
"Uttarakhand": "05",
"West Bengal": "19",
}
|
bOOm-X/spark
|
refs/heads/master
|
examples/src/main/python/ml/bucketed_random_projection_lsh_example.py
|
51
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import print_function
# $example on$
from pyspark.ml.feature import BucketedRandomProjectionLSH
from pyspark.ml.linalg import Vectors
from pyspark.sql.functions import col
# $example off$
from pyspark.sql import SparkSession
"""
An example demonstrating BucketedRandomProjectionLSH.
Run with:
bin/spark-submit examples/src/main/python/ml/bucketed_random_projection_lsh_example.py
"""
if __name__ == "__main__":
spark = SparkSession \
.builder \
.appName("BucketedRandomProjectionLSHExample") \
.getOrCreate()
# $example on$
dataA = [(0, Vectors.dense([1.0, 1.0]),),
(1, Vectors.dense([1.0, -1.0]),),
(2, Vectors.dense([-1.0, -1.0]),),
(3, Vectors.dense([-1.0, 1.0]),)]
dfA = spark.createDataFrame(dataA, ["id", "features"])
dataB = [(4, Vectors.dense([1.0, 0.0]),),
(5, Vectors.dense([-1.0, 0.0]),),
(6, Vectors.dense([0.0, 1.0]),),
(7, Vectors.dense([0.0, -1.0]),)]
dfB = spark.createDataFrame(dataB, ["id", "features"])
key = Vectors.dense([1.0, 0.0])
brp = BucketedRandomProjectionLSH(inputCol="features", outputCol="hashes", bucketLength=2.0,
numHashTables=3)
model = brp.fit(dfA)
# Feature Transformation
print("The hashed dataset where hashed values are stored in the column 'hashes':")
model.transform(dfA).show()
# Compute the locality sensitive hashes for the input rows, then perform approximate
# similarity join.
# We could avoid computing hashes by passing in the already-transformed dataset, e.g.
# `model.approxSimilarityJoin(transformedA, transformedB, 1.5)`
print("Approximately joining dfA and dfB on Euclidean distance smaller than 1.5:")
model.approxSimilarityJoin(dfA, dfB, 1.5, distCol="EuclideanDistance")\
.select(col("datasetA.id").alias("idA"),
col("datasetB.id").alias("idB"),
col("EuclideanDistance")).show()
# Compute the locality sensitive hashes for the input rows, then perform approximate nearest
# neighbor search.
# We could avoid computing hashes by passing in the already-transformed dataset, e.g.
# `model.approxNearestNeighbors(transformedA, key, 2)`
print("Approximately searching dfA for 2 nearest neighbors of the key:")
model.approxNearestNeighbors(dfA, key, 2).show()
# $example off$
spark.stop()
|
EdibleEd/vacbooru
|
refs/heads/master
|
VAB_upload.py
|
1
|
# TAKES a set of tupples of the form
# {source_url, source_hash, [tags]}
# OPTIONALLY TAKES
# RETURNS return code
import VAB_scraper
import argparse
import Utility as utl
import requests
from requests.auth import HTTPBasicAuth
from bs4 import BeautifulSoup
import os
import sys
# Actually builds the post request, then executes it on vacbooru
class VAB_upload:
def __init__(self, config):
self.config = config
self.user = config['username']
self.api_token = config['api_token']
print('Uploader init')
def go(self, tagset):
# First thing to do, is retrieve a tage list for the image
print(tagset)
f = open(tagset['local_file'], 'rb')
fileToSend = { 'upload[file]' : f}
fff = { 'upload[tag_string]' : tagset['tag_string'],
'upload[rating]' : tagset['rating'],
'upload[source]' : tagset['source']}
r = requests.post('http://anubis/uploads.json', files=fileToSend, data=fff, auth=HTTPBasicAuth(self.user, self.api_token), verify=False)
f.close()
if('RuntimeError - duplicate' in r.text):
print("Duplicate file attempt")
a = tagset['local_file'].rfind(os.path.sep)
newDir = os.path.join(tagset['local_file'][:a], 'vab_duplicateupload')
try:
if not os.path.exists(newDir):
os.makedirs(newDir)
print(tagset['local_file'])
print(os.path.join(newDir,tagset['local_file'][a+1:]))
os.rename(tagset['local_file'], os.path.join(newDir, tagset['local_file'][a+1:]))
except:
print('File ' + tagset['local_file'][a+1:] + ' could not be moved')
print("Error:", sys.exc_info()[0])
return
else:
print(r.text)
# Now that we have uploaded it, lets move it to a new location, so we don't reupload it later
a = tagset['local_file'].rfind(os.path.sep)
newDir = os.path.join(tagset['local_file'][:a], 'vab_successfulupload')
try:
if not os.path.exists(newDir):
os.makedirs(newDir)
print(tagset['local_file'])
print(os.path.join(newDir, tagset['local_file'][a+1:]))
os.rename(tagset['local_file'], os.path.join(newDir, tagset['local_file'][a+1:]))
except:
print('File ' + tagset['local_file'][a+1:] + ' could not be moved')
print("Error:", sys.exc_info()[0])
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Upload an image to the local dbu server", usage="%(prog)s [options]")
parser.add_argument("image", help="Image to upload", metavar='I', type=str, nargs='+')
args = parser.parse_args()
loader = VAB_upload(args.image)
loader.go()
|
jlmadurga/django-oscar
|
refs/heads/master
|
tests/integration/catalogue/reviews/form_tests.py
|
40
|
from django.test import TestCase
from oscar.apps.catalogue.reviews import forms
from oscar.test.factories import create_product, UserFactory
class TestReviewForm(TestCase):
def setUp(self):
self.product = create_product()
self.reviewer = UserFactory()
self.data = {
'title': ' This product is lovely',
'body': 'I really like this cheese',
'score': 0,
'name': 'JR Hartley',
'email': 'hartley@example.com'
}
def test_cleans_title(self):
form = forms.ProductReviewForm(
product=self.product, user=self.reviewer, data=self.data)
self.assertTrue(form.is_valid())
review = form.save()
self.assertEqual("This product is lovely", review.title)
class TestVoteForm(TestCase):
def setUp(self):
self.product = create_product()
self.reviewer = UserFactory()
self.voter = UserFactory()
self.review = self.product.reviews.create(
title='This is nice',
score=3,
body="This is the body",
user=self.reviewer)
def test_allows_real_users_to_vote(self):
form = forms.VoteForm(self.review, self.voter, data={'delta': 1})
self.assertTrue(form.is_valid())
def test_prevents_users_from_voting_more_than_once(self):
self.review.vote_up(self.voter)
form = forms.VoteForm(self.review, self.voter, data={'delta': 1})
self.assertFalse(form.is_valid())
self.assertTrue(len(form.errors['__all__']) > 0)
def test_prevents_users_voting_on_their_own_reviews(self):
form = forms.VoteForm(self.review, self.reviewer, data={'delta': 1})
self.assertFalse(form.is_valid())
self.assertTrue(len(form.errors['__all__']) > 0)
|
xiangel/hue
|
refs/heads/master
|
desktop/core/ext-py/python-openid-2.2.5/openid/yadis/services.py
|
167
|
# -*- test-case-name: openid.test.test_services -*-
from openid.yadis.filters import mkFilter
from openid.yadis.discover import discover, DiscoveryFailure
from openid.yadis.etxrd import parseXRDS, iterServices, XRDSError
def getServiceEndpoints(input_url, flt=None):
"""Perform the Yadis protocol on the input URL and return an
iterable of resulting endpoint objects.
@param flt: A filter object or something that is convertable to
a filter object (using mkFilter) that will be used to generate
endpoint objects. This defaults to generating BasicEndpoint
objects.
@param input_url: The URL on which to perform the Yadis protocol
@return: The normalized identity URL and an iterable of endpoint
objects generated by the filter function.
@rtype: (str, [endpoint])
@raises DiscoveryFailure: when Yadis fails to obtain an XRDS document.
"""
result = discover(input_url)
try:
endpoints = applyFilter(result.normalized_uri,
result.response_text, flt)
except XRDSError, err:
raise DiscoveryFailure(str(err), None)
return (result.normalized_uri, endpoints)
def applyFilter(normalized_uri, xrd_data, flt=None):
"""Generate an iterable of endpoint objects given this input data,
presumably from the result of performing the Yadis protocol.
@param normalized_uri: The input URL, after following redirects,
as in the Yadis protocol.
@param xrd_data: The XML text the XRDS file fetched from the
normalized URI.
@type xrd_data: str
"""
flt = mkFilter(flt)
et = parseXRDS(xrd_data)
endpoints = []
for service_element in iterServices(et):
endpoints.extend(
flt.getServiceEndpoints(normalized_uri, service_element))
return endpoints
|
javachengwc/hue
|
refs/heads/master
|
desktop/core/ext-py/pyformance-0.3.2/pyformance/meters/meter.py
|
33
|
import time
from threading import Lock
from ..stats.moving_average import ExpWeightedMovingAvg
class Meter(object):
"""
A meter metric which measures mean throughput and one-, five-, and fifteen-minute
exponentially-weighted moving average throughputs.
"""
def __init__(self, clock=time):
super(Meter, self).__init__()
self.lock = Lock()
self.clock = clock
self.clear()
def clear(self):
with self.lock:
self.start_time = self.clock.time()
self.counter = 0.0
self.m1rate = ExpWeightedMovingAvg(period=1, clock=self.clock)
self.m5rate = ExpWeightedMovingAvg(period=5, clock=self.clock)
self.m15rate = ExpWeightedMovingAvg(period=15, clock=self.clock)
def get_one_minute_rate(self):
return self.m1rate.get_rate()
def get_five_minute_rate(self):
return self.m5rate.get_rate()
def get_fifteen_minute_rate(self):
return self.m15rate.get_rate()
def tick(self):
self.m1rate.tick()
self.m5rate.tick()
self.m15rate.tick()
def mark(self, value=1):
with self.lock:
self.counter += value
self.m1rate.add(value)
self.m5rate.add(value)
self.m15rate.add(value)
def get_count(self):
return self.counter
def get_mean_rate(self):
if self.counter == 0:
return 0
elapsed = self.clock.time() - self.start_time
return self.counter / elapsed
def _convertNsRate(self, ratePerNs):
return ratePerNs
|
breunigs/beets
|
refs/heads/master
|
beets/__init__.py
|
1
|
# This file is part of beets.
# Copyright 2013, Adrian Sampson.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
__version__ = '1.2.2'
__author__ = 'Adrian Sampson <adrian@radbox.org>'
import beets.library
from beets.util import confit
Library = beets.library.Library
config = confit.LazyConfig('beets', __name__)
|
tomchristie/django
|
refs/heads/master
|
django/contrib/auth/migrations/0006_require_contenttypes_0002.py
|
134
|
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('auth', '0005_alter_user_last_login_null'),
('contenttypes', '0002_remove_content_type_name'),
]
operations = [
# Ensure the contenttypes migration is applied before sending
# post_migrate signals (which create ContentTypes).
]
|
megaserg/pants
|
refs/heads/master
|
tests/python/pants_test/tasks/task_test_base.py
|
3
|
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
import subprocess
from contextlib import closing
from StringIO import StringIO
from pants.backend.core.tasks.console_task import ConsoleTask
from pants.goal.goal import Goal
from pants.ivy.bootstrapper import Bootstrapper
from pants.util.contextutil import temporary_dir
from pants_test.base_test import BaseTest
# TODO: Find a better home for this?
def is_exe(name):
result = subprocess.call(['which', name], stdout=open(os.devnull, 'w'), stderr=subprocess.STDOUT)
return result == 0
def ensure_cached(task_cls, expected_num_artifacts=None):
"""Decorator for a task-executing unit test. Asserts that after running
the decorated test function, the cache for task_cls contains expected_num_artifacts.
Clears the task's cache before running the test.
:param task_cls: Class of the task to check the artifact cache for. (e.g. JarCreate)
:param expected_num_artifacts: Expected number of artifacts to be in the task's
cache after running the test. If unspecified, will
assert that the number of artifacts in the cache is
non-zero.
"""
def decorator(test_fn):
def wrapper(self, *args, **kwargs):
with temporary_dir() as artifact_cache:
self.set_options_for_scope('cache.{}'.format(self.options_scope),
write_to=[artifact_cache])
task_cache = os.path.join(artifact_cache, task_cls.stable_name())
os.mkdir(task_cache)
test_fn(self, *args, **kwargs)
num_artifacts = 0
for (_, _, files) in os.walk(task_cache):
num_artifacts += len(files)
if expected_num_artifacts is None:
self.assertNotEqual(num_artifacts, 0)
else:
self.assertEqual(num_artifacts, expected_num_artifacts)
return wrapper
return decorator
class TaskTestBase(BaseTest):
"""A baseclass useful for testing a single Task type."""
options_scope = 'test_scope'
@classmethod
def task_type(cls):
"""Subclasses must return the type of the Task subclass under test."""
raise NotImplementedError()
def setUp(self):
super(TaskTestBase, self).setUp()
self._testing_task_type = self.synthesize_task_subtype(self.task_type(), self.options_scope)
# We locate the workdir below the pants_workdir, which BaseTest locates within the BuildRoot.
# BaseTest cleans this up, so we don't need to. We give it a stable name, so that we can
# use artifact caching to speed up tests.
self._test_workdir = os.path.join(self.pants_workdir, self.task_type().stable_name())
os.mkdir(self._test_workdir)
# TODO: Push this down to JVM-related tests only? Seems wrong to have an ivy-specific
# action in this non-JVM-specific, high-level base class.
Bootstrapper.reset_instance()
@property
def test_workdir(self):
return self._test_workdir
def synthesize_task_subtype(self, task_type, options_scope):
"""Creates a synthetic subclass of the task type.
Note that passing in a stable options scope will speed up some tests, as the scope may appear
in the paths of tools used by the task, and if these are stable, tests can get artifact
cache hits when bootstrapping these tools. This doesn't hurt test isolation, as we reset
class-level state between each test.
# TODO: Use the task type directly once we re-do the Task lifecycle.
:param task_type: The task type to subtype.
:param options_scope: The scope to give options on the generated task type.
:return: A pair (type, options_scope)
"""
subclass_name = b'test_{0}_{1}'.format(task_type.__name__, options_scope)
return type(subclass_name, (task_type,), {'_stable_name': task_type._compute_stable_name(),
'options_scope': options_scope})
def set_options(self, **kwargs):
self.set_options_for_scope(self.options_scope, **kwargs)
def context(self, for_task_types=None, options=None, passthru_args=None, target_roots=None,
console_outstream=None, workspace=None, for_subsystems=None):
# Add in our task type.
for_task_types = [self._testing_task_type] + (for_task_types or [])
return super(TaskTestBase, self).context(for_task_types=for_task_types,
options=options,
passthru_args=passthru_args,
target_roots=target_roots,
console_outstream=console_outstream,
workspace=workspace,
for_subsystems=for_subsystems)
def create_task(self, context, workdir=None):
return self._testing_task_type(context, workdir or self._test_workdir)
class ConsoleTaskTestBase(TaskTestBase):
"""A base class useful for testing ConsoleTasks."""
def setUp(self):
Goal.clear()
super(ConsoleTaskTestBase, self).setUp()
task_type = self.task_type()
assert issubclass(task_type, ConsoleTask), \
'task_type() must return a ConsoleTask subclass, got %s' % task_type
def execute_task(self, targets=None, options=None):
"""Creates a new task and executes it with the given config, command line args and targets.
:param targets: Optional list of Target objects passed on the command line.
Returns the text output of the task.
"""
options = options or {}
with closing(StringIO()) as output:
self.set_options(**options)
context = self.context(target_roots=targets, console_outstream=output)
task = self.create_task(context)
task.execute()
return output.getvalue()
def execute_console_task(self, targets=None, extra_targets=None, options=None,
passthru_args=None, workspace=None):
"""Creates a new task and executes it with the given config, command line args and targets.
:param options: option values.
:param targets: optional list of Target objects passed on the command line.
:param extra_targets: optional list of extra targets in the context in addition to those
passed on the command line.
:param passthru_args: optional list of passthru_args
:param workspace: optional Workspace to pass into the context.
Returns the list of items returned from invoking the console task's console_output method.
"""
options = options or {}
self.set_options(**options)
context = self.context(target_roots=targets, passthru_args=passthru_args, workspace=workspace)
return self.execute_console_task_given_context(context, extra_targets=extra_targets)
def execute_console_task_given_context(self, context, extra_targets=None):
"""Creates a new task and executes it with the context and extra targets.
:param context: The pants run context to use.
:param extra_targets: An optional list of extra targets in the context in addition to those
passed on the command line.
:returns: The list of items returned from invoking the console task's console_output method.
:rtype: list of strings
"""
task = self.create_task(context)
return list(task.console_output(list(task.context.targets()) + list(extra_targets or ())))
def assert_entries(self, sep, *output, **kwargs):
"""Verifies the expected output text is flushed by the console task under test.
NB: order of entries is not tested, just presence.
sep: the expected output separator.
*output: the output entries expected between the separators
**options: additional options passed to execute_task.
"""
# We expect each output line to be suffixed with the separator, so for , and [1,2,3] we expect:
# '1,2,3,' - splitting this by the separator we should get ['1', '2', '3', ''] - always an extra
# empty string if the separator is properly always a suffix and not applied just between
# entries.
self.assertEqual(sorted(list(output) + ['']), sorted((self.execute_task(**kwargs)).split(sep)))
def assert_console_output(self, *output, **kwargs):
"""Verifies the expected output entries are emitted by the console task under test.
NB: order of entries is not tested, just presence.
*output: the expected output entries
**kwargs: additional kwargs passed to execute_console_task.
"""
self.assertEqual(sorted(output), sorted(self.execute_console_task(**kwargs)))
def assert_console_output_contains(self, output, **kwargs):
"""Verifies the expected output string is emitted by the console task under test.
output: the expected output entry(ies)
**kwargs: additional kwargs passed to execute_console_task.
"""
self.assertIn(output, self.execute_console_task(**kwargs))
def assert_console_output_ordered(self, *output, **kwargs):
"""Verifies the expected output entries are emitted by the console task under test.
NB: order of entries is tested.
*output: the expected output entries in expected order
**kwargs: additional kwargs passed to execute_console_task.
"""
self.assertEqual(list(output), self.execute_console_task(**kwargs))
def assert_console_raises(self, exception, **kwargs):
"""Verifies the expected exception is raised by the console task under test.
**kwargs: additional kwargs are passed to execute_console_task.
"""
with self.assertRaises(exception):
self.execute_console_task(**kwargs)
|
TNT-Samuel/Coding-Projects
|
refs/heads/master
|
DNS Server/Source/Lib/site-packages/dask/array/tests/test_gufunc.py
|
2
|
from __future__ import absolute_import, division, print_function
from distutils.version import LooseVersion
import pytest
from numpy.testing import assert_equal
import dask.array as da
from dask.array.utils import assert_eq
import numpy as np
from dask.array.core import Array
from dask.array.gufunc import _parse_gufunc_signature, apply_gufunc,gufunc, as_gufunc
# Copied from `numpy.lib.test_test_function_base.py`:
def test__parse_gufunc_signature():
assert_equal(_parse_gufunc_signature('(x)->()'), ([('x',)], ()))
assert_equal(_parse_gufunc_signature('(x,y)->()'),
([('x', 'y')], ()))
assert_equal(_parse_gufunc_signature('(x),(y)->()'),
([('x',), ('y',)], ()))
assert_equal(_parse_gufunc_signature('(x)->(y)'),
([('x',)], ('y',)))
assert_equal(_parse_gufunc_signature('(x)->(y),()'),
([('x',)], [('y',), ()]))
assert_equal(_parse_gufunc_signature('(),(a,b,c),(d)->(d,e)'),
([(), ('a', 'b', 'c'), ('d',)], ('d', 'e')))
with pytest.raises(ValueError):
_parse_gufunc_signature('(x)(y)->()')
with pytest.raises(ValueError):
_parse_gufunc_signature('(x),(y)->')
with pytest.raises(ValueError):
_parse_gufunc_signature('((x))->(x)')
with pytest.raises(ValueError):
_parse_gufunc_signature('(x)->(x),')
def test_apply_gufunc_01():
def stats(x):
return np.mean(x, axis=-1), np.std(x, axis=-1)
a = da.random.normal(size=(10, 20, 30), chunks=(5, 5, 30))
mean, std = apply_gufunc(stats, "(i)->(),()", a,
output_dtypes=2 * (a.dtype,))
assert mean.compute().shape == (10, 20)
assert std.compute().shape == (10, 20)
def test_apply_gufunc_01b():
def stats(x):
return np.mean(x, axis=-1), np.std(x, axis=-1)
a = da.random.normal(size=(10, 20, 30), chunks=5)
mean, std = apply_gufunc(stats, "(i)->(),()", a,
output_dtypes=2 * (a.dtype,),
allow_rechunk=True)
assert mean.compute().shape == (10, 20)
assert std.compute().shape == (10, 20)
@pytest.mark.skipif(LooseVersion(np.__version__) < '1.12.0',
reason="`np.vectorize(..., signature=...)` not supported yet")
@pytest.mark.parametrize('vectorize', [False, True])
def test_apply_gufunc_output_dtypes_string(vectorize):
def stats(x):
return np.mean(x, axis=-1)
a = da.random.normal(size=(10, 20, 30), chunks=(5, 5, 30))
mean = apply_gufunc(stats, "(i)->()", a, output_dtypes="f", vectorize=vectorize)
assert mean.compute().shape == (10, 20)
@pytest.mark.skipif(LooseVersion(np.__version__) < '1.12.0',
reason="`np.vectorize(..., signature=...)` not supported yet")
@pytest.mark.parametrize('vectorize', [False, True])
def test_apply_gufunc_output_dtypes_string_many_outputs(vectorize):
def stats(x):
return np.mean(x, axis=-1), np.std(x, axis=-1)
a = da.random.normal(size=(10, 20, 30), chunks=(5, 5, 30))
mean, std = apply_gufunc(stats, "(i)->(),()", a, output_dtypes="ff", vectorize=vectorize)
assert mean.compute().shape == (10, 20)
assert std.compute().shape == (10, 20)
def test_apply_gufunc_pass_additional_kwargs():
def foo(x, bar):
assert bar == 2
return x
ret = apply_gufunc(foo, "()->()", 1., output_dtypes="f", bar=2)
assert_eq(ret, np.array(1., dtype="f"))
@pytest.mark.xfail(reason="Currently np.einsum doesn't seem to broadcast correctly for this case")
def test_apply_gufunc_02():
def outer_product(x, y):
return np.einsum("...i,...j->...ij", x, y)
a = da.random.normal(size=( 20, 30), chunks=(5, 30))
b = da.random.normal(size=(10, 1, 40), chunks=(10, 1, 40))
c = apply_gufunc(outer_product, "(i),(j)->(i,j)", a, b, output_dtypes=a.dtype)
assert c.compute().shape == (10, 20, 30, 40)
def test_apply_gufunc_scalar_output():
def foo():
return 1
x = apply_gufunc(foo, "->()", output_dtypes=int)
assert x.compute() == 1
def test_apply_gufunc_elemwise_01():
def add(x, y):
return x + y
a = da.from_array(np.array([1, 2, 3]), chunks=2, name='a')
b = da.from_array(np.array([1, 2, 3]), chunks=2, name='b')
z = apply_gufunc(add, "(),()->()", a, b, output_dtypes=a.dtype)
assert_eq(z, np.array([2, 4, 6]))
def test_apply_gufunc_elemwise_01b():
def add(x, y):
return x + y
a = da.from_array(np.array([1, 2, 3]), chunks=2, name='a')
b = da.from_array(np.array([1, 2, 3]), chunks=1, name='b')
with pytest.raises(ValueError):
apply_gufunc(add, "(),()->()", a, b, output_dtypes=a.dtype)
def test_apply_gufunc_elemwise_02():
def addmul(x, y):
assert x.shape in ((2,), (1,))
return x + y, x * y
a = da.from_array(np.array([1, 2, 3]), chunks=2, name='a')
b = da.from_array(np.array([1, 2, 3]), chunks=2, name='b')
z1, z2 = apply_gufunc(addmul, "(),()->(),()", a, b, output_dtypes=2 * (a.dtype,))
assert_eq(z1, np.array([2, 4, 6]))
assert_eq(z2, np.array([1, 4, 9]))
def test_gufunc_vector_output():
def foo():
return np.array([1, 2, 3], dtype=int)
x = apply_gufunc(foo, "->(i_0)", output_dtypes=int, output_sizes={"i_0": 3})
assert x.chunks == ((3,),)
assert_eq(x, np.array([1, 2, 3]))
def test_apply_gufunc_elemwise_loop():
def foo(x):
assert x.shape in ((2,), (1,))
return 2 * x
a = da.from_array(np.array([1, 2, 3]), chunks=2, name='a')
z = apply_gufunc(foo, "()->()", a, output_dtypes=int)
assert z.chunks == ((2, 1),)
assert_eq(z, np.array([2, 4, 6]))
def test_apply_gufunc_elemwise_core():
def foo(x):
assert x.shape == (3,)
return 2 * x
a = da.from_array(np.array([1, 2, 3]), chunks=3, name='a')
z = apply_gufunc(foo, "(i)->(i)", a, output_dtypes=int)
assert z.chunks == ((3,),)
assert_eq(z, np.array([2, 4, 6]))
# TODO: In case single tuple output will get enabled:
# def test_apply_gufunc_one_scalar_output():
# def foo():
# return 1,
# x, = apply_gufunc(foo, "->(),", output_dtypes=(int,))
# assert x.compute() == 1
def test_apply_gufunc_two_scalar_output():
def foo():
return 1, 2
x, y = apply_gufunc(foo, "->(),()", output_dtypes=(int, int))
assert x.compute() == 1
assert y.compute() == 2
def test_apply_gufunc_two_mixed_outputs():
def foo():
return 1, np.ones((2, 3), dtype=float)
x, y = apply_gufunc(foo, "->(),(i,j)",
output_dtypes=(int, float),
output_sizes={'i': 2, 'j': 3})
assert x.compute() == 1
assert y.chunks == ((2,), (3,))
assert_eq(y, np.ones((2, 3), dtype=float))
def test_gufunc_two_inputs():
def foo(x, y):
return np.einsum('...ij,...jk->ik', x, y)
a = da.ones((2, 3), chunks=100, dtype=int)
b = da.ones((3, 4), chunks=100, dtype=int)
x = apply_gufunc(foo, "(i,j),(j,k)->(i,k)", a, b, output_dtypes=int)
assert_eq(x, 3 * np.ones((2, 4), dtype=int))
def test_gufunc_mixed_inputs():
def foo(x, y):
return x + y
a = np.ones((2, 1), dtype=int)
b = da.ones((1, 8), chunks=(2, 3), dtype=int)
x = apply_gufunc(foo, "(),()->()", a, b, output_dtypes=int)
assert_eq(x, 2 * np.ones((2, 8), dtype=int))
@pytest.mark.skipif(LooseVersion(np.__version__) < '1.12.0',
reason="`np.vectorize(..., signature=...)` not supported yet")
def test_gufunc():
x = da.random.normal(size=(10, 5), chunks=(2, 5))
def foo(x):
return np.mean(x, axis=-1)
gufoo = gufunc(foo, signature="(i)->()", output_dtypes=float, vectorize=True)
y = gufoo(x)
valy = y.compute()
assert isinstance(y, Array)
assert valy.shape == (10,)
@pytest.mark.skipif(LooseVersion(np.__version__) < '1.12.0',
reason="`np.vectorize(..., signature=...)` not supported yet")
def test_as_gufunc():
x = da.random.normal(size=(10, 5), chunks=(2, 5))
@as_gufunc("(i)->()", output_dtypes=float, vectorize=True)
def foo(x):
return np.mean(x, axis=-1)
y = foo(x)
valy = y.compute()
assert isinstance(y, Array)
assert valy.shape == (10,)
def test_apply_gufunc_broadcasting_loopdims():
def foo(x, y):
assert len(x.shape) == 2
assert len(y.shape) == 3
x, y = np.broadcast_arrays(x, y)
return x, y, x * y
a = da.random.normal(size=( 10, 30), chunks=(8, 30))
b = da.random.normal(size=(20, 1, 30), chunks=(3, 1, 30))
x, y, z = apply_gufunc(foo, "(i),(i)->(i),(i),(i)", a, b, output_dtypes=3 * (float,), vectorize=False)
assert x.compute().shape == (20, 10, 30)
assert y.compute().shape == (20, 10, 30)
assert z.compute().shape == (20, 10, 30)
def test_apply_gufunc_check_same_dimsizes():
def foo(x, y):
return x + y
a = da.random.normal(size=(3,), chunks=(2,))
b = da.random.normal(size=(4,), chunks=(2,))
with pytest.raises(ValueError) as excinfo:
apply_gufunc(foo, "(),()->()", a, b, output_dtypes=float, allow_rechunk=True)
assert "different lengths in arrays" in str(excinfo.value)
def test_apply_gufunc_check_coredim_chunksize():
def foo(x):
return np.sum(x, axis=-1)
a = da.random.normal(size=(8,), chunks=3)
with pytest.raises(ValueError) as excinfo:
da.apply_gufunc(foo, "(i)->()", a, output_dtypes=float, allow_rechunk=False)
assert "consists of multiple chunks" in str(excinfo.value)
def test_apply_gufunc_check_inhomogeneous_chunksize():
def foo(x, y):
return x + y
a = da.random.normal(size=(8,), chunks=((2, 2, 2, 2),))
b = da.random.normal(size=(8,), chunks=((2, 3, 3),))
with pytest.raises(ValueError) as excinfo:
da.apply_gufunc(foo, "(),()->()", a, b, output_dtypes=float, allow_rechunk=False)
assert "with different chunksize present" in str(excinfo.value)
|
KSanthanam/rethinkdb
|
refs/heads/next
|
test/common/rdb_unittest.py
|
5
|
#!/usr/bin/env python
# Copyright 2015 RethinkDB, all rights reserved.
import itertools, os, random, shutil, sys, unittest, warnings
import driver, utils
def main():
unittest.main(argv=[sys.argv[0]])
class RdbTestCase(unittest.TestCase):
# -- settings
servers = None # defaults to shards * replicas
shards = 1
replicas = 1
server_command_prefix = None
server_extra_options = None
fieldName = 'id'
recordsToGenerate = 0
samplesPerShard = 5 # when making changes the number of changes to make per shard
destructiveTest = False # if true the cluster should be restarted after this test
# -- class variables
dbName = None
tableName = None
db = None
table = None
cluster = None
_conn = None
r = utils.import_python_driver()
# -- unittest subclass variables
__currentResult = None
__problemCount = None
# --
def run(self, result=None):
if not all([self.dbName, self.tableName]):
defaultDb, defaultTable = utils.get_test_db_table()
if self.dbName is None:
self.__class__.dbName = defaultDb
if self.tableName is None:
self.__class__.tableName = defaultTable
self.__class__.db = self.r.db(self.dbName)
self.__class__.table = self.db.table(self.tableName)
# Allow detecting test failure in tearDown
self.__currentResult = result or self.defaultTestResult()
self.__problemCount = 0 if result is None else len(self.__currentResult.errors) + len(self.__currentResult.failures)
super(RdbTestCase, self).run(self.__currentResult)
@property
def conn(self):
'''Retrieve a valid connection to some server in the cluster'''
# -- check if we already have a good cached connection
if self.__class__._conn and self.__class__._conn.is_open():
try:
self.r.expr(1).run(self.__class__._conn)
return self.__class__._conn
except Exception: pass
if self.__class__.conn is not None:
try:
self.__class__._conn.close()
except Exception: pass
self.__class__._conn = None
# -- try a new connection to each server in order
for server in self.cluster:
if not server.ready:
continue
try:
self.__class__._conn = self.r.connect(host=server.host, port=server.driver_port)
return self.__class__._conn
except Exception as e: pass
else:
# fail as we have run out of servers
raise Exception('Unable to get a connection to any server in the cluster')
def getPrimaryForShard(self, index, tableName=None, dbName=None):
if tableName is None:
tableName = self.tableName
if dbName is None:
dbName = self.dbName
serverName = self.r.db(dbName).table(tableName).config()['shards'].nth(index)['primary_replica'].run(self.conn)
for server in self.cluster:
if server.name == serverName:
return server
return None
def getReplicasForShard(self, index, tableName=None, dbName=None):
if tableName is None:
tableName = self.tableName
if dbName is None:
dbName = self.dbName
shardsData = self.r.db(dbName).table(tableName).config()['shards'].nth(index).run(self.conn)
replicaNames = [x for x in shardsData['replicas'] if x != shardsData['primary_replica']]
replicas = []
for server in self.cluster:
if server.name in replicaNames:
replicas.append(server)
return replicas
def getReplicaForShard(self, index, tableName=None, dbName=None):
replicas = self.getReplicasForShard(index, tableName=None, dbName=None)
if replicas:
return replicas[0]
else:
return None
def checkCluster(self):
'''Check that all the servers are running and the cluster is in good shape. Errors on problems'''
assert self.cluster is not None, 'The cluster was None'
self.cluster.check()
assert [] == list(self.r.db('rethinkdb').table('current_issues').run(self.conn))
def setUp(self):
# -- start the servers
# - check on an existing cluster
if self.cluster is not None:
try:
self.checkCluster()
except:
self.__class__.cluster = None
self.__class__._conn = None
self.__class__.table = None
# - start new servers if necessary
# note: we start up enough servers to make sure they each have only one role
if self.cluster is None:
initalServers = max(self.shards * self.replicas, self.servers)
if self.servers is not None and initalServers > self.servers:
raise ValueError('servers must always be >= shards * replicas. If you need another configuration you must set it up manually')
self.__class__.cluster = driver.Cluster(
initial_servers=initalServers,
wait_until_ready=True,
command_prefix=self.server_command_prefix,
extra_options=self.server_extra_options
)
# -- ensure db is available
if self.dbName is not None and self.dbName not in self.r.db_list().run(self.conn):
self.r.db_create(self.dbName).run(self.conn)
# -- setup test table
if self.tableName is not None:
# - ensure we have a clean table
if self.tableName in self.r.db(self.dbName).table_list().run(self.conn):
self.r.db(self.dbName).table_drop(self.tableName).run(self.conn)
self.r.db(self.dbName).table_create(self.tableName).run(self.conn)
self.__class__.table = self.r.db(self.dbName).table(self.tableName)
# - add initial records
if self.recordsToGenerate:
utils.populateTable(conn=self.conn, table=self.table, records=self.recordsToGenerate, fieldName=self.fieldName)
# - shard and replicate the table
primaries = iter(self.cluster[:self.shards])
replicas = iter(self.cluster[self.shards:])
shardPlan = []
for primary in primaries:
chosenReplicas = [replicas.next().name for _ in range(0, self.replicas - 1)]
shardPlan.append({'primary_replica':primary.name, 'replicas':[primary.name] + chosenReplicas})
assert (self.r.db(self.dbName).table(self.tableName).config().update({'shards':shardPlan}).run(self.conn))['errors'] == 0
self.r.db(self.dbName).table(self.tableName).wait().run(self.conn)
def tearDown(self):
# -- verify that the servers are still running
lastError = None
for server in self.cluster:
if server.running is False:
continue
try:
server.check()
except Exception as e:
lastError = e
# -- check that there were not problems in this test
allGood = self.__problemCount == len(self.__currentResult.errors) + len(self.__currentResult.failures)
if lastError is not None or not allGood:
# -- stop all of the servers
try:
self.cluster.check_and_stop()
except Exception: pass
# -- save the server data
try:
# - create enclosing dir
name = self.id()
if name.startswith('__main__.'):
name = name[len('__main__.'):]
outputFolder = os.path.realpath(os.path.join(os.getcwd(), name))
if not os.path.isdir(outputFolder):
os.makedirs(outputFolder)
# - copy the servers data
for server in self.cluster:
shutil.copytree(server.data_path, os.path.join(outputFolder, os.path.basename(server.data_path)))
except Exception as e:
warnings.warn('Unable to copy server folder into results: %s' % str(e))
self.__class__.cluster = None
self.__class__._conn = None
self.__class__.table = None
if lastError:
raise lastError
if self.destructiveTest:
try:
self.cluster.check_and_stop()
except Exception: pass
self.__class__.cluster = None
self.__class__._conn = None
self.__class__.table = None
def makeChanges(self, tableName=None, dbName=None, samplesPerShard=None, connections=None):
'''make a minor change to records, and return those ids'''
if tableName is None:
tableName = self.tableName
if dbName is None:
dbName = self.dbName
if samplesPerShard is None:
samplesPerShard = self.samplesPerShard
if connections is None:
connections = itertools.cycle([self.conn])
else:
connections = itertools.cycle(connections)
changedRecordIds = []
for lower, upper in utils.getShardRanges(connections.next(), tableName):
conn = connections.next()
sampleIds = (x['id'] for x in self.r.db(dbName).table(tableName).between(lower, upper).sample(samplesPerShard).run(conn))
for thisId in sampleIds:
self.r.db(dbName).table(tableName).get(thisId).update({'randomChange':random.randint(0, 65536)}).run(conn)
changedRecordIds.append(thisId)
changedRecordIds.sort()
return changedRecordIds
|
izhukov/ansible
|
refs/heads/devel
|
lib/ansible/runner/lookup_plugins/file.py
|
153
|
# (c) 2012, Daniel Hokka Zakrisson <daniel@hozac.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from ansible import utils, errors
import os
import codecs
class LookupModule(object):
def __init__(self, basedir=None, **kwargs):
self.basedir = basedir
def run(self, terms, inject=None, **kwargs):
terms = utils.listify_lookup_plugin_terms(terms, self.basedir, inject)
ret = []
# this can happen if the variable contains a string, strictly not desired for lookup
# plugins, but users may try it, so make it work.
if not isinstance(terms, list):
terms = [ terms ]
for term in terms:
basedir_path = utils.path_dwim(self.basedir, term)
relative_path = None
playbook_path = None
# Special handling of the file lookup, used primarily when the
# lookup is done from a role. If the file isn't found in the
# basedir of the current file, use dwim_relative to look in the
# role/files/ directory, and finally the playbook directory
# itself (which will be relative to the current working dir)
if '_original_file' in inject:
relative_path = utils.path_dwim_relative(inject['_original_file'], 'files', term, self.basedir, check=False)
if 'playbook_dir' in inject:
playbook_path = os.path.join(inject['playbook_dir'], term)
for path in (basedir_path, relative_path, playbook_path):
if path and os.path.exists(path):
ret.append(codecs.open(path, encoding="utf8").read().rstrip())
break
else:
raise errors.AnsibleError("could not locate file in lookup: %s" % term)
return ret
|
ProjectX-Android/kernel_htc_msm8974
|
refs/heads/lollipop-5.1
|
scripts/tracing/draw_functrace.py
|
14679
|
#!/usr/bin/python
"""
Copyright 2008 (c) Frederic Weisbecker <fweisbec@gmail.com>
Licensed under the terms of the GNU GPL License version 2
This script parses a trace provided by the function tracer in
kernel/trace/trace_functions.c
The resulted trace is processed into a tree to produce a more human
view of the call stack by drawing textual but hierarchical tree of
calls. Only the functions's names and the the call time are provided.
Usage:
Be sure that you have CONFIG_FUNCTION_TRACER
# mount -t debugfs nodev /sys/kernel/debug
# echo function > /sys/kernel/debug/tracing/current_tracer
$ cat /sys/kernel/debug/tracing/trace_pipe > ~/raw_trace_func
Wait some times but not too much, the script is a bit slow.
Break the pipe (Ctrl + Z)
$ scripts/draw_functrace.py < raw_trace_func > draw_functrace
Then you have your drawn trace in draw_functrace
"""
import sys, re
class CallTree:
""" This class provides a tree representation of the functions
call stack. If a function has no parent in the kernel (interrupt,
syscall, kernel thread...) then it is attached to a virtual parent
called ROOT.
"""
ROOT = None
def __init__(self, func, time = None, parent = None):
self._func = func
self._time = time
if parent is None:
self._parent = CallTree.ROOT
else:
self._parent = parent
self._children = []
def calls(self, func, calltime):
""" If a function calls another one, call this method to insert it
into the tree at the appropriate place.
@return: A reference to the newly created child node.
"""
child = CallTree(func, calltime, self)
self._children.append(child)
return child
def getParent(self, func):
""" Retrieve the last parent of the current node that
has the name given by func. If this function is not
on a parent, then create it as new child of root
@return: A reference to the parent.
"""
tree = self
while tree != CallTree.ROOT and tree._func != func:
tree = tree._parent
if tree == CallTree.ROOT:
child = CallTree.ROOT.calls(func, None)
return child
return tree
def __repr__(self):
return self.__toString("", True)
def __toString(self, branch, lastChild):
if self._time is not None:
s = "%s----%s (%s)\n" % (branch, self._func, self._time)
else:
s = "%s----%s\n" % (branch, self._func)
i = 0
if lastChild:
branch = branch[:-1] + " "
while i < len(self._children):
if i != len(self._children) - 1:
s += "%s" % self._children[i].__toString(branch +\
" |", False)
else:
s += "%s" % self._children[i].__toString(branch +\
" |", True)
i += 1
return s
class BrokenLineException(Exception):
"""If the last line is not complete because of the pipe breakage,
we want to stop the processing and ignore this line.
"""
pass
class CommentLineException(Exception):
""" If the line is a comment (as in the beginning of the trace file),
just ignore it.
"""
pass
def parseLine(line):
line = line.strip()
if line.startswith("#"):
raise CommentLineException
m = re.match("[^]]+?\\] +([0-9.]+): (\\w+) <-(\\w+)", line)
if m is None:
raise BrokenLineException
return (m.group(1), m.group(2), m.group(3))
def main():
CallTree.ROOT = CallTree("Root (Nowhere)", None, None)
tree = CallTree.ROOT
for line in sys.stdin:
try:
calltime, callee, caller = parseLine(line)
except BrokenLineException:
break
except CommentLineException:
continue
tree = tree.getParent(caller)
tree = tree.calls(callee, calltime)
print CallTree.ROOT
if __name__ == "__main__":
main()
|
liyichao/spark
|
refs/heads/master
|
python/pyspark/mllib/stat/distribution.py
|
137
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import namedtuple
__all__ = ['MultivariateGaussian']
class MultivariateGaussian(namedtuple('MultivariateGaussian', ['mu', 'sigma'])):
"""Represents a (mu, sigma) tuple
>>> m = MultivariateGaussian(Vectors.dense([11,12]),DenseMatrix(2, 2, (1.0, 3.0, 5.0, 2.0)))
>>> (m.mu, m.sigma.toArray())
(DenseVector([11.0, 12.0]), array([[ 1., 5.],[ 3., 2.]]))
>>> (m[0], m[1])
(DenseVector([11.0, 12.0]), array([[ 1., 5.],[ 3., 2.]]))
"""
|
reviewboard/ReviewBot
|
refs/heads/master
|
docs/releasenotes/conf.py
|
1
|
# -*- coding: utf-8 -*-
#
# Release Notes documentation build configuration file, created by
# sphinx-quickstart on Mon Mar 27 18:32:43 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
from __future__ import unicode_literals
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
import os
import sys
sys.path.insert(0, os.path.abspath('_ext'))
# Allow Sphinx to find the Review Bot modules.
parent_dir = os.path.abspath(os.path.join(__file__, '..', '..', '..'))
sys.path.insert(0, os.path.join(parent_dir, 'bot'))
import reviewbot
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'beanbag_docutils.sphinx.ext.django_utils',
'beanbag_docutils.sphinx.ext.extlinks',
'beanbag_docutils.sphinx.ext.http_role',
'beanbag_docutils.sphinx.ext.intersphinx_utils',
'beanbag_docutils.sphinx.ext.retina_images',
'extralinks',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'Release Notes'
copyright = '2017, Beanbag, Inc.'
author = 'Beanbag, Inc.'
bugtracker_url = 'https://reviewboard.org/bugs/%s'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '.'.join([str(i) for i in reviewbot.__version_info__[:2]])
# The full version, including alpha/beta/rc tags.
release = reviewbot.get_version_string()
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#
# today = ''
#
# Else, today_fmt is used as the format for a strftime call.
#
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#
add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#
add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'classic'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents.
# "<project> v<release> documentation" by default.
#
html_title = 'Release Notes'
# A shorter title for the navigation bar. Default is the same as html_title.
#
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#
# html_logo = None
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or
# 32x32 pixels large.
#
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#
# html_extra_path = []
# If not None, a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
# The empty string is equivalent to '%b %d, %Y'.
#
# html_last_updated_fmt = None
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#
# html_additional_pages = {}
# If false, no module index is generated.
#
# html_domain_indices = True
# If false, no index is generated.
#
# html_use_index = True
# If true, the index is split into individual pages for each letter.
#
# html_split_index = False
# If true, links to the reST sources are added to the pages.
#
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh'
#
# html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# 'ja' uses this config value.
# 'zh' user can custom change `jieba` dictionary path.
#
# html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'ReleaseNotesdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'ReleaseNotes.tex', 'Release Notes Documentation',
'Beanbag, Inc.', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#
# latex_use_parts = False
# If true, show page references after internal links.
#
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
#
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
#
# latex_appendices = []
# If false, will not define \strong, \code, \titleref, \crossref ... but only
# \sphinxstrong, ..., \sphinxtitleref, ... To help avoid clash with user added
# packages.
#
# latex_keep_old_macro_names = True
# If false, no module index is generated.
#
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = []
# If true, show URL addresses after external links.
#
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = []
# Documents to append as an appendix to all manuals.
#
# texinfo_appendices = []
# If false, no module index is generated.
#
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#
# texinfo_no_detailmenu = False
# Check whether reviewboard.org intersphinx lookups should use the local
# server.
if os.getenv('DOCS_USE_LOCAL_RBWEBSITE') == '1':
rbwebsite_url = 'http://localhost:8081'
else:
rbwebsite_url = 'https://www.reviewboard.org'
# Add references for intersphinx and custom roles.
intersphinx_mapping = {
'python': ('https://docs.python.org/2.7', None),
'rb3.0': ('%s/docs/manual/3.0/' % rbwebsite_url, None),
'rb4.0': ('%s/docs/manual/4.0/' % rbwebsite_url, None),
'reviewbot1.0': ('%s/docs/reviewbot/1.0/' % rbwebsite_url, None),
'reviewbot2.0': ('%s/docs/reviewbot/2.0/' % rbwebsite_url, None),
'reviewbot3.0': ('%s/docs/reviewbot/3.0/' % rbwebsite_url, None),
}
|
vnsofthe/odoo
|
refs/heads/8.0
|
addons/web_kanban_gauge/__openerp__.py
|
428
|
{
'name': 'Gauge Widget for Kanban',
'category': 'Hidden',
'description': """
This widget allows to display gauges using justgage library.
""",
'version': '1.0',
'depends': ['web_kanban'],
'data' : [
'views/web_kanban_gauge.xml',
],
'qweb': [
],
'auto_install': True,
}
|
drogenlied/qudi
|
refs/heads/master
|
logic/simple_data_logic.py
|
1
|
# -*- coding: utf-8 -*-
"""
Buffer for simple data
Qudi is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Qudi is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Qudi. If not, see <http://www.gnu.org/licenses/>.
Copyright (c) the Qudi Developers. See the COPYRIGHT.txt file at the
top-level directory of this distribution and at <https://github.com/Ulm-IQO/qudi/>
"""
from qtpy import QtCore
import numpy as np
from logic.generic_logic import GenericLogic
class SimpleDataLogic(GenericLogic):
""" Logic module agreggating multiple hardware switches.
"""
_modclass = 'smple_data'
_modtype = 'logic'
_in = {'simpledata': 'SimpleData'}
_out = {'simplelogic': 'SimpleDataLogic'}
sigRepeat = QtCore.Signal()
def on_activate(self, e):
""" Prepare logic module for work.
@param object e: Fysom state change notification
"""
self._data_logic = self.get_in_connector('simpledata')
self.stopRequest = False
self.bufferLength = 1000
self.sigRepeat.connect(self.measureLoop, QtCore.Qt.QueuedConnection)
def on_deactivate(self, e):
""" Deactivate modeule.
@param object e: Fysom state change notification
"""
self.stopMeasure()
def startMeasure(self):
""" Start measurement: zero the buffer and call loop function."""
self.window_len = 50
self.buf = np.zeros((self.bufferLength, self._data_logic.getChannels()))
self.smooth = np.zeros((self.bufferLength + self.window_len - 1, self._data_logic.getChannels()))
self.lock()
self.sigRepeat.emit()
def stopMeasure(self):
""" Ask the measurement loop to stop. """
self.stopRequest = True
def measureLoop(self):
""" Measure 10 values, add them to buffer and remove the 10 oldest values.
"""
if self.stopRequest:
self.stopRequest = False
self.unlock()
return
data = [self._data_logic.getData() for i in range(10)]
self.buf = np.roll(self.buf, -10, axis=0)
self.buf[-11:-1] = data
w = np.hanning(self.window_len)
s = np.r_[self.buf[self.window_len-1:0:-1], self.buf, self.buf[-1:-self.window_len:-1]]
for channel in range(self._data_logic.getChannels()):
convolved = np.convolve(w/w.sum(), s[:, channel], mode='valid')
self.smooth[:, channel] = convolved
self.sigRepeat.emit()
|
motatoes/rainbowstream
|
refs/heads/master
|
setup.py
|
6
|
from setuptools import setup, find_packages
import os
import os.path
# Bumped version
version = '1.3.1'
# Require
install_requires = [
"python-dateutil",
"arrow",
"requests==2.5.3",
"pyfiglet",
"twitter",
"Pillow",
"PySocks"
]
# Copy default config if not exists
default = os.path.expanduser("~") + os.sep + '.rainbow_config.json'
if not os.path.isfile(default):
cmd = 'cp rainbowstream/colorset/config ' + default
os.system(cmd)
cmd = 'chmod 777 ' + default
os.system(cmd)
# Setup
setup(name='rainbowstream',
version=version,
description="A smart and nice Twitter client on terminal.",
long_description=open("./README.rst", "r").read(),
classifiers=[
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Intended Audience :: End Users/Desktop",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries",
"Topic :: Utilities",
"License :: OSI Approved :: MIT License",
],
keywords='twitter, command-line tools, stream API',
author='Vu Nhat Minh',
author_email='nhatminh179@gmail.com',
url='http://www.rainbowstream.org/',
license='MIT License',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=True,
install_requires=install_requires,
entry_points="""
# -*- Entry points: -*-
[console_scripts]
rainbowstream=rainbowstream.rainbow:fly
""",
)
|
MediaMath/Diamond
|
refs/heads/master
|
src/collectors/mmcassandra/mmcassandra.py
|
1
|
import subprocess, socket, math
import diamond.collector
def parse_line(line):
metric_name, rhs = line.strip().split(':', 1)
rhs = rhs.strip()
if ' ' in rhs:
str_value, units = rhs.split(' ', 1)
if units not in ('ms', 'ms.'):
raise ValueError("Cannot parse " + repr(line))
else:
str_value = rhs
try:
value = float(str_value)
except:
value = str_value
return metric_name, value
class Keyspace(object):
def __init__(self, name, stats, tables):
self.name = name
self.stats = stats
self.tables = tables
class Table(object):
def __init__(self, name, stats):
self.name = name
self.stats = stats
def clean_key(key):
return key.replace(' ', '_').replace(',', '_').replace('(', '').replace(')', '')
bad_keyspaces = ('system', 'system_traces')
class ColumnFamilyStatsCollector(diamond.collector.Collector):
last_read = {}
last_write = {}
def collect(self):
for keyspace in self.cfstats():
if keyspace.name not in bad_keyspaces:
for (key, value) in keyspace.stats:
name = 'cassandra.cfstats.{}.{}'.format(
keyspace.name, key)
self.publish(name, value)
for table in keyspace.tables:
for (key, value) in table.stats:
name = 'cassandra.cfstats.{}.{}.{}'.format(
keyspace.name, table.name, key)
self.publish(name, value)
def get_periodic_rw(self, history_dict, key, value):
if history_dict.get(key, 0) == 0:
history_dict[key] = value
periodic_value = value - history_dict.get(key, 0)
history_dict[key] = value
return periodic_value
def cfstats(self):
output = subprocess.check_output(['nodetool', 'cfstats'])
lines = [line for line in output.splitlines()
if line and (line != '----------------')]
# cfstats output is structured in a very specific way: all lines are
# key: value pairs prefixed by tabs. everything indented belongs to the
keyspaces = []
ks_name = ""
table_name = ""
for line in lines:
try:
tab_count = len(line) - len(line.lstrip('\t'))
if tab_count == 0:
key, value = parse_line(line)
assert key == 'Keyspace'
ks_name = value
keyspaces.append(Keyspace(value, [], []))
elif tab_count == 1:
key, value = parse_line(line)
if not math.isnan(value):
if key == "Read Count":
value = self.get_periodic_rw(
ColumnFamilyStatsCollector.last_read,
ks_name, value)
elif key == "Write Count":
value = self.get_periodic_rw(
ColumnFamilyStatsCollector.last_write,
ks_name, value)
keyspaces[-1].stats.append((clean_key(key), value))
elif tab_count == 2:
key, value = parse_line(line)
if key == 'Table':
table_name = value
keyspaces[-1].tables.append(Table(value, []))
else:
if not math.isnan(value):
key_name = ks_name + table_name
if key == "Local read count":
rate_value = self.get_periodic_rw(
ColumnFamilyStatsCollector.last_read,
key_name, value)
keyspaces[-1].tables[-1].stats.append(
(clean_key("Local_read_rate"), rate_value))
elif key == "Local write count":
rate_value = self.get_periodic_rw(
ColumnFamilyStatsCollector.last_write,
key_name, value)
keyspaces[-1].tables[-1].stats.append(
(clean_key("Local_write_rate"), rate_value))
keyspaces[-1].tables[-1].stats.append((clean_key(key), value))
else:
raise ValueError
except ValueError:
self.log.error("Unable to parse line: %s" % line)
return keyspaces
|
CiscoSystems/tempest
|
refs/heads/master
|
tempest/api/compute/images/test_list_image_filters_negative.py
|
5
|
# Copyright 2014 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.api.compute import base
from tempest.common.utils import data_utils
from tempest import config
from tempest import exceptions
from tempest import test
CONF = config.CONF
class ListImageFiltersNegativeTestJSON(base.BaseV2ComputeTest):
@classmethod
def resource_setup(cls):
super(ListImageFiltersNegativeTestJSON, cls).resource_setup()
if not CONF.service_available.glance:
skip_msg = ("%s skipped as glance is not available" % cls.__name__)
raise cls.skipException(skip_msg)
cls.client = cls.images_client
@test.attr(type=['negative', 'gate'])
def test_get_nonexistent_image(self):
# Check raises a NotFound
nonexistent_image = data_utils.rand_uuid()
self.assertRaises(exceptions.NotFound, self.client.get_image,
nonexistent_image)
|
stack-of-tasks/rbdlpy
|
refs/heads/master
|
tutorial/lib/python2.7/site-packages/OpenGL/GL/ARB/texture_query_lod.py
|
9
|
'''OpenGL extension ARB.texture_query_lod
This module customises the behaviour of the
OpenGL.raw.GL.ARB.texture_query_lod to provide a more
Python-friendly API
Overview (from the spec)
This extension provides a new set of fragment shader texture
functions (textureLOD) that return the results of automatic
level-of-detail computations that would be performed if a texture
lookup were performed.
The official definition of this extension is available here:
http://www.opengl.org/registry/specs/ARB/texture_query_lod.txt
'''
from OpenGL import platform, constant, arrays
from OpenGL import extensions, wrapper
import ctypes
from OpenGL.raw.GL import _types, _glgets
from OpenGL.raw.GL.ARB.texture_query_lod import *
from OpenGL.raw.GL.ARB.texture_query_lod import _EXTENSION_NAME
def glInitTextureQueryLodARB():
'''Return boolean indicating whether this extension is available'''
from OpenGL import extensions
return extensions.hasGLExtension( _EXTENSION_NAME )
### END AUTOGENERATED SECTION
|
abloomston/sympy
|
refs/heads/master
|
sympy/strategies/core.py
|
94
|
""" Generic SymPy-Independent Strategies """
from __future__ import print_function, division
from sympy.core.compatibility import get_function_name
identity = lambda x: x
def exhaust(rule):
""" Apply a rule repeatedly until it has no effect """
def exhaustive_rl(expr):
new, old = rule(expr), expr
while(new != old):
new, old = rule(new), new
return new
return exhaustive_rl
def memoize(rule):
""" Memoized version of a rule """
cache = {}
def memoized_rl(expr):
if expr in cache:
return cache[expr]
else:
result = rule(expr)
cache[expr] = result
return result
return memoized_rl
def condition(cond, rule):
""" Only apply rule if condition is true """
def conditioned_rl(expr):
if cond(expr):
return rule(expr)
else:
return expr
return conditioned_rl
def chain(*rules):
"""
Compose a sequence of rules so that they apply to the expr sequentially
"""
def chain_rl(expr):
for rule in rules:
expr = rule(expr)
return expr
return chain_rl
def debug(rule, file=None):
""" Print out before and after expressions each time rule is used """
if file is None:
from sys import stdout
file = stdout
def debug_rl(*args, **kwargs):
expr = args[0]
result = rule(*args, **kwargs)
if result != expr:
file.write("Rule: %s\n" % get_function_name(rule))
file.write("In: %s\nOut: %s\n\n"%(expr, result))
return result
return debug_rl
def null_safe(rule):
""" Return original expr if rule returns None """
def null_safe_rl(expr):
result = rule(expr)
if result is None:
return expr
else:
return result
return null_safe_rl
def tryit(rule):
""" Return original expr if rule raises exception """
def try_rl(expr):
try:
return rule(expr)
except Exception:
return expr
return try_rl
def do_one(*rules):
""" Try each of the rules until one works. Then stop. """
def do_one_rl(expr):
for rl in rules:
result = rl(expr)
if result != expr:
return result
return expr
return do_one_rl
def switch(key, ruledict):
""" Select a rule based on the result of key called on the function """
def switch_rl(expr):
rl = ruledict.get(key(expr), identity)
return rl(expr)
return switch_rl
identity = lambda x: x
def minimize(*rules, **kwargs):
""" Select result of rules that minimizes objective
>>> from sympy.strategies import minimize
>>> inc = lambda x: x + 1
>>> dec = lambda x: x - 1
>>> rl = minimize(inc, dec)
>>> rl(4)
3
>>> rl = minimize(inc, dec, objective=lambda x: -x) # maximize
>>> rl(4)
5
"""
objective = kwargs.get('objective', identity)
def minrule(expr):
return min([rule(expr) for rule in rules], key=objective)
return minrule
|
Workday/OpenFrame
|
refs/heads/master
|
build/android/incremental_install/__init__.py
|
57
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
|
zhukaixy/kbengine
|
refs/heads/master
|
kbe/src/lib/python/Lib/test/test_glob.py
|
84
|
import glob
import os
import shutil
import sys
import unittest
from test.support import (run_unittest, TESTFN, skip_unless_symlink,
can_symlink, create_empty_file)
class GlobTests(unittest.TestCase):
def norm(self, *parts):
return os.path.normpath(os.path.join(self.tempdir, *parts))
def mktemp(self, *parts):
filename = self.norm(*parts)
base, file = os.path.split(filename)
if not os.path.exists(base):
os.makedirs(base)
create_empty_file(filename)
def setUp(self):
self.tempdir = TESTFN + "_dir"
self.mktemp('a', 'D')
self.mktemp('aab', 'F')
self.mktemp('.aa', 'G')
self.mktemp('.bb', 'H')
self.mktemp('aaa', 'zzzF')
self.mktemp('ZZZ')
self.mktemp('a', 'bcd', 'EF')
self.mktemp('a', 'bcd', 'efg', 'ha')
if can_symlink():
os.symlink(self.norm('broken'), self.norm('sym1'))
os.symlink('broken', self.norm('sym2'))
os.symlink(os.path.join('a', 'bcd'), self.norm('sym3'))
def tearDown(self):
shutil.rmtree(self.tempdir)
def glob(self, *parts):
if len(parts) == 1:
pattern = parts[0]
else:
pattern = os.path.join(*parts)
p = os.path.join(self.tempdir, pattern)
res = glob.glob(p)
self.assertEqual(list(glob.iglob(p)), res)
bres = [os.fsencode(x) for x in res]
self.assertEqual(glob.glob(os.fsencode(p)), bres)
self.assertEqual(list(glob.iglob(os.fsencode(p))), bres)
return res
def assertSequencesEqual_noorder(self, l1, l2):
l1 = list(l1)
l2 = list(l2)
self.assertEqual(set(l1), set(l2))
self.assertEqual(sorted(l1), sorted(l2))
def test_glob_literal(self):
eq = self.assertSequencesEqual_noorder
eq(self.glob('a'), [self.norm('a')])
eq(self.glob('a', 'D'), [self.norm('a', 'D')])
eq(self.glob('aab'), [self.norm('aab')])
eq(self.glob('zymurgy'), [])
res = glob.glob('*')
self.assertEqual({type(r) for r in res}, {str})
res = glob.glob(os.path.join(os.curdir, '*'))
self.assertEqual({type(r) for r in res}, {str})
res = glob.glob(b'*')
self.assertEqual({type(r) for r in res}, {bytes})
res = glob.glob(os.path.join(os.fsencode(os.curdir), b'*'))
self.assertEqual({type(r) for r in res}, {bytes})
def test_glob_one_directory(self):
eq = self.assertSequencesEqual_noorder
eq(self.glob('a*'), map(self.norm, ['a', 'aab', 'aaa']))
eq(self.glob('*a'), map(self.norm, ['a', 'aaa']))
eq(self.glob('.*'), map(self.norm, ['.aa', '.bb']))
eq(self.glob('?aa'), map(self.norm, ['aaa']))
eq(self.glob('aa?'), map(self.norm, ['aaa', 'aab']))
eq(self.glob('aa[ab]'), map(self.norm, ['aaa', 'aab']))
eq(self.glob('*q'), [])
def test_glob_nested_directory(self):
eq = self.assertSequencesEqual_noorder
if os.path.normcase("abCD") == "abCD":
# case-sensitive filesystem
eq(self.glob('a', 'bcd', 'E*'), [self.norm('a', 'bcd', 'EF')])
else:
# case insensitive filesystem
eq(self.glob('a', 'bcd', 'E*'), [self.norm('a', 'bcd', 'EF'),
self.norm('a', 'bcd', 'efg')])
eq(self.glob('a', 'bcd', '*g'), [self.norm('a', 'bcd', 'efg')])
def test_glob_directory_names(self):
eq = self.assertSequencesEqual_noorder
eq(self.glob('*', 'D'), [self.norm('a', 'D')])
eq(self.glob('*', '*a'), [])
eq(self.glob('a', '*', '*', '*a'),
[self.norm('a', 'bcd', 'efg', 'ha')])
eq(self.glob('?a?', '*F'), [self.norm('aaa', 'zzzF'),
self.norm('aab', 'F')])
def test_glob_directory_with_trailing_slash(self):
# Patterns ending with a slash shouldn't match non-dirs
res = glob.glob(self.norm('Z*Z') + os.sep)
self.assertEqual(res, [])
res = glob.glob(self.norm('ZZZ') + os.sep)
self.assertEqual(res, [])
# When there is a wildcard pattern which ends with os.sep, glob()
# doesn't blow up.
res = glob.glob(self.norm('aa*') + os.sep)
self.assertEqual(len(res), 2)
# either of these results is reasonable
self.assertIn(set(res), [
{self.norm('aaa'), self.norm('aab')},
{self.norm('aaa') + os.sep, self.norm('aab') + os.sep},
])
def test_glob_bytes_directory_with_trailing_slash(self):
# Same as test_glob_directory_with_trailing_slash, but with a
# bytes argument.
res = glob.glob(os.fsencode(self.norm('Z*Z') + os.sep))
self.assertEqual(res, [])
res = glob.glob(os.fsencode(self.norm('ZZZ') + os.sep))
self.assertEqual(res, [])
res = glob.glob(os.fsencode(self.norm('aa*') + os.sep))
self.assertEqual(len(res), 2)
# either of these results is reasonable
self.assertIn(set(res), [
{os.fsencode(self.norm('aaa')),
os.fsencode(self.norm('aab'))},
{os.fsencode(self.norm('aaa') + os.sep),
os.fsencode(self.norm('aab') + os.sep)},
])
@skip_unless_symlink
def test_glob_symlinks(self):
eq = self.assertSequencesEqual_noorder
eq(self.glob('sym3'), [self.norm('sym3')])
eq(self.glob('sym3', '*'), [self.norm('sym3', 'EF'),
self.norm('sym3', 'efg')])
self.assertIn(self.glob('sym3' + os.sep),
[[self.norm('sym3')], [self.norm('sym3') + os.sep]])
eq(self.glob('*', '*F'),
[self.norm('aaa', 'zzzF'),
self.norm('aab', 'F'), self.norm('sym3', 'EF')])
@skip_unless_symlink
def test_glob_broken_symlinks(self):
eq = self.assertSequencesEqual_noorder
eq(self.glob('sym*'), [self.norm('sym1'), self.norm('sym2'),
self.norm('sym3')])
eq(self.glob('sym1'), [self.norm('sym1')])
eq(self.glob('sym2'), [self.norm('sym2')])
@unittest.skipUnless(sys.platform == "win32", "Win32 specific test")
def test_glob_magic_in_drive(self):
eq = self.assertSequencesEqual_noorder
eq(glob.glob('*:'), [])
eq(glob.glob(b'*:'), [])
eq(glob.glob('?:'), [])
eq(glob.glob(b'?:'), [])
eq(glob.glob('\\\\?\\c:\\'), ['\\\\?\\c:\\'])
eq(glob.glob(b'\\\\?\\c:\\'), [b'\\\\?\\c:\\'])
eq(glob.glob('\\\\*\\*\\'), [])
eq(glob.glob(b'\\\\*\\*\\'), [])
def check_escape(self, arg, expected):
self.assertEqual(glob.escape(arg), expected)
self.assertEqual(glob.escape(os.fsencode(arg)), os.fsencode(expected))
def test_escape(self):
check = self.check_escape
check('abc', 'abc')
check('[', '[[]')
check('?', '[?]')
check('*', '[*]')
check('[[_/*?*/_]]', '[[][[]_/[*][?][*]/_]]')
check('/[[_/*?*/_]]/', '/[[][[]_/[*][?][*]/_]]/')
@unittest.skipUnless(sys.platform == "win32", "Win32 specific test")
def test_escape_windows(self):
check = self.check_escape
check('?:?', '?:[?]')
check('*:*', '*:[*]')
check(r'\\?\c:\?', r'\\?\c:\[?]')
check(r'\\*\*\*', r'\\*\*\[*]')
check('//?/c:/?', '//?/c:/[?]')
check('//*/*/*', '//*/*/[*]')
def test_main():
run_unittest(GlobTests)
if __name__ == "__main__":
test_main()
|
dataxu/ansible
|
refs/heads/dx-stable-2.5
|
lib/ansible/plugins/cache/base.py
|
232
|
# (c) 2017, ansible by Red Hat
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
# moved actual classes to __init__ kept here for backward compat with 3rd parties
from ansible.plugins.cache import BaseCacheModule, BaseFileCacheModule
|
geekboxzone/lollipop_external_chromium_org
|
refs/heads/geekbox
|
third_party/closure_linter/closure_linter/gjslint.py
|
95
|
#!/usr/bin/env python
# Copyright 2007 The Closure Linter Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Checks JavaScript files for common style guide violations.
gjslint.py is designed to be used as a PRESUBMIT script to check for javascript
style guide violations. As of now, it checks for the following violations:
* Missing and extra spaces
* Lines longer than 80 characters
* Missing newline at end of file
* Missing semicolon after function declaration
* Valid JsDoc including parameter matching
Someday it will validate to the best of its ability against the entirety of the
JavaScript style guide.
This file is a front end that parses arguments and flags. The core of the code
is in tokenizer.py and checker.py.
"""
__author__ = ('robbyw@google.com (Robert Walker)',
'ajp@google.com (Andy Perelson)',
'nnaze@google.com (Nathan Naze)',)
import errno
import itertools
import os
import platform
import re
import sys
import time
import gflags as flags
from closure_linter import errorrecord
from closure_linter import runner
from closure_linter.common import erroraccumulator
from closure_linter.common import simplefileflags as fileflags
# Attempt import of multiprocessing (should be available in Python 2.6 and up).
try:
# pylint: disable=g-import-not-at-top
import multiprocessing
except ImportError:
multiprocessing = None
FLAGS = flags.FLAGS
flags.DEFINE_boolean('unix_mode', False,
'Whether to emit warnings in standard unix format.')
flags.DEFINE_boolean('beep', True, 'Whether to beep when errors are found.')
flags.DEFINE_boolean('time', False, 'Whether to emit timing statistics.')
flags.DEFINE_boolean('quiet', False, 'Whether to minimize logged messages. '
'Most useful for per-file linting, such as that performed '
'by the presubmit linter service.')
flags.DEFINE_boolean('check_html', False,
'Whether to check javascript in html files.')
flags.DEFINE_boolean('summary', False,
'Whether to show an error count summary.')
flags.DEFINE_list('additional_extensions', None, 'List of additional file '
'extensions (not js) that should be treated as '
'JavaScript files.')
flags.DEFINE_boolean('multiprocess',
platform.system() is 'Linux' and bool(multiprocessing),
'Whether to attempt parallelized linting using the '
'multiprocessing module. Enabled by default on Linux '
'if the multiprocessing module is present (Python 2.6+). '
'Otherwise disabled by default. '
'Disabling may make debugging easier.')
flags.ADOPT_module_key_flags(fileflags)
flags.ADOPT_module_key_flags(runner)
GJSLINT_ONLY_FLAGS = ['--unix_mode', '--beep', '--nobeep', '--time',
'--check_html', '--summary', '--quiet']
def _MultiprocessCheckPaths(paths):
"""Run _CheckPath over mutltiple processes.
Tokenization, passes, and checks are expensive operations. Running in a
single process, they can only run on one CPU/core. Instead,
shard out linting over all CPUs with multiprocessing to parallelize.
Args:
paths: paths to check.
Yields:
errorrecord.ErrorRecords for any found errors.
"""
pool = multiprocessing.Pool()
path_results = pool.imap(_CheckPath, paths)
for results in path_results:
for result in results:
yield result
# Force destruct before returning, as this can sometimes raise spurious
# "interrupted system call" (EINTR), which we can ignore.
try:
pool.close()
pool.join()
del pool
except OSError as err:
if err.errno is not errno.EINTR:
raise err
def _CheckPaths(paths):
"""Run _CheckPath on all paths in one thread.
Args:
paths: paths to check.
Yields:
errorrecord.ErrorRecords for any found errors.
"""
for path in paths:
results = _CheckPath(path)
for record in results:
yield record
def _CheckPath(path):
"""Check a path and return any errors.
Args:
path: paths to check.
Returns:
A list of errorrecord.ErrorRecords for any found errors.
"""
error_handler = erroraccumulator.ErrorAccumulator()
runner.Run(path, error_handler)
make_error_record = lambda err: errorrecord.MakeErrorRecord(path, err)
return map(make_error_record, error_handler.GetErrors())
def _GetFilePaths(argv):
suffixes = ['.js']
if FLAGS.additional_extensions:
suffixes += ['.%s' % ext for ext in FLAGS.additional_extensions]
if FLAGS.check_html:
suffixes += ['.html', '.htm']
return fileflags.GetFileList(argv, 'JavaScript', suffixes)
# Error printing functions
def _PrintFileSummary(paths, records):
"""Print a detailed summary of the number of errors in each file."""
paths = list(paths)
paths.sort()
for path in paths:
path_errors = [e for e in records if e.path == path]
print '%s: %d' % (path, len(path_errors))
def _PrintFileSeparator(path):
print '----- FILE : %s -----' % path
def _PrintSummary(paths, error_records):
"""Print a summary of the number of errors and files."""
error_count = len(error_records)
all_paths = set(paths)
all_paths_count = len(all_paths)
if error_count is 0:
print '%d files checked, no errors found.' % all_paths_count
new_error_count = len([e for e in error_records if e.new_error])
error_paths = set([e.path for e in error_records])
error_paths_count = len(error_paths)
no_error_paths_count = all_paths_count - error_paths_count
if (error_count or new_error_count) and not FLAGS.quiet:
error_noun = 'error' if error_count == 1 else 'errors'
new_error_noun = 'error' if new_error_count == 1 else 'errors'
error_file_noun = 'file' if error_paths_count == 1 else 'files'
ok_file_noun = 'file' if no_error_paths_count == 1 else 'files'
print ('Found %d %s, including %d new %s, in %d %s (%d %s OK).' %
(error_count,
error_noun,
new_error_count,
new_error_noun,
error_paths_count,
error_file_noun,
no_error_paths_count,
ok_file_noun))
def _PrintErrorRecords(error_records):
"""Print error records strings in the expected format."""
current_path = None
for record in error_records:
if current_path != record.path:
current_path = record.path
if not FLAGS.unix_mode:
_PrintFileSeparator(current_path)
print record.error_string
def _FormatTime(t):
"""Formats a duration as a human-readable string.
Args:
t: A duration in seconds.
Returns:
A formatted duration string.
"""
if t < 1:
return '%dms' % round(t * 1000)
else:
return '%.2fs' % t
def main(argv=None):
"""Main function.
Args:
argv: Sequence of command line arguments.
"""
if argv is None:
argv = flags.FLAGS(sys.argv)
if FLAGS.time:
start_time = time.time()
suffixes = ['.js']
if FLAGS.additional_extensions:
suffixes += ['.%s' % ext for ext in FLAGS.additional_extensions]
if FLAGS.check_html:
suffixes += ['.html', '.htm']
paths = fileflags.GetFileList(argv, 'JavaScript', suffixes)
if FLAGS.multiprocess:
records_iter = _MultiprocessCheckPaths(paths)
else:
records_iter = _CheckPaths(paths)
records_iter, records_iter_copy = itertools.tee(records_iter, 2)
_PrintErrorRecords(records_iter_copy)
error_records = list(records_iter)
_PrintSummary(paths, error_records)
exit_code = 0
# If there are any errors
if error_records:
exit_code += 1
# If there are any new errors
if [r for r in error_records if r.new_error]:
exit_code += 2
if exit_code:
if FLAGS.summary:
_PrintFileSummary(paths, error_records)
if FLAGS.beep:
# Make a beep noise.
sys.stdout.write(chr(7))
# Write out instructions for using fixjsstyle script to fix some of the
# reported errors.
fix_args = []
for flag in sys.argv[1:]:
for f in GJSLINT_ONLY_FLAGS:
if flag.startswith(f):
break
else:
fix_args.append(flag)
if not FLAGS.quiet:
print """
Some of the errors reported by GJsLint may be auto-fixable using the script
fixjsstyle. Please double check any changes it makes and report any bugs. The
script can be run by executing:
fixjsstyle %s """ % ' '.join(fix_args)
if FLAGS.time:
print 'Done in %s.' % _FormatTime(time.time() - start_time)
sys.exit(exit_code)
if __name__ == '__main__':
main()
|
Aravinthu/odoo
|
refs/heads/master
|
addons/account_payment/controllers/payment.py
|
3
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from werkzeug import url_encode
from odoo import http, _
from odoo.http import request, route
class PaymentPortal(http.Controller):
@route('/invoice/pay/<int:invoice_id>/form_tx', type='json', auth="public", website=True)
def invoice_pay_form(self, acquirer_id, invoice_id, save_token=False, access_token=None, **kwargs):
""" Json method that creates a payment.transaction, used to create a
transaction when the user clicks on 'pay now' button on the payment
form.
:return html: form containing all values related to the acquirer to
redirect customers to the acquirer website """
success_url = kwargs.get('success_url', '/my')
callback_method = kwargs.get('callback_method', '')
invoice_sudo = request.env['account.invoice'].sudo().browse(invoice_id)
if not invoice_sudo:
return False
try:
acquirer = request.env['payment.acquirer'].browse(int(acquirer_id))
except:
return False
token = request.env['payment.token'].sudo() # currently no support of payment tokens
tx = request.env['payment.transaction'].sudo()._check_or_create_invoice_tx(
invoice_sudo,
acquirer,
payment_token=token,
tx_type='form_save' if save_token else 'form',
add_tx_values={
'callback_model_id': request.env['ir.model'].sudo().search([('model', '=', invoice_sudo._name)], limit=1).id,
'callback_res_id': invoice_sudo.id,
'callback_method': callback_method,
})
# set the transaction id into the session
request.session['portal_invoice_%s_transaction_id' % invoice_sudo.id] = tx.id
return tx.render_invoice_button(
invoice_sudo,
success_url,
submit_txt=_('Pay & Confirm'),
render_values={
'type': 'form_save' if save_token else 'form',
'alias_usage': _('If we store your payment information on our server, subscription payments will be made automatically.'),
}
)
@http.route('/invoice/pay/<int:invoice_id>/s2s_token_tx', type='http', auth='public', website=True)
def invoice_pay_token(self, invoice_id, pm_id=None, **kwargs):
""" Use a token to perform a s2s transaction """
error_url = kwargs.get('error_url', '/my')
success_url = kwargs.get('success_url', '/my')
callback_method = kwargs.get('callback_method', '')
access_token = kwargs.get('access_token')
params = {}
if access_token:
params['access_token'] = access_token
invoice_sudo = request.env['account.invoice'].sudo().browse(invoice_id)
if not invoice_sudo:
params['error'] = 'pay_invoice_invalid_doc'
return request.redirect('%s?%s' % (error_url, url_encode(params)))
try:
token = request.env['payment.token'].sudo().browse(int(pm_id))
except (ValueError, TypeError):
token = False
if not token:
params['error'] = 'pay_invoice_invalid_token'
return request.redirect('%s?%s' % (error_url, url_encode(params)))
# find an existing tx or create a new one
tx = request.env['payment.transaction'].sudo()._check_or_create_invoice_tx(
invoice_sudo,
token.acquirer_id,
payment_token=token,
tx_type='server2server',
add_tx_values={
'callback_model_id': request.env['ir.model'].sudo().search([('model', '=', invoice_sudo._name)], limit=1).id,
'callback_res_id': invoice_sudo.id,
'callback_method': callback_method,
})
# set the transaction id into the session
request.session['portal_invoice_%s_transaction_id' % invoice_sudo.id] = tx.id
# proceed to the payment
res = tx.confirm_invoice_token()
if res is not True:
params['error'] = res
return request.redirect('%s?%s' % (error_url, url_encode(params)))
params['success'] = 'pay_invoice'
return request.redirect('%s?%s' % (success_url, url_encode(params)))
|
Bashar/django
|
refs/heads/master
|
django/contrib/sessions/exceptions.py
|
931
|
from django.core.exceptions import SuspiciousOperation
class InvalidSessionKey(SuspiciousOperation):
"""Invalid characters in session key"""
pass
class SuspiciousSession(SuspiciousOperation):
"""The session may be tampered with"""
pass
|
darkserver/darkadmin
|
refs/heads/master
|
src/modules/nginx.py
|
1
|
import json, os, subprocess, re
from log import *
cfg = {}
user = None
def process(args, config, userdata):
global cfg, user
cfg = config
user = userdata
return {
'enable' : site_enable,
'disable' : site_disable,
'list' : list_sites,
'add' : site_add,
'del' : site_remove,
}.get(args[1], help)(args[:1] + args[2:])
def help(args):
return \
'Available commands:\n\n' \
' list list all sites with availability status\n' \
' enable <domain> enables site\n' \
' disable <domain> disables site\n' \
' add <domain> <type> adds site with type where type can be: php, django\n' \
' del <domain> deletes site information, doesn\'t delete files\n'
def site_enable(args):
sites = args[1:]
reload_nginx = False
ret = []
for s in sites:
src = os.path.join(cfg['nginx:sites_available'], user.pw_name, s)
dst = os.path.join(cfg['nginx:sites_enabled'], user.pw_name, s)
if os.path.isfile(src):
if os.path.exists(dst):
ret.append({'message': 'Site %s is already enabled' % s})
else:
reload_nginx = True
os.symlink(src, dst)
ret.append({'message': 'Site %s enabled' % s})
else:
ret.append({'message': 'No site called %s' % s})
if reload_nginx:
devnull = open('/dev/null', 'w')
subprocess.call(cfg['nginx:reload_cmd'].split(' '), stdout=devnull)
return json.dumps(ret)
def site_disable(args):
sites = args[1:]
reload_nginx = False
ret = []
for s in sites:
dst = os.path.join(cfg['nginx:sites_enabled'], user.pw_name, s)
if os.path.exists(dst):
reload_nginx = True
os.unlink(dst)
ret.append({ 'message': 'Site %s disabled' % s})
else:
ret.append({'message': 'Site %s is already disabled' % s})
if reload_nginx:
devnull = open('/dev/null', 'w')
subprocess.call(cfg['nginx:reload_cmd'].split(' '), stdout=devnull)
return json.dumps(ret)
def list_sites(args):
data = []
saval = os.listdir(os.path.join(cfg['nginx:sites_available'], user.pw_name))
for s in saval:
status = os.path.isfile(os.path.join(cfg['nginx:sites_enabled'], user.pw_name, s))
data.append({
'domain' : s,
'status' : status,
})
# reverse order for sorting
for d in data:
d['domain'] = d['domain'].split('.')
d['domain'].reverse()
data = sorted(data, key=lambda k: k['domain'])
# reverse again to show correct domain names
for d in data:
d['domain'].reverse()
d['domain'] = '.'.join(d['domain'])
return json.dumps({"sites":data})
def format_list_sites(data):
ret = 'Your sites:\n\n'
counter = 0
lennum = len(data) / 10 + 1
lenname = 1
for site in data:
if lenname < len(site['domain']):
lenname = len(site['domain'])
for site in data:
counter += 1
num = ('{0: >#%s}' % lennum).format(counter)
name = ('{0: >%s}' % lenname).format(site['domain'])
if site['status'] == True:
ret += ' %s \033[1;32mon\033[0m %s\n' % (num, name)
else:
ret += ' %s \033[1;31moff\033[0m %s\n' % (num, name)
return ret
def site_add(args):
_help = 'Format: add <domain>[,<domain1>[,<domainn>]] <type>'
if len(args) < 3:
return _help
domains = args[1].split(',')
nginx_logdir_user = '/var/log/nginx/%s' % (user.pw_name)
nginx_logdir_site = '%s/%s' % (nginx_logdir_user, domains[0])
conf = {}
if args[2] == 'php':
conf = _parse_config('modules/nginx/php.tpl')['%domain%']
conf['locations']['~ \.php?$']['fastcgi_pass'] = 'unix:/var/lib/darkadmin/php/%s.sock' % (user.pw_name)
elif args[2] == 'django':
if len(args) < 4:
return _help
conf = _parse_config('modules/nginx/django.tpl')['%domain%']
conf['locations']['/admin/static']['alias'] = '%s/sites/%s/admin/media' % (user.pw_dir, domains[0])
conf['locations']['/static']['alias'] = '%s/sites/%s/static' % (user.pw_dir, domains[0])
conf['locations']['/']['fastcgi_pass'] = 'unix:/var/lib/darkadmin/django/%s/%s.sock' % (user.pw_name, domains[0])
conf['server_name'] = ' '.join(domains)
conf['root'] = '%s/sites/%s' % (user.pw_dir, domains[0])
conf['access_log'] = '%s/access.log' % (nginx_logdir_site)
conf['error_log'] = '%s/error.log' % (nginx_logdir_site)
fdata = _compose_config({domains[0] : conf})
fname = os.path.join(cfg['nginx:sites_available'], user.pw_name, domains[0])
f = open(fname, 'w')
f.write(fdata)
f.close()
# create nginx log dirs or nginx will give error
if not os.path.exists(nginx_logdir_user):
os.mkdir(nginx_logdir_user)
if not os.path.exists(nginx_logdir_site):
os.mkdir(nginx_logdir_site)
r = site_enable(['json', domains[0]])
return "Added %s\n%s" % (args[1], r)
def site_remove(args):
r = site_disable(['json', args[1]])
fname = os.path.join(cfg['nginx:sites_available'], user.pw_name, args[1])
os.unlink(fname)
return "Deleted %s\n%s" % (args[1], r)
def _compose_config(config):
data = \
"server {\n"
sortby = [
'server_name',
'listen',
'root',
'-',
'access_log',
'error_log',
'-',
'locations',
]
sortbyid = 0
for site, c in config.iteritems():
dothis = True
while dothis == True:
for var, val in c.iteritems():
if sortbyid >= len(sortby):
dothis = False
continue
if sortby[sortbyid] == '-':
data += '\n'
sortbyid += 1
continue
if var != sortby[sortbyid]:
continue
if val == None or val == [] or val == {}:
continue
if var == 'locations':
i = 0
for name, location in val.iteritems():
i += 1
data += "\tlocation %s {\n" % name
for var, val in location.iteritems():
if val == None or val == [] or val == {}:
continue
data += "\t\t%s %s;\n" % (var, val)
data += "\t}\n"
if i < len(val):
data += "\n"
else:
data += '\t%s %s;\n' % (var, val)
sortbyid += 1
data += "}\n"
return data
def _parse_config(config):
data = {}
_srv = False
_level = 0
server = None
server_data = []
location = None
for l in open(config):
if _level < 0:
err("Error while parsing config %s" % config)
return {}
l = l.strip().replace('\n', '')
# dont parse comments
l = re.sub('#(.*)$', '', l)
m = re.match('^(.*){$', l)
if m:
_level += 1
m = re.match('^server\s+{$', l)
if m:
server = None
server_data = {
'locations' : {},
'fastcgi_params':{},
}
_srv = True
continue
m = re.match('^location\s+(.+)\s+{$', l)
if m:
location = m.group(1)
server_data['locations'][location] = {'fastcgi_params':{}}
continue
m = re.match('^}$', l)
if m:
_level -= 1
if location:
location = None
if _level == 0:
data[server] = ''
data[server] = server_data
_srv = False
continue
m = re.match('^(\w+)\s+(.*)$', l)
if m:
val = m.group(2).split('\s')[0].replace(';', '')
if location:
if m.group(1) == 'fastcgi_param':
server_data['locations'][location]['fastcgi_params'][val.split()[0]] = val.split()[1]
else:
server_data['locations'][location][m.group(1)] = val
else:
if m.group(1) == 'fastcgi_param':
server_data['fastcgi_params'][val.split()[0]] = val.split()[1]
else:
server_data[m.group(1)] = val
if m.group(1) == 'server_name':
server = val
if _level < 0:
err("Error while parsing config %s" % config)
return {}
return data
|
FireWRT/OpenWrt-Firefly-Libraries
|
refs/heads/master
|
staging_dir/host/lib/scons-2.3.1/SCons/Options/PathOption.py
|
11
|
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013, 2014 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Options/PathOption.py 2014/03/02 14:18:15 garyo"
__doc__ = """Place-holder for the old SCons.Options module hierarchy
This is for backwards compatibility. The new equivalent is the Variables/
class hierarchy. These will have deprecation warnings added (some day),
and will then be removed entirely (some day).
"""
import SCons.Variables
import SCons.Warnings
warned = False
class _PathOptionClass(object):
def warn(self):
global warned
if not warned:
msg = "The PathOption() function is deprecated; use the PathVariable() function instead."
SCons.Warnings.warn(SCons.Warnings.DeprecatedOptionsWarning, msg)
warned = True
def __call__(self, *args, **kw):
self.warn()
return SCons.Variables.PathVariable(*args, **kw)
def PathAccept(self, *args, **kw):
self.warn()
return SCons.Variables.PathVariable.PathAccept(*args, **kw)
def PathIsDir(self, *args, **kw):
self.warn()
return SCons.Variables.PathVariable.PathIsDir(*args, **kw)
def PathIsDirCreate(self, *args, **kw):
self.warn()
return SCons.Variables.PathVariable.PathIsDirCreate(*args, **kw)
def PathIsFile(self, *args, **kw):
self.warn()
return SCons.Variables.PathVariable.PathIsFile(*args, **kw)
def PathExists(self, *args, **kw):
self.warn()
return SCons.Variables.PathVariable.PathExists(*args, **kw)
PathOption = _PathOptionClass()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
h4/fuit-webdev
|
refs/heads/master
|
examples/lesson2/3.2/2.3.1.py
|
1
|
# -*- encoding: utf-8 -*-
"""
Логическое И
"""
print 'a' and 'b'
print '' and 'b'
print 'a' and 'b' and 'c'
|
emon10005/sympy
|
refs/heads/master
|
sympy/parsing/__init__.py
|
135
|
"""Used for translating a string into a SymPy expression. """
|
shanil-puri/mase
|
refs/heads/master
|
src/smoteok.py
|
10
|
from __future__ import print_function, division
from ok import *
import sys
sys.dont_write_bytecode = True
from smote import *
@ok
def _some():
rseed(1)
s = Some(16)
for i in xrange(100000):
s += i
assert sorted(s.any)== [ 5852, 24193, 28929, 38266,
41764, 42926, 51310, 52203,
54651, 56743, 59368, 60794,
61888, 82586, 83018, 88462]
print(s.hi())
weather="""
outlook,
temperature,
humidity,?windy,play
sunny , 85, 85, FALSE, no # an interesting case
sunny , 80, 90, TRUE , no
overcast , 83, 86, FALSE, yes
rainy , 70, 96, FALSE, yes
rainy , 68, 80, FALSE, yes
rainy , 65, 70, TRUE , no
overcast , 64, 65, TRUE ,
yes
sunny , 72, 95, FALSE, no
sunny , 69, 70, FALSE, yes
rainy , 75, 80, FALSE, yes
sunny , 75, 70, TRUE , yes
overcast , 72, 90, TRUE , yes
overcast , 81, 75, FALSE, yes
rainy , 71, 91, TRUE , no"""
|
olexiim/edx-platform
|
refs/heads/master
|
common/djangoapps/external_auth/tests/test_shib.py
|
7
|
# -*- coding: utf-8 -*-
"""
Tests for Shibboleth Authentication
@jbau
"""
import unittest
from ddt import ddt, data
from django.conf import settings
from django.http import HttpResponseRedirect
from django.test import TestCase
from django.test.client import RequestFactory, Client as DjangoTestClient
from django.test.utils import override_settings
from django.core.urlresolvers import reverse
from django.contrib.auth.models import AnonymousUser, User
from django.utils.importlib import import_module
from edxmako.tests import mako_middleware_process_request
from external_auth.models import ExternalAuthMap
from external_auth.views import (
shib_login, course_specific_login, course_specific_register, _flatten_to_ascii
)
from mock import patch
from xmodule.modulestore.tests.django_utils import TEST_DATA_MOCK_MODULESTORE
from student.views import create_account, change_enrollment
from student.models import UserProfile, CourseEnrollment
from student.tests.factories import UserFactory
from xmodule.modulestore.tests.factories import CourseFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore import ModuleStoreEnum
# Shib is supposed to provide 'REMOTE_USER', 'givenName', 'sn', 'mail', 'Shib-Identity-Provider'
# attributes via request.META. We can count on 'Shib-Identity-Provider', and 'REMOTE_USER' being present
# b/c of how mod_shib works but should test the behavior with the rest of the attributes present/missing
# For the sake of python convention we'll make all of these variable names ALL_CAPS
# These values would all returned from request.META, so they need to be str, not unicode
IDP = 'https://idp.stanford.edu/'
REMOTE_USER = 'test_user@stanford.edu'
MAILS = [None, '', 'test_user@stanford.edu'] # unicode shouldn't be in emails, would fail django's email validator
DISPLAYNAMES = [None, '', 'Jason 包']
GIVENNAMES = [None, '', 'jasön; John; bob'] # At Stanford, the givenNames can be a list delimited by ';'
SNS = [None, '', '包; smith'] # At Stanford, the sns can be a list delimited by ';'
def gen_all_identities():
"""
A generator for all combinations of test inputs.
Each generated item is a dict that represents what a shib IDP
could potentially pass to django via request.META, i.e.
setting (or not) request.META['givenName'], etc.
"""
def _build_identity_dict(mail, display_name, given_name, surname):
""" Helper function to return a dict of test identity """
meta_dict = {'Shib-Identity-Provider': IDP,
'REMOTE_USER': REMOTE_USER}
if display_name is not None:
meta_dict['displayName'] = display_name
if mail is not None:
meta_dict['mail'] = mail
if given_name is not None:
meta_dict['givenName'] = given_name
if surname is not None:
meta_dict['sn'] = surname
return meta_dict
for mail in MAILS:
for given_name in GIVENNAMES:
for surname in SNS:
for display_name in DISPLAYNAMES:
yield _build_identity_dict(mail, display_name, given_name, surname)
@ddt
@override_settings(MODULESTORE=TEST_DATA_MOCK_MODULESTORE, SESSION_ENGINE='django.contrib.sessions.backends.cache')
class ShibSPTest(ModuleStoreTestCase):
"""
Tests for the Shibboleth SP, which communicates via request.META
(Apache environment variables set by mod_shib)
"""
request_factory = RequestFactory()
def setUp(self):
super(ShibSPTest, self).setUp(create_user=False)
self.test_user_id = ModuleStoreEnum.UserID.test
@unittest.skipUnless(settings.FEATURES.get('AUTH_USE_SHIB'), "AUTH_USE_SHIB not set")
def test_exception_shib_login(self):
"""
Tests that we get the error page when there is no REMOTE_USER
or Shib-Identity-Provider in request.META
"""
no_remote_user_request = self.request_factory.get('/shib-login')
no_remote_user_request.META.update({'Shib-Identity-Provider': IDP})
no_remote_user_request.user = AnonymousUser()
mako_middleware_process_request(no_remote_user_request)
no_remote_user_response = shib_login(no_remote_user_request)
self.assertEqual(no_remote_user_response.status_code, 403)
self.assertIn("identity server did not return your ID information", no_remote_user_response.content)
no_idp_request = self.request_factory.get('/shib-login')
no_idp_request.META.update({'REMOTE_USER': REMOTE_USER})
no_idp_response = shib_login(no_idp_request)
self.assertEqual(no_idp_response.status_code, 403)
self.assertIn("identity server did not return your ID information", no_idp_response.content)
def _assert_shib_login_is_logged(self, audit_log_call, remote_user):
"""Asserts that shibboleth login attempt is being logged"""
remote_user = _flatten_to_ascii(remote_user) # django usernames have to be ascii
method_name, args, _kwargs = audit_log_call
self.assertEquals(method_name, 'info')
self.assertEquals(len(args), 1)
self.assertIn(u'logged in via Shibboleth', args[0])
self.assertIn(remote_user, args[0])
@unittest.skipUnless(settings.FEATURES.get('AUTH_USE_SHIB'), "AUTH_USE_SHIB not set")
def test_shib_login(self):
"""
Tests that:
* shib credentials that match an existing ExternalAuthMap with a linked active user logs the user in
* shib credentials that match an existing ExternalAuthMap with a linked inactive user shows error page
* shib credentials that match an existing ExternalAuthMap without a linked user and also match the email
of an existing user without an existing ExternalAuthMap links the two and log the user in
* shib credentials that match an existing ExternalAuthMap without a linked user and also match the email
of an existing user that already has an ExternalAuthMap causes an error (403)
* shib credentials that do not match an existing ExternalAuthMap causes the registration form to appear
"""
user_w_map = UserFactory.create(email='withmap@stanford.edu')
extauth = ExternalAuthMap(external_id='withmap@stanford.edu',
external_email='',
external_domain='shib:https://idp.stanford.edu/',
external_credentials="",
user=user_w_map)
user_wo_map = UserFactory.create(email='womap@stanford.edu')
user_w_map.save()
user_wo_map.save()
extauth.save()
inactive_user = UserFactory.create(email='inactive@stanford.edu')
inactive_user.is_active = False
inactive_extauth = ExternalAuthMap(external_id='inactive@stanford.edu',
external_email='',
external_domain='shib:https://idp.stanford.edu/',
external_credentials="",
user=inactive_user)
inactive_user.save()
inactive_extauth.save()
idps = ['https://idp.stanford.edu/', 'https://someother.idp.com/']
remote_users = ['withmap@stanford.edu', 'womap@stanford.edu',
'testuser2@someother_idp.com', 'inactive@stanford.edu']
for idp in idps:
for remote_user in remote_users:
request = self.request_factory.get('/shib-login')
request.session = import_module(settings.SESSION_ENGINE).SessionStore() # empty session
request.META.update({'Shib-Identity-Provider': idp,
'REMOTE_USER': remote_user,
'mail': remote_user})
request.user = AnonymousUser()
mako_middleware_process_request(request)
with patch('external_auth.views.AUDIT_LOG') as mock_audit_log:
response = shib_login(request)
audit_log_calls = mock_audit_log.method_calls
if idp == "https://idp.stanford.edu/" and remote_user == 'withmap@stanford.edu':
self.assertIsInstance(response, HttpResponseRedirect)
self.assertEqual(request.user, user_w_map)
self.assertEqual(response['Location'], '/')
# verify logging:
self.assertEquals(len(audit_log_calls), 2)
self._assert_shib_login_is_logged(audit_log_calls[0], remote_user)
method_name, args, _kwargs = audit_log_calls[1]
self.assertEquals(method_name, 'info')
self.assertEquals(len(args), 1)
self.assertIn(u'Login success', args[0])
self.assertIn(remote_user, args[0])
elif idp == "https://idp.stanford.edu/" and remote_user == 'inactive@stanford.edu':
self.assertEqual(response.status_code, 403)
self.assertIn("Account not yet activated: please look for link in your email", response.content)
# verify logging:
self.assertEquals(len(audit_log_calls), 2)
self._assert_shib_login_is_logged(audit_log_calls[0], remote_user)
method_name, args, _kwargs = audit_log_calls[1]
self.assertEquals(method_name, 'warning')
self.assertEquals(len(args), 1)
self.assertIn(u'is not active after external login', args[0])
# self.assertEquals(remote_user, args[1])
elif idp == "https://idp.stanford.edu/" and remote_user == 'womap@stanford.edu':
self.assertIsNotNone(ExternalAuthMap.objects.get(user=user_wo_map))
self.assertIsInstance(response, HttpResponseRedirect)
self.assertEqual(request.user, user_wo_map)
self.assertEqual(response['Location'], '/')
# verify logging:
self.assertEquals(len(audit_log_calls), 2)
self._assert_shib_login_is_logged(audit_log_calls[0], remote_user)
method_name, args, _kwargs = audit_log_calls[1]
self.assertEquals(method_name, 'info')
self.assertEquals(len(args), 1)
self.assertIn(u'Login success', args[0])
self.assertIn(remote_user, args[0])
elif idp == "https://someother.idp.com/" and remote_user in \
['withmap@stanford.edu', 'womap@stanford.edu', 'inactive@stanford.edu']:
self.assertEqual(response.status_code, 403)
self.assertIn("You have already created an account using an external login", response.content)
# no audit logging calls
self.assertEquals(len(audit_log_calls), 0)
else:
self.assertEqual(response.status_code, 200)
self.assertContains(response,
("Preferences for {platform_name}"
.format(platform_name=settings.PLATFORM_NAME)))
# no audit logging calls
self.assertEquals(len(audit_log_calls), 0)
def _base_test_extauth_auto_activate_user_with_flag(self, log_user_string="inactive@stanford.edu"):
"""
Tests that FEATURES['BYPASS_ACTIVATION_EMAIL_FOR_EXTAUTH'] means extauth automatically
linked users, activates them, and logs them in
"""
inactive_user = UserFactory.create(email='inactive@stanford.edu')
inactive_user.is_active = False
inactive_user.save()
request = self.request_factory.get('/shib-login')
request.session = import_module(settings.SESSION_ENGINE).SessionStore() # empty session
request.META.update({
'Shib-Identity-Provider': 'https://idp.stanford.edu/',
'REMOTE_USER': 'inactive@stanford.edu',
'mail': 'inactive@stanford.edu'
})
request.user = AnonymousUser()
with patch('external_auth.views.AUDIT_LOG') as mock_audit_log:
response = shib_login(request)
audit_log_calls = mock_audit_log.method_calls
# reload user from db, since the view function works via db side-effects
inactive_user = User.objects.get(id=inactive_user.id)
self.assertIsNotNone(ExternalAuthMap.objects.get(user=inactive_user))
self.assertTrue(inactive_user.is_active)
self.assertIsInstance(response, HttpResponseRedirect)
self.assertEqual(request.user, inactive_user)
self.assertEqual(response['Location'], '/')
# verify logging:
self.assertEquals(len(audit_log_calls), 3)
self._assert_shib_login_is_logged(audit_log_calls[0], log_user_string)
method_name, args, _kwargs = audit_log_calls[2]
self.assertEquals(method_name, 'info')
self.assertEquals(len(args), 1)
self.assertIn(u'Login success', args[0])
self.assertIn(log_user_string, args[0])
@unittest.skipUnless(settings.FEATURES.get('AUTH_USE_SHIB'), "AUTH_USE_SHIB not set")
@patch.dict(settings.FEATURES, {'BYPASS_ACTIVATION_EMAIL_FOR_EXTAUTH': True, 'SQUELCH_PII_IN_LOGS': False})
def test_extauth_auto_activate_user_with_flag_no_squelch(self):
"""
Wrapper to run base_test_extauth_auto_activate_user_with_flag with {'SQUELCH_PII_IN_LOGS': False}
"""
self._base_test_extauth_auto_activate_user_with_flag(log_user_string="inactive@stanford.edu")
@unittest.skipUnless(settings.FEATURES.get('AUTH_USE_SHIB'), "AUTH_USE_SHIB not set")
@patch.dict(settings.FEATURES, {'BYPASS_ACTIVATION_EMAIL_FOR_EXTAUTH': True, 'SQUELCH_PII_IN_LOGS': True})
def test_extauth_auto_activate_user_with_flag_squelch(self):
"""
Wrapper to run base_test_extauth_auto_activate_user_with_flag with {'SQUELCH_PII_IN_LOGS': True}
"""
self._base_test_extauth_auto_activate_user_with_flag(log_user_string="user.id: 1")
@unittest.skipUnless(settings.FEATURES.get('AUTH_USE_SHIB'), "AUTH_USE_SHIB not set")
@data(*gen_all_identities())
def test_registration_form(self, identity):
"""
Tests the registration form showing up with the proper parameters.
Uses django test client for its session support
"""
client = DjangoTestClient()
# identity k/v pairs will show up in request.META
response = client.get(path='/shib-login/', data={}, follow=False, **identity)
self.assertEquals(response.status_code, 200)
mail_input_HTML = '<input class="" id="email" type="email" name="email"'
if not identity.get('mail'):
self.assertContains(response, mail_input_HTML)
else:
self.assertNotContains(response, mail_input_HTML)
sn_empty = not identity.get('sn')
given_name_empty = not identity.get('givenName')
displayname_empty = not identity.get('displayName')
fullname_input_html = '<input id="name" type="text" name="name"'
if sn_empty and given_name_empty and displayname_empty:
self.assertContains(response, fullname_input_html)
else:
self.assertNotContains(response, fullname_input_html)
@unittest.skipUnless(settings.FEATURES.get('AUTH_USE_SHIB'), "AUTH_USE_SHIB not set")
@data(*gen_all_identities())
def test_registration_form_submit(self, identity):
"""
Tests user creation after the registration form that pops is submitted. If there is no shib
ExternalAuthMap in the session, then the created user should take the username and email from the
request.
Uses django test client for its session support
"""
# First we pop the registration form
client = DjangoTestClient()
response1 = client.get(path='/shib-login/', data={}, follow=False, **identity)
# Then we have the user answer the registration form
# These are unicode because request.POST returns unicode
postvars = {'email': u'post_email@stanford.edu',
'username': u'post_username', # django usernames can't be unicode
'password': u'post_pássword',
'name': u'post_náme',
'terms_of_service': u'true',
'honor_code': u'true'}
# use RequestFactory instead of TestClient here because we want access to request.user
request2 = self.request_factory.post('/create_account', data=postvars)
request2.session = client.session
request2.user = AnonymousUser()
mako_middleware_process_request(request2)
with patch('student.views.AUDIT_LOG') as mock_audit_log:
_response2 = create_account(request2)
user = request2.user
mail = identity.get('mail')
# verify logging of login happening during account creation:
audit_log_calls = mock_audit_log.method_calls
self.assertEquals(len(audit_log_calls), 3)
method_name, args, _kwargs = audit_log_calls[0]
self.assertEquals(method_name, 'info')
self.assertEquals(len(args), 1)
self.assertIn(u'Login success on new account creation', args[0])
self.assertIn(u'post_username', args[0])
method_name, args, _kwargs = audit_log_calls[1]
self.assertEquals(method_name, 'info')
self.assertEquals(len(args), 2)
self.assertIn(u'User registered with external_auth', args[0])
self.assertEquals(u'post_username', args[1])
method_name, args, _kwargs = audit_log_calls[2]
self.assertEquals(method_name, 'info')
self.assertEquals(len(args), 3)
self.assertIn(u'Updated ExternalAuthMap for ', args[0])
self.assertEquals(u'post_username', args[1])
self.assertEquals(u'test_user@stanford.edu', args[2].external_id)
# check that the created user has the right email, either taken from shib or user input
if mail:
self.assertEqual(user.email, mail)
self.assertEqual(list(User.objects.filter(email=postvars['email'])), [])
self.assertIsNotNone(User.objects.get(email=mail)) # get enforces only 1 such user
else:
self.assertEqual(user.email, postvars['email'])
self.assertEqual(list(User.objects.filter(email=mail)), [])
self.assertIsNotNone(User.objects.get(email=postvars['email'])) # get enforces only 1 such user
# check that the created user profile has the right name, either taken from shib or user input
profile = UserProfile.objects.get(user=user)
sn_empty = not identity.get('sn')
given_name_empty = not identity.get('givenName')
displayname_empty = not identity.get('displayName')
if displayname_empty:
if sn_empty and given_name_empty:
self.assertEqual(profile.name, postvars['name'])
else:
self.assertEqual(profile.name, request2.session['ExternalAuthMap'].external_name)
self.assertNotIn(u';', profile.name)
else:
self.assertEqual(profile.name, request2.session['ExternalAuthMap'].external_name)
self.assertEqual(profile.name, identity.get('displayName').decode('utf-8'))
@unittest.skipUnless(settings.FEATURES.get('AUTH_USE_SHIB'), "AUTH_USE_SHIB not set")
@data(None, "", "shib:https://idp.stanford.edu/")
def test_course_specific_login_and_reg(self, domain):
"""
Tests that the correct course specific login and registration urls work for shib
"""
course = CourseFactory.create(
org='MITx',
number='999',
display_name='Robot Super Course',
user_id=self.test_user_id,
)
# Test for cases where course is found
# set domains
# temporarily set the branch to draft-preferred so we can update the course
with self.store.branch_setting(ModuleStoreEnum.Branch.draft_preferred, course.id):
course.enrollment_domain = domain
self.store.update_item(course, self.test_user_id)
# setting location to test that GET params get passed through
login_request = self.request_factory.get('/course_specific_login/MITx/999/Robot_Super_Course' +
'?course_id=MITx/999/Robot_Super_Course' +
'&enrollment_action=enroll')
_reg_request = self.request_factory.get('/course_specific_register/MITx/999/Robot_Super_Course' +
'?course_id=MITx/999/course/Robot_Super_Course' +
'&enrollment_action=enroll')
login_response = course_specific_login(login_request, 'MITx/999/Robot_Super_Course')
reg_response = course_specific_register(login_request, 'MITx/999/Robot_Super_Course')
if domain and "shib" in domain:
self.assertIsInstance(login_response, HttpResponseRedirect)
self.assertEqual(login_response['Location'],
reverse('shib-login') +
'?course_id=MITx/999/Robot_Super_Course' +
'&enrollment_action=enroll')
self.assertIsInstance(login_response, HttpResponseRedirect)
self.assertEqual(reg_response['Location'],
reverse('shib-login') +
'?course_id=MITx/999/Robot_Super_Course' +
'&enrollment_action=enroll')
else:
self.assertIsInstance(login_response, HttpResponseRedirect)
self.assertEqual(login_response['Location'],
reverse('signin_user') +
'?course_id=MITx/999/Robot_Super_Course' +
'&enrollment_action=enroll')
self.assertIsInstance(login_response, HttpResponseRedirect)
self.assertEqual(reg_response['Location'],
reverse('register_user') +
'?course_id=MITx/999/Robot_Super_Course' +
'&enrollment_action=enroll')
# Now test for non-existent course
# setting location to test that GET params get passed through
login_request = self.request_factory.get('/course_specific_login/DNE/DNE/DNE' +
'?course_id=DNE/DNE/DNE' +
'&enrollment_action=enroll')
_reg_request = self.request_factory.get('/course_specific_register/DNE/DNE/DNE' +
'?course_id=DNE/DNE/DNE/Robot_Super_Course' +
'&enrollment_action=enroll')
login_response = course_specific_login(login_request, 'DNE/DNE/DNE')
reg_response = course_specific_register(login_request, 'DNE/DNE/DNE')
self.assertIsInstance(login_response, HttpResponseRedirect)
self.assertEqual(login_response['Location'],
reverse('signin_user') +
'?course_id=DNE/DNE/DNE' +
'&enrollment_action=enroll')
self.assertIsInstance(login_response, HttpResponseRedirect)
self.assertEqual(reg_response['Location'],
reverse('register_user') +
'?course_id=DNE/DNE/DNE' +
'&enrollment_action=enroll')
@unittest.skipUnless(settings.FEATURES.get('AUTH_USE_SHIB'), "AUTH_USE_SHIB not set")
def test_enrollment_limit_by_domain(self):
"""
Tests that the enrollmentDomain setting is properly limiting enrollment to those who have
the proper external auth
"""
# create 2 course, one with limited enrollment one without
shib_course = CourseFactory.create(
org='Stanford',
number='123',
display_name='Shib Only',
enrollment_domain='shib:https://idp.stanford.edu/',
user_id=self.test_user_id,
)
open_enroll_course = CourseFactory.create(
org='MITx',
number='999',
display_name='Robot Super Course',
enrollment_domain='',
user_id=self.test_user_id,
)
# create 3 kinds of students, external_auth matching shib_course, external_auth not matching, no external auth
shib_student = UserFactory.create()
shib_student.save()
extauth = ExternalAuthMap(external_id='testuser@stanford.edu',
external_email='',
external_domain='shib:https://idp.stanford.edu/',
external_credentials="",
user=shib_student)
extauth.save()
other_ext_student = UserFactory.create()
other_ext_student.username = "teststudent2"
other_ext_student.email = "teststudent2@other.edu"
other_ext_student.save()
extauth = ExternalAuthMap(external_id='testuser1@other.edu',
external_email='',
external_domain='shib:https://other.edu/',
external_credentials="",
user=other_ext_student)
extauth.save()
int_student = UserFactory.create()
int_student.username = "teststudent3"
int_student.email = "teststudent3@gmail.com"
int_student.save()
# Tests the two case for courses, limited and not
for course in [shib_course, open_enroll_course]:
for student in [shib_student, other_ext_student, int_student]:
request = self.request_factory.post('/change_enrollment')
request.POST.update({'enrollment_action': 'enroll',
'course_id': course.id.to_deprecated_string()})
request.user = student
response = change_enrollment(request)
# If course is not limited or student has correct shib extauth then enrollment should be allowed
if course is open_enroll_course or student is shib_student:
self.assertEqual(response.status_code, 200)
self.assertTrue(CourseEnrollment.is_enrolled(student, course.id))
else:
self.assertEqual(response.status_code, 400)
self.assertFalse(CourseEnrollment.is_enrolled(student, course.id))
@unittest.skipUnless(settings.FEATURES.get('AUTH_USE_SHIB'), "AUTH_USE_SHIB not set")
def test_shib_login_enrollment(self):
"""
A functionality test that a student with an existing shib login
can auto-enroll in a class with GET or POST params. Also tests the direction functionality of
the 'next' GET/POST param
"""
student = UserFactory.create()
extauth = ExternalAuthMap(external_id='testuser@stanford.edu',
external_email='',
external_domain='shib:https://idp.stanford.edu/',
external_credentials="",
internal_password="password",
user=student)
student.set_password("password")
student.save()
extauth.save()
course = CourseFactory.create(
org='Stanford',
number='123',
display_name='Shib Only',
enrollment_domain='shib:https://idp.stanford.edu/',
user_id=self.test_user_id,
)
# use django test client for sessions and url processing
# no enrollment before trying
self.assertFalse(CourseEnrollment.is_enrolled(student, course.id))
self.client.logout()
request_kwargs = {'path': '/shib-login/',
'data': {'enrollment_action': 'enroll', 'course_id': course.id.to_deprecated_string(), 'next': '/testredirect'},
'follow': False,
'REMOTE_USER': 'testuser@stanford.edu',
'Shib-Identity-Provider': 'https://idp.stanford.edu/'}
response = self.client.get(**request_kwargs)
# successful login is a redirect to "/"
self.assertEqual(response.status_code, 302)
self.assertEqual(response['location'], 'http://testserver/testredirect')
# now there is enrollment
self.assertTrue(CourseEnrollment.is_enrolled(student, course.id))
# Clean up and try again with POST (doesn't happen with real production shib, doing this for test coverage)
self.client.logout()
CourseEnrollment.unenroll(student, course.id)
self.assertFalse(CourseEnrollment.is_enrolled(student, course.id))
response = self.client.post(**request_kwargs)
# successful login is a redirect to "/"
self.assertEqual(response.status_code, 302)
self.assertEqual(response['location'], 'http://testserver/testredirect')
# now there is enrollment
self.assertTrue(CourseEnrollment.is_enrolled(student, course.id))
class ShibUtilFnTest(TestCase):
"""
Tests util functions in shib module
"""
def test__flatten_to_ascii(self):
DIACRITIC = u"àèìòùÀÈÌÒÙáéíóúýÁÉÍÓÚÝâêîôûÂÊÎÔÛãñõÃÑÕäëïöüÿÄËÏÖÜŸåÅçÇ" # pylint: disable=invalid-name
STR_DIACRI = "àèìòùÀÈÌÒÙáéíóúýÁÉÍÓÚÝâêîôûÂÊÎÔÛãñõÃÑÕäëïöüÿÄËÏÖÜŸåÅçÇ" # pylint: disable=invalid-name
FLATTENED = u"aeiouAEIOUaeiouyAEIOUYaeiouAEIOUanoANOaeiouyAEIOUYaAcC" # pylint: disable=invalid-name
self.assertEqual(_flatten_to_ascii('jasön'), 'jason') # umlaut
self.assertEqual(_flatten_to_ascii('Jason包'), 'Jason') # mandarin, so it just gets dropped
self.assertEqual(_flatten_to_ascii('abc'), 'abc') # pass through
unicode_test = _flatten_to_ascii(DIACRITIC)
self.assertEqual(unicode_test, FLATTENED)
self.assertIsInstance(unicode_test, unicode)
str_test = _flatten_to_ascii(STR_DIACRI)
self.assertEqual(str_test, FLATTENED)
self.assertIsInstance(str_test, str)
|
widowild/messcripts
|
refs/heads/master
|
exercice/python2/solutions/exercice_10_18.py
|
1
|
#! /usr/bin/env python
# -*- coding:Utf-8 -*-
def voyelle(cu):
"teste si le caractère unicode <cu> est une voyelle"
if cu in u"AEIOUYÀÉÈÊËÎÏÔÛÙaeiouyàéèêëîïôûù":
return 1
else:
return 0
# Test :
if __name__ == '__main__':
print voyelle(u"g"), voyelle(u"O"), voyelle(u"à"), voyelle(u"É")
|
abligh/xen
|
refs/heads/master
|
tools/python/xen/xend/XendDPCI.py
|
44
|
#============================================================================
# This library is free software; you can redistribute it and/or
# modify it under the terms of version 2.1 of the GNU Lesser General Public
# License as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#============================================================================
# Copyright (c) 2008 NEC Corporation
# Yosuke Iwamatsu <y-iwamatsu at ab jp nec com>
#============================================================================
from xen.xend.XendBase import XendBase
from xen.xend.XendPPCI import XendPPCI
from xen.xend import XendAPIStore
from xen.xend import uuid as genuuid
import XendDomain, XendNode
from XendError import *
from XendTask import XendTask
from XendLogging import log
class XendDPCI(XendBase):
"""Representation of a passthrough PCI device."""
def getClass(self):
return "DPCI"
def getAttrRO(self):
attrRO = ['virtual_domain',
'virtual_bus',
'virtual_slot',
'virtual_func',
'virtual_name',
'VM',
'PPCI',
'hotplug_slot',
'key',
'options']
return XendBase.getAttrRO() + attrRO
def getAttrRW(self):
attrRW = []
return XendBase.getAttrRW() + attrRW
def getAttrInst(self):
attrInst = ['VM',
'PPCI',
'hotplug_slot',
'key']
return XendBase.getAttrInst() + attrInst
def getMethods(self):
methods = ['destroy']
return XendBase.getMethods() + methods
def getFuncs(self):
funcs = ['create']
return XendBase.getFuncs() + funcs
getClass = classmethod(getClass)
getAttrRO = classmethod(getAttrRO)
getAttrRW = classmethod(getAttrRW)
getAttrInst = classmethod(getAttrInst)
getMethods = classmethod(getMethods)
getFuncs = classmethod(getFuncs)
def create(self, dpci_struct):
# Check if VM is valid
xendom = XendDomain.instance()
if not xendom.is_valid_vm(dpci_struct['VM']):
raise InvalidHandleError('VM', dpci_struct['VM'])
dom = xendom.get_vm_by_uuid(dpci_struct['VM'])
# Check if PPCI is valid
xennode = XendNode.instance()
ppci_uuid = xennode.get_ppci_by_uuid(dpci_struct['PPCI'])
if not ppci_uuid:
raise InvalidHandleError('PPCI', dpci_struct['PPCI'])
for existing_dpci in XendAPIStore.get_all('DPCI'):
if ppci_uuid == existing_dpci.get_PPCI():
raise DirectPCIError("Device is in use")
# Assign PPCI to VM
try:
dpci_ref = XendTask.log_progress(0, 100, dom.create_dpci,
dpci_struct)
except XendError, e:
raise DirectPCIError("Failed to assign device")
# TODO: Retrive virtual pci device infomation.
return dpci_ref
create = classmethod(create)
def get_by_VM(cls, VM_ref):
result = []
for dpci in XendAPIStore.get_all("DPCI"):
if dpci.get_VM() == VM_ref:
result.append(dpci.get_uuid())
return result
get_by_VM = classmethod(get_by_VM)
def __init__(self, uuid, record):
XendBase.__init__(self, uuid, record)
self.virtual_domain = -1
self.virtual_bus = -1
self.virtual_slot = -1
self.virtual_func = -1
self.VM = record['VM']
self.PPCI = record['PPCI']
self.hotplug_slot = int(record['hotplug_slot'], 16)
self.key = record['key']
if 'options' in record.keys():
self.options = record['options']
def destroy(self):
xendom = XendDomain.instance()
dom = xendom.get_vm_by_uuid(self.get_VM())
if not dom:
raise InvalidHandleError("VM", self.get_VM())
XendTask.log_progress(0, 100, dom.destroy_dpci, self.get_uuid())
def get_virtual_domain(self):
return self.virtual_domain
def get_virtual_bus(self):
return self.virtual_bus
def get_virtual_slot(self):
return self.virtual_slot
def get_virtual_func(self):
return self.virtual_func
def get_virtual_name(self):
return "%04x:%02x:%02x.%01x" % (self.virtual_domain, self.virtual_bus,
self.virtual_slot, self.virtual_func)
def get_VM(self):
return self.VM
def get_PPCI(self):
return self.PPCI
def get_hotplug_slot(self):
return "%d" % self.hotplug_slot
def get_key(self):
return self.key
def get_options(self):
return self.options
|
40223212/2015cd_midterm2
|
refs/heads/master
|
static/Brython3.1.1-20150328-091302/Lib/xml/etree/cElementTree.py
|
876
|
# Deprecated alias for xml.etree.ElementTree
from xml.etree.ElementTree import *
|
KickAssCoin/P2Pool-KIK
|
refs/heads/master
|
p2pool/bitcoin/script.py
|
282
|
from p2pool.util import math, pack
def reads_nothing(f):
return None, f
def protoPUSH(length):
return lambda f: pack.read(f, length)
def protoPUSHDATA(size_len):
def _(f):
length_str, f = pack.read(f, size_len)
length = math.string_to_natural(length_str[::-1].lstrip(chr(0)))
data, f = pack.read(f, length)
return data, f
return _
opcodes = {}
for i in xrange(256):
opcodes[i] = 'UNK_' + str(i), reads_nothing
opcodes[0] = 'PUSH', lambda f: ('', f)
for i in xrange(1, 76):
opcodes[i] = 'PUSH', protoPUSH(i)
opcodes[76] = 'PUSH', protoPUSHDATA(1)
opcodes[77] = 'PUSH', protoPUSHDATA(2)
opcodes[78] = 'PUSH', protoPUSHDATA(4)
opcodes[79] = 'PUSH', lambda f: ('\x81', f)
for i in xrange(81, 97):
opcodes[i] = 'PUSH', lambda f, _i=i: (chr(_i - 80), f)
opcodes[172] = 'CHECKSIG', reads_nothing
opcodes[173] = 'CHECKSIGVERIFY', reads_nothing
opcodes[174] = 'CHECKMULTISIG', reads_nothing
opcodes[175] = 'CHECKMULTISIGVERIFY', reads_nothing
def parse(script):
f = script, 0
while pack.size(f):
opcode_str, f = pack.read(f, 1)
opcode = ord(opcode_str)
opcode_name, read_func = opcodes[opcode]
opcode_arg, f = read_func(f)
yield opcode_name, opcode_arg
def get_sigop_count(script):
weights = {
'CHECKSIG': 1,
'CHECKSIGVERIFY': 1,
'CHECKMULTISIG': 20,
'CHECKMULTISIGVERIFY': 20,
}
return sum(weights.get(opcode_name, 0) for opcode_name, opcode_arg in parse(script))
def create_push_script(datums): # datums can be ints or strs
res = []
for datum in datums:
if isinstance(datum, (int, long)):
if datum == -1 or 1 <= datum <= 16:
res.append(chr(datum + 80))
continue
negative = datum < 0
datum = math.natural_to_string(abs(datum))
if datum and ord(datum[0]) & 128:
datum = '\x00' + datum
if negative:
datum = chr(ord(datum[0]) + 128) + datum[1:]
datum = datum[::-1]
if len(datum) < 76:
res.append(chr(len(datum)))
elif len(datum) <= 0xff:
res.append(76)
res.append(chr(len(datum)))
elif len(datum) <= 0xffff:
res.append(77)
res.append(pack.IntType(16).pack(len(datum)))
elif len(datum) <= 0xffffffff:
res.append(78)
res.append(pack.IntType(32).pack(len(datum)))
else:
raise ValueError('string too long')
res.append(datum)
return ''.join(res)
|
Vixionar/django
|
refs/heads/master
|
django/shortcuts.py
|
135
|
"""
This module collects helper functions and classes that "span" multiple levels
of MVC. In other words, these functions/classes introduce controlled coupling
for convenience's sake.
"""
import warnings
from django.core import urlresolvers
from django.db.models.base import ModelBase
from django.db.models.manager import Manager
from django.db.models.query import QuerySet
from django.http import (
Http404, HttpResponse, HttpResponsePermanentRedirect, HttpResponseRedirect,
)
from django.template import RequestContext, loader
from django.template.context import _current_app_undefined
from django.template.engine import (
_context_instance_undefined, _dictionary_undefined, _dirs_undefined,
)
from django.utils import six
from django.utils.deprecation import RemovedInDjango110Warning
from django.utils.encoding import force_text
from django.utils.functional import Promise
def render_to_response(template_name, context=None,
context_instance=_context_instance_undefined,
content_type=None, status=None, dirs=_dirs_undefined,
dictionary=_dictionary_undefined, using=None):
"""
Returns a HttpResponse whose content is filled with the result of calling
django.template.loader.render_to_string() with the passed arguments.
"""
if (context_instance is _context_instance_undefined
and dirs is _dirs_undefined
and dictionary is _dictionary_undefined):
# No deprecated arguments were passed - use the new code path
content = loader.render_to_string(template_name, context, using=using)
else:
# Some deprecated arguments were passed - use the legacy code path
content = loader.render_to_string(
template_name, context, context_instance, dirs, dictionary,
using=using)
return HttpResponse(content, content_type, status)
def render(request, template_name, context=None,
context_instance=_context_instance_undefined,
content_type=None, status=None, current_app=_current_app_undefined,
dirs=_dirs_undefined, dictionary=_dictionary_undefined,
using=None):
"""
Returns a HttpResponse whose content is filled with the result of calling
django.template.loader.render_to_string() with the passed arguments.
Uses a RequestContext by default.
"""
if (context_instance is _context_instance_undefined
and current_app is _current_app_undefined
and dirs is _dirs_undefined
and dictionary is _dictionary_undefined):
# No deprecated arguments were passed - use the new code path
# In Django 1.10, request should become a positional argument.
content = loader.render_to_string(
template_name, context, request=request, using=using)
else:
# Some deprecated arguments were passed - use the legacy code path
if context_instance is not _context_instance_undefined:
if current_app is not _current_app_undefined:
raise ValueError('If you provide a context_instance you must '
'set its current_app before calling render()')
else:
context_instance = RequestContext(request)
if current_app is not _current_app_undefined:
warnings.warn(
"The current_app argument of render is deprecated. "
"Set the current_app attribute of request instead.",
RemovedInDjango110Warning, stacklevel=2)
request.current_app = current_app
# Directly set the private attribute to avoid triggering the
# warning in RequestContext.__init__.
context_instance._current_app = current_app
content = loader.render_to_string(
template_name, context, context_instance, dirs, dictionary,
using=using)
return HttpResponse(content, content_type, status)
def redirect(to, *args, **kwargs):
"""
Returns an HttpResponseRedirect to the appropriate URL for the arguments
passed.
The arguments could be:
* A model: the model's `get_absolute_url()` function will be called.
* A view name, possibly with arguments: `urlresolvers.reverse()` will
be used to reverse-resolve the name.
* A URL, which will be used as-is for the redirect location.
By default issues a temporary redirect; pass permanent=True to issue a
permanent redirect
"""
if kwargs.pop('permanent', False):
redirect_class = HttpResponsePermanentRedirect
else:
redirect_class = HttpResponseRedirect
return redirect_class(resolve_url(to, *args, **kwargs))
def _get_queryset(klass):
"""
Returns a QuerySet from a Model, Manager, or QuerySet. Created to make
get_object_or_404 and get_list_or_404 more DRY.
Raises a ValueError if klass is not a Model, Manager, or QuerySet.
"""
if isinstance(klass, QuerySet):
return klass
elif isinstance(klass, Manager):
manager = klass
elif isinstance(klass, ModelBase):
manager = klass._default_manager
else:
if isinstance(klass, type):
klass__name = klass.__name__
else:
klass__name = klass.__class__.__name__
raise ValueError("Object is of type '%s', but must be a Django Model, "
"Manager, or QuerySet" % klass__name)
return manager.all()
def get_object_or_404(klass, *args, **kwargs):
"""
Uses get() to return an object, or raises a Http404 exception if the object
does not exist.
klass may be a Model, Manager, or QuerySet object. All other passed
arguments and keyword arguments are used in the get() query.
Note: Like with get(), an MultipleObjectsReturned will be raised if more than one
object is found.
"""
queryset = _get_queryset(klass)
try:
return queryset.get(*args, **kwargs)
except queryset.model.DoesNotExist:
raise Http404('No %s matches the given query.' % queryset.model._meta.object_name)
def get_list_or_404(klass, *args, **kwargs):
"""
Uses filter() to return a list of objects, or raise a Http404 exception if
the list is empty.
klass may be a Model, Manager, or QuerySet object. All other passed
arguments and keyword arguments are used in the filter() query.
"""
queryset = _get_queryset(klass)
obj_list = list(queryset.filter(*args, **kwargs))
if not obj_list:
raise Http404('No %s matches the given query.' % queryset.model._meta.object_name)
return obj_list
def resolve_url(to, *args, **kwargs):
"""
Return a URL appropriate for the arguments passed.
The arguments could be:
* A model: the model's `get_absolute_url()` function will be called.
* A view name, possibly with arguments: `urlresolvers.reverse()` will
be used to reverse-resolve the name.
* A URL, which will be returned as-is.
"""
# If it's a model, use get_absolute_url()
if hasattr(to, 'get_absolute_url'):
return to.get_absolute_url()
if isinstance(to, Promise):
# Expand the lazy instance, as it can cause issues when it is passed
# further to some Python functions like urlparse.
to = force_text(to)
if isinstance(to, six.string_types):
# Handle relative URLs
if to.startswith(('./', '../')):
return to
# Next try a reverse URL resolution.
try:
return urlresolvers.reverse(to, args=args, kwargs=kwargs)
except urlresolvers.NoReverseMatch:
# If this is a callable, re-raise.
if callable(to):
raise
# If this doesn't "feel" like a URL, re-raise.
if '/' not in to and '.' not in to:
raise
# Finally, fall back and assume it's a URL
return to
|
sukeesh/Jarvis
|
refs/heads/master
|
jarviscli/__main__.py
|
2
|
# -*- coding: utf-8 -*-
import Jarvis
import colorama
import sys
def check_python_version():
return sys.version_info[0] == 3
def main():
# enable color on windows
colorama.init()
# start Jarvis
jarvis = Jarvis.Jarvis()
command = " ".join(sys.argv[1:]).strip()
jarvis.executor(command)
if __name__ == '__main__':
if check_python_version():
main()
else:
print("Sorry! Only Python 3 supported.")
|
rhosqeauto/InfraRed
|
refs/heads/master
|
plugins/virsh/filter_plugins/wirehosts.py
|
3
|
from __future__ import print_function
from ansible import errors
def create_ifaces(source_node, target_node, bridge_pattern, iface):
for dst_cnt in range(int(target_node.get('num', 1))):
source_iface = iface.copy()
source_iface['model'] = source_iface['src_model']
for key in ['src_model', 'connect_to']:
try:
del source_iface[key]
except KeyError:
pass
if int(source_node.get('num', 1)) == 1:
source_iface['network'] = bridge_pattern.format("0", str(dst_cnt))
else:
source_iface['network'] = bridge_pattern.format("%s", str(dst_cnt))
source_iface['needs_formatting'] = True
source_node['interfaces'].append(source_iface)
for src_cnt in range(int(source_node.get('num', 1))):
target_iface = iface.copy()
for key in ['src_model', 'connect_to']:
try:
del target_iface[key]
except KeyError:
pass
if int(target_node.get('num', 1)) == 1:
target_iface['network'] = bridge_pattern.format(str(src_cnt), "0")
else:
target_iface['network'] = bridge_pattern.format(str(src_cnt), "%s")
target_iface['needs_formatting'] = True
target_node['interfaces'].append(target_iface)
def wire_node(nodes, node):
if 'interfaces' not in node:
return node
interfaces = node['interfaces']
node['interfaces'] = []
for iface in interfaces:
if 'connect_to' in iface:
try:
remote_node = nodes[iface['connect_to']]
except KeyError:
raise errors.AnsibleRuntimeError(
"Node %s does not exist in this topology!" %
iface['connect_to'])
bridge_pattern = "{:s}{{:s}}{:s}{{:s}}".format(
node['name'][:4], iface['connect_to'][:4])
create_ifaces(node, remote_node, bridge_pattern, iface)
else:
node['interfaces'].append(iface)
def wire_nodes(nodes):
for node in nodes.values():
wire_node(nodes, node)
return nodes
class FilterModule(object):
''' Wire nodes filter '''
def filters(self):
return {
'wirenodes': wire_nodes
}
|
tonysyu/deli
|
refs/heads/master
|
deli/utils/data_structures.py
|
1
|
from __future__ import absolute_import
from traits.api import Dict, Event, HasStrictTraits
class NoisyDict(HasStrictTraits):
""" Dict-like object that fires an event when keys are added or changed.
"""
#: Event fired when a new key is added or changed.
updated = Event
# The actual dictionary data that this class wraps.
_dict_data = Dict({})
def __init__(self, *args, **kwargs):
self.update(*args, **kwargs)
def __getitem__(self, name):
return self._dict_data[name]
def __setitem__(self, name, value):
self.update({name: value})
def update(self, *args, **kwargs):
data = dict(*args, **kwargs)
event = {}
for name in data:
event.setdefault('added', []).append(name)
self._dict_data.update(data)
self.updated = event
|
maddabini/robotframework-selenium2library
|
refs/heads/master
|
test/lib/mockito/spying.py
|
70
|
#!/usr/bin/env python
# coding: utf-8
'''Spying on real objects.'''
from invocation import RememberedProxyInvocation, VerifiableInvocation
from mocking import TestDouble
__author__ = "Serhiy Oplakanets <serhiy@oplakanets.com>"
__copyright__ = "Copyright 2009-2010, Mockito Contributors"
__license__ = "MIT"
__maintainer__ = "Mockito Maintainers"
__email__ = "mockito-python@googlegroups.com"
__all__ = ['spy']
def spy(original_object):
return Spy(original_object)
class Spy(TestDouble):
strict = True # spies always have to check if method exists
def __init__(self, original_object):
self.original_object = original_object
self.invocations = []
self.verification = None
def __getattr__(self, name):
if self.verification:
return VerifiableInvocation(self, name)
else:
return RememberedProxyInvocation(self, name)
def remember(self, invocation):
self.invocations.insert(0, invocation)
def pull_verification(self):
v = self.verification
self.verification = None
return v
|
srijanmishra/RouteFlow
|
refs/heads/master
|
pox/pox/lib/packet/llc.py
|
27
|
# Copyright 2013 James McCauley
#
# This file is part of POX.
#
# POX is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# POX is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with POX. If not, see <http://www.gnu.org/licenses/>.
import struct
from packet_base import packet_base
from ethernet import ethernet
from packet_utils import *
class llc (packet_base):
"802.2 LLC header, possibly with SNAP header"
MIN_LEN = 3
def __init__ (self, raw=None, prev=None, **kw):
packet_base.__init__(self)
self.prev = prev
self.next = None
self.length = self.MIN_LEN
self.dsap = None
self.ssap = None
self.control = None
self.oui = None
self.eth_type = ethernet.INVALID_TYPE
if raw is not None:
self.parse(raw)
self._init(kw)
@property
def has_snap (self):
return self.oui is not None
def __str__ (self):
#TODO: include field values!
s = "[LLC"
if self.has_snap:
s += "+SNAP t:%04x" % (self.eth_type,)
else:
s += " ssap:0x%02x dsap:0x%02x c:%s" % (self.ssap, self.dsap,
self.control)
s += "]"
return s
def parse (self, raw):
assert isinstance(raw, bytes)
self.raw = raw
dlen = len(raw)
if dlen < self.MIN_LEN:
self.msg('(llc parse) warning: packet data too short')
return
self.length = 3
(self.dsap, self.ssap, self.control) \
= struct.unpack('!BBB', raw[:self.MIN_LEN])
if ((self.control & 1) == 0) or ((self.control & 3) == 2):
if dlen < self.length + 1:
self.msg('(llc parse) warning: packet data too short')
return
self.control |= (ord(raw[3:4]) << 8)
self.length = 4
if (self.ssap & 0xfe) == 0xaa:
if (self.dsap & 0xfe) == 0xaa:
# Oh snap
if dlen < self.length + 5:
self.msg('(llc parse) warning: incomplete SNAP')
return
self.oui = raw[self.length:self.length+3]
self.length += 3
self.eth_type = struct.unpack("!H", raw[self.length:self.length+2])[0]
self.length += 2
self.parsed = True
self.next = ethernet.parse_next(self, self.eth_type, raw, self.length,
allow_llc = False)
@property
def effective_ethertype (self):
return ethernet._get_effective_ethertype(self)
@property
def type (self):
"""
This is just an alias for eth_type.
It's annoying that the ethertype on an ethernet packet is in the
'type' attribute, and for vlan/llc it's in the 'eth_type' attribute.
We should probably normalize this. For now, we at least have this.
"""
return self.eth_type
def hdr (self, payload):
r = struct.pack("!BB", self.dsap, self.ssap)
if self.length == 3 or self.length == 8:
# One byte control
r += struct.pack("!B", self.control)
else:
#FIXME: this is sloppy
r += chr(self.control & 0xff)
r += chr((self.control>>8) & 0xff)
if self.has_snap:
# SNAP
r += self.oui
r += struct.pack("!H", self.eth_type)
return r
|
mosra/m.css
|
refs/heads/master
|
documentation/test_python/content_parse_docstrings/content_parse_docstrings.py
|
1
|
"""This module has a *serious* docstring. And a :ref:`Class`.
And module **details** as well."""
import enum
class Class:
"""This class has a *serious* docstring.
With a multi-line summary. Relative reference to :ref:`a_property` works
even from a summary.
And class **details** as well."""
@property
def a_property(self) -> float:
"""The :ref:`a_property` has a *serious* docstring.
And property **details** as well."""
class Enum(enum.Enum):
"""This enum has a *serious* docstring. :ref:`VALUE` works from a summary.
:value ANOTHER: Values can be documented from a docstring, too.
And enum **details** as well."""
VALUE = 3
ANOTHER = 4
Enum.VALUE.__doc__ = """Enum value docstrings are *processed* as well.
The :ref:`ANOTHER` value is documented from within the :ref:`Enum` itself.
"""
def function(a: str, b: int) -> float:
"""This :ref:`function()` has a *serious* docstring.
:param a: And parameter docs, referring to :ref:`function()` as well.
On multiple lines.
:param b: *Wow.*
:return: This too. In the :ref:`function()`.
And details.
**Amazing**."""
def empty_docstring(): pass
def summary_only():
"""This is just a summary."""
# This should check we handle reST parsing errors gracefully. Will probably
# look extra weird in the output tho, but that's okay -- it's an error after
# all.
def this_function_has_bad_docs(a, b) -> str:
"""This function has bad docs. It's freaking terrible.
Yes.
Really.
:broken: yes
"""
|
chirilo/kitsune
|
refs/heads/master
|
kitsune/questions/tests/test_utils.py
|
16
|
from nose.tools import eq_
from kitsune.questions.models import Question, Answer
from kitsune.questions.tests import question, answer
from kitsune.questions.utils import (
num_questions, num_answers, num_solutions, mark_content_as_spam)
from kitsune.sumo.tests import TestCase
from kitsune.users.tests import user
class ContributionCountTestCase(TestCase):
def test_num_questions(self):
"""Answers are counted correctly on a user."""
u = user(save=True)
eq_(num_questions(u), 0)
q1 = question(creator=u, save=True)
eq_(num_questions(u), 1)
q2 = question(creator=u, save=True)
eq_(num_questions(u), 2)
q1.delete()
eq_(num_questions(u), 1)
q2.delete()
eq_(num_questions(u), 0)
def test_num_answers(self):
u = user(save=True)
q = question(save=True)
eq_(num_answers(u), 0)
a1 = answer(creator=u, question=q, save=True)
eq_(num_answers(u), 1)
a2 = answer(creator=u, question=q, save=True)
eq_(num_answers(u), 2)
a1.delete()
eq_(num_answers(u), 1)
a2.delete()
eq_(num_answers(u), 0)
def test_num_solutions(self):
u = user(save=True)
q1 = question(save=True)
q2 = question(save=True)
a1 = answer(creator=u, question=q1, save=True)
a2 = answer(creator=u, question=q2, save=True)
eq_(num_solutions(u), 0)
q1.solution = a1
q1.save()
eq_(num_solutions(u), 1)
q2.solution = a2
q2.save()
eq_(num_solutions(u), 2)
q1.solution = None
q1.save()
eq_(num_solutions(u), 1)
a2.delete()
eq_(num_solutions(u), 0)
class FlagUserContentAsSpamTestCase(TestCase):
def test_flag_content_as_spam(self):
# Create some questions and answers by the user.
u = user(save=True)
question(creator=u, save=True)
question(creator=u, save=True)
answer(creator=u, save=True)
answer(creator=u, save=True)
answer(creator=u, save=True)
# Verify they are not marked as spam yet.
eq_(2, Question.objects.filter(is_spam=False, creator=u).count())
eq_(0, Question.objects.filter(is_spam=True, creator=u).count())
eq_(3, Answer.objects.filter(is_spam=False, creator=u).count())
eq_(0, Answer.objects.filter(is_spam=True, creator=u).count())
# Flag content as spam and verify it is updated.
mark_content_as_spam(u, user(save=True))
eq_(0, Question.objects.filter(is_spam=False, creator=u).count())
eq_(2, Question.objects.filter(is_spam=True, creator=u).count())
eq_(0, Answer.objects.filter(is_spam=False, creator=u).count())
eq_(3, Answer.objects.filter(is_spam=True, creator=u).count())
|
nozuono/calibre-webserver
|
refs/heads/master
|
src/tinycss/token_data.py
|
19
|
# coding: utf8
"""
tinycss.token_data
------------------
Shared data for both implementations (Cython and Python) of the tokenizer.
:copyright: (c) 2012 by Simon Sapin.
:license: BSD, see LICENSE for more details.
"""
from __future__ import unicode_literals
import re
import sys
import operator
import functools
import string
# * Raw strings with the r'' notation are used so that \ do not need
# to be escaped.
# * Names and regexps are separated by a tabulation.
# * Macros are re-ordered so that only previous definitions are needed.
# * {} are used for macro substitution with ``string.Formatter``,
# so other uses of { or } have been doubled.
# * The syntax is otherwise compatible with re.compile.
# * Some parentheses were added to add capturing groups.
# (in unicode, DIMENSION and URI)
# *** Willful violation: ***
# Numbers can take a + or - sign, but the sign is a separate DELIM token.
# Since comments are allowed anywhere between tokens, this makes
# the following this is valid. It means 10 negative pixels:
# margin-top: -/**/10px
# This makes parsing numbers a pain, so instead we’ll do the same is Firefox
# and make the sign part as of the 'num' macro. The above CSS will be invalid.
# See discussion:
# http://lists.w3.org/Archives/Public/www-style/2011Oct/0028.html
MACROS = r'''
nl \n|\r\n|\r|\f
w [ \t\r\n\f]*
nonascii [^\0-\237]
unicode \\([0-9a-f]{{1,6}})(\r\n|[ \n\r\t\f])?
simple_escape [^\n\r\f0-9a-f]
escape {unicode}|\\{simple_escape}
nmstart [_a-z]|{nonascii}|{escape}
nmchar [_a-z0-9-]|{nonascii}|{escape}
name {nmchar}+
ident [-]?{nmstart}{nmchar}*
num [-+]?(?:[0-9]*\.[0-9]+|[0-9]+)
string1 \"([^\n\r\f\\"]|\\{nl}|{escape})*\"
string2 \'([^\n\r\f\\']|\\{nl}|{escape})*\'
string {string1}|{string2}
badstring1 \"([^\n\r\f\\"]|\\{nl}|{escape})*\\?
badstring2 \'([^\n\r\f\\']|\\{nl}|{escape})*\\?
badstring {badstring1}|{badstring2}
badcomment1 \/\*[^*]*\*+([^/*][^*]*\*+)*
badcomment2 \/\*[^*]*(\*+[^/*][^*]*)*
badcomment {badcomment1}|{badcomment2}
baduri1 url\({w}([!#$%&*-~]|{nonascii}|{escape})*{w}
baduri2 url\({w}{string}{w}
baduri3 url\({w}{badstring}
baduri {baduri1}|{baduri2}|{baduri3}
'''.replace(r'\0', '\0').replace(r'\237', '\237')
# Removed these tokens. Instead, they’re tokenized as two DELIM each.
# INCLUDES ~=
# DASHMATCH |=
# They are only used in selectors but selectors3 also have ^=, *= and $=.
# We don’t actually parse selectors anyway
# Re-ordered so that the longest match is always the first.
# For example, "url('foo')" matches URI, BAD_URI, FUNCTION and IDENT,
# but URI would always be a longer match than the others.
TOKENS = r'''
S [ \t\r\n\f]+
URI url\({w}({string}|([!#$%&*-\[\]-~]|{nonascii}|{escape})*){w}\)
BAD_URI {baduri}
FUNCTION {ident}\(
UNICODE-RANGE u\+[0-9a-f?]{{1,6}}(-[0-9a-f]{{1,6}})?
IDENT {ident}
ATKEYWORD @{ident}
HASH #{name}
DIMENSION ({num})({ident})
PERCENTAGE {num}%
NUMBER {num}
STRING {string}
BAD_STRING {badstring}
COMMENT \/\*[^*]*\*+([^/*][^*]*\*+)*\/
BAD_COMMENT {badcomment}
: :
; ;
{ \{{
} \}}
( \(
) \)
[ \[
] \]
CDO <!--
CDC -->
'''
# Strings with {macro} expanded
COMPILED_MACROS = {}
COMPILED_TOKEN_REGEXPS = [] # [(name, regexp.match)] ordered
COMPILED_TOKEN_INDEXES = {} # {name: i} helper for the C speedups
# Indexed by codepoint value of the first character of a token.
# Codepoints >= 160 (aka nonascii) all use the index 160.
# values are (i, name, regexp.match)
TOKEN_DISPATCH = []
try:
unichr
except NameError:
# Python 3
unichr = chr
unicode = str
def _init():
"""Import-time initialization."""
COMPILED_MACROS.clear()
for line in MACROS.splitlines():
if line.strip():
name, value = line.split('\t')
COMPILED_MACROS[name.strip()] = '(?:%s)' \
% value.format(**COMPILED_MACROS)
COMPILED_TOKEN_REGEXPS[:] = (
(
name.strip(),
re.compile(
value.format(**COMPILED_MACROS),
# Case-insensitive when matching eg. uRL(foo)
# but preserve the case in extracted groups
re.I
).match
)
for line in TOKENS.splitlines()
if line.strip()
for name, value in [line.split('\t')]
)
COMPILED_TOKEN_INDEXES.clear()
for i, (name, regexp) in enumerate(COMPILED_TOKEN_REGEXPS):
COMPILED_TOKEN_INDEXES[name] = i
dispatch = [[] for i in range(161)]
for chars, names in [
(' \t\r\n\f', ['S']),
('uU', ['URI', 'BAD_URI', 'UNICODE-RANGE']),
# \ is an escape outside of another token
(string.ascii_letters + '\\_-' + unichr(160), ['FUNCTION', 'IDENT']),
(string.digits + '.+-', ['DIMENSION', 'PERCENTAGE', 'NUMBER']),
('@', ['ATKEYWORD']),
('#', ['HASH']),
('\'"', ['STRING', 'BAD_STRING']),
('/', ['COMMENT', 'BAD_COMMENT']),
('<', ['CDO']),
('-', ['CDC']),
]:
for char in chars:
dispatch[ord(char)].extend(names)
for char in ':;{}()[]':
dispatch[ord(char)] = [char]
TOKEN_DISPATCH[:] = (
[
(index,) + COMPILED_TOKEN_REGEXPS[index]
for name in names
for index in [COMPILED_TOKEN_INDEXES[name]]
]
for names in dispatch
)
_init()
def _unicode_replace(match, int=int, unichr=unichr, maxunicode=sys.maxunicode):
codepoint = int(match.group(1), 16)
if codepoint <= maxunicode:
return unichr(codepoint)
else:
return '\N{REPLACEMENT CHARACTER}' # U+FFFD
UNICODE_UNESCAPE = functools.partial(
re.compile(COMPILED_MACROS['unicode'], re.I).sub,
_unicode_replace)
NEWLINE_UNESCAPE = functools.partial(
re.compile(r'()\\' + COMPILED_MACROS['nl']).sub,
'')
SIMPLE_UNESCAPE = functools.partial(
re.compile(r'\\(%s)' % COMPILED_MACROS['simple_escape'] , re.I).sub,
# Same as r'\1', but faster on CPython
operator.methodcaller('group', 1))
FIND_NEWLINES = lambda x : list(re.compile(COMPILED_MACROS['nl']).finditer(x))
class Token(object):
"""A single atomic token.
.. attribute:: is_container
Always ``False``.
Helps to tell :class:`Token` apart from :class:`ContainerToken`.
.. attribute:: type
The type of token as a string:
``S``
A sequence of white space
``IDENT``
An identifier: a name that does not start with a digit.
A name is a sequence of letters, digits, ``_``, ``-``, escaped
characters and non-ASCII characters. Eg: ``margin-left``
``HASH``
``#`` followed immediately by a name. Eg: ``#ff8800``
``ATKEYWORD``
``@`` followed immediately by an identifier. Eg: ``@page``
``URI``
Eg: ``url(foo)`` The content may or may not be quoted.
``UNICODE-RANGE``
``U+`` followed by one or two hexadecimal
Unicode codepoints. Eg: ``U+20-00FF``
``INTEGER``
An integer with an optional ``+`` or ``-`` sign
``NUMBER``
A non-integer number with an optional ``+`` or ``-`` sign
``DIMENSION``
An integer or number followed immediately by an
identifier (the unit). Eg: ``12px``
``PERCENTAGE``
An integer or number followed immediately by ``%``
``STRING``
A string, quoted with ``"`` or ``'``
``:`` or ``;``
That character.
``DELIM``
A single character not matched in another token. Eg: ``,``
See the source of the :mod:`.token_data` module for the precise
regular expressions that match various tokens.
Note that other token types exist in the early tokenization steps,
but these are ignored, are syntax errors, or are later transformed
into :class:`ContainerToken` or :class:`FunctionToken`.
.. attribute:: value
The parsed value:
* INTEGER, NUMBER, PERCENTAGE or DIMENSION tokens: the numeric value
as an int or float.
* STRING tokens: the unescaped string without quotes
* URI tokens: the unescaped URI without quotes or
``url(`` and ``)`` markers.
* IDENT, ATKEYWORD or HASH tokens: the unescaped token,
with ``@`` or ``#`` markers left as-is
* Other tokens: same as :attr:`as_css`
*Unescaped* refers to the various escaping methods based on the
backslash ``\`` character in CSS syntax.
.. attribute:: unit
* DIMENSION tokens: the normalized (unescaped, lower-case)
unit name as a string. eg. ``'px'``
* PERCENTAGE tokens: the string ``'%'``
* Other tokens: ``None``
.. attribute:: line
The line number in the CSS source of the start of this token.
.. attribute:: column
The column number (inside a source line) of the start of this token.
"""
is_container = False
__slots__ = 'type', '_as_css', 'value', 'unit', 'line', 'column'
def __init__(self, type_, css_value, value, unit, line, column):
self.type = type_
self._as_css = css_value
self.value = value
self.unit = unit
self.line = line
self.column = column
def as_css(self):
"""
Return as an Unicode string the CSS representation of the token,
as parsed in the source.
"""
return self._as_css
def __repr__(self):
return ('<Token {0.type} at {0.line}:{0.column} {0.value!r}{1}>'
.format(self, self.unit or ''))
class ContainerToken(object):
"""A token that contains other (nested) tokens.
.. attribute:: is_container
Always ``True``.
Helps to tell :class:`ContainerToken` apart from :class:`Token`.
.. attribute:: type
The type of token as a string. One of ``{``, ``(``, ``[`` or
``FUNCTION``. For ``FUNCTION``, the object is actually a
:class:`FunctionToken`.
.. attribute:: unit
Always ``None``. Included to make :class:`ContainerToken` behave
more like :class:`Token`.
.. attribute:: content
A list of :class:`Token` or nested :class:`ContainerToken`,
not including the opening or closing token.
.. attribute:: line
The line number in the CSS source of the start of this token.
.. attribute:: column
The column number (inside a source line) of the start of this token.
"""
is_container = True
unit = None
__slots__ = 'type', '_css_start', '_css_end', 'content', 'line', 'column'
def __init__(self, type_, css_start, css_end, content, line, column):
self.type = type_
self._css_start = css_start
self._css_end = css_end
self.content = content
self.line = line
self.column = column
def as_css(self):
"""
Return as an Unicode string the CSS representation of the token,
as parsed in the source.
"""
parts = [self._css_start]
parts.extend(token.as_css() for token in self.content)
parts.append(self._css_end)
return ''.join(parts)
format_string = '<ContainerToken {0.type} at {0.line}:{0.column}>'
def __repr__(self):
return (self.format_string + ' {0.content}').format(self)
class FunctionToken(ContainerToken):
"""A specialized :class:`ContainerToken` for a ``FUNCTION`` group.
Has an additional attribute:
.. attribute:: function_name
The unescaped name of the function, with the ``(`` marker removed.
"""
__slots__ = 'function_name',
def __init__(self, type_, css_start, css_end, function_name, content,
line, column):
super(FunctionToken, self).__init__(
type_, css_start, css_end, content, line, column)
# Remove the ( marker:
self.function_name = function_name[:-1]
format_string = ('<FunctionToken {0.function_name}() at '
'{0.line}:{0.column}>')
class TokenList(list):
"""
A mixed list of :class:`~.token_data.Token` and
:class:`~.token_data.ContainerToken` objects.
This is a subclass of the builtin :class:`~builtins.list` type.
It can be iterated, indexed and sliced as usual, but also has some
additional API:
"""
@property
def line(self):
"""The line number in the CSS source of the first token."""
return self[0].line
@property
def column(self):
"""The column number (inside a source line) of the first token."""
return self[0].column
def as_css(self):
"""
Return as an Unicode string the CSS representation of the tokens,
as parsed in the source.
"""
return ''.join(token.as_css() for token in self)
def load_c_tokenizer():
from calibre.constants import plugins
tokenizer, err = plugins['tokenizer']
if err:
raise RuntimeError('Failed to load module tokenizer: %s' % err)
tokens = list(':;(){}[]') + ['DELIM', 'INTEGER', 'STRING']
tokenizer.init(COMPILED_TOKEN_REGEXPS, UNICODE_UNESCAPE, NEWLINE_UNESCAPE, SIMPLE_UNESCAPE, FIND_NEWLINES, TOKEN_DISPATCH, COMPILED_TOKEN_INDEXES, *tokens)
return tokenizer
|
jblackburne/scikit-learn
|
refs/heads/master
|
examples/covariance/plot_sparse_cov.py
|
300
|
"""
======================================
Sparse inverse covariance estimation
======================================
Using the GraphLasso estimator to learn a covariance and sparse precision
from a small number of samples.
To estimate a probabilistic model (e.g. a Gaussian model), estimating the
precision matrix, that is the inverse covariance matrix, is as important
as estimating the covariance matrix. Indeed a Gaussian model is
parametrized by the precision matrix.
To be in favorable recovery conditions, we sample the data from a model
with a sparse inverse covariance matrix. In addition, we ensure that the
data is not too much correlated (limiting the largest coefficient of the
precision matrix) and that there a no small coefficients in the
precision matrix that cannot be recovered. In addition, with a small
number of observations, it is easier to recover a correlation matrix
rather than a covariance, thus we scale the time series.
Here, the number of samples is slightly larger than the number of
dimensions, thus the empirical covariance is still invertible. However,
as the observations are strongly correlated, the empirical covariance
matrix is ill-conditioned and as a result its inverse --the empirical
precision matrix-- is very far from the ground truth.
If we use l2 shrinkage, as with the Ledoit-Wolf estimator, as the number
of samples is small, we need to shrink a lot. As a result, the
Ledoit-Wolf precision is fairly close to the ground truth precision, that
is not far from being diagonal, but the off-diagonal structure is lost.
The l1-penalized estimator can recover part of this off-diagonal
structure. It learns a sparse precision. It is not able to
recover the exact sparsity pattern: it detects too many non-zero
coefficients. However, the highest non-zero coefficients of the l1
estimated correspond to the non-zero coefficients in the ground truth.
Finally, the coefficients of the l1 precision estimate are biased toward
zero: because of the penalty, they are all smaller than the corresponding
ground truth value, as can be seen on the figure.
Note that, the color range of the precision matrices is tweaked to
improve readability of the figure. The full range of values of the
empirical precision is not displayed.
The alpha parameter of the GraphLasso setting the sparsity of the model is
set by internal cross-validation in the GraphLassoCV. As can be
seen on figure 2, the grid to compute the cross-validation score is
iteratively refined in the neighborhood of the maximum.
"""
print(__doc__)
# author: Gael Varoquaux <gael.varoquaux@inria.fr>
# License: BSD 3 clause
# Copyright: INRIA
import numpy as np
from scipy import linalg
from sklearn.datasets import make_sparse_spd_matrix
from sklearn.covariance import GraphLassoCV, ledoit_wolf
import matplotlib.pyplot as plt
##############################################################################
# Generate the data
n_samples = 60
n_features = 20
prng = np.random.RandomState(1)
prec = make_sparse_spd_matrix(n_features, alpha=.98,
smallest_coef=.4,
largest_coef=.7,
random_state=prng)
cov = linalg.inv(prec)
d = np.sqrt(np.diag(cov))
cov /= d
cov /= d[:, np.newaxis]
prec *= d
prec *= d[:, np.newaxis]
X = prng.multivariate_normal(np.zeros(n_features), cov, size=n_samples)
X -= X.mean(axis=0)
X /= X.std(axis=0)
##############################################################################
# Estimate the covariance
emp_cov = np.dot(X.T, X) / n_samples
model = GraphLassoCV()
model.fit(X)
cov_ = model.covariance_
prec_ = model.precision_
lw_cov_, _ = ledoit_wolf(X)
lw_prec_ = linalg.inv(lw_cov_)
##############################################################################
# Plot the results
plt.figure(figsize=(10, 6))
plt.subplots_adjust(left=0.02, right=0.98)
# plot the covariances
covs = [('Empirical', emp_cov), ('Ledoit-Wolf', lw_cov_),
('GraphLasso', cov_), ('True', cov)]
vmax = cov_.max()
for i, (name, this_cov) in enumerate(covs):
plt.subplot(2, 4, i + 1)
plt.imshow(this_cov, interpolation='nearest', vmin=-vmax, vmax=vmax,
cmap=plt.cm.RdBu_r)
plt.xticks(())
plt.yticks(())
plt.title('%s covariance' % name)
# plot the precisions
precs = [('Empirical', linalg.inv(emp_cov)), ('Ledoit-Wolf', lw_prec_),
('GraphLasso', prec_), ('True', prec)]
vmax = .9 * prec_.max()
for i, (name, this_prec) in enumerate(precs):
ax = plt.subplot(2, 4, i + 5)
plt.imshow(np.ma.masked_equal(this_prec, 0),
interpolation='nearest', vmin=-vmax, vmax=vmax,
cmap=plt.cm.RdBu_r)
plt.xticks(())
plt.yticks(())
plt.title('%s precision' % name)
ax.set_axis_bgcolor('.7')
# plot the model selection metric
plt.figure(figsize=(4, 3))
plt.axes([.2, .15, .75, .7])
plt.plot(model.cv_alphas_, np.mean(model.grid_scores, axis=1), 'o-')
plt.axvline(model.alpha_, color='.5')
plt.title('Model selection')
plt.ylabel('Cross-validation score')
plt.xlabel('alpha')
plt.show()
|
frouty/odoo_oph
|
refs/heads/dev_70
|
addons/note/tests/test_note.py
|
427
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (c) 2013-TODAY OpenERP S.A. <http://openerp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.tests import common
class TestNote(common.TransactionCase):
def test_bug_lp_1156215(self):
"""ensure any users can create new users"""
cr, uid = self.cr, self.uid
IMD = self.registry('ir.model.data')
Users = self.registry('res.users')
_, demo_user = IMD.get_object_reference(cr, uid, 'base', 'user_demo')
_, group_id = IMD.get_object_reference(cr, uid, 'base', 'group_erp_manager')
Users.write(cr, uid, [demo_user], {
'groups_id': [(4, group_id)],
})
# must not fail
Users.create(cr, demo_user, {
'name': 'test bug lp:1156215',
'login': 'lp_1156215',
})
|
vmindru/ansible
|
refs/heads/devel
|
lib/ansible/modules/system/known_hosts.py
|
29
|
#!/usr/bin/python
# Copyright: (c) 2014, Matthew Vernon <mcv21@cam.ac.uk>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: known_hosts
short_description: Add or remove a host from the C(known_hosts) file
description:
- The C(known_hosts) module lets you add or remove a host keys from the C(known_hosts) file.
- Starting at Ansible 2.2, multiple entries per host are allowed, but only one for each key type supported by ssh.
This is useful if you're going to want to use the M(git) module over ssh, for example.
- If you have a very large number of host keys to manage, you will find the M(template) module more useful.
version_added: "1.9"
options:
name:
aliases: [ 'host' ]
description:
- The host to add or remove (must match a host specified in key). It will be converted to lowercase so that ssh-keygen can find it.
required: true
key:
description:
- The SSH public host key, as a string (required if state=present, optional when state=absent, in which case all keys for the host are removed).
The key must be in the right format for ssh (see sshd(8), section "SSH_KNOWN_HOSTS FILE FORMAT").
Specifically, the key should not match the format that is found in an SSH pubkey file, but should rather have the hostname prepended to a
line that includes the pubkey, the same way that it would appear in the known_hosts file. The value prepended to the line must also match
the value of the name parameter.
path:
description:
- The known_hosts file to edit
default: "(homedir)+/.ssh/known_hosts"
hash_host:
description:
- Hash the hostname in the known_hosts file
type: bool
default: 'no'
version_added: "2.3"
state:
description:
- I(present) to add the host key, I(absent) to remove it.
choices: [ "present", "absent" ]
default: present
requirements: [ ]
author: "Matthew Vernon (@mcv21)"
'''
EXAMPLES = '''
- name: tell the host about our servers it might want to ssh to
known_hosts:
path: /etc/ssh/ssh_known_hosts
name: foo.com.invalid
key: "{{ lookup('file', 'pubkeys/foo.com.invalid') }}"
'''
# Makes sure public host keys are present or absent in the given known_hosts
# file.
#
# Arguments
# =========
# name = hostname whose key should be added (alias: host)
# key = line(s) to add to known_hosts file
# path = the known_hosts file to edit (default: ~/.ssh/known_hosts)
# hash_host = yes|no (default: no) hash the hostname in the known_hosts file
# state = absent|present (default: present)
import base64
import errno
import hashlib
import hmac
import os
import os.path
import re
import tempfile
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.common.file import FileLock
from ansible.module_utils._text import to_bytes, to_native
def enforce_state(module, params):
"""
Add or remove key.
"""
host = params["name"].lower()
key = params.get("key", None)
path = params.get("path")
hash_host = params.get("hash_host")
state = params.get("state")
# Find the ssh-keygen binary
sshkeygen = module.get_bin_path("ssh-keygen", True)
if not key and state != "absent":
module.fail_json(msg="No key specified when adding a host")
if key and hash_host:
key = hash_host_key(host, key)
# Trailing newline in files gets lost, so re-add if necessary
if key and not key.endswith('\n'):
key += '\n'
sanity_check(module, host, key, sshkeygen)
found, replace_or_add, found_line = search_for_host_key(module, host, key, path, sshkeygen)
params['diff'] = compute_diff(path, found_line, replace_or_add, state, key)
# We will change state if found==True & state!="present"
# or found==False & state=="present"
# i.e found XOR (state=="present")
# Alternatively, if replace is true (i.e. key present, and we must change
# it)
if module.check_mode:
module.exit_json(changed=replace_or_add or (state == "present") != found,
diff=params['diff'])
# Now do the work.
# Only remove whole host if found and no key provided
if found and not key and state == "absent":
module.run_command([sshkeygen, '-R', host, '-f', path], check_rc=True)
params['changed'] = True
# Next, add a new (or replacing) entry
if replace_or_add or found != (state == "present"):
try:
inf = open(path, "r")
except IOError as e:
if e.errno == errno.ENOENT:
inf = None
else:
module.fail_json(msg="Failed to read %s: %s" % (path, str(e)))
try:
with tempfile.NamedTemporaryFile(mode='w+', dir=os.path.dirname(path), delete=False) as outf:
if inf is not None:
for line_number, line in enumerate(inf):
if found_line == (line_number + 1) and (replace_or_add or state == 'absent'):
continue # skip this line to replace its key
outf.write(line)
inf.close()
if state == 'present':
outf.write(key)
except (IOError, OSError) as e:
module.fail_json(msg="Failed to write to file %s: %s" % (path, to_native(e)))
else:
module.atomic_move(outf.name, path)
params['changed'] = True
return params
def sanity_check(module, host, key, sshkeygen):
'''Check supplied key is sensible
host and key are parameters provided by the user; If the host
provided is inconsistent with the key supplied, then this function
quits, providing an error to the user.
sshkeygen is the path to ssh-keygen, found earlier with get_bin_path
'''
# If no key supplied, we're doing a removal, and have nothing to check here.
if not key:
return
# Rather than parsing the key ourselves, get ssh-keygen to do it
# (this is essential for hashed keys, but otherwise useful, as the
# key question is whether ssh-keygen thinks the key matches the host).
# The approach is to write the key to a temporary file,
# and then attempt to look up the specified host in that file.
if re.search(r'\S+(\s+)?,(\s+)?', host):
module.fail_json(msg="Comma separated list of names is not supported. "
"Please pass a single name to lookup in the known_hosts file.")
with tempfile.NamedTemporaryFile(mode='w+') as outf:
try:
outf.write(key)
outf.flush()
except IOError as e:
module.fail_json(msg="Failed to write to temporary file %s: %s" %
(outf.name, to_native(e)))
sshkeygen_command = [sshkeygen, '-F', host, '-f', outf.name]
rc, stdout, stderr = module.run_command(sshkeygen_command)
if stdout == '': # host not found
module.fail_json(msg="Host parameter does not match hashed host field in supplied key")
def search_for_host_key(module, host, key, path, sshkeygen):
'''search_for_host_key(module,host,key,path,sshkeygen) -> (found,replace_or_add,found_line)
Looks up host and keytype in the known_hosts file path; if it's there, looks to see
if one of those entries matches key. Returns:
found (Boolean): is host found in path?
replace_or_add (Boolean): is the key in path different to that supplied by user?
found_line (int or None): the line where a key of the same type was found
if found=False, then replace is always False.
sshkeygen is the path to ssh-keygen, found earlier with get_bin_path
'''
if os.path.exists(path) is False:
return False, False, None
sshkeygen_command = [sshkeygen, '-F', host, '-f', path]
# openssh >=6.4 has changed ssh-keygen behaviour such that it returns
# 1 if no host is found, whereas previously it returned 0
rc, stdout, stderr = module.run_command(sshkeygen_command, check_rc=False)
if stdout == '' and stderr == '' and (rc == 0 or rc == 1):
return False, False, None # host not found, no other errors
if rc != 0: # something went wrong
module.fail_json(msg="ssh-keygen failed (rc=%d, stdout='%s',stderr='%s')" % (rc, stdout, stderr))
# If user supplied no key, we don't want to try and replace anything with it
if not key:
return True, False, None
lines = stdout.split('\n')
new_key = normalize_known_hosts_key(key)
for lnum, l in enumerate(lines):
if l == '':
continue
elif l[0] == '#': # info output from ssh-keygen; contains the line number where key was found
try:
# This output format has been hardcoded in ssh-keygen since at least OpenSSH 4.0
# It always outputs the non-localized comment before the found key
found_line = int(re.search(r'found: line (\d+)', l).group(1))
except IndexError:
module.fail_json(msg="failed to parse output of ssh-keygen for line number: '%s'" % l)
else:
found_key = normalize_known_hosts_key(l)
if new_key['host'][:3] == '|1|' and found_key['host'][:3] == '|1|': # do not change host hash if already hashed
new_key['host'] = found_key['host']
if new_key == found_key: # found a match
return True, False, found_line # found exactly the same key, don't replace
elif new_key['type'] == found_key['type']: # found a different key for the same key type
return True, True, found_line
# No match found, return found and replace, but no line
return True, True, None
def hash_host_key(host, key):
hmac_key = os.urandom(20)
hashed_host = hmac.new(hmac_key, to_bytes(host), hashlib.sha1).digest()
parts = key.strip().split()
# @ indicates the optional marker field used for @cert-authority or @revoked
i = 1 if parts[0][0] == '@' else 0
parts[i] = '|1|%s|%s' % (to_native(base64.b64encode(hmac_key)), to_native(base64.b64encode(hashed_host)))
return ' '.join(parts)
def normalize_known_hosts_key(key):
'''
Transform a key, either taken from a known_host file or provided by the
user, into a normalized form.
The host part (which might include multiple hostnames or be hashed) gets
replaced by the provided host. Also, any spurious information gets removed
from the end (like the username@host tag usually present in hostkeys, but
absent in known_hosts files)
'''
key = key.strip() # trim trailing newline
k = key.split()
d = dict()
# The optional "marker" field, used for @cert-authority or @revoked
if k[0][0] == '@':
d['options'] = k[0]
d['host'] = k[1]
d['type'] = k[2]
d['key'] = k[3]
else:
d['host'] = k[0]
d['type'] = k[1]
d['key'] = k[2]
return d
def compute_diff(path, found_line, replace_or_add, state, key):
diff = {
'before_header': path,
'after_header': path,
'before': '',
'after': '',
}
try:
inf = open(path, "r")
except IOError as e:
if e.errno == errno.ENOENT:
diff['before_header'] = '/dev/null'
else:
diff['before'] = inf.read()
inf.close()
lines = diff['before'].splitlines(1)
if (replace_or_add or state == 'absent') and found_line is not None and 1 <= found_line <= len(lines):
del lines[found_line - 1]
if state == 'present' and (replace_or_add or found_line is None):
lines.append(key)
diff['after'] = ''.join(lines)
return diff
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(required=True, type='str', aliases=['host']),
key=dict(required=False, type='str'),
path=dict(default="~/.ssh/known_hosts", type='path'),
hash_host=dict(required=False, type='bool', default=False),
state=dict(default='present', choices=['absent', 'present']),
),
supports_check_mode=True
)
results = enforce_state(module, module.params)
module.exit_json(**results)
if __name__ == '__main__':
main()
|
RoyFerry/mecha-2.6.35-gb-mr
|
refs/heads/master
|
scripts/tracing/draw_functrace.py
|
14679
|
#!/usr/bin/python
"""
Copyright 2008 (c) Frederic Weisbecker <fweisbec@gmail.com>
Licensed under the terms of the GNU GPL License version 2
This script parses a trace provided by the function tracer in
kernel/trace/trace_functions.c
The resulted trace is processed into a tree to produce a more human
view of the call stack by drawing textual but hierarchical tree of
calls. Only the functions's names and the the call time are provided.
Usage:
Be sure that you have CONFIG_FUNCTION_TRACER
# mount -t debugfs nodev /sys/kernel/debug
# echo function > /sys/kernel/debug/tracing/current_tracer
$ cat /sys/kernel/debug/tracing/trace_pipe > ~/raw_trace_func
Wait some times but not too much, the script is a bit slow.
Break the pipe (Ctrl + Z)
$ scripts/draw_functrace.py < raw_trace_func > draw_functrace
Then you have your drawn trace in draw_functrace
"""
import sys, re
class CallTree:
""" This class provides a tree representation of the functions
call stack. If a function has no parent in the kernel (interrupt,
syscall, kernel thread...) then it is attached to a virtual parent
called ROOT.
"""
ROOT = None
def __init__(self, func, time = None, parent = None):
self._func = func
self._time = time
if parent is None:
self._parent = CallTree.ROOT
else:
self._parent = parent
self._children = []
def calls(self, func, calltime):
""" If a function calls another one, call this method to insert it
into the tree at the appropriate place.
@return: A reference to the newly created child node.
"""
child = CallTree(func, calltime, self)
self._children.append(child)
return child
def getParent(self, func):
""" Retrieve the last parent of the current node that
has the name given by func. If this function is not
on a parent, then create it as new child of root
@return: A reference to the parent.
"""
tree = self
while tree != CallTree.ROOT and tree._func != func:
tree = tree._parent
if tree == CallTree.ROOT:
child = CallTree.ROOT.calls(func, None)
return child
return tree
def __repr__(self):
return self.__toString("", True)
def __toString(self, branch, lastChild):
if self._time is not None:
s = "%s----%s (%s)\n" % (branch, self._func, self._time)
else:
s = "%s----%s\n" % (branch, self._func)
i = 0
if lastChild:
branch = branch[:-1] + " "
while i < len(self._children):
if i != len(self._children) - 1:
s += "%s" % self._children[i].__toString(branch +\
" |", False)
else:
s += "%s" % self._children[i].__toString(branch +\
" |", True)
i += 1
return s
class BrokenLineException(Exception):
"""If the last line is not complete because of the pipe breakage,
we want to stop the processing and ignore this line.
"""
pass
class CommentLineException(Exception):
""" If the line is a comment (as in the beginning of the trace file),
just ignore it.
"""
pass
def parseLine(line):
line = line.strip()
if line.startswith("#"):
raise CommentLineException
m = re.match("[^]]+?\\] +([0-9.]+): (\\w+) <-(\\w+)", line)
if m is None:
raise BrokenLineException
return (m.group(1), m.group(2), m.group(3))
def main():
CallTree.ROOT = CallTree("Root (Nowhere)", None, None)
tree = CallTree.ROOT
for line in sys.stdin:
try:
calltime, callee, caller = parseLine(line)
except BrokenLineException:
break
except CommentLineException:
continue
tree = tree.getParent(caller)
tree = tree.calls(callee, calltime)
print CallTree.ROOT
if __name__ == "__main__":
main()
|
Jaiglissechef-i9100/f4ktion_kernel
|
refs/heads/lp-5.0
|
scripts/tracing/draw_functrace.py
|
14679
|
#!/usr/bin/python
"""
Copyright 2008 (c) Frederic Weisbecker <fweisbec@gmail.com>
Licensed under the terms of the GNU GPL License version 2
This script parses a trace provided by the function tracer in
kernel/trace/trace_functions.c
The resulted trace is processed into a tree to produce a more human
view of the call stack by drawing textual but hierarchical tree of
calls. Only the functions's names and the the call time are provided.
Usage:
Be sure that you have CONFIG_FUNCTION_TRACER
# mount -t debugfs nodev /sys/kernel/debug
# echo function > /sys/kernel/debug/tracing/current_tracer
$ cat /sys/kernel/debug/tracing/trace_pipe > ~/raw_trace_func
Wait some times but not too much, the script is a bit slow.
Break the pipe (Ctrl + Z)
$ scripts/draw_functrace.py < raw_trace_func > draw_functrace
Then you have your drawn trace in draw_functrace
"""
import sys, re
class CallTree:
""" This class provides a tree representation of the functions
call stack. If a function has no parent in the kernel (interrupt,
syscall, kernel thread...) then it is attached to a virtual parent
called ROOT.
"""
ROOT = None
def __init__(self, func, time = None, parent = None):
self._func = func
self._time = time
if parent is None:
self._parent = CallTree.ROOT
else:
self._parent = parent
self._children = []
def calls(self, func, calltime):
""" If a function calls another one, call this method to insert it
into the tree at the appropriate place.
@return: A reference to the newly created child node.
"""
child = CallTree(func, calltime, self)
self._children.append(child)
return child
def getParent(self, func):
""" Retrieve the last parent of the current node that
has the name given by func. If this function is not
on a parent, then create it as new child of root
@return: A reference to the parent.
"""
tree = self
while tree != CallTree.ROOT and tree._func != func:
tree = tree._parent
if tree == CallTree.ROOT:
child = CallTree.ROOT.calls(func, None)
return child
return tree
def __repr__(self):
return self.__toString("", True)
def __toString(self, branch, lastChild):
if self._time is not None:
s = "%s----%s (%s)\n" % (branch, self._func, self._time)
else:
s = "%s----%s\n" % (branch, self._func)
i = 0
if lastChild:
branch = branch[:-1] + " "
while i < len(self._children):
if i != len(self._children) - 1:
s += "%s" % self._children[i].__toString(branch +\
" |", False)
else:
s += "%s" % self._children[i].__toString(branch +\
" |", True)
i += 1
return s
class BrokenLineException(Exception):
"""If the last line is not complete because of the pipe breakage,
we want to stop the processing and ignore this line.
"""
pass
class CommentLineException(Exception):
""" If the line is a comment (as in the beginning of the trace file),
just ignore it.
"""
pass
def parseLine(line):
line = line.strip()
if line.startswith("#"):
raise CommentLineException
m = re.match("[^]]+?\\] +([0-9.]+): (\\w+) <-(\\w+)", line)
if m is None:
raise BrokenLineException
return (m.group(1), m.group(2), m.group(3))
def main():
CallTree.ROOT = CallTree("Root (Nowhere)", None, None)
tree = CallTree.ROOT
for line in sys.stdin:
try:
calltime, callee, caller = parseLine(line)
except BrokenLineException:
break
except CommentLineException:
continue
tree = tree.getParent(caller)
tree = tree.calls(callee, calltime)
print CallTree.ROOT
if __name__ == "__main__":
main()
|
xfumihiro/powerline
|
refs/heads/develop
|
powerline/bindings/pdb/__init__.py
|
34
|
# vim:fileencoding=utf-8:noet
from __future__ import (unicode_literals, division, absolute_import, print_function)
import sys
import pdb
from powerline.pdb import PDBPowerline
from powerline.lib.encoding import get_preferred_output_encoding
from powerline.lib.unicode import unicode
if sys.version_info < (3,):
# XXX The below classes make code compatible with PDBpp which uses pyrepl
# which does not expect unicode or something above ASCII. They are
# completely not needed if pdbpp is not used, but that’s not always the
# case.
class PowerlineRenderBytesResult(bytes):
def __new__(cls, s, encoding=None):
encoding = encoding or s.encoding
if isinstance(s, PowerlineRenderResult):
return s.encode(encoding)
self = bytes.__new__(cls, s.encode(encoding) if isinstance(s, unicode) else s)
self.encoding = encoding
return self
for meth in (
'__contains__',
'partition', 'rpartition',
'split', 'rsplit',
'count', 'join',
):
exec((
'def {0}(self, *args):\n'
' if any((isinstance(arg, unicode) for arg in args)):\n'
' return self.__unicode__().{0}(*args)\n'
' else:\n'
' return bytes.{0}(self, *args)'
).format(meth))
for meth in (
'find', 'rfind',
'index', 'rindex',
):
exec((
'def {0}(self, *args):\n'
' if any((isinstance(arg, unicode) for arg in args)):\n'
' args = [arg.encode(self.encoding) if isinstance(arg, unicode) else arg for arg in args]\n'
' return bytes.{0}(self, *args)'
).format(meth))
def __len__(self):
return len(self.decode(self.encoding))
def __getitem__(self, *args):
return PowerlineRenderBytesResult(bytes.__getitem__(self, *args), encoding=self.encoding)
def __getslice__(self, *args):
return PowerlineRenderBytesResult(bytes.__getslice__(self, *args), encoding=self.encoding)
@staticmethod
def add(encoding, *args):
if any((isinstance(arg, unicode) for arg in args)):
return PowerlineRenderResult(''.join((
arg
if isinstance(arg, unicode)
else arg.decode(encoding)
for arg in args
)), encoding)
else:
return PowerlineRenderBytesResult(b''.join(args), encoding=encoding)
def __add__(self, other):
return self.add(self.encoding, self, other)
def __radd__(self, other):
return self.add(self.encoding, other, self)
def __unicode__(self):
return PowerlineRenderResult(self)
class PowerlineRenderResult(unicode):
def __new__(cls, s, encoding=None):
encoding = (
encoding
or getattr(s, 'encoding', None)
or get_preferred_output_encoding()
)
if isinstance(s, unicode):
self = unicode.__new__(cls, s)
else:
self = unicode.__new__(cls, s, encoding, 'replace')
self.encoding = encoding
return self
def __str__(self):
return PowerlineRenderBytesResult(self)
def __getitem__(self, *args):
return PowerlineRenderResult(unicode.__getitem__(self, *args))
def __getslice__(self, *args):
return PowerlineRenderResult(unicode.__getslice__(self, *args))
@staticmethod
def add(encoding, *args):
return PowerlineRenderResult(''.join((
arg
if isinstance(arg, unicode)
else arg.decode(encoding)
for arg in args
)), encoding)
def __add__(self, other):
return self.add(self.encoding, self, other)
def __radd__(self, other):
return self.add(self.encoding, other, self)
def encode(self, *args, **kwargs):
return PowerlineRenderBytesResult(unicode.encode(self, *args, **kwargs), args[0])
else:
PowerlineRenderResult = str
def use_powerline_prompt(cls):
'''Decorator that installs powerline prompt to the class
:param pdb.Pdb cls:
Class that should be decorated.
:return:
``cls`` argument or a class derived from it. Latter is used to turn
old-style classes into new-style classes.
'''
@property
def prompt(self):
try:
powerline = self.powerline
except AttributeError:
powerline = PDBPowerline()
powerline.setup(self)
self.powerline = powerline
return PowerlineRenderResult(powerline.render(side='left'))
@prompt.setter
def prompt(self, _):
pass
@prompt.deleter
def prompt(self):
pass
if not hasattr(cls, '__class__'):
# Old-style class: make it new-style or @property will not work.
old_cls = cls
class cls(cls, object):
__module__ = cls.__module__
__doc__ = cls.__doc__
cls.__name__ = old_cls.__name__
cls.prompt = prompt
return cls
def main():
'''Run module as a script
Uses :py:func:`pdb.main` function directly, but prior to that it mocks
:py:class:`pdb.Pdb` class with powerline-specific class instance.
'''
orig_pdb = pdb.Pdb
@use_powerline_prompt
class Pdb(pdb.Pdb, object):
def __init__(self):
orig_pdb.__init__(self)
pdb.Pdb = Pdb
return pdb.main()
|
amarzavery/AutoRest
|
refs/heads/master
|
src/generator/AutoRest.Python.Tests/Expected/AcceptanceTests/BodyArray/auto_rest_swagger_bat_array_service/exceptions.py
|
687
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.exceptions import (
ClientException,
SerializationError,
DeserializationError,
TokenExpiredError,
ClientRequestError,
AuthenticationError,
HttpOperationError,
ValidationError,
)
|
gauribhoite/personfinder
|
refs/heads/master
|
env/google_appengine/lib/django-1.4/django/utils/regex_helper.py
|
86
|
"""
Functions for reversing a regular expression (used in reverse URL resolving).
Used internally by Django and not intended for external use.
This is not, and is not intended to be, a complete reg-exp decompiler. It
should be good enough for a large class of URLS, however.
"""
# Mapping of an escape character to a representative of that class. So, e.g.,
# "\w" is replaced by "x" in a reverse URL. A value of None means to ignore
# this sequence. Any missing key is mapped to itself.
ESCAPE_MAPPINGS = {
"A": None,
"b": None,
"B": None,
"d": u"0",
"D": u"x",
"s": u" ",
"S": u"x",
"w": u"x",
"W": u"!",
"Z": None,
}
class Choice(list):
"""
Used to represent multiple possibilities at this point in a pattern string.
We use a distinguished type, rather than a list, so that the usage in the
code is clear.
"""
class Group(list):
"""
Used to represent a capturing group in the pattern string.
"""
class NonCapture(list):
"""
Used to represent a non-capturing group in the pattern string.
"""
def normalize(pattern):
"""
Given a reg-exp pattern, normalizes it to a list of forms that suffice for
reverse matching. This does the following:
(1) For any repeating sections, keeps the minimum number of occurrences
permitted (this means zero for optional groups).
(2) If an optional group includes parameters, include one occurrence of
that group (along with the zero occurrence case from step (1)).
(3) Select the first (essentially an arbitrary) element from any character
class. Select an arbitrary character for any unordered class (e.g. '.'
or '\w') in the pattern.
(5) Ignore comments and any of the reg-exp flags that won't change
what we construct ("iLmsu"). "(?x)" is an error, however.
(6) Raise an error on all other non-capturing (?...) forms (e.g.
look-ahead and look-behind matches) and any disjunctive ('|')
constructs.
Django's URLs for forward resolving are either all positional arguments or
all keyword arguments. That is assumed here, as well. Although reverse
resolving can be done using positional args when keyword args are
specified, the two cannot be mixed in the same reverse() call.
"""
# Do a linear scan to work out the special features of this pattern. The
# idea is that we scan once here and collect all the information we need to
# make future decisions.
result = []
non_capturing_groups = []
consume_next = True
pattern_iter = next_char(iter(pattern))
num_args = 0
# A "while" loop is used here because later on we need to be able to peek
# at the next character and possibly go around without consuming another
# one at the top of the loop.
try:
ch, escaped = pattern_iter.next()
except StopIteration:
return zip([u''], [[]])
try:
while True:
if escaped:
result.append(ch)
elif ch == '.':
# Replace "any character" with an arbitrary representative.
result.append(u".")
elif ch == '|':
# FIXME: One day we'll should do this, but not in 1.0.
raise NotImplementedError
elif ch == "^":
pass
elif ch == '$':
break
elif ch == ')':
# This can only be the end of a non-capturing group, since all
# other unescaped parentheses are handled by the grouping
# section later (and the full group is handled there).
#
# We regroup everything inside the capturing group so that it
# can be quantified, if necessary.
start = non_capturing_groups.pop()
inner = NonCapture(result[start:])
result = result[:start] + [inner]
elif ch == '[':
# Replace ranges with the first character in the range.
ch, escaped = pattern_iter.next()
result.append(ch)
ch, escaped = pattern_iter.next()
while escaped or ch != ']':
ch, escaped = pattern_iter.next()
elif ch == '(':
# Some kind of group.
ch, escaped = pattern_iter.next()
if ch != '?' or escaped:
# A positional group
name = "_%d" % num_args
num_args += 1
result.append(Group(((u"%%(%s)s" % name), name)))
walk_to_end(ch, pattern_iter)
else:
ch, escaped = pattern_iter.next()
if ch in "iLmsu#":
# All of these are ignorable. Walk to the end of the
# group.
walk_to_end(ch, pattern_iter)
elif ch == ':':
# Non-capturing group
non_capturing_groups.append(len(result))
elif ch != 'P':
# Anything else, other than a named group, is something
# we cannot reverse.
raise ValueError("Non-reversible reg-exp portion: '(?%s'" % ch)
else:
ch, escaped = pattern_iter.next()
if ch not in ('<', '='):
raise ValueError("Non-reversible reg-exp portion: '(?P%s'" % ch)
# We are in a named capturing group. Extra the name and
# then skip to the end.
if ch == '<':
terminal_char = '>'
# We are in a named backreference.
else:
terminal_char = ')'
name = []
ch, escaped = pattern_iter.next()
while ch != terminal_char:
name.append(ch)
ch, escaped = pattern_iter.next()
param = ''.join(name)
# Named backreferences have already consumed the
# parenthesis.
if terminal_char != ')':
result.append(Group(((u"%%(%s)s" % param), param)))
walk_to_end(ch, pattern_iter)
else:
result.append(Group(((u"%%(%s)s" % param), None)))
elif ch in "*?+{":
# Quanitifers affect the previous item in the result list.
count, ch = get_quantifier(ch, pattern_iter)
if ch:
# We had to look ahead, but it wasn't need to compute the
# quanitifer, so use this character next time around the
# main loop.
consume_next = False
if count == 0:
if contains(result[-1], Group):
# If we are quantifying a capturing group (or
# something containing such a group) and the minimum is
# zero, we must also handle the case of one occurrence
# being present. All the quantifiers (except {0,0},
# which we conveniently ignore) that have a 0 minimum
# also allow a single occurrence.
result[-1] = Choice([None, result[-1]])
else:
result.pop()
elif count > 1:
result.extend([result[-1]] * (count - 1))
else:
# Anything else is a literal.
result.append(ch)
if consume_next:
ch, escaped = pattern_iter.next()
else:
consume_next = True
except StopIteration:
pass
except NotImplementedError:
# A case of using the disjunctive form. No results for you!
return zip([u''], [[]])
return zip(*flatten_result(result))
def next_char(input_iter):
"""
An iterator that yields the next character from "pattern_iter", respecting
escape sequences. An escaped character is replaced by a representative of
its class (e.g. \w -> "x"). If the escaped character is one that is
skipped, it is not returned (the next character is returned instead).
Yields the next character, along with a boolean indicating whether it is a
raw (unescaped) character or not.
"""
for ch in input_iter:
if ch != '\\':
yield ch, False
continue
ch = input_iter.next()
representative = ESCAPE_MAPPINGS.get(ch, ch)
if representative is None:
continue
yield representative, True
def walk_to_end(ch, input_iter):
"""
The iterator is currently inside a capturing group. We want to walk to the
close of this group, skipping over any nested groups and handling escaped
parentheses correctly.
"""
if ch == '(':
nesting = 1
else:
nesting = 0
for ch, escaped in input_iter:
if escaped:
continue
elif ch == '(':
nesting += 1
elif ch == ')':
if not nesting:
return
nesting -= 1
def get_quantifier(ch, input_iter):
"""
Parse a quantifier from the input, where "ch" is the first character in the
quantifier.
Returns the minimum number of occurences permitted by the quantifier and
either None or the next character from the input_iter if the next character
is not part of the quantifier.
"""
if ch in '*?+':
try:
ch2, escaped = input_iter.next()
except StopIteration:
ch2 = None
if ch2 == '?':
ch2 = None
if ch == '+':
return 1, ch2
return 0, ch2
quant = []
while ch != '}':
ch, escaped = input_iter.next()
quant.append(ch)
quant = quant[:-1]
values = ''.join(quant).split(',')
# Consume the trailing '?', if necessary.
try:
ch, escaped = input_iter.next()
except StopIteration:
ch = None
if ch == '?':
ch = None
return int(values[0]), ch
def contains(source, inst):
"""
Returns True if the "source" contains an instance of "inst". False,
otherwise.
"""
if isinstance(source, inst):
return True
if isinstance(source, NonCapture):
for elt in source:
if contains(elt, inst):
return True
return False
def flatten_result(source):
"""
Turns the given source sequence into a list of reg-exp possibilities and
their arguments. Returns a list of strings and a list of argument lists.
Each of the two lists will be of the same length.
"""
if source is None:
return [u''], [[]]
if isinstance(source, Group):
if source[1] is None:
params = []
else:
params = [source[1]]
return [source[0]], [params]
result = [u'']
result_args = [[]]
pos = last = 0
for pos, elt in enumerate(source):
if isinstance(elt, basestring):
continue
piece = u''.join(source[last:pos])
if isinstance(elt, Group):
piece += elt[0]
param = elt[1]
else:
param = None
last = pos + 1
for i in range(len(result)):
result[i] += piece
if param:
result_args[i].append(param)
if isinstance(elt, (Choice, NonCapture)):
if isinstance(elt, NonCapture):
elt = [elt]
inner_result, inner_args = [], []
for item in elt:
res, args = flatten_result(item)
inner_result.extend(res)
inner_args.extend(args)
new_result = []
new_args = []
for item, args in zip(result, result_args):
for i_item, i_args in zip(inner_result, inner_args):
new_result.append(item + i_item)
new_args.append(args[:] + i_args)
result = new_result
result_args = new_args
if pos >= last:
piece = u''.join(source[last:])
for i in range(len(result)):
result[i] += piece
return result, result_args
|
Outernet-Project/rpi-linux
|
refs/heads/outernet-3.18.y
|
tools/perf/scripts/python/sctop.py
|
1996
|
# system call top
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Periodically displays system-wide system call totals, broken down by
# syscall. If a [comm] arg is specified, only syscalls called by
# [comm] are displayed. If an [interval] arg is specified, the display
# will be refreshed every [interval] seconds. The default interval is
# 3 seconds.
import os, sys, thread, time
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
usage = "perf script -s sctop.py [comm] [interval]\n";
for_comm = None
default_interval = 3
interval = default_interval
if len(sys.argv) > 3:
sys.exit(usage)
if len(sys.argv) > 2:
for_comm = sys.argv[1]
interval = int(sys.argv[2])
elif len(sys.argv) > 1:
try:
interval = int(sys.argv[1])
except ValueError:
for_comm = sys.argv[1]
interval = default_interval
syscalls = autodict()
def trace_begin():
thread.start_new_thread(print_syscall_totals, (interval,))
pass
def raw_syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
common_callchain, id, args):
if for_comm is not None:
if common_comm != for_comm:
return
try:
syscalls[id] += 1
except TypeError:
syscalls[id] = 1
def syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, args):
raw_syscalls__sys_enter(**locals())
def print_syscall_totals(interval):
while 1:
clear_term()
if for_comm is not None:
print "\nsyscall events for %s:\n\n" % (for_comm),
else:
print "\nsyscall events:\n\n",
print "%-40s %10s\n" % ("event", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"----------"),
for id, val in sorted(syscalls.iteritems(), key = lambda(k, v): (v, k), \
reverse = True):
try:
print "%-40s %10d\n" % (syscall_name(id), val),
except TypeError:
pass
syscalls.clear()
time.sleep(interval)
|
nnethercote/servo
|
refs/heads/master
|
tests/wpt/web-platform-tests/service-workers/service-worker/resources/redirect.py
|
158
|
def main(request, response):
if 'Status' in request.GET:
status = int(request.GET["Status"])
else:
status = 302
headers = []
url = request.GET['Redirect']
headers.append(("Location", url))
if "ACAOrigin" in request.GET:
for item in request.GET["ACAOrigin"].split(","):
headers.append(("Access-Control-Allow-Origin", item))
for suffix in ["Headers", "Methods", "Credentials"]:
query = "ACA%s" % suffix
header = "Access-Control-Allow-%s" % suffix
if query in request.GET:
headers.append((header, request.GET[query]))
if "ACEHeaders" in request.GET:
headers.append(("Access-Control-Expose-Headers", request.GET["ACEHeaders"]))
return status, headers, ""
|
zmeda/web-summit-2015-recap-zalando
|
refs/heads/master
|
node_modules/grunt-sass/node_modules/node-sass/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py
|
383
|
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
r"""Code to validate and convert settings of the Microsoft build tools.
This file contains code to validate and convert settings of the Microsoft
build tools. The function ConvertToMSBuildSettings(), ValidateMSVSSettings(),
and ValidateMSBuildSettings() are the entry points.
This file was created by comparing the projects created by Visual Studio 2008
and Visual Studio 2010 for all available settings through the user interface.
The MSBuild schemas were also considered. They are typically found in the
MSBuild install directory, e.g. c:\Program Files (x86)\MSBuild
"""
import sys
import re
# Dictionaries of settings validators. The key is the tool name, the value is
# a dictionary mapping setting names to validation functions.
_msvs_validators = {}
_msbuild_validators = {}
# A dictionary of settings converters. The key is the tool name, the value is
# a dictionary mapping setting names to conversion functions.
_msvs_to_msbuild_converters = {}
# Tool name mapping from MSVS to MSBuild.
_msbuild_name_of_tool = {}
class _Tool(object):
"""Represents a tool used by MSVS or MSBuild.
Attributes:
msvs_name: The name of the tool in MSVS.
msbuild_name: The name of the tool in MSBuild.
"""
def __init__(self, msvs_name, msbuild_name):
self.msvs_name = msvs_name
self.msbuild_name = msbuild_name
def _AddTool(tool):
"""Adds a tool to the four dictionaries used to process settings.
This only defines the tool. Each setting also needs to be added.
Args:
tool: The _Tool object to be added.
"""
_msvs_validators[tool.msvs_name] = {}
_msbuild_validators[tool.msbuild_name] = {}
_msvs_to_msbuild_converters[tool.msvs_name] = {}
_msbuild_name_of_tool[tool.msvs_name] = tool.msbuild_name
def _GetMSBuildToolSettings(msbuild_settings, tool):
"""Returns an MSBuild tool dictionary. Creates it if needed."""
return msbuild_settings.setdefault(tool.msbuild_name, {})
class _Type(object):
"""Type of settings (Base class)."""
def ValidateMSVS(self, value):
"""Verifies that the value is legal for MSVS.
Args:
value: the value to check for this type.
Raises:
ValueError if value is not valid for MSVS.
"""
def ValidateMSBuild(self, value):
"""Verifies that the value is legal for MSBuild.
Args:
value: the value to check for this type.
Raises:
ValueError if value is not valid for MSBuild.
"""
def ConvertToMSBuild(self, value):
"""Returns the MSBuild equivalent of the MSVS value given.
Args:
value: the MSVS value to convert.
Returns:
the MSBuild equivalent.
Raises:
ValueError if value is not valid.
"""
return value
class _String(_Type):
"""A setting that's just a string."""
def ValidateMSVS(self, value):
if not isinstance(value, basestring):
raise ValueError('expected string; got %r' % value)
def ValidateMSBuild(self, value):
if not isinstance(value, basestring):
raise ValueError('expected string; got %r' % value)
def ConvertToMSBuild(self, value):
# Convert the macros
return ConvertVCMacrosToMSBuild(value)
class _StringList(_Type):
"""A settings that's a list of strings."""
def ValidateMSVS(self, value):
if not isinstance(value, basestring) and not isinstance(value, list):
raise ValueError('expected string list; got %r' % value)
def ValidateMSBuild(self, value):
if not isinstance(value, basestring) and not isinstance(value, list):
raise ValueError('expected string list; got %r' % value)
def ConvertToMSBuild(self, value):
# Convert the macros
if isinstance(value, list):
return [ConvertVCMacrosToMSBuild(i) for i in value]
else:
return ConvertVCMacrosToMSBuild(value)
class _Boolean(_Type):
"""Boolean settings, can have the values 'false' or 'true'."""
def _Validate(self, value):
if value != 'true' and value != 'false':
raise ValueError('expected bool; got %r' % value)
def ValidateMSVS(self, value):
self._Validate(value)
def ValidateMSBuild(self, value):
self._Validate(value)
def ConvertToMSBuild(self, value):
self._Validate(value)
return value
class _Integer(_Type):
"""Integer settings."""
def __init__(self, msbuild_base=10):
_Type.__init__(self)
self._msbuild_base = msbuild_base
def ValidateMSVS(self, value):
# Try to convert, this will raise ValueError if invalid.
self.ConvertToMSBuild(value)
def ValidateMSBuild(self, value):
# Try to convert, this will raise ValueError if invalid.
int(value, self._msbuild_base)
def ConvertToMSBuild(self, value):
msbuild_format = (self._msbuild_base == 10) and '%d' or '0x%04x'
return msbuild_format % int(value)
class _Enumeration(_Type):
"""Type of settings that is an enumeration.
In MSVS, the values are indexes like '0', '1', and '2'.
MSBuild uses text labels that are more representative, like 'Win32'.
Constructor args:
label_list: an array of MSBuild labels that correspond to the MSVS index.
In the rare cases where MSVS has skipped an index value, None is
used in the array to indicate the unused spot.
new: an array of labels that are new to MSBuild.
"""
def __init__(self, label_list, new=None):
_Type.__init__(self)
self._label_list = label_list
self._msbuild_values = set(value for value in label_list
if value is not None)
if new is not None:
self._msbuild_values.update(new)
def ValidateMSVS(self, value):
# Try to convert. It will raise an exception if not valid.
self.ConvertToMSBuild(value)
def ValidateMSBuild(self, value):
if value not in self._msbuild_values:
raise ValueError('unrecognized enumerated value %s' % value)
def ConvertToMSBuild(self, value):
index = int(value)
if index < 0 or index >= len(self._label_list):
raise ValueError('index value (%d) not in expected range [0, %d)' %
(index, len(self._label_list)))
label = self._label_list[index]
if label is None:
raise ValueError('converted value for %s not specified.' % value)
return label
# Instantiate the various generic types.
_boolean = _Boolean()
_integer = _Integer()
# For now, we don't do any special validation on these types:
_string = _String()
_file_name = _String()
_folder_name = _String()
_file_list = _StringList()
_folder_list = _StringList()
_string_list = _StringList()
# Some boolean settings went from numerical values to boolean. The
# mapping is 0: default, 1: false, 2: true.
_newly_boolean = _Enumeration(['', 'false', 'true'])
def _Same(tool, name, setting_type):
"""Defines a setting that has the same name in MSVS and MSBuild.
Args:
tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
name: the name of the setting.
setting_type: the type of this setting.
"""
_Renamed(tool, name, name, setting_type)
def _Renamed(tool, msvs_name, msbuild_name, setting_type):
"""Defines a setting for which the name has changed.
Args:
tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
msvs_name: the name of the MSVS setting.
msbuild_name: the name of the MSBuild setting.
setting_type: the type of this setting.
"""
def _Translate(value, msbuild_settings):
msbuild_tool_settings = _GetMSBuildToolSettings(msbuild_settings, tool)
msbuild_tool_settings[msbuild_name] = setting_type.ConvertToMSBuild(value)
_msvs_validators[tool.msvs_name][msvs_name] = setting_type.ValidateMSVS
_msbuild_validators[tool.msbuild_name][msbuild_name] = (
setting_type.ValidateMSBuild)
_msvs_to_msbuild_converters[tool.msvs_name][msvs_name] = _Translate
def _Moved(tool, settings_name, msbuild_tool_name, setting_type):
_MovedAndRenamed(tool, settings_name, msbuild_tool_name, settings_name,
setting_type)
def _MovedAndRenamed(tool, msvs_settings_name, msbuild_tool_name,
msbuild_settings_name, setting_type):
"""Defines a setting that may have moved to a new section.
Args:
tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
msvs_settings_name: the MSVS name of the setting.
msbuild_tool_name: the name of the MSBuild tool to place the setting under.
msbuild_settings_name: the MSBuild name of the setting.
setting_type: the type of this setting.
"""
def _Translate(value, msbuild_settings):
tool_settings = msbuild_settings.setdefault(msbuild_tool_name, {})
tool_settings[msbuild_settings_name] = setting_type.ConvertToMSBuild(value)
_msvs_validators[tool.msvs_name][msvs_settings_name] = (
setting_type.ValidateMSVS)
validator = setting_type.ValidateMSBuild
_msbuild_validators[msbuild_tool_name][msbuild_settings_name] = validator
_msvs_to_msbuild_converters[tool.msvs_name][msvs_settings_name] = _Translate
def _MSVSOnly(tool, name, setting_type):
"""Defines a setting that is only found in MSVS.
Args:
tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
name: the name of the setting.
setting_type: the type of this setting.
"""
def _Translate(unused_value, unused_msbuild_settings):
# Since this is for MSVS only settings, no translation will happen.
pass
_msvs_validators[tool.msvs_name][name] = setting_type.ValidateMSVS
_msvs_to_msbuild_converters[tool.msvs_name][name] = _Translate
def _MSBuildOnly(tool, name, setting_type):
"""Defines a setting that is only found in MSBuild.
Args:
tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
name: the name of the setting.
setting_type: the type of this setting.
"""
def _Translate(value, msbuild_settings):
# Let msbuild-only properties get translated as-is from msvs_settings.
tool_settings = msbuild_settings.setdefault(tool.msbuild_name, {})
tool_settings[name] = value
_msbuild_validators[tool.msbuild_name][name] = setting_type.ValidateMSBuild
_msvs_to_msbuild_converters[tool.msvs_name][name] = _Translate
def _ConvertedToAdditionalOption(tool, msvs_name, flag):
"""Defines a setting that's handled via a command line option in MSBuild.
Args:
tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
msvs_name: the name of the MSVS setting that if 'true' becomes a flag
flag: the flag to insert at the end of the AdditionalOptions
"""
def _Translate(value, msbuild_settings):
if value == 'true':
tool_settings = _GetMSBuildToolSettings(msbuild_settings, tool)
if 'AdditionalOptions' in tool_settings:
new_flags = '%s %s' % (tool_settings['AdditionalOptions'], flag)
else:
new_flags = flag
tool_settings['AdditionalOptions'] = new_flags
_msvs_validators[tool.msvs_name][msvs_name] = _boolean.ValidateMSVS
_msvs_to_msbuild_converters[tool.msvs_name][msvs_name] = _Translate
def _CustomGeneratePreprocessedFile(tool, msvs_name):
def _Translate(value, msbuild_settings):
tool_settings = _GetMSBuildToolSettings(msbuild_settings, tool)
if value == '0':
tool_settings['PreprocessToFile'] = 'false'
tool_settings['PreprocessSuppressLineNumbers'] = 'false'
elif value == '1': # /P
tool_settings['PreprocessToFile'] = 'true'
tool_settings['PreprocessSuppressLineNumbers'] = 'false'
elif value == '2': # /EP /P
tool_settings['PreprocessToFile'] = 'true'
tool_settings['PreprocessSuppressLineNumbers'] = 'true'
else:
raise ValueError('value must be one of [0, 1, 2]; got %s' % value)
# Create a bogus validator that looks for '0', '1', or '2'
msvs_validator = _Enumeration(['a', 'b', 'c']).ValidateMSVS
_msvs_validators[tool.msvs_name][msvs_name] = msvs_validator
msbuild_validator = _boolean.ValidateMSBuild
msbuild_tool_validators = _msbuild_validators[tool.msbuild_name]
msbuild_tool_validators['PreprocessToFile'] = msbuild_validator
msbuild_tool_validators['PreprocessSuppressLineNumbers'] = msbuild_validator
_msvs_to_msbuild_converters[tool.msvs_name][msvs_name] = _Translate
fix_vc_macro_slashes_regex_list = ('IntDir', 'OutDir')
fix_vc_macro_slashes_regex = re.compile(
r'(\$\((?:%s)\))(?:[\\/]+)' % "|".join(fix_vc_macro_slashes_regex_list)
)
# Regular expression to detect keys that were generated by exclusion lists
_EXCLUDED_SUFFIX_RE = re.compile('^(.*)_excluded$')
def _ValidateExclusionSetting(setting, settings, error_msg, stderr=sys.stderr):
"""Verify that 'setting' is valid if it is generated from an exclusion list.
If the setting appears to be generated from an exclusion list, the root name
is checked.
Args:
setting: A string that is the setting name to validate
settings: A dictionary where the keys are valid settings
error_msg: The message to emit in the event of error
stderr: The stream receiving the error messages.
"""
# This may be unrecognized because it's an exclusion list. If the
# setting name has the _excluded suffix, then check the root name.
unrecognized = True
m = re.match(_EXCLUDED_SUFFIX_RE, setting)
if m:
root_setting = m.group(1)
unrecognized = root_setting not in settings
if unrecognized:
# We don't know this setting. Give a warning.
print >> stderr, error_msg
def FixVCMacroSlashes(s):
"""Replace macros which have excessive following slashes.
These macros are known to have a built-in trailing slash. Furthermore, many
scripts hiccup on processing paths with extra slashes in the middle.
This list is probably not exhaustive. Add as needed.
"""
if '$' in s:
s = fix_vc_macro_slashes_regex.sub(r'\1', s)
return s
def ConvertVCMacrosToMSBuild(s):
"""Convert the the MSVS macros found in the string to the MSBuild equivalent.
This list is probably not exhaustive. Add as needed.
"""
if '$' in s:
replace_map = {
'$(ConfigurationName)': '$(Configuration)',
'$(InputDir)': '%(RelativeDir)',
'$(InputExt)': '%(Extension)',
'$(InputFileName)': '%(Filename)%(Extension)',
'$(InputName)': '%(Filename)',
'$(InputPath)': '%(Identity)',
'$(ParentName)': '$(ProjectFileName)',
'$(PlatformName)': '$(Platform)',
'$(SafeInputName)': '%(Filename)',
}
for old, new in replace_map.iteritems():
s = s.replace(old, new)
s = FixVCMacroSlashes(s)
return s
def ConvertToMSBuildSettings(msvs_settings, stderr=sys.stderr):
"""Converts MSVS settings (VS2008 and earlier) to MSBuild settings (VS2010+).
Args:
msvs_settings: A dictionary. The key is the tool name. The values are
themselves dictionaries of settings and their values.
stderr: The stream receiving the error messages.
Returns:
A dictionary of MSBuild settings. The key is either the MSBuild tool name
or the empty string (for the global settings). The values are themselves
dictionaries of settings and their values.
"""
msbuild_settings = {}
for msvs_tool_name, msvs_tool_settings in msvs_settings.iteritems():
if msvs_tool_name in _msvs_to_msbuild_converters:
msvs_tool = _msvs_to_msbuild_converters[msvs_tool_name]
for msvs_setting, msvs_value in msvs_tool_settings.iteritems():
if msvs_setting in msvs_tool:
# Invoke the translation function.
try:
msvs_tool[msvs_setting](msvs_value, msbuild_settings)
except ValueError, e:
print >> stderr, ('Warning: while converting %s/%s to MSBuild, '
'%s' % (msvs_tool_name, msvs_setting, e))
else:
_ValidateExclusionSetting(msvs_setting,
msvs_tool,
('Warning: unrecognized setting %s/%s '
'while converting to MSBuild.' %
(msvs_tool_name, msvs_setting)),
stderr)
else:
print >> stderr, ('Warning: unrecognized tool %s while converting to '
'MSBuild.' % msvs_tool_name)
return msbuild_settings
def ValidateMSVSSettings(settings, stderr=sys.stderr):
"""Validates that the names of the settings are valid for MSVS.
Args:
settings: A dictionary. The key is the tool name. The values are
themselves dictionaries of settings and their values.
stderr: The stream receiving the error messages.
"""
_ValidateSettings(_msvs_validators, settings, stderr)
def ValidateMSBuildSettings(settings, stderr=sys.stderr):
"""Validates that the names of the settings are valid for MSBuild.
Args:
settings: A dictionary. The key is the tool name. The values are
themselves dictionaries of settings and their values.
stderr: The stream receiving the error messages.
"""
_ValidateSettings(_msbuild_validators, settings, stderr)
def _ValidateSettings(validators, settings, stderr):
"""Validates that the settings are valid for MSBuild or MSVS.
We currently only validate the names of the settings, not their values.
Args:
validators: A dictionary of tools and their validators.
settings: A dictionary. The key is the tool name. The values are
themselves dictionaries of settings and their values.
stderr: The stream receiving the error messages.
"""
for tool_name in settings:
if tool_name in validators:
tool_validators = validators[tool_name]
for setting, value in settings[tool_name].iteritems():
if setting in tool_validators:
try:
tool_validators[setting](value)
except ValueError, e:
print >> stderr, ('Warning: for %s/%s, %s' %
(tool_name, setting, e))
else:
_ValidateExclusionSetting(setting,
tool_validators,
('Warning: unrecognized setting %s/%s' %
(tool_name, setting)),
stderr)
else:
print >> stderr, ('Warning: unrecognized tool %s' % tool_name)
# MSVS and MBuild names of the tools.
_compile = _Tool('VCCLCompilerTool', 'ClCompile')
_link = _Tool('VCLinkerTool', 'Link')
_midl = _Tool('VCMIDLTool', 'Midl')
_rc = _Tool('VCResourceCompilerTool', 'ResourceCompile')
_lib = _Tool('VCLibrarianTool', 'Lib')
_manifest = _Tool('VCManifestTool', 'Manifest')
_masm = _Tool('MASM', 'MASM')
_AddTool(_compile)
_AddTool(_link)
_AddTool(_midl)
_AddTool(_rc)
_AddTool(_lib)
_AddTool(_manifest)
_AddTool(_masm)
# Add sections only found in the MSBuild settings.
_msbuild_validators[''] = {}
_msbuild_validators['ProjectReference'] = {}
_msbuild_validators['ManifestResourceCompile'] = {}
# Descriptions of the compiler options, i.e. VCCLCompilerTool in MSVS and
# ClCompile in MSBuild.
# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\cl.xml" for
# the schema of the MSBuild ClCompile settings.
# Options that have the same name in MSVS and MSBuild
_Same(_compile, 'AdditionalIncludeDirectories', _folder_list) # /I
_Same(_compile, 'AdditionalOptions', _string_list)
_Same(_compile, 'AdditionalUsingDirectories', _folder_list) # /AI
_Same(_compile, 'AssemblerListingLocation', _file_name) # /Fa
_Same(_compile, 'BrowseInformationFile', _file_name)
_Same(_compile, 'BufferSecurityCheck', _boolean) # /GS
_Same(_compile, 'DisableLanguageExtensions', _boolean) # /Za
_Same(_compile, 'DisableSpecificWarnings', _string_list) # /wd
_Same(_compile, 'EnableFiberSafeOptimizations', _boolean) # /GT
_Same(_compile, 'EnablePREfast', _boolean) # /analyze Visible='false'
_Same(_compile, 'ExpandAttributedSource', _boolean) # /Fx
_Same(_compile, 'FloatingPointExceptions', _boolean) # /fp:except
_Same(_compile, 'ForceConformanceInForLoopScope', _boolean) # /Zc:forScope
_Same(_compile, 'ForcedIncludeFiles', _file_list) # /FI
_Same(_compile, 'ForcedUsingFiles', _file_list) # /FU
_Same(_compile, 'GenerateXMLDocumentationFiles', _boolean) # /doc
_Same(_compile, 'IgnoreStandardIncludePath', _boolean) # /X
_Same(_compile, 'MinimalRebuild', _boolean) # /Gm
_Same(_compile, 'OmitDefaultLibName', _boolean) # /Zl
_Same(_compile, 'OmitFramePointers', _boolean) # /Oy
_Same(_compile, 'PreprocessorDefinitions', _string_list) # /D
_Same(_compile, 'ProgramDataBaseFileName', _file_name) # /Fd
_Same(_compile, 'RuntimeTypeInfo', _boolean) # /GR
_Same(_compile, 'ShowIncludes', _boolean) # /showIncludes
_Same(_compile, 'SmallerTypeCheck', _boolean) # /RTCc
_Same(_compile, 'StringPooling', _boolean) # /GF
_Same(_compile, 'SuppressStartupBanner', _boolean) # /nologo
_Same(_compile, 'TreatWChar_tAsBuiltInType', _boolean) # /Zc:wchar_t
_Same(_compile, 'UndefineAllPreprocessorDefinitions', _boolean) # /u
_Same(_compile, 'UndefinePreprocessorDefinitions', _string_list) # /U
_Same(_compile, 'UseFullPaths', _boolean) # /FC
_Same(_compile, 'WholeProgramOptimization', _boolean) # /GL
_Same(_compile, 'XMLDocumentationFileName', _file_name)
_Same(_compile, 'AssemblerOutput',
_Enumeration(['NoListing',
'AssemblyCode', # /FA
'All', # /FAcs
'AssemblyAndMachineCode', # /FAc
'AssemblyAndSourceCode'])) # /FAs
_Same(_compile, 'BasicRuntimeChecks',
_Enumeration(['Default',
'StackFrameRuntimeCheck', # /RTCs
'UninitializedLocalUsageCheck', # /RTCu
'EnableFastChecks'])) # /RTC1
_Same(_compile, 'BrowseInformation',
_Enumeration(['false',
'true', # /FR
'true'])) # /Fr
_Same(_compile, 'CallingConvention',
_Enumeration(['Cdecl', # /Gd
'FastCall', # /Gr
'StdCall', # /Gz
'VectorCall'])) # /Gv
_Same(_compile, 'CompileAs',
_Enumeration(['Default',
'CompileAsC', # /TC
'CompileAsCpp'])) # /TP
_Same(_compile, 'DebugInformationFormat',
_Enumeration(['', # Disabled
'OldStyle', # /Z7
None,
'ProgramDatabase', # /Zi
'EditAndContinue'])) # /ZI
_Same(_compile, 'EnableEnhancedInstructionSet',
_Enumeration(['NotSet',
'StreamingSIMDExtensions', # /arch:SSE
'StreamingSIMDExtensions2', # /arch:SSE2
'AdvancedVectorExtensions', # /arch:AVX (vs2012+)
'NoExtensions', # /arch:IA32 (vs2012+)
# This one only exists in the new msbuild format.
'AdvancedVectorExtensions2', # /arch:AVX2 (vs2013r2+)
]))
_Same(_compile, 'ErrorReporting',
_Enumeration(['None', # /errorReport:none
'Prompt', # /errorReport:prompt
'Queue'], # /errorReport:queue
new=['Send'])) # /errorReport:send"
_Same(_compile, 'ExceptionHandling',
_Enumeration(['false',
'Sync', # /EHsc
'Async'], # /EHa
new=['SyncCThrow'])) # /EHs
_Same(_compile, 'FavorSizeOrSpeed',
_Enumeration(['Neither',
'Speed', # /Ot
'Size'])) # /Os
_Same(_compile, 'FloatingPointModel',
_Enumeration(['Precise', # /fp:precise
'Strict', # /fp:strict
'Fast'])) # /fp:fast
_Same(_compile, 'InlineFunctionExpansion',
_Enumeration(['Default',
'OnlyExplicitInline', # /Ob1
'AnySuitable'], # /Ob2
new=['Disabled'])) # /Ob0
_Same(_compile, 'Optimization',
_Enumeration(['Disabled', # /Od
'MinSpace', # /O1
'MaxSpeed', # /O2
'Full'])) # /Ox
_Same(_compile, 'RuntimeLibrary',
_Enumeration(['MultiThreaded', # /MT
'MultiThreadedDebug', # /MTd
'MultiThreadedDLL', # /MD
'MultiThreadedDebugDLL'])) # /MDd
_Same(_compile, 'StructMemberAlignment',
_Enumeration(['Default',
'1Byte', # /Zp1
'2Bytes', # /Zp2
'4Bytes', # /Zp4
'8Bytes', # /Zp8
'16Bytes'])) # /Zp16
_Same(_compile, 'WarningLevel',
_Enumeration(['TurnOffAllWarnings', # /W0
'Level1', # /W1
'Level2', # /W2
'Level3', # /W3
'Level4'], # /W4
new=['EnableAllWarnings'])) # /Wall
# Options found in MSVS that have been renamed in MSBuild.
_Renamed(_compile, 'EnableFunctionLevelLinking', 'FunctionLevelLinking',
_boolean) # /Gy
_Renamed(_compile, 'EnableIntrinsicFunctions', 'IntrinsicFunctions',
_boolean) # /Oi
_Renamed(_compile, 'KeepComments', 'PreprocessKeepComments', _boolean) # /C
_Renamed(_compile, 'ObjectFile', 'ObjectFileName', _file_name) # /Fo
_Renamed(_compile, 'OpenMP', 'OpenMPSupport', _boolean) # /openmp
_Renamed(_compile, 'PrecompiledHeaderThrough', 'PrecompiledHeaderFile',
_file_name) # Used with /Yc and /Yu
_Renamed(_compile, 'PrecompiledHeaderFile', 'PrecompiledHeaderOutputFile',
_file_name) # /Fp
_Renamed(_compile, 'UsePrecompiledHeader', 'PrecompiledHeader',
_Enumeration(['NotUsing', # VS recognized '' for this value too.
'Create', # /Yc
'Use'])) # /Yu
_Renamed(_compile, 'WarnAsError', 'TreatWarningAsError', _boolean) # /WX
_ConvertedToAdditionalOption(_compile, 'DefaultCharIsUnsigned', '/J')
# MSVS options not found in MSBuild.
_MSVSOnly(_compile, 'Detect64BitPortabilityProblems', _boolean)
_MSVSOnly(_compile, 'UseUnicodeResponseFiles', _boolean)
# MSBuild options not found in MSVS.
_MSBuildOnly(_compile, 'BuildingInIDE', _boolean)
_MSBuildOnly(_compile, 'CompileAsManaged',
_Enumeration([], new=['false',
'true', # /clr
'Pure', # /clr:pure
'Safe', # /clr:safe
'OldSyntax'])) # /clr:oldSyntax
_MSBuildOnly(_compile, 'CreateHotpatchableImage', _boolean) # /hotpatch
_MSBuildOnly(_compile, 'MultiProcessorCompilation', _boolean) # /MP
_MSBuildOnly(_compile, 'PreprocessOutputPath', _string) # /Fi
_MSBuildOnly(_compile, 'ProcessorNumber', _integer) # the number of processors
_MSBuildOnly(_compile, 'TrackerLogDirectory', _folder_name)
_MSBuildOnly(_compile, 'TreatSpecificWarningsAsErrors', _string_list) # /we
_MSBuildOnly(_compile, 'UseUnicodeForAssemblerListing', _boolean) # /FAu
# Defines a setting that needs very customized processing
_CustomGeneratePreprocessedFile(_compile, 'GeneratePreprocessedFile')
# Directives for converting MSVS VCLinkerTool to MSBuild Link.
# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\link.xml" for
# the schema of the MSBuild Link settings.
# Options that have the same name in MSVS and MSBuild
_Same(_link, 'AdditionalDependencies', _file_list)
_Same(_link, 'AdditionalLibraryDirectories', _folder_list) # /LIBPATH
# /MANIFESTDEPENDENCY:
_Same(_link, 'AdditionalManifestDependencies', _file_list)
_Same(_link, 'AdditionalOptions', _string_list)
_Same(_link, 'AddModuleNamesToAssembly', _file_list) # /ASSEMBLYMODULE
_Same(_link, 'AllowIsolation', _boolean) # /ALLOWISOLATION
_Same(_link, 'AssemblyLinkResource', _file_list) # /ASSEMBLYLINKRESOURCE
_Same(_link, 'BaseAddress', _string) # /BASE
_Same(_link, 'CLRUnmanagedCodeCheck', _boolean) # /CLRUNMANAGEDCODECHECK
_Same(_link, 'DelayLoadDLLs', _file_list) # /DELAYLOAD
_Same(_link, 'DelaySign', _boolean) # /DELAYSIGN
_Same(_link, 'EmbedManagedResourceFile', _file_list) # /ASSEMBLYRESOURCE
_Same(_link, 'EnableUAC', _boolean) # /MANIFESTUAC
_Same(_link, 'EntryPointSymbol', _string) # /ENTRY
_Same(_link, 'ForceSymbolReferences', _file_list) # /INCLUDE
_Same(_link, 'FunctionOrder', _file_name) # /ORDER
_Same(_link, 'GenerateDebugInformation', _boolean) # /DEBUG
_Same(_link, 'GenerateMapFile', _boolean) # /MAP
_Same(_link, 'HeapCommitSize', _string)
_Same(_link, 'HeapReserveSize', _string) # /HEAP
_Same(_link, 'IgnoreAllDefaultLibraries', _boolean) # /NODEFAULTLIB
_Same(_link, 'IgnoreEmbeddedIDL', _boolean) # /IGNOREIDL
_Same(_link, 'ImportLibrary', _file_name) # /IMPLIB
_Same(_link, 'KeyContainer', _file_name) # /KEYCONTAINER
_Same(_link, 'KeyFile', _file_name) # /KEYFILE
_Same(_link, 'ManifestFile', _file_name) # /ManifestFile
_Same(_link, 'MapExports', _boolean) # /MAPINFO:EXPORTS
_Same(_link, 'MapFileName', _file_name)
_Same(_link, 'MergedIDLBaseFileName', _file_name) # /IDLOUT
_Same(_link, 'MergeSections', _string) # /MERGE
_Same(_link, 'MidlCommandFile', _file_name) # /MIDL
_Same(_link, 'ModuleDefinitionFile', _file_name) # /DEF
_Same(_link, 'OutputFile', _file_name) # /OUT
_Same(_link, 'PerUserRedirection', _boolean)
_Same(_link, 'Profile', _boolean) # /PROFILE
_Same(_link, 'ProfileGuidedDatabase', _file_name) # /PGD
_Same(_link, 'ProgramDatabaseFile', _file_name) # /PDB
_Same(_link, 'RegisterOutput', _boolean)
_Same(_link, 'SetChecksum', _boolean) # /RELEASE
_Same(_link, 'StackCommitSize', _string)
_Same(_link, 'StackReserveSize', _string) # /STACK
_Same(_link, 'StripPrivateSymbols', _file_name) # /PDBSTRIPPED
_Same(_link, 'SupportUnloadOfDelayLoadedDLL', _boolean) # /DELAY:UNLOAD
_Same(_link, 'SuppressStartupBanner', _boolean) # /NOLOGO
_Same(_link, 'SwapRunFromCD', _boolean) # /SWAPRUN:CD
_Same(_link, 'TurnOffAssemblyGeneration', _boolean) # /NOASSEMBLY
_Same(_link, 'TypeLibraryFile', _file_name) # /TLBOUT
_Same(_link, 'TypeLibraryResourceID', _integer) # /TLBID
_Same(_link, 'UACUIAccess', _boolean) # /uiAccess='true'
_Same(_link, 'Version', _string) # /VERSION
_Same(_link, 'EnableCOMDATFolding', _newly_boolean) # /OPT:ICF
_Same(_link, 'FixedBaseAddress', _newly_boolean) # /FIXED
_Same(_link, 'LargeAddressAware', _newly_boolean) # /LARGEADDRESSAWARE
_Same(_link, 'OptimizeReferences', _newly_boolean) # /OPT:REF
_Same(_link, 'RandomizedBaseAddress', _newly_boolean) # /DYNAMICBASE
_Same(_link, 'TerminalServerAware', _newly_boolean) # /TSAWARE
_subsystem_enumeration = _Enumeration(
['NotSet',
'Console', # /SUBSYSTEM:CONSOLE
'Windows', # /SUBSYSTEM:WINDOWS
'Native', # /SUBSYSTEM:NATIVE
'EFI Application', # /SUBSYSTEM:EFI_APPLICATION
'EFI Boot Service Driver', # /SUBSYSTEM:EFI_BOOT_SERVICE_DRIVER
'EFI ROM', # /SUBSYSTEM:EFI_ROM
'EFI Runtime', # /SUBSYSTEM:EFI_RUNTIME_DRIVER
'WindowsCE'], # /SUBSYSTEM:WINDOWSCE
new=['POSIX']) # /SUBSYSTEM:POSIX
_target_machine_enumeration = _Enumeration(
['NotSet',
'MachineX86', # /MACHINE:X86
None,
'MachineARM', # /MACHINE:ARM
'MachineEBC', # /MACHINE:EBC
'MachineIA64', # /MACHINE:IA64
None,
'MachineMIPS', # /MACHINE:MIPS
'MachineMIPS16', # /MACHINE:MIPS16
'MachineMIPSFPU', # /MACHINE:MIPSFPU
'MachineMIPSFPU16', # /MACHINE:MIPSFPU16
None,
None,
None,
'MachineSH4', # /MACHINE:SH4
None,
'MachineTHUMB', # /MACHINE:THUMB
'MachineX64']) # /MACHINE:X64
_Same(_link, 'AssemblyDebug',
_Enumeration(['',
'true', # /ASSEMBLYDEBUG
'false'])) # /ASSEMBLYDEBUG:DISABLE
_Same(_link, 'CLRImageType',
_Enumeration(['Default',
'ForceIJWImage', # /CLRIMAGETYPE:IJW
'ForcePureILImage', # /Switch="CLRIMAGETYPE:PURE
'ForceSafeILImage'])) # /Switch="CLRIMAGETYPE:SAFE
_Same(_link, 'CLRThreadAttribute',
_Enumeration(['DefaultThreadingAttribute', # /CLRTHREADATTRIBUTE:NONE
'MTAThreadingAttribute', # /CLRTHREADATTRIBUTE:MTA
'STAThreadingAttribute'])) # /CLRTHREADATTRIBUTE:STA
_Same(_link, 'DataExecutionPrevention',
_Enumeration(['',
'false', # /NXCOMPAT:NO
'true'])) # /NXCOMPAT
_Same(_link, 'Driver',
_Enumeration(['NotSet',
'Driver', # /Driver
'UpOnly', # /DRIVER:UPONLY
'WDM'])) # /DRIVER:WDM
_Same(_link, 'LinkTimeCodeGeneration',
_Enumeration(['Default',
'UseLinkTimeCodeGeneration', # /LTCG
'PGInstrument', # /LTCG:PGInstrument
'PGOptimization', # /LTCG:PGOptimize
'PGUpdate'])) # /LTCG:PGUpdate
_Same(_link, 'ShowProgress',
_Enumeration(['NotSet',
'LinkVerbose', # /VERBOSE
'LinkVerboseLib'], # /VERBOSE:Lib
new=['LinkVerboseICF', # /VERBOSE:ICF
'LinkVerboseREF', # /VERBOSE:REF
'LinkVerboseSAFESEH', # /VERBOSE:SAFESEH
'LinkVerboseCLR'])) # /VERBOSE:CLR
_Same(_link, 'SubSystem', _subsystem_enumeration)
_Same(_link, 'TargetMachine', _target_machine_enumeration)
_Same(_link, 'UACExecutionLevel',
_Enumeration(['AsInvoker', # /level='asInvoker'
'HighestAvailable', # /level='highestAvailable'
'RequireAdministrator'])) # /level='requireAdministrator'
_Same(_link, 'MinimumRequiredVersion', _string)
_Same(_link, 'TreatLinkerWarningAsErrors', _boolean) # /WX
# Options found in MSVS that have been renamed in MSBuild.
_Renamed(_link, 'ErrorReporting', 'LinkErrorReporting',
_Enumeration(['NoErrorReport', # /ERRORREPORT:NONE
'PromptImmediately', # /ERRORREPORT:PROMPT
'QueueForNextLogin'], # /ERRORREPORT:QUEUE
new=['SendErrorReport'])) # /ERRORREPORT:SEND
_Renamed(_link, 'IgnoreDefaultLibraryNames', 'IgnoreSpecificDefaultLibraries',
_file_list) # /NODEFAULTLIB
_Renamed(_link, 'ResourceOnlyDLL', 'NoEntryPoint', _boolean) # /NOENTRY
_Renamed(_link, 'SwapRunFromNet', 'SwapRunFromNET', _boolean) # /SWAPRUN:NET
_Moved(_link, 'GenerateManifest', '', _boolean)
_Moved(_link, 'IgnoreImportLibrary', '', _boolean)
_Moved(_link, 'LinkIncremental', '', _newly_boolean)
_Moved(_link, 'LinkLibraryDependencies', 'ProjectReference', _boolean)
_Moved(_link, 'UseLibraryDependencyInputs', 'ProjectReference', _boolean)
# MSVS options not found in MSBuild.
_MSVSOnly(_link, 'OptimizeForWindows98', _newly_boolean)
_MSVSOnly(_link, 'UseUnicodeResponseFiles', _boolean)
# MSBuild options not found in MSVS.
_MSBuildOnly(_link, 'BuildingInIDE', _boolean)
_MSBuildOnly(_link, 'ImageHasSafeExceptionHandlers', _boolean) # /SAFESEH
_MSBuildOnly(_link, 'LinkDLL', _boolean) # /DLL Visible='false'
_MSBuildOnly(_link, 'LinkStatus', _boolean) # /LTCG:STATUS
_MSBuildOnly(_link, 'PreventDllBinding', _boolean) # /ALLOWBIND
_MSBuildOnly(_link, 'SupportNobindOfDelayLoadedDLL', _boolean) # /DELAY:NOBIND
_MSBuildOnly(_link, 'TrackerLogDirectory', _folder_name)
_MSBuildOnly(_link, 'MSDOSStubFileName', _file_name) # /STUB Visible='false'
_MSBuildOnly(_link, 'SectionAlignment', _integer) # /ALIGN
_MSBuildOnly(_link, 'SpecifySectionAttributes', _string) # /SECTION
_MSBuildOnly(_link, 'ForceFileOutput',
_Enumeration([], new=['Enabled', # /FORCE
# /FORCE:MULTIPLE
'MultiplyDefinedSymbolOnly',
'UndefinedSymbolOnly'])) # /FORCE:UNRESOLVED
_MSBuildOnly(_link, 'CreateHotPatchableImage',
_Enumeration([], new=['Enabled', # /FUNCTIONPADMIN
'X86Image', # /FUNCTIONPADMIN:5
'X64Image', # /FUNCTIONPADMIN:6
'ItaniumImage'])) # /FUNCTIONPADMIN:16
_MSBuildOnly(_link, 'CLRSupportLastError',
_Enumeration([], new=['Enabled', # /CLRSupportLastError
'Disabled', # /CLRSupportLastError:NO
# /CLRSupportLastError:SYSTEMDLL
'SystemDlls']))
# Directives for converting VCResourceCompilerTool to ResourceCompile.
# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\rc.xml" for
# the schema of the MSBuild ResourceCompile settings.
_Same(_rc, 'AdditionalOptions', _string_list)
_Same(_rc, 'AdditionalIncludeDirectories', _folder_list) # /I
_Same(_rc, 'Culture', _Integer(msbuild_base=16))
_Same(_rc, 'IgnoreStandardIncludePath', _boolean) # /X
_Same(_rc, 'PreprocessorDefinitions', _string_list) # /D
_Same(_rc, 'ResourceOutputFileName', _string) # /fo
_Same(_rc, 'ShowProgress', _boolean) # /v
# There is no UI in VisualStudio 2008 to set the following properties.
# However they are found in CL and other tools. Include them here for
# completeness, as they are very likely to have the same usage pattern.
_Same(_rc, 'SuppressStartupBanner', _boolean) # /nologo
_Same(_rc, 'UndefinePreprocessorDefinitions', _string_list) # /u
# MSBuild options not found in MSVS.
_MSBuildOnly(_rc, 'NullTerminateStrings', _boolean) # /n
_MSBuildOnly(_rc, 'TrackerLogDirectory', _folder_name)
# Directives for converting VCMIDLTool to Midl.
# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\midl.xml" for
# the schema of the MSBuild Midl settings.
_Same(_midl, 'AdditionalIncludeDirectories', _folder_list) # /I
_Same(_midl, 'AdditionalOptions', _string_list)
_Same(_midl, 'CPreprocessOptions', _string) # /cpp_opt
_Same(_midl, 'ErrorCheckAllocations', _boolean) # /error allocation
_Same(_midl, 'ErrorCheckBounds', _boolean) # /error bounds_check
_Same(_midl, 'ErrorCheckEnumRange', _boolean) # /error enum
_Same(_midl, 'ErrorCheckRefPointers', _boolean) # /error ref
_Same(_midl, 'ErrorCheckStubData', _boolean) # /error stub_data
_Same(_midl, 'GenerateStublessProxies', _boolean) # /Oicf
_Same(_midl, 'GenerateTypeLibrary', _boolean)
_Same(_midl, 'HeaderFileName', _file_name) # /h
_Same(_midl, 'IgnoreStandardIncludePath', _boolean) # /no_def_idir
_Same(_midl, 'InterfaceIdentifierFileName', _file_name) # /iid
_Same(_midl, 'MkTypLibCompatible', _boolean) # /mktyplib203
_Same(_midl, 'OutputDirectory', _string) # /out
_Same(_midl, 'PreprocessorDefinitions', _string_list) # /D
_Same(_midl, 'ProxyFileName', _file_name) # /proxy
_Same(_midl, 'RedirectOutputAndErrors', _file_name) # /o
_Same(_midl, 'SuppressStartupBanner', _boolean) # /nologo
_Same(_midl, 'TypeLibraryName', _file_name) # /tlb
_Same(_midl, 'UndefinePreprocessorDefinitions', _string_list) # /U
_Same(_midl, 'WarnAsError', _boolean) # /WX
_Same(_midl, 'DefaultCharType',
_Enumeration(['Unsigned', # /char unsigned
'Signed', # /char signed
'Ascii'])) # /char ascii7
_Same(_midl, 'TargetEnvironment',
_Enumeration(['NotSet',
'Win32', # /env win32
'Itanium', # /env ia64
'X64'])) # /env x64
_Same(_midl, 'EnableErrorChecks',
_Enumeration(['EnableCustom',
'None', # /error none
'All'])) # /error all
_Same(_midl, 'StructMemberAlignment',
_Enumeration(['NotSet',
'1', # Zp1
'2', # Zp2
'4', # Zp4
'8'])) # Zp8
_Same(_midl, 'WarningLevel',
_Enumeration(['0', # /W0
'1', # /W1
'2', # /W2
'3', # /W3
'4'])) # /W4
_Renamed(_midl, 'DLLDataFileName', 'DllDataFileName', _file_name) # /dlldata
_Renamed(_midl, 'ValidateParameters', 'ValidateAllParameters',
_boolean) # /robust
# MSBuild options not found in MSVS.
_MSBuildOnly(_midl, 'ApplicationConfigurationMode', _boolean) # /app_config
_MSBuildOnly(_midl, 'ClientStubFile', _file_name) # /cstub
_MSBuildOnly(_midl, 'GenerateClientFiles',
_Enumeration([], new=['Stub', # /client stub
'None'])) # /client none
_MSBuildOnly(_midl, 'GenerateServerFiles',
_Enumeration([], new=['Stub', # /client stub
'None'])) # /client none
_MSBuildOnly(_midl, 'LocaleID', _integer) # /lcid DECIMAL
_MSBuildOnly(_midl, 'ServerStubFile', _file_name) # /sstub
_MSBuildOnly(_midl, 'SuppressCompilerWarnings', _boolean) # /no_warn
_MSBuildOnly(_midl, 'TrackerLogDirectory', _folder_name)
_MSBuildOnly(_midl, 'TypeLibFormat',
_Enumeration([], new=['NewFormat', # /newtlb
'OldFormat'])) # /oldtlb
# Directives for converting VCLibrarianTool to Lib.
# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\lib.xml" for
# the schema of the MSBuild Lib settings.
_Same(_lib, 'AdditionalDependencies', _file_list)
_Same(_lib, 'AdditionalLibraryDirectories', _folder_list) # /LIBPATH
_Same(_lib, 'AdditionalOptions', _string_list)
_Same(_lib, 'ExportNamedFunctions', _string_list) # /EXPORT
_Same(_lib, 'ForceSymbolReferences', _string) # /INCLUDE
_Same(_lib, 'IgnoreAllDefaultLibraries', _boolean) # /NODEFAULTLIB
_Same(_lib, 'IgnoreSpecificDefaultLibraries', _file_list) # /NODEFAULTLIB
_Same(_lib, 'ModuleDefinitionFile', _file_name) # /DEF
_Same(_lib, 'OutputFile', _file_name) # /OUT
_Same(_lib, 'SuppressStartupBanner', _boolean) # /NOLOGO
_Same(_lib, 'UseUnicodeResponseFiles', _boolean)
_Same(_lib, 'LinkTimeCodeGeneration', _boolean) # /LTCG
_Same(_lib, 'TargetMachine', _target_machine_enumeration)
# TODO(jeanluc) _link defines the same value that gets moved to
# ProjectReference. We may want to validate that they are consistent.
_Moved(_lib, 'LinkLibraryDependencies', 'ProjectReference', _boolean)
_MSBuildOnly(_lib, 'DisplayLibrary', _string) # /LIST Visible='false'
_MSBuildOnly(_lib, 'ErrorReporting',
_Enumeration([], new=['PromptImmediately', # /ERRORREPORT:PROMPT
'QueueForNextLogin', # /ERRORREPORT:QUEUE
'SendErrorReport', # /ERRORREPORT:SEND
'NoErrorReport'])) # /ERRORREPORT:NONE
_MSBuildOnly(_lib, 'MinimumRequiredVersion', _string)
_MSBuildOnly(_lib, 'Name', _file_name) # /NAME
_MSBuildOnly(_lib, 'RemoveObjects', _file_list) # /REMOVE
_MSBuildOnly(_lib, 'SubSystem', _subsystem_enumeration)
_MSBuildOnly(_lib, 'TrackerLogDirectory', _folder_name)
_MSBuildOnly(_lib, 'TreatLibWarningAsErrors', _boolean) # /WX
_MSBuildOnly(_lib, 'Verbose', _boolean)
# Directives for converting VCManifestTool to Mt.
# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\mt.xml" for
# the schema of the MSBuild Lib settings.
# Options that have the same name in MSVS and MSBuild
_Same(_manifest, 'AdditionalManifestFiles', _file_list) # /manifest
_Same(_manifest, 'AdditionalOptions', _string_list)
_Same(_manifest, 'AssemblyIdentity', _string) # /identity:
_Same(_manifest, 'ComponentFileName', _file_name) # /dll
_Same(_manifest, 'GenerateCatalogFiles', _boolean) # /makecdfs
_Same(_manifest, 'InputResourceManifests', _string) # /inputresource
_Same(_manifest, 'OutputManifestFile', _file_name) # /out
_Same(_manifest, 'RegistrarScriptFile', _file_name) # /rgs
_Same(_manifest, 'ReplacementsFile', _file_name) # /replacements
_Same(_manifest, 'SuppressStartupBanner', _boolean) # /nologo
_Same(_manifest, 'TypeLibraryFile', _file_name) # /tlb:
_Same(_manifest, 'UpdateFileHashes', _boolean) # /hashupdate
_Same(_manifest, 'UpdateFileHashesSearchPath', _file_name)
_Same(_manifest, 'VerboseOutput', _boolean) # /verbose
# Options that have moved location.
_MovedAndRenamed(_manifest, 'ManifestResourceFile',
'ManifestResourceCompile',
'ResourceOutputFileName',
_file_name)
_Moved(_manifest, 'EmbedManifest', '', _boolean)
# MSVS options not found in MSBuild.
_MSVSOnly(_manifest, 'DependencyInformationFile', _file_name)
_MSVSOnly(_manifest, 'UseFAT32Workaround', _boolean)
_MSVSOnly(_manifest, 'UseUnicodeResponseFiles', _boolean)
# MSBuild options not found in MSVS.
_MSBuildOnly(_manifest, 'EnableDPIAwareness', _boolean)
_MSBuildOnly(_manifest, 'GenerateCategoryTags', _boolean) # /category
_MSBuildOnly(_manifest, 'ManifestFromManagedAssembly',
_file_name) # /managedassemblyname
_MSBuildOnly(_manifest, 'OutputResourceManifests', _string) # /outputresource
_MSBuildOnly(_manifest, 'SuppressDependencyElement', _boolean) # /nodependency
_MSBuildOnly(_manifest, 'TrackerLogDirectory', _folder_name)
# Directives for MASM.
# See "$(VCTargetsPath)\BuildCustomizations\masm.xml" for the schema of the
# MSBuild MASM settings.
# Options that have the same name in MSVS and MSBuild.
_Same(_masm, 'UseSafeExceptionHandlers', _boolean) # /safeseh
|
devopservices/ansible
|
refs/heads/devel
|
lib/ansible/module_utils/gce.py
|
305
|
# This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# Copyright (c), Franck Cuny <franck.cuny@gmail.com>, 2014
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import pprint
USER_AGENT_PRODUCT="Ansible-gce"
USER_AGENT_VERSION="v1"
def gce_connect(module, provider=None):
"""Return a Google Cloud Engine connection."""
service_account_email = module.params.get('service_account_email', None)
pem_file = module.params.get('pem_file', None)
project_id = module.params.get('project_id', None)
# If any of the values are not given as parameters, check the appropriate
# environment variables.
if not service_account_email:
service_account_email = os.environ.get('GCE_EMAIL', None)
if not project_id:
project_id = os.environ.get('GCE_PROJECT', None)
if not pem_file:
pem_file = os.environ.get('GCE_PEM_FILE_PATH', None)
# If we still don't have one or more of our credentials, attempt to
# get the remaining values from the libcloud secrets file.
if service_account_email is None or pem_file is None:
try:
import secrets
except ImportError:
secrets = None
if hasattr(secrets, 'GCE_PARAMS'):
if not service_account_email:
service_account_email = secrets.GCE_PARAMS[0]
if not pem_file:
pem_file = secrets.GCE_PARAMS[1]
keyword_params = getattr(secrets, 'GCE_KEYWORD_PARAMS', {})
if not project_id:
project_id = keyword_params.get('project', None)
# If we *still* don't have the credentials we need, then it's time to
# just fail out.
if service_account_email is None or pem_file is None or project_id is None:
module.fail_json(msg='Missing GCE connection parameters in libcloud '
'secrets file.')
return None
# Allow for passing in libcloud Google DNS (e.g, Provider.GOOGLE)
if provider is None:
provider = Provider.GCE
try:
gce = get_driver(provider)(service_account_email, pem_file,
datacenter=module.params.get('zone', None),
project=project_id)
gce.connection.user_agent_append("%s/%s" % (
USER_AGENT_PRODUCT, USER_AGENT_VERSION))
except (RuntimeError, ValueError), e:
module.fail_json(msg=str(e), changed=False)
except Exception, e:
module.fail_json(msg=unexpected_error_msg(e), changed=False)
return gce
def unexpected_error_msg(error):
"""Create an error string based on passed in error."""
return 'Unexpected response: ' + pprint.pformat(vars(error))
|
ericfc/django
|
refs/heads/master
|
tests/test_client/auth_backends.py
|
315
|
from django.contrib.auth.backends import ModelBackend
class TestClientBackend(ModelBackend):
pass
|
ramitsurana/boto
|
refs/heads/develop
|
tests/unit/emr/test_connection.py
|
34
|
# Copyright (c) 2013 Amazon.com, Inc. or its affiliates.
# All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import boto.utils
from datetime import datetime
from time import time
from tests.unit import AWSMockServiceTestCase
from boto.emr.connection import EmrConnection
from boto.emr.emrobject import BootstrapAction, BootstrapActionList, \
ClusterStateChangeReason, ClusterStatus, ClusterSummaryList, \
ClusterSummary, ClusterTimeline, InstanceInfo, \
InstanceList, InstanceGroupInfo, \
InstanceGroup, InstanceGroupList, JobFlow, \
JobFlowStepList, Step, StepSummaryList, \
Cluster, RunJobFlowResponse
# These tests are just checking the basic structure of
# the Elastic MapReduce code, by picking a few calls
# and verifying we get the expected results with mocked
# responses. The integration tests actually verify the
# API calls interact with the service correctly.
class TestListClusters(AWSMockServiceTestCase):
connection_class = EmrConnection
def default_body(self):
return b"""
<ListClustersResponse xmlns="http://elasticmapreduce.amazonaws.com/doc/2009-03-31">
<ListClustersResult>
<Clusters>
<member>
<Id>j-aaaaaaaaaaaa</Id>
<Status>
<StateChangeReason>
<Message>Terminated by user request</Message>
<Code>USER_REQUEST</Code>
</StateChangeReason>
<State>TERMINATED</State>
<Timeline>
<CreationDateTime>2014-01-24T01:21:21Z</CreationDateTime>
<ReadyDateTime>2014-01-24T01:25:26Z</ReadyDateTime>
<EndDateTime>2014-01-24T02:19:46Z</EndDateTime>
</Timeline>
</Status>
<Name>analytics test</Name>
<NormalizedInstanceHours>10</NormalizedInstanceHours>
</member>
<member>
<Id>j-aaaaaaaaaaaab</Id>
<Status>
<StateChangeReason>
<Message>Terminated by user request</Message>
<Code>USER_REQUEST</Code>
</StateChangeReason>
<State>TERMINATED</State>
<Timeline>
<CreationDateTime>2014-01-21T02:53:08Z</CreationDateTime>
<ReadyDateTime>2014-01-21T02:56:40Z</ReadyDateTime>
<EndDateTime>2014-01-21T03:40:22Z</EndDateTime>
</Timeline>
</Status>
<Name>test job</Name>
<NormalizedInstanceHours>20</NormalizedInstanceHours>
</member>
</Clusters>
</ListClustersResult>
<ResponseMetadata>
<RequestId>aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee</RequestId>
</ResponseMetadata>
</ListClustersResponse>
"""
def test_list_clusters(self):
self.set_http_response(status_code=200)
response = self.service_connection.list_clusters()
self.assert_request_parameters({
'Action': 'ListClusters',
'Version': '2009-03-31',
})
self.assertTrue(isinstance(response, ClusterSummaryList))
self.assertEqual(len(response.clusters), 2)
self.assertTrue(isinstance(response.clusters[0], ClusterSummary))
self.assertEqual(response.clusters[0].name, 'analytics test')
self.assertEqual(response.clusters[0].normalizedinstancehours, '10')
self.assertTrue(isinstance(response.clusters[0].status, ClusterStatus))
self.assertEqual(response.clusters[0].status.state, 'TERMINATED')
self.assertTrue(isinstance(response.clusters[0].status.timeline, ClusterTimeline))
self.assertEqual(response.clusters[0].status.timeline.creationdatetime, '2014-01-24T01:21:21Z')
self.assertEqual(response.clusters[0].status.timeline.readydatetime, '2014-01-24T01:25:26Z')
self.assertEqual(response.clusters[0].status.timeline.enddatetime, '2014-01-24T02:19:46Z')
self.assertTrue(isinstance(response.clusters[0].status.statechangereason, ClusterStateChangeReason))
self.assertEqual(response.clusters[0].status.statechangereason.code, 'USER_REQUEST')
self.assertEqual(response.clusters[0].status.statechangereason.message, 'Terminated by user request')
def test_list_clusters_created_before(self):
self.set_http_response(status_code=200)
date = datetime.now()
response = self.service_connection.list_clusters(created_before=date)
self.assert_request_parameters({
'Action': 'ListClusters',
'CreatedBefore': date.strftime(boto.utils.ISO8601),
'Version': '2009-03-31'
})
def test_list_clusters_created_after(self):
self.set_http_response(status_code=200)
date = datetime.now()
response = self.service_connection.list_clusters(created_after=date)
self.assert_request_parameters({
'Action': 'ListClusters',
'CreatedAfter': date.strftime(boto.utils.ISO8601),
'Version': '2009-03-31'
})
def test_list_clusters_states(self):
self.set_http_response(status_code=200)
response = self.service_connection.list_clusters(cluster_states=[
'RUNNING',
'WAITING'
])
self.assert_request_parameters({
'Action': 'ListClusters',
'ClusterStates.member.1': 'RUNNING',
'ClusterStates.member.2': 'WAITING',
'Version': '2009-03-31'
})
class TestListInstanceGroups(AWSMockServiceTestCase):
connection_class = EmrConnection
def default_body(self):
return b"""
<ListInstanceGroupsResponse xmlns="http://elasticmapreduce.amazonaws.com/doc/2009-03-31">
<ListInstanceGroupsResult>
<InstanceGroups>
<member>
<Id>ig-aaaaaaaaaaaaa</Id>
<InstanceType>m1.large</InstanceType>
<Market>ON_DEMAND</Market>
<Status>
<StateChangeReason>
<Message>Job flow terminated</Message>
<Code>CLUSTER_TERMINATED</Code>
</StateChangeReason>
<State>TERMINATED</State>
<Timeline>
<CreationDateTime>2014-01-24T01:21:21Z</CreationDateTime>
<ReadyDateTime>2014-01-24T01:25:08Z</ReadyDateTime>
<EndDateTime>2014-01-24T02:19:46Z</EndDateTime>
</Timeline>
</Status>
<Name>Master instance group</Name>
<RequestedInstanceCount>1</RequestedInstanceCount>
<RunningInstanceCount>0</RunningInstanceCount>
<InstanceGroupType>MASTER</InstanceGroupType>
</member>
<member>
<Id>ig-aaaaaaaaaaab</Id>
<InstanceType>m1.large</InstanceType>
<Market>ON_DEMAND</Market>
<Status>
<StateChangeReason>
<Message>Job flow terminated</Message>
<Code>CLUSTER_TERMINATED</Code>
</StateChangeReason>
<State>TERMINATED</State>
<Timeline>
<CreationDateTime>2014-01-24T01:21:21Z</CreationDateTime>
<ReadyDateTime>2014-01-24T01:25:26Z</ReadyDateTime>
<EndDateTime>2014-01-24T02:19:46Z</EndDateTime>
</Timeline>
</Status>
<Name>Core instance group</Name>
<RequestedInstanceCount>2</RequestedInstanceCount>
<RunningInstanceCount>0</RunningInstanceCount>
<InstanceGroupType>CORE</InstanceGroupType>
</member>
</InstanceGroups>
</ListInstanceGroupsResult>
<ResponseMetadata>
<RequestId>aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee</RequestId>
</ResponseMetadata>
</ListInstanceGroupsResponse>
"""
def test_list_instance_groups(self):
self.set_http_response(200)
with self.assertRaises(TypeError):
self.service_connection.list_instance_groups()
response = self.service_connection.list_instance_groups(cluster_id='j-123')
self.assert_request_parameters({
'Action': 'ListInstanceGroups',
'ClusterId': 'j-123',
'Version': '2009-03-31'
})
self.assertTrue(isinstance(response, InstanceGroupList))
self.assertEqual(len(response.instancegroups), 2)
self.assertTrue(isinstance(response.instancegroups[0], InstanceGroupInfo))
self.assertEqual(response.instancegroups[0].id, 'ig-aaaaaaaaaaaaa')
self.assertEqual(response.instancegroups[0].instancegrouptype, "MASTER")
self.assertEqual(response.instancegroups[0].instancetype, "m1.large")
self.assertEqual(response.instancegroups[0].market, "ON_DEMAND")
self.assertEqual(response.instancegroups[0].name, "Master instance group")
self.assertEqual(response.instancegroups[0].requestedinstancecount, '1')
self.assertEqual(response.instancegroups[0].runninginstancecount, '0')
self.assertTrue(isinstance(response.instancegroups[0].status, ClusterStatus))
self.assertEqual(response.instancegroups[0].status.state, 'TERMINATED')
# status.statechangereason is not parsed into an object
#self.assertEqual(response.instancegroups[0].status.statechangereason.code, 'CLUSTER_TERMINATED')
class TestListInstances(AWSMockServiceTestCase):
connection_class = EmrConnection
def default_body(self):
return b"""
<ListInstancesResponse xmlns="http://elasticmapreduce.amazonaws.com/doc/2009-03-31">
<ListInstancesResult>
<Instances>
<member>
<Id>ci-123456789abc</Id>
<Status>
<StateChangeReason>
<Message>Cluster was terminated.</Message>
<Code>CLUSTER_TERMINATED</Code>
</StateChangeReason>
<State>TERMINATED</State>
<Timeline>
<CreationDateTime>2014-01-24T01:21:26Z</CreationDateTime>
<ReadyDateTime>2014-01-24T01:25:25Z</ReadyDateTime>
<EndDateTime>2014-01-24T02:19:46Z</EndDateTime>
</Timeline>
</Status>
<PrivateDnsName>ip-10-0-0-60.us-west-1.compute.internal</PrivateDnsName>
<PublicIpAddress>54.0.0.1</PublicIpAddress>
<PublicDnsName>ec2-54-0-0-1.us-west-1.compute.amazonaws.com</PublicDnsName>
<Ec2InstanceId>i-aaaaaaaa</Ec2InstanceId>
<PrivateIpAddress>10.0.0.60</PrivateIpAddress>
</member>
<member>
<Id>ci-123456789abd</Id>
<Status>
<StateChangeReason>
<Message>Cluster was terminated.</Message>
<Code>CLUSTER_TERMINATED</Code>
</StateChangeReason>
<State>TERMINATED</State>
<Timeline>
<CreationDateTime>2014-01-24T01:21:26Z</CreationDateTime>
<ReadyDateTime>2014-01-24T01:25:25Z</ReadyDateTime>
<EndDateTime>2014-01-24T02:19:46Z</EndDateTime>
</Timeline>
</Status>
<PrivateDnsName>ip-10-0-0-61.us-west-1.compute.internal</PrivateDnsName>
<PublicIpAddress>54.0.0.2</PublicIpAddress>
<PublicDnsName>ec2-54-0-0-2.us-west-1.compute.amazonaws.com</PublicDnsName>
<Ec2InstanceId>i-aaaaaaab</Ec2InstanceId>
<PrivateIpAddress>10.0.0.61</PrivateIpAddress>
</member>
<member>
<Id>ci-123456789abe3</Id>
<Status>
<StateChangeReason>
<Message>Cluster was terminated.</Message>
<Code>CLUSTER_TERMINATED</Code>
</StateChangeReason>
<State>TERMINATED</State>
<Timeline>
<CreationDateTime>2014-01-24T01:21:33Z</CreationDateTime>
<ReadyDateTime>2014-01-24T01:25:08Z</ReadyDateTime>
<EndDateTime>2014-01-24T02:19:46Z</EndDateTime>
</Timeline>
</Status>
<PrivateDnsName>ip-10-0-0-62.us-west-1.compute.internal</PrivateDnsName>
<PublicIpAddress>54.0.0.3</PublicIpAddress>
<PublicDnsName>ec2-54-0-0-3.us-west-1.compute.amazonaws.com</PublicDnsName>
<Ec2InstanceId>i-aaaaaaac</Ec2InstanceId>
<PrivateIpAddress>10.0.0.62</PrivateIpAddress>
</member>
</Instances>
</ListInstancesResult>
<ResponseMetadata>
<RequestId>aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee</RequestId>
</ResponseMetadata>
</ListInstancesResponse>
"""
def test_list_instances(self):
self.set_http_response(200)
with self.assertRaises(TypeError):
self.service_connection.list_instances()
response = self.service_connection.list_instances(cluster_id='j-123')
self.assertTrue(isinstance(response, InstanceList))
self.assertEqual(len(response.instances), 3)
self.assertTrue(isinstance(response.instances[0], InstanceInfo))
self.assertEqual(response.instances[0].ec2instanceid, 'i-aaaaaaaa')
self.assertEqual(response.instances[0].id, 'ci-123456789abc')
self.assertEqual(response.instances[0].privatednsname , 'ip-10-0-0-60.us-west-1.compute.internal')
self.assertEqual(response.instances[0].privateipaddress , '10.0.0.60')
self.assertEqual(response.instances[0].publicdnsname , 'ec2-54-0-0-1.us-west-1.compute.amazonaws.com')
self.assertEqual(response.instances[0].publicipaddress , '54.0.0.1')
self.assert_request_parameters({
'Action': 'ListInstances',
'ClusterId': 'j-123',
'Version': '2009-03-31'
})
def test_list_instances_with_group_id(self):
self.set_http_response(200)
response = self.service_connection.list_instances(
cluster_id='j-123', instance_group_id='abc')
self.assert_request_parameters({
'Action': 'ListInstances',
'ClusterId': 'j-123',
'InstanceGroupId': 'abc',
'Version': '2009-03-31'
})
def test_list_instances_with_types(self):
self.set_http_response(200)
response = self.service_connection.list_instances(
cluster_id='j-123', instance_group_types=[
'MASTER',
'TASK'
])
self.assert_request_parameters({
'Action': 'ListInstances',
'ClusterId': 'j-123',
'InstanceGroupTypes.member.1': 'MASTER',
'InstanceGroupTypes.member.2': 'TASK',
'Version': '2009-03-31'
})
class TestListSteps(AWSMockServiceTestCase):
connection_class = EmrConnection
def default_body(self):
return b"""<ListStepsResponse xmlns="http://elasticmapreduce.amazonaws.com/doc/2009-03-31">
<ListStepsResult>
<Steps>
<member>
<Id>abc123</Id>
<Status>
<StateChangeReason/>
<Timeline>
<CreationDateTime>2014-07-01T00:00:00.000Z</CreationDateTime>
</Timeline>
<State>PENDING</State>
</Status>
<Name>Step 1</Name>
<Config>
<Jar>/home/hadoop/lib/emr-s3distcp-1.0.jar</Jar>
<Args>
<member>--src</member>
<member>hdfs:///data/test/</member>
<member>--dest</member>
<member>s3n://test/data</member>
</Args>
<Properties/>
</Config>
<ActionOnFailure>CONTINUE</ActionOnFailure>
</member>
<member>
<Id>def456</Id>
<Status>
<StateChangeReason/>
<Timeline>
<CreationDateTime>2014-07-01T00:00:00.000Z</CreationDateTime>
</Timeline>
<State>COMPLETED</State>
</Status>
<Name>Step 2</Name>
<Config>
<MainClass>my.main.SomeClass</MainClass>
<Jar>s3n://test/jars/foo.jar</Jar>
</Config>
<ActionOnFailure>CONTINUE</ActionOnFailure>
</member>
<member>
<Id>ghi789</Id>
<Status>
<StateChangeReason/>
<Timeline>
<CreationDateTime>2014-07-01T00:00:00.000Z</CreationDateTime>
</Timeline>
<State>FAILED</State>
</Status>
<Name>Step 3</Name>
<Config>
<Jar>s3n://test/jars/bar.jar</Jar>
<Args>
<member>-arg</member>
<member>value</member>
</Args>
<Properties/>
</Config>
<ActionOnFailure>TERMINATE_CLUSTER</ActionOnFailure>
</member>
</Steps>
</ListStepsResult>
<ResponseMetadata>
<RequestId>eff31ee5-0342-11e4-b3c7-9de5a93f6fcb</RequestId>
</ResponseMetadata>
</ListStepsResponse>
"""
def test_list_steps(self):
self.set_http_response(200)
with self.assertRaises(TypeError):
self.service_connection.list_steps()
response = self.service_connection.list_steps(cluster_id='j-123')
self.assert_request_parameters({
'Action': 'ListSteps',
'ClusterId': 'j-123',
'Version': '2009-03-31'
})
self.assertTrue(isinstance(response, StepSummaryList))
self.assertEqual(response.steps[0].name, 'Step 1')
valid_states = [
'PENDING',
'RUNNING',
'COMPLETED',
'CANCELLED',
'FAILED',
'INTERRUPTED'
]
# Check for step states
for step in response.steps:
self.assertIn(step.status.state, valid_states)
# Check for step config
step = response.steps[0]
self.assertEqual(step.config.jar,
'/home/hadoop/lib/emr-s3distcp-1.0.jar')
self.assertEqual(len(step.config.args), 4)
self.assertEqual(step.config.args[0].value, '--src')
self.assertEqual(step.config.args[1].value, 'hdfs:///data/test/')
step = response.steps[1]
self.assertEqual(step.config.mainclass, 'my.main.SomeClass')
def test_list_steps_with_states(self):
self.set_http_response(200)
response = self.service_connection.list_steps(
cluster_id='j-123', step_states=[
'COMPLETED',
'FAILED'
])
self.assert_request_parameters({
'Action': 'ListSteps',
'ClusterId': 'j-123',
'StepStateList.member.1': 'COMPLETED',
'StepStateList.member.2': 'FAILED',
'Version': '2009-03-31'
})
self.assertTrue(isinstance(response, StepSummaryList))
self.assertEqual(response.steps[0].name, 'Step 1')
class TestListBootstrapActions(AWSMockServiceTestCase):
connection_class = EmrConnection
def default_body(self):
return b"""<ListBootstrapActionsOutput></ListBootstrapActionsOutput>"""
def test_list_bootstrap_actions(self):
self.set_http_response(200)
with self.assertRaises(TypeError):
self.service_connection.list_bootstrap_actions()
response = self.service_connection.list_bootstrap_actions(cluster_id='j-123')
self.assert_request_parameters({
'Action': 'ListBootstrapActions',
'ClusterId': 'j-123',
'Version': '2009-03-31'
})
class TestDescribeCluster(AWSMockServiceTestCase):
connection_class = EmrConnection
def default_body(self):
return b"""
<DescribeClusterResponse xmlns="http://elasticmapreduce.amazonaws.com/doc/2009-03-31">
<DescribeClusterResult>
<Cluster>
<Id>j-aaaaaaaaa</Id>
<Tags/>
<Ec2InstanceAttributes>
<Ec2AvailabilityZone>us-west-1c</Ec2AvailabilityZone>
<Ec2KeyName>my_secret_key</Ec2KeyName>
</Ec2InstanceAttributes>
<RunningAmiVersion>2.4.2</RunningAmiVersion>
<VisibleToAllUsers>true</VisibleToAllUsers>
<Status>
<StateChangeReason>
<Message>Terminated by user request</Message>
<Code>USER_REQUEST</Code>
</StateChangeReason>
<State>TERMINATED</State>
<Timeline>
<CreationDateTime>2014-01-24T01:21:21Z</CreationDateTime>
<ReadyDateTime>2014-01-24T01:25:26Z</ReadyDateTime>
<EndDateTime>2014-01-24T02:19:46Z</EndDateTime>
</Timeline>
</Status>
<AutoTerminate>false</AutoTerminate>
<Name>test analytics</Name>
<RequestedAmiVersion>2.4.2</RequestedAmiVersion>
<Applications>
<member>
<Name>hadoop</Name>
<Version>1.0.3</Version>
</member>
</Applications>
<TerminationProtected>false</TerminationProtected>
<MasterPublicDnsName>ec2-184-0-0-1.us-west-1.compute.amazonaws.com</MasterPublicDnsName>
<NormalizedInstanceHours>10</NormalizedInstanceHours>
<ServiceRole>my-service-role</ServiceRole>
</Cluster>
</DescribeClusterResult>
<ResponseMetadata>
<RequestId>aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee</RequestId>
</ResponseMetadata>
</DescribeClusterResponse>
"""
def test_describe_cluster(self):
self.set_http_response(200)
with self.assertRaises(TypeError):
self.service_connection.describe_cluster()
response = self.service_connection.describe_cluster(cluster_id='j-123')
self.assertTrue(isinstance(response, Cluster))
self.assertEqual(response.id, 'j-aaaaaaaaa')
self.assertEqual(response.runningamiversion, '2.4.2')
self.assertEqual(response.visibletoallusers, 'true')
self.assertEqual(response.autoterminate, 'false')
self.assertEqual(response.name, 'test analytics')
self.assertEqual(response.requestedamiversion, '2.4.2')
self.assertEqual(response.terminationprotected, 'false')
self.assertEqual(response.ec2instanceattributes.ec2availabilityzone, "us-west-1c")
self.assertEqual(response.ec2instanceattributes.ec2keyname, 'my_secret_key')
self.assertEqual(response.status.state, 'TERMINATED')
self.assertEqual(response.applications[0].name, 'hadoop')
self.assertEqual(response.applications[0].version, '1.0.3')
self.assertEqual(response.masterpublicdnsname, 'ec2-184-0-0-1.us-west-1.compute.amazonaws.com')
self.assertEqual(response.normalizedinstancehours, '10')
self.assertEqual(response.servicerole, 'my-service-role')
self.assert_request_parameters({
'Action': 'DescribeCluster',
'ClusterId': 'j-123',
'Version': '2009-03-31'
})
class TestDescribeStep(AWSMockServiceTestCase):
connection_class = EmrConnection
def default_body(self):
return b"""<DescribeStepOutput></DescribeStepOutput>"""
def test_describe_step(self):
self.set_http_response(200)
with self.assertRaises(TypeError):
self.service_connection.describe_step()
with self.assertRaises(TypeError):
self.service_connection.describe_step(cluster_id='j-123')
with self.assertRaises(TypeError):
self.service_connection.describe_step(step_id='abc')
response = self.service_connection.describe_step(
cluster_id='j-123', step_id='abc')
self.assert_request_parameters({
'Action': 'DescribeStep',
'ClusterId': 'j-123',
'StepId': 'abc',
'Version': '2009-03-31'
})
class TestAddJobFlowSteps(AWSMockServiceTestCase):
connection_class = EmrConnection
def default_body(self):
return b"""
<AddJobFlowStepsOutput>
<StepIds>
<member>Foo</member>
<member>Bar</member>
</StepIds>
</AddJobFlowStepsOutput>
"""
def test_add_jobflow_steps(self):
self.set_http_response(200)
response = self.service_connection.add_jobflow_steps(
jobflow_id='j-123', steps=[])
# Make sure the correct object is returned, as this was
# previously set to incorrectly return an empty instance
# of RunJobFlowResponse.
self.assertTrue(isinstance(response, JobFlowStepList))
self.assertEqual(response.stepids[0].value, 'Foo')
self.assertEqual(response.stepids[1].value, 'Bar')
class TestBuildTagList(AWSMockServiceTestCase):
connection_class = EmrConnection
def test_key_without_value_encoding(self):
input_dict = {
'KeyWithNoValue': '',
'AnotherKeyWithNoValue': None
}
res = self.service_connection._build_tag_list(input_dict)
# Keys are outputted in ascending key order.
expected = {
'Tags.member.1.Key': 'AnotherKeyWithNoValue',
'Tags.member.2.Key': 'KeyWithNoValue'
}
self.assertEqual(expected, res)
def test_key_full_key_value_encoding(self):
input_dict = {
'FirstKey': 'One',
'SecondKey': 'Two'
}
res = self.service_connection._build_tag_list(input_dict)
# Keys are outputted in ascending key order.
expected = {
'Tags.member.1.Key': 'FirstKey',
'Tags.member.1.Value': 'One',
'Tags.member.2.Key': 'SecondKey',
'Tags.member.2.Value': 'Two'
}
self.assertEqual(expected, res)
class TestAddTag(AWSMockServiceTestCase):
connection_class = EmrConnection
def default_body(self):
return b"""<AddTagsResponse
xmlns="http://elasticmapreduce.amazonaws.com/doc/2009-03-31">
<AddTagsResult/>
<ResponseMetadata>
<RequestId>88888888-8888-8888-8888-888888888888</RequestId>
</ResponseMetadata>
</AddTagsResponse>
"""
def test_add_mix_of_tags_with_without_values(self):
input_tags = {
'FirstKey': 'One',
'SecondKey': 'Two',
'ZzzNoValue': ''
}
self.set_http_response(200)
with self.assertRaises(TypeError):
self.service_connection.add_tags()
with self.assertRaises(TypeError):
self.service_connection.add_tags('j-123')
with self.assertRaises(AssertionError):
self.service_connection.add_tags('j-123', [])
response = self.service_connection.add_tags('j-123', input_tags)
self.assertTrue(response)
self.assert_request_parameters({
'Action': 'AddTags',
'ResourceId': 'j-123',
'Tags.member.1.Key': 'FirstKey',
'Tags.member.1.Value': 'One',
'Tags.member.2.Key': 'SecondKey',
'Tags.member.2.Value': 'Two',
'Tags.member.3.Key': 'ZzzNoValue',
'Version': '2009-03-31'
})
class TestRemoveTag(AWSMockServiceTestCase):
connection_class = EmrConnection
def default_body(self):
return b"""<RemoveTagsResponse
xmlns="http://elasticmapreduce.amazonaws.com/doc/2009-03-31">
<RemoveTagsResult/>
<ResponseMetadata>
<RequestId>88888888-8888-8888-8888-888888888888</RequestId>
</ResponseMetadata>
</RemoveTagsResponse>
"""
def test_remove_tags(self):
input_tags = {
'FirstKey': 'One',
'SecondKey': 'Two',
'ZzzNoValue': ''
}
self.set_http_response(200)
with self.assertRaises(TypeError):
self.service_connection.add_tags()
with self.assertRaises(TypeError):
self.service_connection.add_tags('j-123')
with self.assertRaises(AssertionError):
self.service_connection.add_tags('j-123', [])
response = self.service_connection.remove_tags('j-123', ['FirstKey', 'SecondKey'])
self.assertTrue(response)
self.assert_request_parameters({
'Action': 'RemoveTags',
'ResourceId': 'j-123',
'TagKeys.member.1': 'FirstKey',
'TagKeys.member.2': 'SecondKey',
'Version': '2009-03-31'
})
class DescribeJobFlowsTestBase(AWSMockServiceTestCase):
connection_class = EmrConnection
def default_body(self):
return b"""
<DescribeJobFlowsResponse xmlns="http://elasticmapreduce.amazonaws.com/doc/2009-03-31">
<DescribeJobFlowsResult>
<JobFlows>
<member>
<AmiVersion>2.4.2</AmiVersion>
<ExecutionStatusDetail>
<CreationDateTime>2014-01-24T01:21:21Z</CreationDateTime>
<LastStateChangeReason>Terminated by user request</LastStateChangeReason>
<StartDateTime>2014-01-24T01:25:26Z</StartDateTime>
<ReadyDateTime>2014-01-24T01:25:26Z</ReadyDateTime>
<State>TERMINATED</State>
<EndDateTime>2014-01-24T02:19:46Z</EndDateTime>
</ExecutionStatusDetail>
<BootstrapActions/>
<VisibleToAllUsers>true</VisibleToAllUsers>
<SupportedProducts/>
<Name>test analytics</Name>
<JobFlowId>j-aaaaaa</JobFlowId>
<Steps>
<member>
<ExecutionStatusDetail>
<CreationDateTime>2014-01-24T01:21:21Z</CreationDateTime>
<StartDateTime>2014-01-24T01:25:26Z</StartDateTime>
<State>COMPLETED</State>
<EndDateTime>2014-01-24T01:26:08Z</EndDateTime>
</ExecutionStatusDetail>
<StepConfig>
<HadoopJarStep>
<Args>
<member>s3://us-west-1.elasticmapreduce/libs/hive/hive-script</member>
<member>--base-path</member>
<member>s3://us-west-1.elasticmapreduce/libs/hive/</member>
<member>--install-hive</member>
<member>--hive-versions</member>
<member>0.11.0.1</member>
</Args>
<Jar>s3://us-west-1.elasticmapreduce/libs/script-runner/script-runner.jar</Jar>
<Properties/>
</HadoopJarStep>
<Name>Setup hive</Name>
<ActionOnFailure>TERMINATE_JOB_FLOW</ActionOnFailure>
</StepConfig>
</member>
</Steps>
<Instances>
<Placement>
<AvailabilityZone>us-west-1c</AvailabilityZone>
</Placement>
<MasterInstanceType>m1.large</MasterInstanceType>
<Ec2KeyName>my_key</Ec2KeyName>
<KeepJobFlowAliveWhenNoSteps>true</KeepJobFlowAliveWhenNoSteps>
<InstanceGroups>
<member>
<CreationDateTime>2014-01-24T01:21:21Z</CreationDateTime>
<InstanceRunningCount>0</InstanceRunningCount>
<StartDateTime>2014-01-24T01:23:56Z</StartDateTime>
<ReadyDateTime>2014-01-24T01:25:08Z</ReadyDateTime>
<State>ENDED</State>
<EndDateTime>2014-01-24T02:19:46Z</EndDateTime>
<InstanceRequestCount>1</InstanceRequestCount>
<InstanceType>m1.large</InstanceType>
<LastStateChangeReason>Job flow terminated</LastStateChangeReason>
<Market>ON_DEMAND</Market>
<InstanceGroupId>ig-aaaaaa</InstanceGroupId>
<InstanceRole>MASTER</InstanceRole>
<Name>Master instance group</Name>
</member>
<member>
<CreationDateTime>2014-01-24T01:21:21Z</CreationDateTime>
<InstanceRunningCount>0</InstanceRunningCount>
<StartDateTime>2014-01-24T01:25:26Z</StartDateTime>
<ReadyDateTime>2014-01-24T01:25:26Z</ReadyDateTime>
<State>ENDED</State>
<EndDateTime>2014-01-24T02:19:46Z</EndDateTime>
<InstanceRequestCount>2</InstanceRequestCount>
<InstanceType>m1.large</InstanceType>
<LastStateChangeReason>Job flow terminated</LastStateChangeReason>
<Market>ON_DEMAND</Market>
<InstanceGroupId>ig-aaaaab</InstanceGroupId>
<InstanceRole>CORE</InstanceRole>
<Name>Core instance group</Name>
</member>
</InstanceGroups>
<SlaveInstanceType>m1.large</SlaveInstanceType>
<MasterInstanceId>i-aaaaaa</MasterInstanceId>
<HadoopVersion>1.0.3</HadoopVersion>
<NormalizedInstanceHours>12</NormalizedInstanceHours>
<MasterPublicDnsName>ec2-184-0-0-1.us-west-1.compute.amazonaws.com</MasterPublicDnsName>
<InstanceCount>3</InstanceCount>
<TerminationProtected>false</TerminationProtected>
</Instances>
</member>
</JobFlows>
</DescribeJobFlowsResult>
<ResponseMetadata>
<RequestId>aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee</RequestId>
</ResponseMetadata>
</DescribeJobFlowsResponse>
"""
class TestDescribeJobFlows(DescribeJobFlowsTestBase):
def test_describe_jobflows_response(self):
self.set_http_response(200)
response = self.service_connection.describe_jobflows()
self.assertTrue(isinstance(response, list))
jf = response[0]
self.assertTrue(isinstance(jf, JobFlow))
self.assertEqual(jf.amiversion, '2.4.2')
self.assertEqual(jf.visibletoallusers, 'true')
self.assertEqual(jf.name, 'test analytics')
self.assertEqual(jf.jobflowid, 'j-aaaaaa')
self.assertEqual(jf.ec2keyname, 'my_key')
self.assertEqual(jf.masterinstancetype, 'm1.large')
self.assertEqual(jf.availabilityzone, 'us-west-1c')
self.assertEqual(jf.keepjobflowalivewhennosteps, 'true')
self.assertEqual(jf.slaveinstancetype, 'm1.large')
self.assertEqual(jf.masterinstanceid, 'i-aaaaaa')
self.assertEqual(jf.hadoopversion, '1.0.3')
self.assertEqual(jf.normalizedinstancehours, '12')
self.assertEqual(jf.masterpublicdnsname, 'ec2-184-0-0-1.us-west-1.compute.amazonaws.com')
self.assertEqual(jf.instancecount, '3')
self.assertEqual(jf.terminationprotected, 'false')
self.assertTrue(isinstance(jf.steps, list))
step = jf.steps[0]
self.assertTrue(isinstance(step, Step))
self.assertEqual(step.jar, 's3://us-west-1.elasticmapreduce/libs/script-runner/script-runner.jar')
self.assertEqual(step.name, 'Setup hive')
self.assertEqual(step.actiononfailure, 'TERMINATE_JOB_FLOW')
self.assertTrue(isinstance(jf.instancegroups, list))
ig = jf.instancegroups[0]
self.assertTrue(isinstance(ig, InstanceGroup))
self.assertEqual(ig.creationdatetime, '2014-01-24T01:21:21Z')
self.assertEqual(ig.state, 'ENDED')
self.assertEqual(ig.instancerequestcount, '1')
self.assertEqual(ig.instancetype, 'm1.large')
self.assertEqual(ig.laststatechangereason, 'Job flow terminated')
self.assertEqual(ig.market, 'ON_DEMAND')
self.assertEqual(ig.instancegroupid, 'ig-aaaaaa')
self.assertEqual(ig.instancerole, 'MASTER')
self.assertEqual(ig.name, 'Master instance group')
def test_describe_jobflows_no_args(self):
self.set_http_response(200)
self.service_connection.describe_jobflows()
self.assert_request_parameters({
'Action': 'DescribeJobFlows',
}, ignore_params_values=['Version'])
def test_describe_jobflows_filtered(self):
self.set_http_response(200)
now = datetime.now()
a_bit_before = datetime.fromtimestamp(time() - 1000)
self.service_connection.describe_jobflows(states=['WAITING', 'RUNNING'], jobflow_ids=['j-aaaaaa', 'j-aaaaab'], created_after=a_bit_before, created_before=now)
self.assert_request_parameters({
'Action': 'DescribeJobFlows',
'JobFlowIds.member.1': 'j-aaaaaa',
'JobFlowIds.member.2': 'j-aaaaab',
'JobFlowStates.member.1': 'WAITING',
'JobFlowStates.member.2': 'RUNNING',
'CreatedAfter': a_bit_before.strftime(boto.utils.ISO8601),
'CreatedBefore': now.strftime(boto.utils.ISO8601),
}, ignore_params_values=['Version'])
class TestDescribeJobFlow(DescribeJobFlowsTestBase):
def test_describe_jobflow(self):
self.set_http_response(200)
response = self.service_connection.describe_jobflow('j-aaaaaa')
self.assertTrue(isinstance(response, JobFlow))
self.assert_request_parameters({
'Action': 'DescribeJobFlows',
'JobFlowIds.member.1': 'j-aaaaaa',
}, ignore_params_values=['Version'])
class TestRunJobFlow(AWSMockServiceTestCase):
connection_class = EmrConnection
def default_body(self):
return b"""
<RunJobFlowResponse xmlns="http://elasticmapreduce.amazonaws.com/doc/2009-03-31">
<RunJobFlowResult>
<JobFlowId>j-ZKIY4CKQRX72</JobFlowId>
</RunJobFlowResult>
<ResponseMetadata>
<RequestId>aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee</RequestId>
</ResponseMetadata>
</RunJobFlowResponse>
"""
def test_run_jobflow_service_role(self):
self.set_http_response(200)
response = self.service_connection.run_jobflow(
'EmrCluster', service_role='EMR_DefaultRole')
self.assertTrue(response)
self.assert_request_parameters({
'Action': 'RunJobFlow',
'Version': '2009-03-31',
'ServiceRole': 'EMR_DefaultRole',
'Name': 'EmrCluster' },
ignore_params_values=['ActionOnFailure', 'Instances.InstanceCount',
'Instances.KeepJobFlowAliveWhenNoSteps',
'Instances.MasterInstanceType',
'Instances.SlaveInstanceType'])
|
renpytom/python-for-android
|
refs/heads/master
|
src/buildlib/jinja2.egg/jinja2/filters.py
|
199
|
# -*- coding: utf-8 -*-
"""
jinja2.filters
~~~~~~~~~~~~~~
Bundled jinja filters.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
import re
import math
from random import choice
from operator import itemgetter
from itertools import imap, groupby
from jinja2.utils import Markup, escape, pformat, urlize, soft_unicode
from jinja2.runtime import Undefined
from jinja2.exceptions import FilterArgumentError, SecurityError
_word_re = re.compile(r'\w+(?u)')
def contextfilter(f):
"""Decorator for marking context dependent filters. The current
:class:`Context` will be passed as first argument.
"""
f.contextfilter = True
return f
def evalcontextfilter(f):
"""Decorator for marking eval-context dependent filters. An eval
context object is passed as first argument. For more information
about the eval context, see :ref:`eval-context`.
.. versionadded:: 2.4
"""
f.evalcontextfilter = True
return f
def environmentfilter(f):
"""Decorator for marking evironment dependent filters. The current
:class:`Environment` is passed to the filter as first argument.
"""
f.environmentfilter = True
return f
def do_forceescape(value):
"""Enforce HTML escaping. This will probably double escape variables."""
if hasattr(value, '__html__'):
value = value.__html__()
return escape(unicode(value))
@evalcontextfilter
def do_replace(eval_ctx, s, old, new, count=None):
"""Return a copy of the value with all occurrences of a substring
replaced with a new one. The first argument is the substring
that should be replaced, the second is the replacement string.
If the optional third argument ``count`` is given, only the first
``count`` occurrences are replaced:
.. sourcecode:: jinja
{{ "Hello World"|replace("Hello", "Goodbye") }}
-> Goodbye World
{{ "aaaaargh"|replace("a", "d'oh, ", 2) }}
-> d'oh, d'oh, aaargh
"""
if count is None:
count = -1
if not eval_ctx.autoescape:
return unicode(s).replace(unicode(old), unicode(new), count)
if hasattr(old, '__html__') or hasattr(new, '__html__') and \
not hasattr(s, '__html__'):
s = escape(s)
else:
s = soft_unicode(s)
return s.replace(soft_unicode(old), soft_unicode(new), count)
def do_upper(s):
"""Convert a value to uppercase."""
return soft_unicode(s).upper()
def do_lower(s):
"""Convert a value to lowercase."""
return soft_unicode(s).lower()
@evalcontextfilter
def do_xmlattr(_eval_ctx, d, autospace=True):
"""Create an SGML/XML attribute string based on the items in a dict.
All values that are neither `none` nor `undefined` are automatically
escaped:
.. sourcecode:: html+jinja
<ul{{ {'class': 'my_list', 'missing': none,
'id': 'list-%d'|format(variable)}|xmlattr }}>
...
</ul>
Results in something like this:
.. sourcecode:: html
<ul class="my_list" id="list-42">
...
</ul>
As you can see it automatically prepends a space in front of the item
if the filter returned something unless the second parameter is false.
"""
rv = u' '.join(
u'%s="%s"' % (escape(key), escape(value))
for key, value in d.iteritems()
if value is not None and not isinstance(value, Undefined)
)
if autospace and rv:
rv = u' ' + rv
if _eval_ctx.autoescape:
rv = Markup(rv)
return rv
def do_capitalize(s):
"""Capitalize a value. The first character will be uppercase, all others
lowercase.
"""
return soft_unicode(s).capitalize()
def do_title(s):
"""Return a titlecased version of the value. I.e. words will start with
uppercase letters, all remaining characters are lowercase.
"""
return soft_unicode(s).title()
def do_dictsort(value, case_sensitive=False, by='key'):
"""Sort a dict and yield (key, value) pairs. Because python dicts are
unsorted you may want to use this function to order them by either
key or value:
.. sourcecode:: jinja
{% for item in mydict|dictsort %}
sort the dict by key, case insensitive
{% for item in mydict|dicsort(true) %}
sort the dict by key, case sensitive
{% for item in mydict|dictsort(false, 'value') %}
sort the dict by key, case insensitive, sorted
normally and ordered by value.
"""
if by == 'key':
pos = 0
elif by == 'value':
pos = 1
else:
raise FilterArgumentError('You can only sort by either '
'"key" or "value"')
def sort_func(item):
value = item[pos]
if isinstance(value, basestring) and not case_sensitive:
value = value.lower()
return value
return sorted(value.items(), key=sort_func)
def do_sort(value, case_sensitive=False):
"""Sort an iterable. If the iterable is made of strings the second
parameter can be used to control the case sensitiveness of the
comparison which is disabled by default.
.. sourcecode:: jinja
{% for item in iterable|sort %}
...
{% endfor %}
"""
if not case_sensitive:
def sort_func(item):
if isinstance(item, basestring):
item = item.lower()
return item
else:
sort_func = None
return sorted(seq, key=sort_func)
def do_default(value, default_value=u'', boolean=False):
"""If the value is undefined it will return the passed default value,
otherwise the value of the variable:
.. sourcecode:: jinja
{{ my_variable|default('my_variable is not defined') }}
This will output the value of ``my_variable`` if the variable was
defined, otherwise ``'my_variable is not defined'``. If you want
to use default with variables that evaluate to false you have to
set the second parameter to `true`:
.. sourcecode:: jinja
{{ ''|default('the string was empty', true) }}
"""
if (boolean and not value) or isinstance(value, Undefined):
return default_value
return value
@evalcontextfilter
def do_join(eval_ctx, value, d=u''):
"""Return a string which is the concatenation of the strings in the
sequence. The separator between elements is an empty string per
default, you can define it with the optional parameter:
.. sourcecode:: jinja
{{ [1, 2, 3]|join('|') }}
-> 1|2|3
{{ [1, 2, 3]|join }}
-> 123
"""
# no automatic escaping? joining is a lot eaiser then
if not eval_ctx.autoescape:
return unicode(d).join(imap(unicode, value))
# if the delimiter doesn't have an html representation we check
# if any of the items has. If yes we do a coercion to Markup
if not hasattr(d, '__html__'):
value = list(value)
do_escape = False
for idx, item in enumerate(value):
if hasattr(item, '__html__'):
do_escape = True
else:
value[idx] = unicode(item)
if do_escape:
d = escape(d)
else:
d = unicode(d)
return d.join(value)
# no html involved, to normal joining
return soft_unicode(d).join(imap(soft_unicode, value))
def do_center(value, width=80):
"""Centers the value in a field of a given width."""
return unicode(value).center(width)
@environmentfilter
def do_first(environment, seq):
"""Return the first item of a sequence."""
try:
return iter(seq).next()
except StopIteration:
return environment.undefined('No first item, sequence was empty.')
@environmentfilter
def do_last(environment, seq):
"""Return the last item of a sequence."""
try:
return iter(reversed(seq)).next()
except StopIteration:
return environment.undefined('No last item, sequence was empty.')
@environmentfilter
def do_random(environment, seq):
"""Return a random item from the sequence."""
try:
return choice(seq)
except IndexError:
return environment.undefined('No random item, sequence was empty.')
def do_filesizeformat(value, binary=False):
"""Format the value like a 'human-readable' file size (i.e. 13 KB,
4.1 MB, 102 bytes, etc). Per default decimal prefixes are used (mega,
giga, etc.), if the second parameter is set to `True` the binary
prefixes are used (mebi, gibi).
"""
bytes = float(value)
base = binary and 1024 or 1000
middle = binary and 'i' or ''
if bytes < base:
return "%d Byte%s" % (bytes, bytes != 1 and 's' or '')
elif bytes < base * base:
return "%.1f K%sB" % (bytes / base, middle)
elif bytes < base * base * base:
return "%.1f M%sB" % (bytes / (base * base), middle)
return "%.1f G%sB" % (bytes / (base * base * base), middle)
def do_pprint(value, verbose=False):
"""Pretty print a variable. Useful for debugging.
With Jinja 1.2 onwards you can pass it a parameter. If this parameter
is truthy the output will be more verbose (this requires `pretty`)
"""
return pformat(value, verbose=verbose)
@evalcontextfilter
def do_urlize(eval_ctx, value, trim_url_limit=None, nofollow=False):
"""Converts URLs in plain text into clickable links.
If you pass the filter an additional integer it will shorten the urls
to that number. Also a third argument exists that makes the urls
"nofollow":
.. sourcecode:: jinja
{{ mytext|urlize(40, true) }}
links are shortened to 40 chars and defined with rel="nofollow"
"""
rv = urlize(value, trim_url_limit, nofollow)
if eval_ctx.autoescape:
rv = Markup(rv)
return rv
def do_indent(s, width=4, indentfirst=False):
"""Return a copy of the passed string, each line indented by
4 spaces. The first line is not indented. If you want to
change the number of spaces or indent the first line too
you can pass additional parameters to the filter:
.. sourcecode:: jinja
{{ mytext|indent(2, true) }}
indent by two spaces and indent the first line too.
"""
indention = u' ' * width
rv = (u'\n' + indention).join(s.splitlines())
if indentfirst:
rv = indention + rv
return rv
def do_truncate(s, length=255, killwords=False, end='...'):
"""Return a truncated copy of the string. The length is specified
with the first parameter which defaults to ``255``. If the second
parameter is ``true`` the filter will cut the text at length. Otherwise
it will try to save the last word. If the text was in fact
truncated it will append an ellipsis sign (``"..."``). If you want a
different ellipsis sign than ``"..."`` you can specify it using the
third parameter.
.. sourcecode jinja::
{{ mytext|truncate(300, false, '»') }}
truncate mytext to 300 chars, don't split up words, use a
right pointing double arrow as ellipsis sign.
"""
if len(s) <= length:
return s
elif killwords:
return s[:length] + end
words = s.split(' ')
result = []
m = 0
for word in words:
m += len(word) + 1
if m > length:
break
result.append(word)
result.append(end)
return u' '.join(result)
def do_wordwrap(s, width=79, break_long_words=True):
"""
Return a copy of the string passed to the filter wrapped after
``79`` characters. You can override this default using the first
parameter. If you set the second parameter to `false` Jinja will not
split words apart if they are longer than `width`.
"""
import textwrap
return u'\n'.join(textwrap.wrap(s, width=width, expand_tabs=False,
replace_whitespace=False,
break_long_words=break_long_words))
def do_wordcount(s):
"""Count the words in that string."""
return len(_word_re.findall(s))
def do_int(value, default=0):
"""Convert the value into an integer. If the
conversion doesn't work it will return ``0``. You can
override this default using the first parameter.
"""
try:
return int(value)
except (TypeError, ValueError):
# this quirk is necessary so that "42.23"|int gives 42.
try:
return int(float(value))
except (TypeError, ValueError):
return default
def do_float(value, default=0.0):
"""Convert the value into a floating point number. If the
conversion doesn't work it will return ``0.0``. You can
override this default using the first parameter.
"""
try:
return float(value)
except (TypeError, ValueError):
return default
def do_format(value, *args, **kwargs):
"""
Apply python string formatting on an object:
.. sourcecode:: jinja
{{ "%s - %s"|format("Hello?", "Foo!") }}
-> Hello? - Foo!
"""
if args and kwargs:
raise FilterArgumentError('can\'t handle positional and keyword '
'arguments at the same time')
return soft_unicode(value) % (kwargs or args)
def do_trim(value):
"""Strip leading and trailing whitespace."""
return soft_unicode(value).strip()
def do_striptags(value):
"""Strip SGML/XML tags and replace adjacent whitespace by one space.
"""
if hasattr(value, '__html__'):
value = value.__html__()
return Markup(unicode(value)).striptags()
def do_slice(value, slices, fill_with=None):
"""Slice an iterator and return a list of lists containing
those items. Useful if you want to create a div containing
three ul tags that represent columns:
.. sourcecode:: html+jinja
<div class="columwrapper">
{%- for column in items|slice(3) %}
<ul class="column-{{ loop.index }}">
{%- for item in column %}
<li>{{ item }}</li>
{%- endfor %}
</ul>
{%- endfor %}
</div>
If you pass it a second argument it's used to fill missing
values on the last iteration.
"""
seq = list(value)
length = len(seq)
items_per_slice = length // slices
slices_with_extra = length % slices
offset = 0
for slice_number in xrange(slices):
start = offset + slice_number * items_per_slice
if slice_number < slices_with_extra:
offset += 1
end = offset + (slice_number + 1) * items_per_slice
tmp = seq[start:end]
if fill_with is not None and slice_number >= slices_with_extra:
tmp.append(fill_with)
yield tmp
def do_batch(value, linecount, fill_with=None):
"""
A filter that batches items. It works pretty much like `slice`
just the other way round. It returns a list of lists with the
given number of items. If you provide a second parameter this
is used to fill missing items. See this example:
.. sourcecode:: html+jinja
<table>
{%- for row in items|batch(3, ' ') %}
<tr>
{%- for column in row %}
<td>{{ column }}</td>
{%- endfor %}
</tr>
{%- endfor %}
</table>
"""
result = []
tmp = []
for item in value:
if len(tmp) == linecount:
yield tmp
tmp = []
tmp.append(item)
if tmp:
if fill_with is not None and len(tmp) < linecount:
tmp += [fill_with] * (linecount - len(tmp))
yield tmp
def do_round(value, precision=0, method='common'):
"""Round the number to a given precision. The first
parameter specifies the precision (default is ``0``), the
second the rounding method:
- ``'common'`` rounds either up or down
- ``'ceil'`` always rounds up
- ``'floor'`` always rounds down
If you don't specify a method ``'common'`` is used.
.. sourcecode:: jinja
{{ 42.55|round }}
-> 43.0
{{ 42.55|round(1, 'floor') }}
-> 42.5
Note that even if rounded to 0 precision, a float is returned. If
you need a real integer, pipe it through `int`:
.. sourcecode:: jinja
{{ 42.55|round|int }}
-> 43
"""
if not method in ('common', 'ceil', 'floor'):
raise FilterArgumentError('method must be common, ceil or floor')
if precision < 0:
raise FilterArgumentError('precision must be a postive integer '
'or zero.')
if method == 'common':
return round(value, precision)
func = getattr(math, method)
if precision:
return func(value * 10 * precision) / (10 * precision)
else:
return func(value)
def do_sort(value, reverse=False):
"""Sort a sequence. Per default it sorts ascending, if you pass it
true as first argument it will reverse the sorting.
"""
return sorted(value, reverse=reverse)
@environmentfilter
def do_groupby(environment, value, attribute):
"""Group a sequence of objects by a common attribute.
If you for example have a list of dicts or objects that represent persons
with `gender`, `first_name` and `last_name` attributes and you want to
group all users by genders you can do something like the following
snippet:
.. sourcecode:: html+jinja
<ul>
{% for group in persons|groupby('gender') %}
<li>{{ group.grouper }}<ul>
{% for person in group.list %}
<li>{{ person.first_name }} {{ person.last_name }}</li>
{% endfor %}</ul></li>
{% endfor %}
</ul>
Additionally it's possible to use tuple unpacking for the grouper and
list:
.. sourcecode:: html+jinja
<ul>
{% for grouper, list in persons|groupby('gender') %}
...
{% endfor %}
</ul>
As you can see the item we're grouping by is stored in the `grouper`
attribute and the `list` contains all the objects that have this grouper
in common.
"""
expr = lambda x: environment.getitem(x, attribute)
return sorted(map(_GroupTuple, groupby(sorted(value, key=expr), expr)))
class _GroupTuple(tuple):
__slots__ = ()
grouper = property(itemgetter(0))
list = property(itemgetter(1))
def __new__(cls, (key, value)):
return tuple.__new__(cls, (key, list(value)))
def do_list(value):
"""Convert the value into a list. If it was a string the returned list
will be a list of characters.
"""
return list(value)
def do_mark_safe(value):
"""Mark the value as safe which means that in an environment with automatic
escaping enabled this variable will not be escaped.
"""
return Markup(value)
def do_mark_unsafe(value):
"""Mark a value as unsafe. This is the reverse operation for :func:`safe`."""
return unicode(value)
def do_reverse(value):
"""Reverse the object or return an iterator the iterates over it the other
way round.
"""
if isinstance(value, basestring):
return value[::-1]
try:
return reversed(value)
except TypeError:
try:
rv = list(value)
rv.reverse()
return rv
except TypeError:
raise FilterArgumentError('argument must be iterable')
@environmentfilter
def do_attr(environment, obj, name):
"""Get an attribute of an object. ``foo|attr("bar")`` works like
``foo["bar"]`` just that always an attribute is returned and items are not
looked up.
See :ref:`Notes on subscriptions <notes-on-subscriptions>` for more details.
"""
try:
name = str(name)
except UnicodeError:
pass
else:
try:
value = getattr(obj, name)
except AttributeError:
pass
else:
if environment.sandboxed and not \
environment.is_safe_attribute(obj, name, value):
return environment.unsafe_undefined(obj, name)
return value
return environment.undefined(obj=obj, name=name)
FILTERS = {
'attr': do_attr,
'replace': do_replace,
'upper': do_upper,
'lower': do_lower,
'escape': escape,
'e': escape,
'forceescape': do_forceescape,
'capitalize': do_capitalize,
'title': do_title,
'default': do_default,
'd': do_default,
'join': do_join,
'count': len,
'dictsort': do_dictsort,
'sort': do_sort,
'length': len,
'reverse': do_reverse,
'center': do_center,
'indent': do_indent,
'title': do_title,
'capitalize': do_capitalize,
'first': do_first,
'last': do_last,
'random': do_random,
'filesizeformat': do_filesizeformat,
'pprint': do_pprint,
'truncate': do_truncate,
'wordwrap': do_wordwrap,
'wordcount': do_wordcount,
'int': do_int,
'float': do_float,
'string': soft_unicode,
'list': do_list,
'urlize': do_urlize,
'format': do_format,
'trim': do_trim,
'striptags': do_striptags,
'slice': do_slice,
'batch': do_batch,
'sum': sum,
'abs': abs,
'round': do_round,
'sort': do_sort,
'groupby': do_groupby,
'safe': do_mark_safe,
'xmlattr': do_xmlattr
}
|
tmpgit/intellij-community
|
refs/heads/master
|
python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/__main__.py
|
466
|
import sys
from .main import main
sys.exit(main("lib2to3.fixes"))
|
menpo/menpo
|
refs/heads/master
|
menpo/__init__.py
|
2
|
from . import base
from . import feature
from . import image
from . import io
from . import landmark
from . import math
from . import model
from . import shape
from . import transform
from . import visualize
from ._version import __version__
|
wdzhou/mantid
|
refs/heads/master
|
scripts/test/SANSCommandInterfaceTest.py
|
3
|
from __future__ import (absolute_import, division, print_function)
import unittest
import mantid
import os
import isis_instrument as instruments
import ISISCommandInterface as command_iface
from reducer_singleton import ReductionSingleton
import isis_reduction_steps as reduction_steps
from mantid.simpleapi import *
from mantid.kernel import DateAndTime
import random
import math
class SANSCommandInterfaceGetAndSetTransmissionSettings(unittest.TestCase):
def test_that_gets_transmission_monitor(self):
# Arrange
trans_spectrum = 4
command_iface.Clean()
command_iface.SANS2D()
ReductionSingleton().transmission_calculator.trans_mon = trans_spectrum
# Act
result = command_iface.GetTransmissionMonitorSpectrum()
# Assert
self.assertEqual(trans_spectrum, result, 'The transmission spectrum should be set to 4.')
def test_setting_transmission_monitor_to_valid_input(self):
# Arrange
trans_spectrum = 4
command_iface.Clean()
command_iface.SANS2D()
# Act
command_iface.SetTransmissionMonitorSpectrum(trans_mon = trans_spectrum)
# Assert
self.assertEqual(trans_spectrum, command_iface.GetTransmissionMonitorSpectrum(), 'The transmission spectrum should be set to 4.')
def test_setting_transmission_monitor_to_invalid_input_does_not_set(self):
# Arrange
trans_spectrum = 4
trans_spectrum_invalid = '23434_yh'
command_iface.Clean()
command_iface.SANS2D()
# Act
command_iface.SetTransmissionMonitorSpectrum(trans_mon = trans_spectrum)
command_iface.SetTransmissionMonitorSpectrum(trans_mon = trans_spectrum_invalid)
# Assert
self.assertEqual(trans_spectrum, command_iface.GetTransmissionMonitorSpectrum(), 'The transmission spectrum should be set to 4.')
def test_that_gets_transmission_monitor_shift(self):
# Arrange
trans_spectrum_shift = -55
command_iface.Clean()
command_iface.SANS2D()
ReductionSingleton().get_instrument().monitor_4_offset = trans_spectrum_shift
# Act
result = command_iface.GetTransmissionMonitorSpectrumShift()
# Assert
self.assertEqual(trans_spectrum_shift, result, 'The transmission monitor shift should be set to -55.')
def test_setting_shift_to_valid_value(self):
# Arrange
trans_spectrum_shift = -55.0
command_iface.Clean()
command_iface.SANS2D()
# Act
command_iface.SetTransmissionMonitorSpectrumShift(trans_mon_shift = trans_spectrum_shift)
# Assert
self.assertEqual(trans_spectrum_shift, command_iface.GetTransmissionMonitorSpectrumShift(), 'The transmission monitor shift should be set to -55.')
def test_setting_shift_with_invalid_input(self):
# Arrange
trans_spectrum_shift = '-55_thg'
command_iface.Clean()
command_iface.SANS2D()
# Act
command_iface.SetTransmissionMonitorSpectrumShift(trans_mon_shift = trans_spectrum_shift)
# Assert
self.assertEqual(None, command_iface.GetTransmissionMonitorSpectrumShift(), 'The transmission monitor shift should be None.')
def test_that_gets_transmission_radius(self):
# Arrange
trans_radius = 23/1000
command_iface.Clean()
command_iface.SANS2D()
ReductionSingleton().transmission_calculator.radius = trans_radius
# Act
result = command_iface.GetTransmissionRadiusInMM()
# Assert
self.assertEqual(trans_radius*1000, result, 'The transmission radius should be set to 23 mm.')
def test_setting_radius_to_valid_value(self):
# Arrange
trans_radius = 23
command_iface.Clean()
command_iface.SANS2D()
# Act
command_iface.SetTransmissionRadiusInMM(trans_radius = trans_radius)
# Assert
self.assertEqual(trans_radius, command_iface.GetTransmissionRadiusInMM(), 'The transmission radius should be set to 23.')
def test_setting_radius_with_invalid_input(self):
# Arrange
trans_radius = '23_yh'
command_iface.Clean()
command_iface.SANS2D()
# Act
command_iface.SetTransmissionRadiusInMM(trans_radius = trans_radius)
# Assert
self.assertEqual(None, command_iface.GetTransmissionRadiusInMM(), 'The transmission radius should be None.')
def test_that_gets_non_empty_roi_files(self):
# Arrange
trans_roi_files = ['roi_file1.xml', 'roi_file2.xml']
command_iface.Clean()
command_iface.SANS2D()
ReductionSingleton().transmission_calculator.roi_files = trans_roi_files
# Act
result = command_iface.GetTransmissionROI()
# Assert
self.assertEqual(trans_roi_files, result, 'The transmission roi should have two entries')
def test_that_gets_None_for_empty_roi_files(self):
# Arrange
command_iface.Clean()
command_iface.SANS2D()
# Act
result = command_iface.GetTransmissionROI()
# Assert
self.assertEqual(None, result, 'The transmission roi should be None')
def test_setting_roi_file_for_valid_input(self):
# Arrange
trans_roi_files = ['file1.xml', 'file2.xml']
command_iface.Clean()
command_iface.SANS2D()
# Act
command_iface.SetTransmissionROI(trans_roi_files = trans_roi_files)
# Assert
result = ReductionSingleton().transmission_calculator.roi_files
self.assertEqual(2, len(result), 'The transmission roi list should have two entries')
self.assertEqual("file1.xml", result[0], 'The first file should be file1.xml')
self.assertEqual("file2.xml", result[1], 'The second file should be file2.xml')
def test_setting_roi_file_for_invalid_input(self):
# Arrange
trans_roi_files = ['file1g', 'file2.xml']
command_iface.Clean()
command_iface.SANS2D()
# Act
command_iface.SetTransmissionROI(trans_roi_files = trans_roi_files)
# Assert
self.assertEqual(0, len(ReductionSingleton().transmission_calculator.roi_files), 'The transmission roi list should be empty.')
def test_that_gets_non_empty_mask_files(self):
# Arrange
trans_mask_files = ['mask_file1.xml', 'mask_file2.xml']
command_iface.Clean()
command_iface.SANS2D()
ReductionSingleton().transmission_calculator.mask_files = trans_mask_files
# Act
result = command_iface.GetTransmissionMask()
# Assert
self.assertEqual(trans_mask_files, result, 'The transmission mask should have two entries')
def test_that_gets_None_for_empty_mask_files(self):
# Arrange
command_iface.Clean()
command_iface.SANS2D()
# Act
result = command_iface.GetTransmissionMask()
# Assert
self.assertEqual(None, result, 'The transmission mask should be None')
def test_setting_mask_file_for_valid_input(self):
# Arrange
trans_mask_files = ['file1.xml', 'file2.xml']
command_iface.Clean()
command_iface.SANS2D()
# Act
command_iface.SetTransmissionMask(trans_mask_files = trans_mask_files)
# Assert
result = ReductionSingleton().transmission_calculator.mask_files
self.assertEqual(2, len(result), 'The transmission mask list should have two entries')
self.assertEqual("file1.xml", result[0], 'The first file should be file1.xml')
self.assertEqual("file2.xml", result[1], 'The second file should be file2.xml')
def test_setting_mask_file_for_invalid_input(self):
# Arrange
trans_mask_files = " file1g, file2.xml "
command_iface.Clean()
command_iface.SANS2D()
# Act
command_iface.SetTransmissionMask(trans_mask_files = trans_mask_files)
# Assert
self.assertEqual(0, len(ReductionSingleton().transmission_calculator.mask_files), 'The transmission mask list should be empty.')
class TestEventWorkspaceCheck(unittest.TestCase):
def _create_file_name(self, name):
temp_save_dir = config['defaultsave.directory']
if (temp_save_dir == ''):
temp_save_dir = os.getcwd()
return os.path.join(temp_save_dir, name + '.nxs')
def addSampleLogEntry(self, log_name, ws, start_time, extra_time_shift):
number_of_times = 10
for i in range(0, number_of_times):
val = random.randrange(0, 10, 1)
date = DateAndTime(start_time)
date += int(i*1e9)
date += int(extra_time_shift*1e9)
AddTimeSeriesLog(ws, Name=log_name, Time=date.__str__().strip(), Value=val)
def _clean_up(self, file_name):
if os.path.exists(file_name):
os.remove(file_name)
def test_that_histogram_workspace_is_detected(self):
# Arrange
ws = CreateSampleWorkspace()
self.addSampleLogEntry('proton_charge', ws, "2010-01-01T00:00:00", 0.0)
file_name = self._create_file_name('dummy')
SaveNexus(Filename= file_name, InputWorkspace=ws)
# Act
result = command_iface.check_if_event_workspace(file_name)
self.assertFalse(result)
# Clean Up
self._clean_up(file_name)
DeleteWorkspace(ws)
class SANSCommandInterfaceGetAndSetQResolutionSettings(unittest.TestCase):
#Test the input and output mechanims for the QResolution settings
def test_full_setup_for_circular_apertures(self):
# Arrange
command_iface.Clean()
command_iface.SANS2D()
a1 = 2 # in mm
a2 = 3 # in mm
delta_r = 4 # in mm
collimation_length = 10 # in m
norm = reduction_steps.CalculateNormISIS()
ReductionSingleton().to_Q = reduction_steps.ConvertToQISIS(norm)
# Act
command_iface.set_q_resolution_a1(a1 = a1)
command_iface.set_q_resolution_a2(a2 = a2)
command_iface.set_q_resolution_delta_r(delta_r = delta_r)
command_iface.set_q_resolution_collimation_length(collimation_length = collimation_length)
command_iface.set_q_resolution_use(use = True)
ReductionSingleton().to_Q._set_up_q_resolution_parameters()
# Assert
a1_stored = ReductionSingleton().to_Q.get_q_resolution_a1() # in m
a1_expected = a1/1000.
self.assertEqual(a1_stored, a1_expected)
a2_stored = ReductionSingleton().to_Q.get_q_resolution_a2() # in m
a2_expected = a2/1000.
self.assertEqual(a2_stored, a2_expected)
collimation_length_stored = ReductionSingleton().to_Q.get_q_resolution_collimation_length() # in m
collimation_length_expected = collimation_length
self.assertEqual(collimation_length_stored, collimation_length_expected)
delta_r_stored = ReductionSingleton().to_Q.get_q_resolution_delta_r() # in m
delta_r_expected = delta_r/1000.
self.assertEqual(delta_r_stored, delta_r_expected)
def test_full_setup_for_rectangular_apertures(self):
# Arrange
command_iface.Clean()
command_iface.SANS2D()
a1 = 2 # in mm
a2 = 3 # in mm
delta_r = 4 # in mm
collimation_length = 10 # in m
h1 = 9 # in mm
w1 = 8 # in mm
h2 = 7 # in mm
w2 = 5 # in mm
norm = reduction_steps.CalculateNormISIS()
ReductionSingleton().to_Q = reduction_steps.ConvertToQISIS(norm)
# Act
command_iface.set_q_resolution_a1(a1 = a1)
command_iface.set_q_resolution_a2(a2 = a2)
command_iface.set_q_resolution_delta_r(delta_r = delta_r)
command_iface.set_q_resolution_h1(h1 = h1)
command_iface.set_q_resolution_w1(w1 = w1)
command_iface.set_q_resolution_h2(h2 = h2)
command_iface.set_q_resolution_w2(w2 = w2)
command_iface.set_q_resolution_collimation_length(collimation_length = collimation_length)
command_iface.set_q_resolution_use(use = True)
ReductionSingleton().to_Q._set_up_q_resolution_parameters()
# Assert
a1_stored = ReductionSingleton().to_Q.get_q_resolution_a1() # in m
a1_expected = 2*math.sqrt((h1/1000.*h1/1000. + w1/1000.*w1/1000.)/6)
self.assertEqual(a1_stored, a1_expected)
a2_stored = ReductionSingleton().to_Q.get_q_resolution_a2() # in m
a2_expected = 2*math.sqrt((h2/1000.*h2/1000. + w2/1000.*w2/1000.)/6)
self.assertEqual(a2_stored, a2_expected)
collimation_length_stored = ReductionSingleton().to_Q.get_q_resolution_collimation_length() # in m
collimation_length_expected = collimation_length
self.assertEqual(collimation_length_stored, collimation_length_expected)
delta_r_stored = ReductionSingleton().to_Q.get_q_resolution_delta_r() # in m
delta_r_expected = delta_r/1000.
self.assertEqual(delta_r_stored, delta_r_expected)
def test_full_setup_for_rectangular_apertures_which_are_only_partially_specified(self):
# Arrange
command_iface.Clean()
command_iface.SANS2D()
a1 = 2 # in mm
a2 = 3 # in mm
delta_r = 4 # in mm
collimation_length = 10 # in m
h1 = 9 # in mm
w1 = 8 # in mm
h2 = 7 # in mm
# We take out w2, hence we don't have a full rectangular spec
norm = reduction_steps.CalculateNormISIS()
ReductionSingleton().to_Q = reduction_steps.ConvertToQISIS(norm)
# Act
command_iface.set_q_resolution_a1(a1 = a1)
command_iface.set_q_resolution_a2(a2 = a2)
command_iface.set_q_resolution_delta_r(delta_r = delta_r)
command_iface.set_q_resolution_h1(h1 = h1)
command_iface.set_q_resolution_w1(w1 = w1)
command_iface.set_q_resolution_h2(h2 = h2)
command_iface.set_q_resolution_collimation_length(collimation_length = collimation_length)
command_iface.set_q_resolution_use(use = True)
ReductionSingleton().to_Q._set_up_q_resolution_parameters()
# Assert
a1_stored = ReductionSingleton().to_Q.get_q_resolution_a1() # in m
a1_expected = a1/1000.
self.assertEqual(a1_stored, a1_expected)
a2_stored = ReductionSingleton().to_Q.get_q_resolution_a2() # in m
a2_expected = a2/1000.
self.assertEqual(a2_stored, a2_expected)
collimation_length_stored = ReductionSingleton().to_Q.get_q_resolution_collimation_length() # in m
collimation_length_expected = collimation_length
self.assertEqual(collimation_length_stored, collimation_length_expected)
delta_r_stored = ReductionSingleton().to_Q.get_q_resolution_delta_r() # in m
delta_r_expected = delta_r/1000.
self.assertEqual(delta_r_stored, delta_r_expected)
class TestLARMORCommand(unittest.TestCase):
def test_that_default_idf_is_being_selected(self):
command_iface.Clean()
# Act
command_iface.LARMOR()
# Assert
instrument = ReductionSingleton().get_instrument()
idf_file_path = instrument.get_idf_file_path()
file_name = os.path.basename(idf_file_path)
expected_name = "LARMOR_Definition.xml"
self.assertEqual(file_name, expected_name)
def test_that_selected_idf_is_being_selectedI(self):
command_iface.Clean()
selected_idf = "LARMOR_Definition_8tubes.xml"
# Act
command_iface.LARMOR(selected_idf)
# Assert
instrument = ReductionSingleton().get_instrument()
idf_file_path = instrument.get_idf_file_path()
file_name = os.path.basename(idf_file_path)
expected_name = selected_idf
self.assertEqual(file_name, expected_name)
def test_that_for_non_existing_false_is_returned(self):
command_iface.Clean()
selected_idf = "LARMOR_Definition_NONEXIST.xml"
# Act + Assert
self.assertFalse(command_iface.LARMOR(selected_idf),
"A non existant idf path should return false")
class TestMaskFile(unittest.TestCase):
def test_throws_for_user_file_with_invalid_extension(self):
# Arrange
file_name = "/path1/path2/user_file.abc"
command_iface.Clean()
command_iface.SANS2D()
# Act + Assert
args = [file_name]
self.assertRaises(RuntimeError, command_iface.MaskFile, *args)
class SANSCommandInterfaceGetAndSetBackgroundCorrectionSettings(unittest.TestCase):
def _do_test_correct_setting(self, run_number, is_time, is_mon, is_mean, mon_numbers):
# Assert that settings were set
setting = ReductionSingleton().get_dark_run_setting(is_time, is_mon)
self.assertEqual(setting.run_number, run_number)
self.assertEqual(setting.time, is_time)
self.assertEqual(setting.mean, is_mean)
self.assertEqual(setting.mon, is_mon)
self.assertEqual(setting.mon_numbers, mon_numbers)
# Assert that other settings are None. Hence set up all combinations and remove the one which
# has been set up earlier
combinations = [[True, True], [True, False], [False, True], [False, False]]
selected_combination = [is_time, is_mon]
combinations.remove(selected_combination)
for combination in combinations:
self.assertTrue(ReductionSingleton().get_dark_run_setting(combination[0], combination[1]) is None)
def test_that_correct_setting_can_be_passed_in(self):
# Arrange
run_number = "test12345"
is_time = True
is_mon = True
is_mean = False
mon_numbers= None
command_iface.Clean()
command_iface.LOQ()
# Act
command_iface.set_background_correction(run_number, is_time,
is_mon, is_mean, mon_numbers)
# Assert
self._do_test_correct_setting(run_number, is_time, is_mon, is_mean, mon_numbers)
if __name__ == "__main__":
unittest.main()
|
nbeaver/numpy
|
refs/heads/master
|
numpy/distutils/__init__.py
|
87
|
from __future__ import division, absolute_import, print_function
import sys
from .__version__ import version as __version__
# Must import local ccompiler ASAP in order to get
# customized CCompiler.spawn effective.
from . import ccompiler
from . import unixccompiler
from .info import __doc__
from .npy_pkg_config import *
# If numpy is installed, add distutils.test()
try:
from . import __config__
# Normally numpy is installed if the above import works, but an interrupted
# in-place build could also have left a __config__.py. In that case the
# next import may still fail, so keep it inside the try block.
from numpy.testing.nosetester import _numpy_tester
test = _numpy_tester().test
except ImportError:
pass
|
rebstar6/servo
|
refs/heads/master
|
components/script/dom/bindings/codegen/parser/tests/test_callback_interface.py
|
142
|
import WebIDL
def WebIDLTest(parser, harness):
parser.parse("""
callback interface TestCallbackInterface {
attribute boolean bool;
};
""")
results = parser.finish()
iface = results[0]
harness.ok(iface.isCallback(), "Interface should be a callback")
parser = parser.reset()
threw = False
try:
parser.parse("""
interface TestInterface {
};
callback interface TestCallbackInterface : TestInterface {
attribute boolean bool;
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should not allow non-callback parent of callback interface")
parser = parser.reset()
threw = False
try:
parser.parse("""
interface TestInterface : TestCallbackInterface {
};
callback interface TestCallbackInterface {
attribute boolean bool;
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should not allow callback parent of non-callback interface")
parser = parser.reset()
parser.parse("""
callback interface TestCallbackInterface1 {
void foo();
};
callback interface TestCallbackInterface2 {
void foo(DOMString arg);
void foo(TestCallbackInterface1 arg);
};
callback interface TestCallbackInterface3 {
void foo(DOMString arg);
void foo(TestCallbackInterface1 arg);
static void bar();
};
callback interface TestCallbackInterface4 {
void foo(DOMString arg);
void foo(TestCallbackInterface1 arg);
static void bar();
const long baz = 5;
};
callback interface TestCallbackInterface5 {
static attribute boolean bool;
void foo();
};
callback interface TestCallbackInterface6 {
void foo(DOMString arg);
void foo(TestCallbackInterface1 arg);
void bar();
};
callback interface TestCallbackInterface7 {
static attribute boolean bool;
};
callback interface TestCallbackInterface8 {
attribute boolean bool;
};
callback interface TestCallbackInterface9 : TestCallbackInterface1 {
void foo();
};
callback interface TestCallbackInterface10 : TestCallbackInterface1 {
void bar();
};
""")
results = parser.finish()
for (i, iface) in enumerate(results):
harness.check(iface.isSingleOperationInterface(), i < 4,
"Interface %s should be a single operation interface" %
iface.identifier.name)
|
beeftornado/sentry
|
refs/heads/master
|
src/sentry/runner/initializer.py
|
1
|
from __future__ import absolute_import, print_function
import click
import logging
import os
import six
from django.conf import settings
from sentry.utils import metrics, warnings
from sentry.utils.sdk import configure_sdk
from sentry.utils.warnings import DeprecatedSettingWarning
from sentry.utils.compat import map
logger = logging.getLogger("sentry.runner.initializer")
def register_plugins(settings, raise_on_plugin_load_failure=False):
from pkg_resources import iter_entry_points
from sentry.plugins.base import plugins
# entry_points={
# 'sentry.plugins': [
# 'phabricator = sentry_phabricator.plugins:PhabricatorPlugin'
# ],
# },
for ep in iter_entry_points("sentry.plugins"):
try:
plugin = ep.load()
except Exception:
import traceback
click.echo(
"Failed to load plugin %r:\n%s" % (ep.name, traceback.format_exc()), err=True
)
if raise_on_plugin_load_failure:
raise
else:
plugins.register(plugin)
for plugin in plugins.all(version=None):
init_plugin(plugin)
from sentry import integrations
from sentry.utils.imports import import_string
for integration_path in settings.SENTRY_DEFAULT_INTEGRATIONS:
try:
integration_cls = import_string(integration_path)
except Exception:
import traceback
click.echo(
"Failed to load integration %r:\n%s" % (integration_path, traceback.format_exc()),
err=True,
)
else:
integrations.register(integration_cls)
for integration in integrations.all():
try:
integration.setup()
except AttributeError:
pass
def init_plugin(plugin):
from sentry.plugins.base import bindings
plugin.setup(bindings)
# Register contexts from plugins if necessary
if hasattr(plugin, "get_custom_contexts"):
from sentry.interfaces.contexts import contexttype
for cls in plugin.get_custom_contexts() or ():
contexttype(cls)
if hasattr(plugin, "get_cron_schedule") and plugin.is_enabled():
schedules = plugin.get_cron_schedule()
if schedules:
settings.CELERYBEAT_SCHEDULE.update(schedules)
if hasattr(plugin, "get_worker_imports") and plugin.is_enabled():
imports = plugin.get_worker_imports()
if imports:
settings.CELERY_IMPORTS += tuple(imports)
if hasattr(plugin, "get_worker_queues") and plugin.is_enabled():
from kombu import Queue
for queue in plugin.get_worker_queues():
try:
name, routing_key = queue
except ValueError:
name = routing_key = queue
q = Queue(name, routing_key=routing_key)
q.durable = False
settings.CELERY_QUEUES.append(q)
def initialize_receivers():
# force signal registration
import sentry.receivers # NOQA
def get_asset_version(settings):
path = os.path.join(settings.STATIC_ROOT, "version")
try:
with open(path) as fp:
return fp.read().strip()
except IOError:
from time import time
return int(time())
# Options which must get extracted into Django settings while
# bootstrapping. Everything else will get validated and used
# as a part of OptionsManager.
options_mapper = {
# 'cache.backend': 'SENTRY_CACHE',
# 'cache.options': 'SENTRY_CACHE_OPTIONS',
# 'system.databases': 'DATABASES',
# 'system.debug': 'DEBUG',
"system.secret-key": "SECRET_KEY",
"mail.backend": "EMAIL_BACKEND",
"mail.host": "EMAIL_HOST",
"mail.port": "EMAIL_PORT",
"mail.username": "EMAIL_HOST_USER",
"mail.password": "EMAIL_HOST_PASSWORD",
"mail.use-tls": "EMAIL_USE_TLS",
"mail.from": "SERVER_EMAIL",
"mail.subject-prefix": "EMAIL_SUBJECT_PREFIX",
"github-login.client-id": "GITHUB_APP_ID",
"github-login.client-secret": "GITHUB_API_SECRET",
"github-login.require-verified-email": "GITHUB_REQUIRE_VERIFIED_EMAIL",
"github-login.base-domain": "GITHUB_BASE_DOMAIN",
"github-login.api-domain": "GITHUB_API_DOMAIN",
"github-login.extended-permissions": "GITHUB_EXTENDED_PERMISSIONS",
"github-login.organization": "GITHUB_ORGANIZATION",
}
# Just reuse the integration app for Single Org / Self-Hosted as
# it doesn't make much sense to use 2 separate apps for SSO and
# integration.
if settings.SENTRY_SINGLE_ORGANIZATION:
options_mapper.update(
{"github-app.client-id": "GITHUB_APP_ID", "github-app.client-secret": "GITHUB_API_SECRET"}
)
def bootstrap_options(settings, config=None):
"""
Quickly bootstrap options that come in from a config file
and convert options into Django settings that are
required to even initialize the rest of the app.
"""
# Make sure our options have gotten registered
from sentry.options import load_defaults
load_defaults()
options = {}
if config is not None:
# Attempt to load our config yaml file
from sentry.utils.yaml import safe_load
from yaml.parser import ParserError
from yaml.scanner import ScannerError
try:
with open(config, "rb") as fp:
options = safe_load(fp)
except IOError:
# Gracefully fail if yaml file doesn't exist
pass
except (AttributeError, ParserError, ScannerError) as e:
from .importer import ConfigurationError
raise ConfigurationError("Malformed config.yml file: %s" % six.text_type(e))
# Empty options file, so fail gracefully
if options is None:
options = {}
# Options needs to be a dict
elif not isinstance(options, dict):
from .importer import ConfigurationError
raise ConfigurationError("Malformed config.yml file")
from sentry.conf.server import DEAD
# First move options from settings into options
for k, v in six.iteritems(options_mapper):
if getattr(settings, v, DEAD) is not DEAD and k not in options:
warnings.warn(DeprecatedSettingWarning(options_mapper[k], "SENTRY_OPTIONS['%s']" % k))
options[k] = getattr(settings, v)
# Stuff everything else into SENTRY_OPTIONS
# these will be validated later after bootstrapping
for k, v in six.iteritems(options):
settings.SENTRY_OPTIONS[k] = v
# Now go back through all of SENTRY_OPTIONS and promote
# back into settings. This catches the case when values are defined
# only in SENTRY_OPTIONS and no config.yml file
for o in (settings.SENTRY_DEFAULT_OPTIONS, settings.SENTRY_OPTIONS):
for k, v in six.iteritems(o):
if k in options_mapper:
# Map the mail.backend aliases to something Django understands
if k == "mail.backend":
try:
v = settings.SENTRY_EMAIL_BACKEND_ALIASES[v]
except KeyError:
pass
# Escalate the few needed to actually get the app bootstrapped into settings
setattr(settings, options_mapper[k], v)
def configure_structlog():
"""
Make structlog comply with all of our options.
"""
from django.conf import settings
import logging.config
import structlog
from sentry import options
from sentry.logging import LoggingFormat
WrappedDictClass = structlog.threadlocal.wrap_dict(dict)
kwargs = {
"context_class": WrappedDictClass,
"wrapper_class": structlog.stdlib.BoundLogger,
"cache_logger_on_first_use": True,
"processors": [
structlog.stdlib.add_log_level,
structlog.stdlib.PositionalArgumentsFormatter(),
structlog.processors.format_exc_info,
structlog.processors.StackInfoRenderer(),
structlog.processors.UnicodeDecoder(),
],
}
fmt_from_env = os.environ.get("SENTRY_LOG_FORMAT")
if fmt_from_env:
settings.SENTRY_OPTIONS["system.logging-format"] = fmt_from_env.lower()
fmt = options.get("system.logging-format")
if fmt == LoggingFormat.HUMAN:
from sentry.logging.handlers import HumanRenderer
kwargs["processors"].extend(
[structlog.processors.ExceptionPrettyPrinter(), HumanRenderer()]
)
elif fmt == LoggingFormat.MACHINE:
from sentry.logging.handlers import JSONRenderer
kwargs["processors"].append(JSONRenderer())
structlog.configure(**kwargs)
lvl = os.environ.get("SENTRY_LOG_LEVEL")
if lvl:
levelNames = logging._levelNames if not six.PY3 else logging._nameToLevel
if lvl not in levelNames:
raise AttributeError("%s is not a valid logging level." % lvl)
settings.LOGGING["root"].update({"level": lvl or settings.LOGGING["default_level"]})
if lvl:
for logger in settings.LOGGING["overridable"]:
try:
settings.LOGGING["loggers"][logger].update({"level": lvl})
except KeyError:
raise KeyError("%s is not a defined logger." % logger)
logging.config.dictConfig(settings.LOGGING)
def initialize_app(config, skip_service_validation=False):
settings = config["settings"]
bootstrap_options(settings, config["options"])
configure_structlog()
# Commonly setups don't correctly configure themselves for production envs
# so lets try to provide a bit more guidance
if settings.CELERY_ALWAYS_EAGER and not settings.DEBUG:
warnings.warn(
"Sentry is configured to run asynchronous tasks in-process. "
"This is not recommended within production environments. "
"See https://docs.sentry.io/on-premise/server/queue/ for more information."
)
if settings.SENTRY_SINGLE_ORGANIZATION:
settings.SENTRY_FEATURES["organizations:create"] = False
if not hasattr(settings, "SUDO_COOKIE_SECURE"):
settings.SUDO_COOKIE_SECURE = getattr(settings, "SESSION_COOKIE_SECURE", False)
if not hasattr(settings, "SUDO_COOKIE_DOMAIN"):
settings.SUDO_COOKIE_DOMAIN = getattr(settings, "SESSION_COOKIE_DOMAIN", None)
if not hasattr(settings, "SUDO_COOKIE_PATH"):
settings.SUDO_COOKIE_PATH = getattr(settings, "SESSION_COOKIE_PATH", "/")
if not hasattr(settings, "CSRF_COOKIE_SECURE"):
settings.CSRF_COOKIE_SECURE = getattr(settings, "SESSION_COOKIE_SECURE", False)
if not hasattr(settings, "CSRF_COOKIE_DOMAIN"):
settings.CSRF_COOKIE_DOMAIN = getattr(settings, "SESSION_COOKIE_DOMAIN", None)
if not hasattr(settings, "CSRF_COOKIE_PATH"):
settings.CSRF_COOKIE_PATH = getattr(settings, "SESSION_COOKIE_PATH", "/")
settings.CACHES["default"]["VERSION"] = settings.CACHE_VERSION
settings.ASSET_VERSION = get_asset_version(settings)
settings.STATIC_URL = settings.STATIC_URL.format(version=settings.ASSET_VERSION)
if getattr(settings, "SENTRY_DEBUGGER", None) is None:
settings.SENTRY_DEBUGGER = settings.DEBUG
monkeypatch_model_unpickle()
import django
django.setup()
monkeypatch_django_migrations()
apply_legacy_settings(settings)
bind_cache_to_option_store()
register_plugins(settings)
initialize_receivers()
validate_options(settings)
validate_snuba()
configure_sdk()
setup_services(validate=not skip_service_validation)
from django.utils import timezone
from sentry.app import env
from sentry.runner.settings import get_sentry_conf
env.data["config"] = get_sentry_conf()
env.data["start_date"] = timezone.now()
def setup_services(validate=True):
from sentry import (
analytics,
buffer,
digests,
newsletter,
nodestore,
quotas,
ratelimits,
search,
tagstore,
tsdb,
)
from .importer import ConfigurationError
from sentry.utils.settings import reraise_as
service_list = (
analytics,
buffer,
digests,
newsletter,
nodestore,
quotas,
ratelimits,
search,
tagstore,
tsdb,
)
for service in service_list:
if validate:
try:
service.validate()
except AttributeError as exc:
reraise_as(
ConfigurationError(
u"{} service failed to call validate()\n{}".format(
service.__name__, six.text_type(exc)
)
)
)
try:
service.setup()
except AttributeError as exc:
if not hasattr(service, "setup") or not callable(service.setup):
reraise_as(
ConfigurationError(
u"{} service failed to call setup()\n{}".format(
service.__name__, six.text_type(exc)
)
)
)
raise
def validate_options(settings):
from sentry.options import default_manager
default_manager.validate(settings.SENTRY_OPTIONS, warn=True)
import django.db.models.base
model_unpickle = django.db.models.base.model_unpickle
def __model_unpickle_compat(model_id, attrs=None, factory=None):
if attrs is not None or factory is not None:
metrics.incr("django.pickle.loaded_19_pickle.__model_unpickle_compat", sample_rate=1)
logger.error(
"django.compat.model-unpickle-compat",
extra={"model_id": model_id, "attrs": attrs, "factory": factory},
exc_info=True,
)
return model_unpickle(model_id)
def __simple_class_factory_compat(model, attrs):
return model
def monkeypatch_model_unpickle():
# https://code.djangoproject.com/ticket/27187
# Django 1.10 breaks pickle compat with 1.9 models.
django.db.models.base.model_unpickle = __model_unpickle_compat
# Django 1.10 needs this to unpickle 1.9 models, but we can't branch while
# monkeypatching else our monkeypatched funcs won't be pickleable.
# So just vendor simple_class_factory from 1.9.
django.db.models.base.simple_class_factory = __simple_class_factory_compat
def monkeypatch_django_migrations():
# This monkeypatches django's migration executor with our own, which
# adds some small but important customizations.
import sentry.new_migrations.monkey # NOQA
def bind_cache_to_option_store():
# The default ``OptionsStore`` instance is initialized without the cache
# backend attached. The store itself utilizes the cache during normal
# operation, but can't use the cache before the options (which typically
# includes the cache configuration) have been bootstrapped from the legacy
# settings and/or configuration values. Those options should have been
# loaded at this point, so we can plug in the cache backend before
# continuing to initialize the remainder of the application.
from django.core.cache import cache as default_cache
from sentry.options import default_store
default_store.cache = default_cache
def show_big_error(message):
if isinstance(message, six.string_types):
lines = message.strip().splitlines()
else:
lines = message
maxline = max(map(len, lines))
click.echo("", err=True)
click.secho("!!!%s!!!" % ("!" * min(maxline, 80),), err=True, fg="red")
click.secho("!! %s !!" % "".center(maxline), err=True, fg="red")
for line in lines:
click.secho("!! %s !!" % line.center(maxline), err=True, fg="red")
click.secho("!! %s !!" % "".center(maxline), err=True, fg="red")
click.secho("!!!%s!!!" % ("!" * min(maxline, 80),), err=True, fg="red")
click.echo("", err=True)
def apply_legacy_settings(settings):
from sentry import options
# SENTRY_USE_QUEUE used to determine if Celery was eager or not
if hasattr(settings, "SENTRY_USE_QUEUE"):
warnings.warn(
DeprecatedSettingWarning(
"SENTRY_USE_QUEUE",
"CELERY_ALWAYS_EAGER",
"https://docs.sentry.io/on-premise/server/queue/",
)
)
settings.CELERY_ALWAYS_EAGER = not settings.SENTRY_USE_QUEUE
for old, new in (
("SENTRY_ADMIN_EMAIL", "system.admin-email"),
("SENTRY_URL_PREFIX", "system.url-prefix"),
("SENTRY_SYSTEM_MAX_EVENTS_PER_MINUTE", "system.rate-limit"),
("SENTRY_ENABLE_EMAIL_REPLIES", "mail.enable-replies"),
("SENTRY_SMTP_HOSTNAME", "mail.reply-hostname"),
("MAILGUN_API_KEY", "mail.mailgun-api-key"),
("SENTRY_FILESTORE", "filestore.backend"),
("SENTRY_FILESTORE_OPTIONS", "filestore.options"),
("GOOGLE_CLIENT_ID", "auth-google.client-id"),
("GOOGLE_CLIENT_SECRET", "auth-google.client-secret"),
):
if new not in settings.SENTRY_OPTIONS and hasattr(settings, old):
warnings.warn(DeprecatedSettingWarning(old, "SENTRY_OPTIONS['%s']" % new))
settings.SENTRY_OPTIONS[new] = getattr(settings, old)
if hasattr(settings, "SENTRY_REDIS_OPTIONS"):
if "redis.clusters" in settings.SENTRY_OPTIONS:
raise Exception(
"Cannot specify both SENTRY_OPTIONS['redis.clusters'] option and SENTRY_REDIS_OPTIONS setting."
)
else:
warnings.warn(
DeprecatedSettingWarning(
"SENTRY_REDIS_OPTIONS",
'SENTRY_OPTIONS["redis.clusters"]',
removed_in_version="8.5",
)
)
settings.SENTRY_OPTIONS["redis.clusters"] = {"default": settings.SENTRY_REDIS_OPTIONS}
else:
# Provide backwards compatibility to plugins expecting there to be a
# ``SENTRY_REDIS_OPTIONS`` setting by using the ``default`` cluster.
# This should be removed when ``SENTRY_REDIS_OPTIONS`` is officially
# deprecated. (This also assumes ``FLAG_NOSTORE`` on the configuration
# option.)
settings.SENTRY_REDIS_OPTIONS = options.get("redis.clusters")["default"]
if not hasattr(settings, "SENTRY_URL_PREFIX"):
url_prefix = options.get("system.url-prefix", silent=True)
if not url_prefix:
# HACK: We need to have some value here for backwards compatibility
url_prefix = "http://sentry.example.com"
settings.SENTRY_URL_PREFIX = url_prefix
if settings.TIME_ZONE != "UTC":
# non-UTC timezones are not supported
show_big_error("TIME_ZONE should be set to UTC")
# Set ALLOWED_HOSTS if it's not already available
if not settings.ALLOWED_HOSTS:
settings.ALLOWED_HOSTS = ["*"]
if hasattr(settings, "SENTRY_ALLOW_REGISTRATION"):
warnings.warn(
DeprecatedSettingWarning(
"SENTRY_ALLOW_REGISTRATION", 'SENTRY_FEATURES["auth:register"]'
)
)
settings.SENTRY_FEATURES["auth:register"] = settings.SENTRY_ALLOW_REGISTRATION
settings.DEFAULT_FROM_EMAIL = settings.SENTRY_OPTIONS.get(
"mail.from", settings.SENTRY_DEFAULT_OPTIONS.get("mail.from")
)
# HACK(mattrobenolt): This is a one-off assertion for a system.secret-key value.
# If this becomes a pattern, we could add another flag to the OptionsManager to cover this, but for now
# this is the only value that should prevent the app from booting up. Currently FLAG_REQUIRED is used to
# trigger the Installation Wizard, not abort startup.
if not settings.SENTRY_OPTIONS.get("system.secret-key"):
from .importer import ConfigurationError
raise ConfigurationError(
"`system.secret-key` MUST be set. Use 'sentry config generate-secret-key' to get one."
)
def validate_snuba():
"""
Make sure everything related to Snuba is in sync.
This covers a few cases:
* When you have features related to Snuba, you must also
have Snuba fully configured correctly to continue.
* If you have Snuba specific search/tagstore/tsdb backends,
you must also have a Snuba compatible eventstream backend
otherwise no data will be written into Snuba.
* If you only have Snuba related eventstream, yell that you
probably want the other backends otherwise things are weird.
"""
if not settings.DEBUG:
return
has_all_snuba_required_backends = (
settings.SENTRY_SEARCH == "sentry.search.snuba.EventsDatasetSnubaSearchBackend"
and settings.SENTRY_TAGSTORE == "sentry.tagstore.snuba.SnubaTagStorage"
and
# TODO(mattrobenolt): Remove ServiceDelegator check
settings.SENTRY_TSDB
in ("sentry.tsdb.redissnuba.RedisSnubaTSDB", "sentry.utils.services.ServiceDelegator")
)
eventstream_is_snuba = (
settings.SENTRY_EVENTSTREAM == "sentry.eventstream.snuba.SnubaEventStream"
or settings.SENTRY_EVENTSTREAM == "sentry.eventstream.kafka.KafkaEventStream"
)
# All good here, it doesn't matter what else is going on
if has_all_snuba_required_backends and eventstream_is_snuba:
return
from sentry.features import requires_snuba as snuba_features
snuba_enabled_features = set()
for feature in snuba_features:
if settings.SENTRY_FEATURES.get(feature, False):
snuba_enabled_features.add(feature)
if snuba_enabled_features and not eventstream_is_snuba:
from .importer import ConfigurationError
show_big_error(
"""
You have features enabled which require Snuba,
but you don't have any Snuba compatible configuration.
Features you have enabled:
%s
See: https://github.com/getsentry/snuba#sentry--snuba
"""
% "\n".join(snuba_enabled_features)
)
raise ConfigurationError("Cannot continue without Snuba configured.")
if not eventstream_is_snuba:
from .importer import ConfigurationError
show_big_error(
"""
It appears that you are requiring Snuba,
but your SENTRY_EVENTSTREAM is not compatible.
Current settings:
SENTRY_SEARCH = %r
SENTRY_TAGSTORE = %r
SENTRY_TSDB = %r
SENTRY_EVENTSTREAM = %r
See: https://github.com/getsentry/snuba#sentry--snuba"""
% (
settings.SENTRY_SEARCH,
settings.SENTRY_TAGSTORE,
settings.SENTRY_TSDB,
settings.SENTRY_EVENTSTREAM,
)
)
raise ConfigurationError("Cannot continue without Snuba configured correctly.")
if eventstream_is_snuba and not has_all_snuba_required_backends:
show_big_error(
"""
You are using a Snuba compatible eventstream
without configuring search/tagstore/tsdb also to use Snuba.
This is probably not what you want.
Current settings:
SENTRY_SEARCH = %r
SENTRY_TAGSTORE = %r
SENTRY_TSDB = %r
SENTRY_EVENTSTREAM = %r
See: https://github.com/getsentry/snuba#sentry--snuba"""
% (
settings.SENTRY_SEARCH,
settings.SENTRY_TAGSTORE,
settings.SENTRY_TSDB,
settings.SENTRY_EVENTSTREAM,
)
)
|
ltowarek/budget-supervisor
|
refs/heads/dependabot/pip/django-3.1.10
|
budgetsupervisor/budget/tests/test_views.py
|
1
|
import base64
import datetime
import json
from typing import Callable, Dict
import OpenSSL.crypto
import pytest
import swagger_client as saltedge_client
import swagger_client.rest
from budget.models import Account, Category, Connection, Transaction
from budget.views import verify_signature
from django.contrib.messages import get_messages
from django.test import Client
from django.urls import resolve, reverse
from pytest_mock import MockFixture
from swagger_client import ConnectSessionResponse, ConnectSessionResponseData
from users.models import Profile, User
from utils import get_url_path
def test_index_view_get(
client: Client, user_foo: User, login_user: Callable[[User], None]
) -> None:
login_user(user_foo)
url = reverse("budget_index")
response = client.get(url)
assert response.status_code == 200
def test_index_view_get_not_logged_in(client: Client) -> None:
url = reverse("budget_index")
response = client.get(url)
assert response.status_code == 302
assert resolve(get_url_path(response)).url_name == "login"
def test_connection_list_view_get_single_connection(
client: Client,
user_foo: User,
login_user: Callable[[User], None],
connection_foo: Connection,
) -> None:
login_user(user_foo)
url = reverse("connections:connection_list")
response = client.get(url)
assert response.status_code == 200
assert list(response.context["connection_list"]) == [connection_foo]
def test_connection_list_view_get_not_logged_in(client: Client) -> None:
url = reverse("connections:connection_list")
response = client.get(url)
assert response.status_code == 302
assert resolve(get_url_path(response)).url_name == "login"
def test_connection_list_view_get_multiple_connections(
client: Client,
user_foo: User,
login_user: Callable[[User], None],
connection_factory: Callable[..., Connection],
) -> None:
login_user(user_foo)
connection_a = connection_factory("a")
connection_b = connection_factory("b")
url = reverse("connections:connection_list")
response = client.get(url)
assert response.status_code == 200
assert list(response.context["connection_list"]) == [connection_a, connection_b]
def test_connection_list_view_get_ordered_by_provider(
client: Client,
user_foo: User,
login_user: Callable[[User], None],
connection_factory: Callable[..., Connection],
) -> None:
login_user(user_foo)
connection_b = connection_factory("b")
connection_a = connection_factory("a")
url = reverse("connections:connection_list")
response = client.get(url)
assert response.status_code == 200
assert list(response.context["connection_list"]) == [connection_a, connection_b]
def test_connection_list_view_get_current_user(
client: Client,
user_factory: Callable[..., User],
login_user: Callable[[User], None],
connection_factory: Callable[..., Connection],
) -> None:
user_a = user_factory("a")
user_b = user_factory("b")
login_user(user_a)
connection_a = connection_factory("a", user=user_a)
connection_factory("b", user=user_b)
url = reverse("connections:connection_list")
response = client.get(url)
assert response.status_code == 200
assert list(response.context["connection_list"]) == [connection_a]
def test_connection_create_view_get(
client: Client, user_foo: User, login_user: Callable[[User], None]
) -> None:
login_user(user_foo)
url = reverse("connections:connection_create")
response = client.get(url)
assert response.status_code == 200
assert response.context["profile"] == user_foo.profile
def test_connection_create_view_get_not_logged_in(client: Client) -> None:
url = reverse("connections:connection_create")
response = client.get(url)
assert response.status_code == 302
assert resolve(get_url_path(response)).url_name == "login"
def test_connection_create_view_post_redirect(
client: Client,
user_foo: User,
login_user: Callable[[User], None],
mocker: MockFixture,
connect_sessions_api: saltedge_client.ConnectSessionsApi,
) -> None:
login_user(user_foo)
url = reverse("connections:connection_create")
connect_sessions_api.connect_sessions_create_post.return_value = ConnectSessionResponse(
data=ConnectSessionResponseData(connect_url="example.com")
)
mocker.patch(
"budget.views.connect_sessions_api",
autospec=True,
return_value=connect_sessions_api,
)
response = client.post(url)
assert response.status_code == 302
assert response.url == "example.com"
def test_connection_update_view_get(
client: Client,
user_foo: User,
login_user: Callable[[User], None],
connection_foo: Connection,
) -> None:
login_user(user_foo)
url = reverse("connections:connection_update", kwargs={"pk": connection_foo.pk})
response = client.get(url)
assert response.status_code == 200
def test_connection_update_view_get_not_logged_in(
client: Client, connection_foo: Connection
) -> None:
url = reverse("connections:connection_update", kwargs={"pk": connection_foo.pk})
response = client.get(url)
assert response.status_code == 302
assert resolve(get_url_path(response)).url_name == "login"
def test_connection_update_view_post_redirect(
client: Client,
user_foo: User,
login_user: Callable[[User], None],
connection_foo: Connection,
) -> None:
login_user(user_foo)
url = reverse("connections:connection_update", kwargs={"pk": connection_foo.pk})
response = client.post(url, data={})
assert response.status_code == 302
assert resolve(get_url_path(response)).url_name == "connection_list"
def test_connection_update_view_post_message(
client: Client,
user_foo: User,
login_user: Callable[[User], None],
connection_foo: Connection,
) -> None:
login_user(user_foo)
url = reverse("connections:connection_update", kwargs={"pk": connection_foo.pk})
data = {
"provider": "bar",
}
response = client.post(url, data=data)
messages = [m.message for m in get_messages(response.wsgi_request)]
assert "Connection was updated successfully" in messages
def test_connection_update_view_post_different_user(
client: Client,
user_factory: Callable[..., User],
login_user: Callable[[User], None],
connection_factory: Callable[..., Connection],
) -> None:
user_a = user_factory("a")
user_b = user_factory("b")
login_user(user_a)
connection_factory("a", user=user_a)
connection_b = connection_factory("b", user=user_b)
url = reverse("connections:connection_update", kwargs={"pk": connection_b.pk})
data = {"name": "bx", "connection_type": Account.AccountType.ACCOUNT}
response = client.post(url, data=data)
assert response.status_code == 403
def test_connection_refresh_view_get(
client: Client,
user_foo: User,
login_user: Callable[[User], None],
connection_foo: Connection,
) -> None:
login_user(user_foo)
url = reverse("connections:connection_refresh", kwargs={"pk": connection_foo.pk})
response = client.get(url)
assert response.status_code == 200
def test_connection_refresh_view_get_not_logged_in(
client: Client, connection_foo: Connection
) -> None:
url = reverse("connections:connection_refresh", kwargs={"pk": connection_foo.pk})
response = client.get(url)
assert response.status_code == 302
assert resolve(get_url_path(response)).url_name == "login"
def test_connection_refresh_view_post_redirect(
client: Client,
user_foo: User,
login_user: Callable[[User], None],
connection_foo: Connection,
mocker: MockFixture,
connect_sessions_api: saltedge_client.ConnectSessionsApi,
) -> None:
login_user(user_foo)
url = reverse("connections:connection_refresh", kwargs={"pk": connection_foo.pk})
connect_sessions_api.connect_sessions_refresh_post.return_value = ConnectSessionResponse(
data=ConnectSessionResponseData(connect_url="example.com")
)
mocker.patch(
"budget.views.connect_sessions_api",
autospec=True,
return_value=connect_sessions_api,
)
response = client.post(url, data={})
assert response.status_code == 302
assert response.url == "example.com"
def test_connection_refresh_view_post_different_user(
client: Client,
user_factory: Callable[..., User],
login_user: Callable[[User], None],
connection_factory: Callable[..., Connection],
mocker: MockFixture,
connect_sessions_api: saltedge_client.ConnectSessionsApi,
) -> None:
user_a = user_factory("a")
user_b = user_factory("b")
login_user(user_a)
connection_factory("a", user=user_a)
connection_b = connection_factory("b", user=user_b)
url = reverse("connections:connection_refresh", kwargs={"pk": connection_b.pk})
data: Dict = {}
connect_sessions_api.connect_sessions_refresh_post.return_value = ConnectSessionResponse(
data=ConnectSessionResponseData(connect_url="example.com")
)
mocker.patch(
"budget.views.connect_sessions_api",
autospec=True,
return_value=connect_sessions_api,
)
response = client.post(url, data=data)
assert response.status_code == 403
def test_connection_delete_view_get(
client: Client,
user_foo: User,
login_user: Callable[[User], None],
connection_foo: Connection,
) -> None:
login_user(user_foo)
url = reverse("connections:connection_delete", kwargs={"pk": connection_foo.pk})
response = client.get(url)
assert response.status_code == 200
def test_connection_delete_view_get_not_logged_in(
client: Client, connection_foo: Connection
) -> None:
url = reverse("connections:connection_delete", kwargs={"pk": connection_foo.pk})
response = client.get(url)
assert response.status_code == 302
assert resolve(get_url_path(response)).url_name == "login"
def test_connection_delete_view_post_redirect(
client: Client,
user_foo: User,
login_user: Callable[[User], None],
connection_foo: Connection,
mocker: MockFixture,
connections_api: saltedge_client.ConnectionsApi,
) -> None:
login_user(user_foo)
url = reverse("connections:connection_delete", kwargs={"pk": connection_foo.pk})
mocker.patch(
"budget.views.connections_api", autospec=True, return_value=connections_api
)
response = client.post(url)
assert response.status_code == 302
assert resolve(get_url_path(response)).url_name == "connection_list"
def test_connection_delete_view_post_disconnect_accounts(
client: Client,
user_foo: User,
login_user: Callable[[User], None],
connection_foo: Connection,
account_foo_external: Account,
mocker: MockFixture,
connections_api: saltedge_client.ConnectionsApi,
) -> None:
login_user(user_foo)
url = reverse("connections:connection_delete", kwargs={"pk": connection_foo.pk})
mocker.patch(
"budget.views.connections_api", autospec=True, return_value=connections_api
)
response = client.post(url)
assert response.status_code == 302
account_foo_external.refresh_from_db()
assert account_foo_external.external_id is None
def test_connection_delete_view_post_disconnect_transactions(
client: Client,
user_foo: User,
login_user: Callable[[User], None],
connection_foo: Connection,
transaction_foo_external: Transaction,
mocker: MockFixture,
connections_api: saltedge_client.ConnectionsApi,
) -> None:
login_user(user_foo)
url = reverse("connections:connection_delete", kwargs={"pk": connection_foo.pk})
mocker.patch(
"budget.views.connections_api", autospec=True, return_value=connections_api
)
response = client.post(url)
assert response.status_code == 302
transaction_foo_external.refresh_from_db()
assert transaction_foo_external.external_id is None
def test_connection_delete_view_post_message(
client: Client,
user_foo: User,
login_user: Callable[[User], None],
connection_foo: Connection,
mocker: MockFixture,
connections_api: saltedge_client.ConnectionsApi,
) -> None:
login_user(user_foo)
url = reverse("connections:connection_delete", kwargs={"pk": connection_foo.pk})
mocker.patch(
"budget.views.connections_api", autospec=True, return_value=connections_api
)
response = client.post(url)
assert response.status_code == 302
messages = [m.message for m in get_messages(response.wsgi_request)]
assert "Connection was deleted successfully" in messages
def test_connection_delete_view_post_different_user(
client: Client,
user_factory: Callable[..., User],
login_user: Callable[[User], None],
connection_factory: Callable[..., Connection],
) -> None:
user_a = user_factory("a")
user_b = user_factory("b")
login_user(user_a)
connection_factory("a", user=user_a)
connection_b = connection_factory("b", user=user_b)
url = reverse("connections:connection_delete", kwargs={"pk": connection_b.pk})
response = client.post(url)
assert response.status_code == 403
def test_account_list_view_get_single_account(
client: Client,
user_foo: User,
login_user: Callable[[User], None],
account_foo: Account,
) -> None:
login_user(user_foo)
url = reverse("accounts:account_list")
response = client.get(url)
assert response.status_code == 200
assert list(response.context["account_list"]) == [account_foo]
def test_account_list_view_get_not_logged_in(client: Client) -> None:
url = reverse("accounts:account_list")
response = client.get(url)
assert response.status_code == 302
assert resolve(get_url_path(response)).url_name == "login"
def test_account_list_view_get_multiple_accounts(
client: Client,
user_foo: User,
login_user: Callable[[User], None],
account_factory: Callable[..., Account],
) -> None:
login_user(user_foo)
account_a = account_factory("a")
account_b = account_factory("b")
url = reverse("accounts:account_list")
response = client.get(url)
assert response.status_code == 200
assert list(response.context["account_list"]) == [account_a, account_b]
def test_account_list_view_get_ordered_by_name(
client: Client,
user_foo: User,
login_user: Callable[[User], None],
account_factory: Callable[..., Account],
) -> None:
login_user(user_foo)
account_b = account_factory("b")
account_a = account_factory("a")
url = reverse("accounts:account_list")
response = client.get(url)
assert response.status_code == 200
assert list(response.context["account_list"]) == [account_a, account_b]
def test_account_list_view_get_current_user(
client: Client,
user_factory: Callable[..., User],
login_user: Callable[[User], None],
account_factory: Callable[..., Account],
) -> None:
user_a = user_factory("a")
user_b = user_factory("b")
login_user(user_a)
account_a = account_factory("a", user=user_a)
account_factory("b", user=user_b)
url = reverse("accounts:account_list")
response = client.get(url)
assert response.status_code == 200
assert list(response.context["account_list"]) == [account_a]
def test_account_list_view_get_form_in_context(
client: Client, user_foo: User, login_user: Callable[[User], None]
) -> None:
login_user(user_foo)
url = reverse("accounts:account_list")
response = client.get(url)
assert response.status_code == 200
assert "form" in response.context
def test_account_list_view_get_query_string_in_context(
client: Client, user_foo: User, login_user: Callable[[User], None]
) -> None:
login_user(user_foo)
query_string = "name=foo&alias=bar&account_types=A&connections=1&connections=2"
url = reverse("accounts:account_list") + "?" + query_string
response = client.get(url)
assert response.status_code == 200
assert response.context["query_string"] == query_string
def test_account_create_view_get(
client: Client, user_foo: User, login_user: Callable[[User], None]
) -> None:
login_user(user_foo)
url = reverse("accounts:account_create")
response = client.get(url)
assert response.status_code == 200
def test_account_create_view_get_not_logged_in(client: Client) -> None:
url = reverse("accounts:account_create")
response = client.get(url)
assert response.status_code == 302
assert resolve(get_url_path(response)).url_name == "login"
def test_account_create_view_post_redirect(
client: Client, user_foo: User, login_user: Callable[[User], None]
) -> None:
login_user(user_foo)
url = reverse("accounts:account_create")
data = {"name": "a", "account_type": Account.AccountType.ACCOUNT}
response = client.post(url, data=data)
assert response.status_code == 302
assert resolve(get_url_path(response)).url_name == "account_list"
def test_account_create_view_post_message(
client: Client, user_foo: User, login_user: Callable[[User], None]
) -> None:
login_user(user_foo)
url = reverse("accounts:account_create")
data = {"name": "a", "account_type": Account.AccountType.ACCOUNT}
response = client.post(url, data=data)
assert response.status_code == 302
messages = [m.message for m in get_messages(response.wsgi_request)]
assert "Account was created successfully" in messages
def test_account_update_view_get(
client: Client,
user_foo: User,
login_user: Callable[[User], None],
account_foo: Account,
) -> None:
login_user(user_foo)
url = reverse("accounts:account_update", kwargs={"pk": account_foo.pk})
response = client.get(url)
assert response.status_code == 200
def test_account_update_view_get_not_logged_in(
client: Client, account_foo: Account
) -> None:
url = reverse("accounts:account_update", kwargs={"pk": account_foo.pk})
response = client.get(url)
assert response.status_code == 302
assert resolve(get_url_path(response)).url_name == "login"
def test_account_update_view_post_redirect(
client: Client,
user_foo: User,
login_user: Callable[[User], None],
account_foo: Account,
) -> None:
login_user(user_foo)
url = reverse("accounts:account_update", kwargs={"pk": account_foo.pk})
data = {"name": "bar", "account_type": Account.AccountType.ACCOUNT}
response = client.post(url, data=data)
assert response.status_code == 302
assert resolve(get_url_path(response)).url_name == "account_list"
def test_account_update_view_post_message(
client: Client,
user_foo: User,
login_user: Callable[[User], None],
account_foo: Account,
) -> None:
login_user(user_foo)
url = reverse("accounts:account_update", kwargs={"pk": account_foo.pk})
data = {"name": "bar", "account_type": Account.AccountType.ACCOUNT}
response = client.post(url, data=data)
assert response.status_code == 302
messages = [m.message for m in get_messages(response.wsgi_request)]
assert "Account was updated successfully" in messages
def test_account_update_view_post_different_user(
client: Client,
user_factory: Callable[..., User],
login_user: Callable[[User], None],
account_factory: Callable[..., Account],
) -> None:
user_a = user_factory("a")
user_b = user_factory("b")
login_user(user_a)
account_factory("a", user=user_a)
account_b = account_factory("b", user=user_b)
url = reverse("accounts:account_update", kwargs={"pk": account_b.pk})
data = {"name": "bx", "account_type": Account.AccountType.ACCOUNT}
response = client.post(url, data=data)
assert response.status_code == 403
def test_account_delete_view_get(
client: Client,
user_foo: User,
login_user: Callable[[User], None],
account_foo: Account,
) -> None:
login_user(user_foo)
url = reverse("accounts:account_delete", kwargs={"pk": account_foo.pk})
response = client.get(url)
assert response.status_code == 200
def test_account_delete_view_get_not_logged_in(
client: Client, account_foo: Account
) -> None:
url = reverse("accounts:account_delete", kwargs={"pk": account_foo.pk})
response = client.get(url)
assert response.status_code == 302
assert resolve(get_url_path(response)).url_name == "login"
def test_account_delete_view_post_redirect(
client: Client,
user_foo: User,
login_user: Callable[[User], None],
account_foo: Account,
) -> None:
login_user(user_foo)
url = reverse("accounts:account_delete", kwargs={"pk": account_foo.pk})
response = client.post(url)
assert response.status_code == 302
assert resolve(get_url_path(response)).url_name == "account_list"
def test_account_delete_view_post_message(
client: Client,
user_foo: User,
login_user: Callable[[User], None],
account_foo: Account,
) -> None:
login_user(user_foo)
url = reverse("accounts:account_delete", kwargs={"pk": account_foo.pk})
response = client.post(url)
assert response.status_code == 302
messages = [m.message for m in get_messages(response.wsgi_request)]
assert "Account was deleted successfully" in messages
def test_account_delete_view_post_different_user(
client: Client,
user_factory: Callable[..., User],
login_user: Callable[[User], None],
account_factory: Callable[..., Account],
) -> None:
user_a = user_factory("a")
user_b = user_factory("b")
login_user(user_a)
account_factory("a", user=user_a)
account_b = account_factory("b", user=user_b)
url = reverse("accounts:account_delete", kwargs={"pk": account_b.pk})
response = client.post(url)
assert response.status_code == 403
def test_transaction_list_view_get_single_transaction(
client: Client,
user_foo: User,
login_user: Callable[[User], None],
transaction_foo: Transaction,
) -> None:
login_user(user_foo)
url = reverse("transactions:transaction_list")
response = client.get(url)
assert response.status_code == 200
assert list(response.context["transaction_list"]) == [transaction_foo]
def test_transaction_list_view_get_not_logged_in(client: Client) -> None:
url = reverse("transactions:transaction_list")
response = client.get(url)
assert response.status_code == 302
assert resolve(get_url_path(response)).url_name == "login"
def test_transaction_list_view_get_multiple_transactions(
client: Client,
user_foo: User,
login_user: Callable[[User], None],
transaction_factory: Callable[..., Transaction],
) -> None:
login_user(user_foo)
transaction_a = transaction_factory()
transaction_b = transaction_factory()
url = reverse("transactions:transaction_list")
response = client.get(url)
assert response.status_code == 200
assert list(response.context["transaction_list"]) == [transaction_a, transaction_b]
def test_transaction_list_view_get_ordered_by_date(
client: Client,
user_foo: User,
login_user: Callable[[User], None],
transaction_factory: Callable[..., Transaction],
) -> None:
login_user(user_foo)
transaction_b = transaction_factory(
datetime.date.today() - datetime.timedelta(days=1)
)
transaction_a = transaction_factory(datetime.date.today())
url = reverse("transactions:transaction_list")
response = client.get(url)
assert response.status_code == 200
assert list(response.context["transaction_list"]) == [transaction_a, transaction_b]
def test_transaction_list_view_get_current_user(
client: Client,
user_factory: Callable[..., User],
login_user: Callable[[User], None],
transaction_factory: Callable[..., Transaction],
) -> None:
user_a = user_factory("a")
user_b = user_factory("b")
login_user(user_a)
transaction_a = transaction_factory(user=user_a)
transaction_factory(user=user_b)
url = reverse("transactions:transaction_list")
response = client.get(url)
assert response.status_code == 200
assert list(response.context["transaction_list"]) == [transaction_a]
def test_transaction_list_view_get_form_in_context(
client: Client, user_foo: User, login_user: Callable[[User], None]
) -> None:
login_user(user_foo)
url = reverse("transactions:transaction_list")
response = client.get(url)
assert response.status_code == 200
assert "form" in response.context
def test_transaction_list_view_get_query_string_in_context(
client: Client, user_foo: User, login_user: Callable[[User], None]
) -> None:
login_user(user_foo)
query_string = "min_amount=100&max_amount=200&accounts=1&accounts=2"
url = reverse("transactions:transaction_list") + "?" + query_string
response = client.get(url)
assert response.status_code == 200
assert response.context["query_string"] == query_string
def test_transaction_create_view_get(
client: Client, user_foo: User, login_user: Callable[[User], None]
) -> None:
login_user(user_foo)
url = reverse("transactions:transaction_create")
response = client.get(url)
assert response.status_code == 200
def test_transaction_create_view_get_not_logged_in(client: Client) -> None:
url = reverse("transactions:transaction_create")
response = client.get(url)
assert response.status_code == 302
assert resolve(get_url_path(response)).url_name == "login"
def test_transaction_create_view_post_redirect(
client: Client,
user_foo: User,
login_user: Callable[[User], None],
account_foo: Account,
category_foo: Category,
) -> None:
login_user(user_foo)
url = reverse("transactions:transaction_create")
data = {
"date": datetime.date.today(),
"amount": 100.0,
"category": category_foo.id,
"account": account_foo.id,
}
response = client.post(url, data=data)
assert response.status_code == 302
assert resolve(get_url_path(response)).url_name == "transaction_list"
def test_transaction_create_view_post_message(
client: Client,
user_foo: User,
login_user: Callable[[User], None],
account_foo: Account,
category_foo: Category,
) -> None:
login_user(user_foo)
url = reverse("transactions:transaction_create")
data = {
"date": datetime.date.today(),
"amount": 100.0,
"category": category_foo.id,
"account": account_foo.id,
}
response = client.post(url, data=data)
assert response.status_code == 302
messages = [m.message for m in get_messages(response.wsgi_request)]
assert "Transaction was created successfully" in messages
def test_transaction_update_view_get(
client: Client,
user_foo: User,
login_user: Callable[[User], None],
transaction_foo: Transaction,
) -> None:
login_user(user_foo)
url = reverse("transactions:transaction_update", kwargs={"pk": transaction_foo.pk})
response = client.get(url)
assert response.status_code == 200
def test_transaction_update_view_get_not_logged_in(
client: Client, transaction_foo: Transaction
) -> None:
url = reverse("transactions:transaction_update", kwargs={"pk": transaction_foo.pk})
response = client.get(url)
assert response.status_code == 302
assert resolve(get_url_path(response)).url_name == "login"
def test_transaction_update_view_post_redirect(
client: Client,
user_foo: User,
login_user: Callable[[User], None],
transaction_foo: Transaction,
account_foo: Account,
category_foo: Category,
) -> None:
login_user(user_foo)
url = reverse("transactions:transaction_update", kwargs={"pk": transaction_foo.pk})
data = {
"date": datetime.date.today(),
"amount": 100.0,
"category": category_foo.id,
"account": account_foo.id,
}
response = client.post(url, data=data)
assert response.status_code == 302
assert resolve(get_url_path(response)).url_name == "transaction_list"
def test_transaction_update_view_post_message(
client: Client,
user_foo: User,
login_user: Callable[[User], None],
transaction_foo: Transaction,
account_foo: Account,
category_foo: Category,
) -> None:
login_user(user_foo)
url = reverse("transactions:transaction_update", kwargs={"pk": transaction_foo.pk})
data = {
"date": datetime.date.today(),
"amount": 100.0,
"category": category_foo.id,
"account": account_foo.id,
}
response = client.post(url, data=data)
assert response.status_code == 302
messages = [m.message for m in get_messages(response.wsgi_request)]
assert "Transaction was updated successfully" in messages
def test_transaction_update_view_post_different_user(
client: Client,
user_factory: Callable[..., User],
login_user: Callable[[User], None],
transaction_factory: Callable[..., Transaction],
) -> None:
user_a = user_factory("a")
user_b = user_factory("b")
login_user(user_a)
transaction_factory(user=user_a)
transaction_b = transaction_factory(user=user_b)
url = reverse("transactions:transaction_update", kwargs={"pk": transaction_b.pk})
data = {
"description": "bx",
}
response = client.post(url, data=data)
assert response.status_code == 403
def test_transaction_delete_view_get(
client: Client,
user_foo: User,
login_user: Callable[[User], None],
transaction_foo: Transaction,
) -> None:
login_user(user_foo)
url = reverse("transactions:transaction_delete", kwargs={"pk": transaction_foo.pk})
response = client.get(url)
assert response.status_code == 200
def test_transaction_delete_view_get_not_logged_in(
client: Client, transaction_foo: Transaction
) -> None:
url = reverse("transactions:transaction_delete", kwargs={"pk": transaction_foo.pk})
response = client.get(url)
assert response.status_code == 302
assert resolve(get_url_path(response)).url_name == "login"
def test_transaction_delete_view_post_redirect(
client: Client,
user_foo: User,
login_user: Callable[[User], None],
transaction_foo: Transaction,
) -> None:
login_user(user_foo)
url = reverse("transactions:transaction_delete", kwargs={"pk": transaction_foo.pk})
response = client.post(url)
assert response.status_code == 302
assert resolve(get_url_path(response)).url_name == "transaction_list"
def test_transaction_delete_view_post_message(
client: Client,
user_foo: User,
login_user: Callable[[User], None],
transaction_foo: Transaction,
) -> None:
login_user(user_foo)
url = reverse("transactions:transaction_delete", kwargs={"pk": transaction_foo.pk})
response = client.post(url)
assert response.status_code == 302
messages = [m.message for m in get_messages(response.wsgi_request)]
assert "Transaction was deleted successfully" in messages
def test_transaction_delete_view_post_different_user(
client: Client,
user_factory: Callable[..., User],
login_user: Callable[[User], None],
transaction_factory: Transaction,
) -> None:
user_a = user_factory("a")
user_b = user_factory("b")
login_user(user_a)
transaction_factory(user=user_a)
transaction_b = transaction_factory(user=user_b)
url = reverse("transactions:transaction_delete", kwargs={"pk": transaction_b.pk})
response = client.post(url)
assert response.status_code == 403
def test_category_list_view_get(
client: Client,
user_foo: User,
login_user: Callable[[User], None],
category_factory: Callable[..., Category],
) -> None:
login_user(user_foo)
categories = [
category_factory("a"),
category_factory("b"),
category_factory("c"),
]
url = reverse("categories:category_list")
response = client.get(url)
assert response.status_code == 200
assert len(response.context["category_list"]) == len(categories)
def test_category_list_view_get_not_logged_in(client: Client) -> None:
url = reverse("categories:category_list")
response = client.get(url)
assert response.status_code == 302
assert resolve(get_url_path(response)).url_name == "login"
def test_category_list_view_get_ordered_by_name(
client: Client,
user_foo: User,
login_user: Callable[[User], None],
category_factory: Callable[..., Category],
) -> None:
login_user(user_foo)
category_factory("b")
category_factory("a")
url = reverse("categories:category_list")
response = client.get(url)
assert response.status_code == 200
assert list(response.context["category_list"]) == list(
Category.objects.filter(user=user_foo).order_by("name")
)
def test_category_list_view_get_current_user(
client: Client,
user_factory: Callable[..., User],
login_user: Callable[[User], None],
category_factory: Callable[..., Category],
) -> None:
user_a = user_factory("a")
user_b = user_factory("b")
login_user(user_a)
category_factory("a", user=user_a)
category_factory("b", user=user_b)
url = reverse("categories:category_list")
response = client.get(url)
assert response.status_code == 200
assert list(response.context["category_list"]) == list(
Category.objects.filter(user=user_a).order_by("name")
)
def test_category_create_view_get(
client: Client, user_foo: User, login_user: Callable[[User], None]
) -> None:
login_user(user_foo)
url = reverse("categories:category_create")
response = client.get(url)
assert response.status_code == 200
def test_category_create_view_get_not_logged_in(client: Client) -> None:
url = reverse("categories:category_create")
response = client.get(url)
assert response.status_code == 302
assert resolve(get_url_path(response)).url_name == "login"
def test_category_create_view_post_redirect(
client: Client, user_foo: User, login_user: Callable[[User], None]
) -> None:
login_user(user_foo)
url = reverse("categories:category_create")
data = {
"name": "a",
}
response = client.post(url, data=data)
assert response.status_code == 302
assert resolve(get_url_path(response)).url_name == "category_list"
def test_category_create_view_post_message(
client: Client, user_foo: User, login_user: Callable[[User], None]
) -> None:
login_user(user_foo)
url = reverse("categories:category_create")
data = {
"name": "a",
}
response = client.post(url, data=data)
assert response.status_code == 302
messages = [m.message for m in get_messages(response.wsgi_request)]
assert "Category was created successfully" in messages
def test_category_update_view_get(
client: Client,
user_foo: User,
login_user: Callable[[User], None],
category_foo: Category,
) -> None:
login_user(user_foo)
url = reverse("categories:category_update", kwargs={"pk": category_foo.pk})
response = client.get(url)
assert response.status_code == 200
def test_category_update_view_get_not_logged_in(
client: Client, category_foo: Category
) -> None:
url = reverse("categories:category_update", kwargs={"pk": category_foo.pk})
response = client.get(url)
assert response.status_code == 302
assert resolve(get_url_path(response)).url_name == "login"
def test_category_update_view_post_redirect(
client: Client,
user_foo: User,
login_user: Callable[[User], None],
category_foo: Category,
) -> None:
login_user(user_foo)
url = reverse("categories:category_update", kwargs={"pk": category_foo.pk})
data = {
"name": "bar",
}
response = client.post(url, data=data)
assert response.status_code == 302
assert resolve(get_url_path(response)).url_name == "category_list"
def test_category_update_view_post_message(
client: Client,
user_foo: User,
login_user: Callable[[User], None],
category_foo: Category,
) -> None:
login_user(user_foo)
url = reverse("categories:category_update", kwargs={"pk": category_foo.pk})
data = {
"name": "bar",
}
response = client.post(url, data=data)
assert response.status_code == 302
messages = [m.message for m in get_messages(response.wsgi_request)]
assert "Category was updated successfully" in messages
def test_category_update_view_post_different_user(
client: Client,
user_factory: Callable[..., User],
login_user: Callable[[User], None],
category_factory: Callable[..., Category],
) -> None:
user_a = user_factory("a")
user_b = user_factory("b")
login_user(user_a)
category_factory("a", user=user_a)
category_b = category_factory("b", user=user_b)
url = reverse("categories:category_update", kwargs={"pk": category_b.pk})
data = {"name": "bx", "category_type": Account.AccountType.ACCOUNT}
response = client.post(url, data=data)
assert response.status_code == 403
def test_category_delete_view_get(
client: Client,
user_foo: User,
login_user: Callable[[User], None],
category_foo: Category,
) -> None:
login_user(user_foo)
url = reverse("categories:category_delete", kwargs={"pk": category_foo.pk})
response = client.get(url)
assert response.status_code == 200
def test_category_delete_view_get_not_logged_in(
client: Client, category_foo: Category
) -> None:
url = reverse("categories:category_delete", kwargs={"pk": category_foo.pk})
response = client.get(url)
assert response.status_code == 302
assert resolve(get_url_path(response)).url_name == "login"
def test_category_delete_view_post_redirect(
client: Client,
user_foo: User,
login_user: Callable[[User], None],
category_foo: Category,
) -> None:
login_user(user_foo)
url = reverse("categories:category_delete", kwargs={"pk": category_foo.pk})
response = client.post(url)
assert response.status_code == 302
assert resolve(get_url_path(response)).url_name == "category_list"
def test_category_delete_view_post_message(
client: Client,
user_foo: User,
login_user: Callable[[User], None],
category_foo: Category,
) -> None:
login_user(user_foo)
url = reverse("categories:category_delete", kwargs={"pk": category_foo.pk})
response = client.post(url)
assert response.status_code == 302
messages = [m.message for m in get_messages(response.wsgi_request)]
assert "Category was deleted successfully" in messages
def test_category_delete_view_post_different_user(
client: Client,
user_factory: Callable[..., User],
login_user: Callable[[User], None],
category_factory: Callable[..., Category],
) -> None:
user_a = user_factory("a")
user_b = user_factory("b")
login_user(user_a)
category_factory("a", user=user_a)
category_b = category_factory("b", user=user_b)
url = reverse("categories:category_delete", kwargs={"pk": category_b.pk})
response = client.post(url)
assert response.status_code == 403
def test_report_income_view_get(
client: Client, user_foo: User, login_user: Callable[[User], None]
) -> None:
login_user(user_foo)
url = reverse("reports:report_income")
response = client.get(url)
assert response.status_code == 200
def test_report_income_view_get_not_logged_in(client: Client) -> None:
url = reverse("reports:report_income")
response = client.get(url)
assert response.status_code == 302
assert resolve(get_url_path(response)).url_name == "login"
def test_report_income_view_get_with_parameters(
client: Client,
user_foo: User,
login_user: Callable[[User], None],
account_foo: Account,
category_foo: Category,
) -> None:
login_user(user_foo)
url = reverse("reports:report_income")
data = {
"accounts": [account_foo.pk],
"from_date": datetime.date.today(),
"to_date": datetime.date.today(),
"excluded_categories": [category_foo],
}
response = client.get(url, data)
assert response.status_code == 200
def test_report_balance_view_get(
client: Client, user_foo: User, login_user: Callable[[User], None]
) -> None:
login_user(user_foo)
url = reverse("reports:report_balance")
response = client.get(url)
assert response.status_code == 200
def test_report_balance_view_get_not_logged_in(client: Client) -> None:
url = reverse("reports:report_balance")
response = client.get(url)
assert response.status_code == 302
assert resolve(get_url_path(response)).url_name == "login"
def test_report_balance_view_get_with_parameters(
client: Client,
user_foo: User,
login_user: Callable[[User], None],
account_foo: Account,
) -> None:
login_user(user_foo)
url = reverse("reports:report_balance")
data = {
"accounts": [account_foo.pk],
"from_date": datetime.date.today(),
"to_date": datetime.date.today(),
}
response = client.get(url, data)
assert response.status_code == 200
def test_report_category_balance_view_get(
client: Client, user_foo: User, login_user: Callable[[User], None]
) -> None:
login_user(user_foo)
url = reverse("reports:report_category_balance")
response = client.get(url)
assert response.status_code == 200
def test_report_category_balance_view_get_not_logged_in(client: Client) -> None:
url = reverse("reports:report_category_balance")
response = client.get(url)
assert response.status_code == 302
assert resolve(get_url_path(response)).url_name == "login"
def test_report_category_balance_view_get_with_parameters(
client: Client,
user_foo: User,
login_user: Callable[[User], None],
account_foo: Account,
category_foo: Category,
) -> None:
login_user(user_foo)
url = reverse("reports:report_category_balance")
data = {
"accounts": [account_foo.pk],
"from_date": datetime.date.today(),
"to_date": datetime.date.today(),
"categories": [category_foo],
}
response = client.get(url, data)
assert response.status_code == 200
def test_callback_success_new_connection(
client: Client,
profile_foo_external: Profile,
mocker: MockFixture,
saltedge_connection: saltedge_client.Connection,
) -> None:
mocker.patch(
"budget.views.get_connection", autospec=True, return_value=saltedge_connection
)
mocker.patch("budget.views.get_accounts", autospec=True, return_value=[])
mocker.patch("budget.views.get_transactions", autospec=True, return_value=[])
mocker.patch(
"budget.views.get_pending_transactions", autospec=True, return_value=[]
)
mocker.patch("budget.views.verify_signature", autospec=True)
url = reverse("callbacks:callback_success")
data = {
"data": {
"connection_id": saltedge_connection.id,
"customer_id": str(profile_foo_external.external_id),
"custom_fields": {"key": "value"},
},
"meta": {"version": "5", "time": "2020-11-12T12:31:01.588Z"},
}
response = client.post(
url, json.dumps(data), content_type="application/json", HTTP_SIGNATURE="TODO"
)
assert response.status_code == 204
assert Connection.objects.filter(external_id=int(saltedge_connection.id)).exists()
def test_callback_success_new_account(
client: Client,
profile_foo_external: Profile,
mocker: MockFixture,
saltedge_connection: saltedge_client.Connection,
saltedge_account: saltedge_client.Account,
) -> None:
mocker.patch(
"budget.views.get_connection", autospec=True, return_value=saltedge_connection
)
mocker.patch(
"budget.views.get_accounts", autospec=True, return_value=[saltedge_account]
)
mocker.patch("budget.views.get_transactions", autospec=True, return_value=[])
mocker.patch(
"budget.views.get_pending_transactions", autospec=True, return_value=[]
)
mocker.patch("budget.views.verify_signature", autospec=True)
url = reverse("callbacks:callback_success")
data = {
"data": {
"connection_id": saltedge_connection.id,
"customer_id": str(profile_foo_external.external_id),
"custom_fields": {"key": "value"},
},
"meta": {"version": "5", "time": "2020-11-12T12:31:01.588Z"},
}
response = client.post(
url, json.dumps(data), content_type="application/json", HTTP_SIGNATURE="TODO"
)
assert response.status_code == 204
assert Account.objects.filter(external_id=int(saltedge_account.id)).exists()
def test_callback_success_new_transaction(
client: Client,
profile_foo_external: Profile,
mocker: MockFixture,
saltedge_connection: saltedge_client.Connection,
saltedge_account: saltedge_client.Account,
saltedge_transaction: saltedge_client.Transaction,
) -> None:
mocker.patch(
"budget.views.get_connection", autospec=True, return_value=saltedge_connection
)
mocker.patch(
"budget.views.get_accounts", autospec=True, return_value=[saltedge_account]
)
mocker.patch(
"budget.views.get_transactions",
autospec=True,
return_value=[saltedge_transaction],
)
mocker.patch(
"budget.views.get_pending_transactions", autospec=True, return_value=[]
)
mocker.patch("budget.views.verify_signature", autospec=True)
url = reverse("callbacks:callback_success")
data = {
"data": {
"connection_id": saltedge_connection.id,
"customer_id": str(profile_foo_external.external_id),
"custom_fields": {"key": "value"},
},
"meta": {"version": "5", "time": "2020-11-12T12:31:01.588Z"},
}
response = client.post(
url, json.dumps(data), content_type="application/json", HTTP_SIGNATURE="TODO"
)
assert response.status_code == 204
assert Transaction.objects.filter(external_id=int(saltedge_transaction.id)).exists()
def test_callback_success_initial_balance(
client: Client,
profile_foo_external: Profile,
mocker: MockFixture,
saltedge_connection: saltedge_client.Connection,
saltedge_account: saltedge_client.Account,
saltedge_transaction: saltedge_client.Transaction,
) -> None:
mocker.patch(
"budget.views.get_connection", autospec=True, return_value=saltedge_connection
)
mocker.patch(
"budget.views.get_accounts", autospec=True, return_value=[saltedge_account]
)
mocker.patch(
"budget.views.get_transactions",
autospec=True,
return_value=[saltedge_transaction],
)
mocker.patch(
"budget.views.get_pending_transactions",
autospec=True,
return_value=[saltedge_transaction],
)
mocker.patch("budget.views.verify_signature", autospec=True)
url = reverse("callbacks:callback_success")
data = {
"data": {
"connection_id": saltedge_connection.id,
"customer_id": str(profile_foo_external.external_id),
"custom_fields": {"key": "value"},
},
"meta": {"version": "5", "time": "2020-11-12T12:31:01.588Z"},
}
response = client.post(
url, json.dumps(data), content_type="application/json", HTTP_SIGNATURE="TODO"
)
assert response.status_code == 204
assert Transaction.objects.filter(description="Initial balance").exists()
@pytest.mark.django_db
def test_callback_success_invalid_customer(client: Client, mocker: MockFixture) -> None:
url = reverse("callbacks:callback_success")
data = {
"data": {
"connection_id": "1234",
"customer_id": "5678",
"custom_fields": {"key": "value"},
},
"meta": {"version": "5", "time": "2020-11-12T12:31:01.588Z"},
}
mocker.patch("budget.views.verify_signature", autospec=True)
response = client.post(
url, json.dumps(data), content_type="application/json", HTTP_SIGNATURE="TODO"
)
assert response.status_code == 400
def test_callback_fail(client: Client, mocker: MockFixture,) -> None:
url = reverse("callbacks:callback_fail")
data = {
"data": {
"connection_id": "111111111111111111",
"customer_id": "222222222222222222",
"custom_fields": {"key": "value"},
"error_class": "InvalidCredentials",
"error_message": "Invalid credentials.",
},
"meta": {"version": "5", "time": "2020-11-12T12:31:01.606Z"},
}
mocker.patch("budget.views.verify_signature", autospec=True)
mocker.patch("budget.views.get_accounts", autospec=True, return_value=[])
mocker.patch("budget.views.remove_connection_from_saltedge", autospec=True)
response = client.post(
url, json.dumps(data), content_type="application/json", HTTP_SIGNATURE="TODO"
)
assert response.status_code == 204
def test_callback_destroy(
client: Client,
profile_foo_external: Profile,
connection_foo: Connection,
mocker: MockFixture,
) -> None:
url = reverse("callbacks:callback_destroy")
data = {
"data": {
"connection_id": str(connection_foo.external_id),
"customer_id": str(profile_foo_external.external_id),
},
"meta": {"version": "5", "time": "2020-11-11T12:31:01Z"},
}
mocker.patch("budget.views.verify_signature", autospec=True)
response = client.post(
url, json.dumps(data), content_type="application/json", HTTP_SIGNATURE="TODO"
)
assert response.status_code == 204
assert not Connection.objects.filter(pk=connection_foo.pk).exists()
def test_callback_destroy_invalid_customer(
client: Client, connection_foo: Connection, mocker: MockFixture
) -> None:
url = reverse("callbacks:callback_destroy")
data = {
"data": {
"connection_id": str(connection_foo.external_id),
"customer_id": "1234",
},
"meta": {"version": "5", "time": "2020-11-11T12:31:01Z"},
}
mocker.patch("budget.views.verify_signature", autospec=True)
response = client.post(
url, json.dumps(data), content_type="application/json", HTTP_SIGNATURE="TODO"
)
assert response.status_code == 400
def test_callback_destroy_invalid_connection(
client: Client, profile_foo_external: Profile, mocker: MockFixture
) -> None:
url = reverse("callbacks:callback_destroy")
data = {
"data": {
"connection_id": "1234",
"customer_id": str(profile_foo_external.external_id),
},
"meta": {"version": "5", "time": "2020-11-11T12:31:01Z"},
}
mocker.patch("budget.views.verify_signature", autospec=True)
response = client.post(
url, json.dumps(data), content_type="application/json", HTTP_SIGNATURE="TODO"
)
assert response.status_code == 400
def test_callback_notify(client: Client, mocker: MockFixture) -> None:
url = reverse("callbacks:callback_notify")
data = {
"data": {
"connection_id": "111111111111111111",
"customer_id": "222222222222222222",
"custom_fields": {"key": "value"},
"stage": "start",
},
"meta": {"version": "5", "time": "2020-11-11T12:31:01Z"},
}
mocker.patch("budget.views.verify_signature", autospec=True)
response = client.post(
url, json.dumps(data), content_type="application/json", HTTP_SIGNATURE="TODO"
)
assert response.status_code == 204
def test_callback_service(client: Client, mocker: MockFixture) -> None:
url = reverse("callbacks:callback_service")
data = {
"data": {
"connection_id": "111111111111111111",
"customer_id": "222222222222222222",
"custom_fields": {"key": "value"},
"reason": "updated",
},
"meta": {"version": "5", "time": "2020-11-11T12:31:01Z"},
}
mocker.patch("budget.views.verify_signature", autospec=True)
response = client.post(
url, json.dumps(data), content_type="application/json", HTTP_SIGNATURE="TODO"
)
assert response.status_code == 204
def test_verify_signature_success() -> None:
public_key_pem = """
-----BEGIN PUBLIC KEY-----
MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvL/Xxdmj7/cpZgvDMvxr
nTTU/vkHGM/qkJ0Q+rmfYLru0Z/rSWthPDEK3orY5BTa0sAe2wUV5Fes677X6+Ib
roCF8nODW5hSVTrqWcrQ55I7InpFkpTxyMkiFN8XPS7qmYXl/xofbYq0olcwE/aw
9lfHlZD7iwOpVJqTsYiXzSMRu92ZdECV895kYS/ggymSEtoMSW3405dQ6OfnK53x
7AJPdkAp0Wa2Lk4BNBMd24uu2tasO1bTYBsHpxonwbA+o8BXffdTEloloJgW7pV+
TWvxB/Uxil4yhZZJaFmvTCefxWFovyzLdjn2aSAEI7D1y4IYOdByMOPYQ6Mn7J9A
9wIDAQAB
-----END PUBLIC KEY-----
"""
signature_base64 = "LhW+IftaENhUedrIsWp//ySu55XUs+e0seaJq7dFkiIGJH8XBF+z4yMYWCrr54MDIwwQV3WQ3BlJ6zq5SMiSt5cD72UFtV7dhMndfbKE51ItfpdAaGn47xXab3Nd5kAImNiOse6PUHknFh1mS/lSTF6jIePm6Gv5/BhVm8Y9O+ZBCy/A/GWXE49o6Ai+9StkTXj+6NAwNjvhyMEEBxJIB1d9MmfcrPvHhGV5F7WJxTHb3mNafapkkXO7Lp4dfa1902CzJUQUBt8kBd6dEZyk4NbUKQPOfi6I4HDpt4u+iELgI9M+vwzv8fwWzBpnvTfht1xbklKC3cYFMlaiQO54JQ=="
data = 'http://budget-supervisor-stage.herokuapp.com/callbacks/success/|{"data":{"connection_id":"349600516445047165","customer_id":"345935467172071692","custom_fields":{}},"meta":{"version":"5","time":"2020-11-12T21:00:19.000Z"}}'
verify_signature(public_key_pem, signature_base64, data)
def test_verify_signature_invalid_signature() -> None:
public_key_pem = """
-----BEGIN PUBLIC KEY-----
MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvL/Xxdmj7/cpZgvDMvxr
nTTU/vkHGM/qkJ0Q+rmfYLru0Z/rSWthPDEK3orY5BTa0sAe2wUV5Fes677X6+Ib
roCF8nODW5hSVTrqWcrQ55I7InpFkpTxyMkiFN8XPS7qmYXl/xofbYq0olcwE/aw
9lfHlZD7iwOpVJqTsYiXzSMRu92ZdECV895kYS/ggymSEtoMSW3405dQ6OfnK53x
7AJPdkAp0Wa2Lk4BNBMd24uu2tasO1bTYBsHpxonwbA+o8BXffdTEloloJgW7pV+
TWvxB/Uxil4yhZZJaFmvTCefxWFovyzLdjn2aSAEI7D1y4IYOdByMOPYQ6Mn7J9A
9wIDAQAB
-----END PUBLIC KEY-----
"""
signature_base64 = base64.b64encode(b"xyz").decode("ascii")
data = 'http://budget-supervisor-stage.herokuapp.com/callbacks/success/|{"data":{"connection_id":"349600516445047165","customer_id":"345935467172071692","custom_fields":{}},"meta":{"version":"5","time":"2020-11-12T21:00:19.000Z"}}'
with pytest.raises(OpenSSL.crypto.Error):
verify_signature(public_key_pem, signature_base64, data)
|
onitake/ansible
|
refs/heads/devel
|
lib/ansible/modules/clustering/consul_acl.py
|
32
|
#!/usr/bin/python
#
# (c) 2015, Steve Gargan <steve.gargan@gmail.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
module: consul_acl
short_description: Manipulate Consul ACL keys and rules
description:
- Allows the addition, modification and deletion of ACL keys and associated
rules in a consul cluster via the agent. For more details on using and
configuring ACLs, see https://www.consul.io/docs/guides/acl.html.
version_added: "2.0"
author:
- Steve Gargan (@sgargan)
- Colin Nolan (@colin-nolan)
options:
mgmt_token:
description:
- a management token is required to manipulate the acl lists
state:
description:
- whether the ACL pair should be present or absent
required: false
choices: ['present', 'absent']
default: present
token_type:
description:
- the type of token that should be created, either management or client
choices: ['client', 'management']
default: client
name:
description:
- the name that should be associated with the acl key, this is opaque
to Consul
required: false
token:
description:
- the token key indentifying an ACL rule set. If generated by consul
this will be a UUID
required: false
rules:
description:
- a list of the rules that should be associated with a given token
required: false
host:
description:
- host of the consul agent defaults to localhost
required: false
default: localhost
port:
description:
- the port on which the consul agent is running
required: false
default: 8500
scheme:
description:
- the protocol scheme on which the consul agent is running
required: false
default: http
version_added: "2.1"
validate_certs:
description:
- whether to verify the tls certificate of the consul agent
required: false
default: True
version_added: "2.1"
requirements:
- "python >= 2.6"
- python-consul
- pyhcl
- requests
"""
EXAMPLES = """
- name: create an ACL with rules
consul_acl:
host: consul1.example.com
mgmt_token: some_management_acl
name: Foo access
rules:
- key: "foo"
policy: read
- key: "private/foo"
policy: deny
- name: create an ACL with a specific token
consul_acl:
host: consul1.example.com
mgmt_token: some_management_acl
name: Foo access
token: my-token
rules:
- key: "foo"
policy: read
- name: update the rules associated to an ACL token
consul_acl:
host: consul1.example.com
mgmt_token: some_management_acl
name: Foo access
token: some_client_token
rules:
- event: "bbq"
policy: write
- key: "foo"
policy: read
- key: "private"
policy: deny
- keyring: write
- node: "hgs4"
policy: write
- operator: read
- query: ""
policy: write
- service: "consul"
policy: write
- session: "standup"
policy: write
- name: remove a token
consul_acl:
host: consul1.example.com
mgmt_token: some_management_acl
token: 172bd5c8-9fe9-11e4-b1b0-3c15c2c9fd5e
state: absent
"""
RETURN = """
token:
description: the token associated to the ACL (the ACL's ID)
returned: success
type: string
sample: a2ec332f-04cf-6fba-e8b8-acf62444d3da
rules:
description: the HCL JSON representation of the rules associated to the ACL, in the format described in the
Consul documentation (https://www.consul.io/docs/guides/acl.html#rule-specification).
returned: I(status) == "present"
type: string
sample: {
"key": {
"foo": {
"policy": "write"
},
"bar": {
"policy": "deny"
}
}
}
operation:
description: the operation performed on the ACL
returned: changed
type: string
sample: update
"""
try:
import consul
python_consul_installed = True
except ImportError:
python_consul_installed = False
try:
import hcl
pyhcl_installed = True
except ImportError:
pyhcl_installed = False
try:
from requests.exceptions import ConnectionError
has_requests = True
except ImportError:
has_requests = False
from collections import defaultdict
from ansible.module_utils.basic import to_text, AnsibleModule
RULE_SCOPES = ["agent", "event", "key", "keyring", "node", "operator", "query", "service", "session"]
MANAGEMENT_PARAMETER_NAME = "mgmt_token"
HOST_PARAMETER_NAME = "host"
SCHEME_PARAMETER_NAME = "scheme"
VALIDATE_CERTS_PARAMETER_NAME = "validate_certs"
NAME_PARAMETER_NAME = "name"
PORT_PARAMETER_NAME = "port"
RULES_PARAMETER_NAME = "rules"
STATE_PARAMETER_NAME = "state"
TOKEN_PARAMETER_NAME = "token"
TOKEN_TYPE_PARAMETER_NAME = "token_type"
PRESENT_STATE_VALUE = "present"
ABSENT_STATE_VALUE = "absent"
CLIENT_TOKEN_TYPE_VALUE = "client"
MANAGEMENT_TOKEN_TYPE_VALUE = "management"
REMOVE_OPERATION = "remove"
UPDATE_OPERATION = "update"
CREATE_OPERATION = "create"
_POLICY_JSON_PROPERTY = "policy"
_RULES_JSON_PROPERTY = "Rules"
_TOKEN_JSON_PROPERTY = "ID"
_TOKEN_TYPE_JSON_PROPERTY = "Type"
_NAME_JSON_PROPERTY = "Name"
_POLICY_YML_PROPERTY = "policy"
_POLICY_HCL_PROPERTY = "policy"
_ARGUMENT_SPEC = {
MANAGEMENT_PARAMETER_NAME: dict(required=True, no_log=True),
HOST_PARAMETER_NAME: dict(default='localhost'),
SCHEME_PARAMETER_NAME: dict(required=False, default='http'),
VALIDATE_CERTS_PARAMETER_NAME: dict(required=False, type='bool', default=True),
NAME_PARAMETER_NAME: dict(required=False),
PORT_PARAMETER_NAME: dict(default=8500, type='int'),
RULES_PARAMETER_NAME: dict(default=None, required=False, type='list'),
STATE_PARAMETER_NAME: dict(default=PRESENT_STATE_VALUE, choices=[PRESENT_STATE_VALUE, ABSENT_STATE_VALUE]),
TOKEN_PARAMETER_NAME: dict(required=False),
TOKEN_TYPE_PARAMETER_NAME: dict(required=False, choices=[CLIENT_TOKEN_TYPE_VALUE, MANAGEMENT_TOKEN_TYPE_VALUE],
default=CLIENT_TOKEN_TYPE_VALUE)
}
def set_acl(consul_client, configuration):
"""
Sets an ACL based on the given configuration.
:param consul_client: the consul client
:param configuration: the run configuration
:return: the output of setting the ACL
"""
acls_as_json = decode_acls_as_json(consul_client.acl.list())
existing_acls_mapped_by_name = dict((acl.name, acl) for acl in acls_as_json if acl.name is not None)
existing_acls_mapped_by_token = dict((acl.token, acl) for acl in acls_as_json)
if None in existing_acls_mapped_by_token:
raise AssertionError("expecting ACL list to be associated to a token: %s" %
existing_acls_mapped_by_token[None])
if configuration.token is None and configuration.name and configuration.name in existing_acls_mapped_by_name:
# No token but name given so can get token from name
configuration.token = existing_acls_mapped_by_name[configuration.name].token
if configuration.token and configuration.token in existing_acls_mapped_by_token:
return update_acl(consul_client, configuration)
else:
if configuration.token in existing_acls_mapped_by_token:
raise AssertionError()
if configuration.name in existing_acls_mapped_by_name:
raise AssertionError()
return create_acl(consul_client, configuration)
def update_acl(consul_client, configuration):
"""
Updates an ACL.
:param consul_client: the consul client
:param configuration: the run configuration
:return: the output of the update
"""
existing_acl = load_acl_with_token(consul_client, configuration.token)
changed = existing_acl.rules != configuration.rules
if changed:
name = configuration.name if configuration.name is not None else existing_acl.name
rules_as_hcl = encode_rules_as_hcl_string(configuration.rules)
updated_token = consul_client.acl.update(
configuration.token, name=name, type=configuration.token_type, rules=rules_as_hcl)
if updated_token != configuration.token:
raise AssertionError()
return Output(changed=changed, token=configuration.token, rules=configuration.rules, operation=UPDATE_OPERATION)
def create_acl(consul_client, configuration):
"""
Creates an ACL.
:param consul_client: the consul client
:param configuration: the run configuration
:return: the output of the creation
"""
rules_as_hcl = encode_rules_as_hcl_string(configuration.rules) if len(configuration.rules) > 0 else None
token = consul_client.acl.create(
name=configuration.name, type=configuration.token_type, rules=rules_as_hcl, acl_id=configuration.token)
rules = configuration.rules
return Output(changed=True, token=token, rules=rules, operation=CREATE_OPERATION)
def remove_acl(consul, configuration):
"""
Removes an ACL.
:param consul: the consul client
:param configuration: the run configuration
:return: the output of the removal
"""
token = configuration.token
changed = consul.acl.info(token) is not None
if changed:
consul.acl.destroy(token)
return Output(changed=changed, token=token, operation=REMOVE_OPERATION)
def load_acl_with_token(consul, token):
"""
Loads the ACL with the given token (token == rule ID).
:param consul: the consul client
:param token: the ACL "token"/ID (not name)
:return: the ACL associated to the given token
:exception ConsulACLTokenNotFoundException: raised if the given token does not exist
"""
acl_as_json = consul.acl.info(token)
if acl_as_json is None:
raise ConsulACLNotFoundException(token)
return decode_acl_as_json(acl_as_json)
def encode_rules_as_hcl_string(rules):
"""
Converts the given rules into the equivalent HCL (string) representation.
:param rules: the rules
:return: the equivalent HCL (string) representation of the rules. Will be None if there is no rules (see internal
note for justification)
"""
if len(rules) == 0:
# Note: empty string is not valid HCL according to `hcl.load` however, the ACL `Rule` property will be an empty
# string if there is no rules...
return None
rules_as_hcl = ""
for rule in rules:
rules_as_hcl += encode_rule_as_hcl_string(rule)
return rules_as_hcl
def encode_rule_as_hcl_string(rule):
"""
Converts the given rule into the equivalent HCL (string) representation.
:param rule: the rule
:return: the equivalent HCL (string) representation of the rule
"""
if rule.pattern is not None:
return '%s "%s" {\n %s = "%s"\n}\n' % (rule.scope, rule.pattern, _POLICY_HCL_PROPERTY, rule.policy)
else:
return '%s = "%s"\n' % (rule.scope, rule.policy)
def decode_rules_as_hcl_string(rules_as_hcl):
"""
Converts the given HCL (string) representation of rules into a list of rule domain models.
:param rules_as_hcl: the HCL (string) representation of a collection of rules
:return: the equivalent domain model to the given rules
"""
rules_as_hcl = to_text(rules_as_hcl)
rules_as_json = hcl.loads(rules_as_hcl)
return decode_rules_as_json(rules_as_json)
def decode_rules_as_json(rules_as_json):
"""
Converts the given JSON representation of rules into a list of rule domain models.
:param rules_as_json: the JSON representation of a collection of rules
:return: the equivalent domain model to the given rules
"""
rules = RuleCollection()
for scope in rules_as_json:
if not isinstance(rules_as_json[scope], dict):
rules.add(Rule(scope, rules_as_json[scope]))
else:
for pattern, policy in rules_as_json[scope].items():
rules.add(Rule(scope, policy[_POLICY_JSON_PROPERTY], pattern))
return rules
def encode_rules_as_json(rules):
"""
Converts the given rules into the equivalent JSON representation according to the documentation:
https://www.consul.io/docs/guides/acl.html#rule-specification.
:param rules: the rules
:return: JSON representation of the given rules
"""
rules_as_json = defaultdict(dict)
for rule in rules:
if rule.pattern is not None:
if rule.pattern in rules_as_json[rule.scope]:
raise AssertionError()
rules_as_json[rule.scope][rule.pattern] = {
_POLICY_JSON_PROPERTY: rule.policy
}
else:
if rule.scope in rules_as_json:
raise AssertionError()
rules_as_json[rule.scope] = rule.policy
return rules_as_json
def decode_rules_as_yml(rules_as_yml):
"""
Converts the given YAML representation of rules into a list of rule domain models.
:param rules_as_yml: the YAML representation of a collection of rules
:return: the equivalent domain model to the given rules
"""
rules = RuleCollection()
if rules_as_yml:
for rule_as_yml in rules_as_yml:
rule_added = False
for scope in RULE_SCOPES:
if scope in rule_as_yml:
if rule_as_yml[scope] is None:
raise ValueError("Rule for '%s' does not have a value associated to the scope" % scope)
policy = rule_as_yml[_POLICY_YML_PROPERTY] if _POLICY_YML_PROPERTY in rule_as_yml \
else rule_as_yml[scope]
pattern = rule_as_yml[scope] if _POLICY_YML_PROPERTY in rule_as_yml else None
rules.add(Rule(scope, policy, pattern))
rule_added = True
break
if not rule_added:
raise ValueError("A rule requires one of %s and a policy." % ('/'.join(RULE_SCOPES)))
return rules
def decode_acl_as_json(acl_as_json):
"""
Converts the given JSON representation of an ACL into the equivalent domain model.
:param acl_as_json: the JSON representation of an ACL
:return: the equivalent domain model to the given ACL
"""
rules_as_hcl = acl_as_json[_RULES_JSON_PROPERTY]
rules = decode_rules_as_hcl_string(acl_as_json[_RULES_JSON_PROPERTY]) if rules_as_hcl.strip() != "" \
else RuleCollection()
return ACL(
rules=rules,
token_type=acl_as_json[_TOKEN_TYPE_JSON_PROPERTY],
token=acl_as_json[_TOKEN_JSON_PROPERTY],
name=acl_as_json[_NAME_JSON_PROPERTY]
)
def decode_acls_as_json(acls_as_json):
"""
Converts the given JSON representation of ACLs into a list of ACL domain models.
:param acls_as_json: the JSON representation of a collection of ACLs
:return: list of equivalent domain models for the given ACLs (order not guaranteed to be the same)
"""
return [decode_acl_as_json(acl_as_json) for acl_as_json in acls_as_json]
class ConsulACLNotFoundException(Exception):
"""
Exception raised if an ACL with is not found.
"""
class Configuration:
"""
Configuration for this module.
"""
def __init__(self, management_token=None, host=None, scheme=None, validate_certs=None, name=None, port=None,
rules=None, state=None, token=None, token_type=None):
self.management_token = management_token # type: str
self.host = host # type: str
self.scheme = scheme # type: str
self.validate_certs = validate_certs # type: bool
self.name = name # type: str
self.port = port # type: bool
self.rules = rules # type: RuleCollection
self.state = state # type: str
self.token = token # type: str
self.token_type = token_type # type: str
class Output:
"""
Output of an action of this module.
"""
def __init__(self, changed=None, token=None, rules=None, operation=None):
self.changed = changed # type: bool
self.token = token # type: str
self.rules = rules # type: RuleCollection
self.operation = operation # type: str
class ACL:
"""
Consul ACL. See: https://www.consul.io/docs/guides/acl.html.
"""
def __init__(self, rules, token_type, token, name):
self.rules = rules
self.token_type = token_type
self.token = token
self.name = name
def __eq__(self, other):
return other \
and isinstance(other, self.__class__) \
and self.rules == other.rules \
and self.token_type == other.token_type \
and self.token == other.token \
and self.name == other.name
def __hash__(self):
return hash(self.rules) ^ hash(self.token_type) ^ hash(self.token) ^ hash(self.name)
class Rule:
"""
ACL rule. See: https://www.consul.io/docs/guides/acl.html#acl-rules-and-scope.
"""
def __init__(self, scope, policy, pattern=None):
self.scope = scope
self.policy = policy
self.pattern = pattern
def __eq__(self, other):
return isinstance(other, self.__class__) \
and self.scope == other.scope \
and self.policy == other.policy \
and self.pattern == other.pattern
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return (hash(self.scope) ^ hash(self.policy)) ^ hash(self.pattern)
def __str__(self):
return encode_rule_as_hcl_string(self)
class RuleCollection:
"""
Collection of ACL rules, which are part of a Consul ACL.
"""
def __init__(self):
self._rules = {}
for scope in RULE_SCOPES:
self._rules[scope] = {}
def __iter__(self):
all_rules = []
for scope, pattern_keyed_rules in self._rules.items():
for pattern, rule in pattern_keyed_rules.items():
all_rules.append(rule)
return iter(all_rules)
def __len__(self):
count = 0
for scope in RULE_SCOPES:
count += len(self._rules[scope])
return count
def __eq__(self, other):
return isinstance(other, self.__class__) \
and set(self) == set(other)
def __ne__(self, other):
return not self.__eq__(other)
def __str__(self):
return encode_rules_as_hcl_string(self)
def add(self, rule):
"""
Adds the given rule to this collection.
:param rule: model of a rule
:raises ValueError: raised if there already exists a rule for a given scope and pattern
"""
if rule.pattern in self._rules[rule.scope]:
patten_info = " and pattern '%s'" % rule.pattern if rule.pattern is not None else ""
raise ValueError("Duplicate rule for scope '%s'%s" % (rule.scope, patten_info))
self._rules[rule.scope][rule.pattern] = rule
def get_consul_client(configuration):
"""
Gets a Consul client for the given configuration.
Does not check if the Consul client can connect.
:param configuration: the run configuration
:return: Consul client
"""
token = configuration.management_token
if token is None:
token = configuration.token
if token is None:
raise AssertionError("Expecting the management token to always be set")
return consul.Consul(host=configuration.host, port=configuration.port, scheme=configuration.scheme,
verify=configuration.validate_certs, token=token)
def check_dependencies():
"""
Checks that the required dependencies have been imported.
:exception ImportError: if it is detected that any of the required dependencies have not been iported
"""
if not python_consul_installed:
raise ImportError("python-consul required for this module. "
"See: https://python-consul.readthedocs.io/en/latest/#installation")
if not pyhcl_installed:
raise ImportError("pyhcl required for this module. "
"See: https://pypi.org/project/pyhcl/")
if not has_requests:
raise ImportError("requests required for this module. See https://pypi.org/project/requests/")
def main():
"""
Main method.
"""
module = AnsibleModule(_ARGUMENT_SPEC, supports_check_mode=False)
try:
check_dependencies()
except ImportError as e:
module.fail_json(msg=str(e))
configuration = Configuration(
management_token=module.params.get(MANAGEMENT_PARAMETER_NAME),
host=module.params.get(HOST_PARAMETER_NAME),
scheme=module.params.get(SCHEME_PARAMETER_NAME),
validate_certs=module.params.get(VALIDATE_CERTS_PARAMETER_NAME),
name=module.params.get(NAME_PARAMETER_NAME),
port=module.params.get(PORT_PARAMETER_NAME),
rules=decode_rules_as_yml(module.params.get(RULES_PARAMETER_NAME)),
state=module.params.get(STATE_PARAMETER_NAME),
token=module.params.get(TOKEN_PARAMETER_NAME),
token_type=module.params.get(TOKEN_TYPE_PARAMETER_NAME)
)
consul_client = get_consul_client(configuration)
try:
if configuration.state == PRESENT_STATE_VALUE:
output = set_acl(consul_client, configuration)
else:
output = remove_acl(consul_client, configuration)
except ConnectionError as e:
module.fail_json(msg='Could not connect to consul agent at %s:%s, error was %s' % (
configuration.host, configuration.port, str(e)))
raise
return_values = dict(changed=output.changed, token=output.token, operation=output.operation)
if output.rules is not None:
return_values["rules"] = encode_rules_as_json(output.rules)
module.exit_json(**return_values)
if __name__ == "__main__":
main()
|
edcast-inc/edx-platform-edcast
|
refs/heads/master
|
lms/djangoapps/instructor_analytics/tests/test_basic.py
|
32
|
"""
Tests for instructor.basic
"""
import json
from student.models import CourseEnrollment, CourseEnrollmentAllowed
from django.core.urlresolvers import reverse
from mock import patch
from student.roles import CourseSalesAdminRole
from student.tests.factories import UserFactory, CourseModeFactory
from shoppingcart.models import (
CourseRegistrationCode, RegistrationCodeRedemption, Order,
Invoice, Coupon, CourseRegCodeItem, CouponRedemption, CourseRegistrationCodeInvoiceItem
)
from course_modes.models import CourseMode
from instructor_analytics.basic import (
sale_record_features, sale_order_record_features, enrolled_students_features,
course_registration_features, coupon_codes_features, list_may_enroll,
AVAILABLE_FEATURES, STUDENT_FEATURES, PROFILE_FEATURES
)
from openedx.core.djangoapps.course_groups.tests.helpers import CohortFactory
from courseware.tests.factories import InstructorFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory
import datetime
from django.db.models import Q
import pytz
class TestAnalyticsBasic(ModuleStoreTestCase):
""" Test basic analytics functions. """
def setUp(self):
super(TestAnalyticsBasic, self).setUp()
self.course_key = self.store.make_course_key('robot', 'course', 'id')
self.users = tuple(UserFactory() for _ in xrange(30))
self.ces = tuple(CourseEnrollment.enroll(user, self.course_key)
for user in self.users)
self.instructor = InstructorFactory(course_key=self.course_key)
for user in self.users:
user.profile.meta = json.dumps({
"position": "edX expert {}".format(user.id),
"company": "Open edX Inc {}".format(user.id),
})
user.profile.save()
self.students_who_may_enroll = list(self.users) + [UserFactory() for _ in range(5)]
for student in self.students_who_may_enroll:
CourseEnrollmentAllowed.objects.create(
email=student.email, course_id=self.course_key
)
def test_enrolled_students_features_username(self):
self.assertIn('username', AVAILABLE_FEATURES)
userreports = enrolled_students_features(self.course_key, ['username'])
self.assertEqual(len(userreports), len(self.users))
for userreport in userreports:
self.assertEqual(userreport.keys(), ['username'])
self.assertIn(userreport['username'], [user.username for user in self.users])
def test_enrolled_students_features_keys(self):
query_features = ('username', 'name', 'email')
for feature in query_features:
self.assertIn(feature, AVAILABLE_FEATURES)
with self.assertNumQueries(1):
userreports = enrolled_students_features(self.course_key, query_features)
self.assertEqual(len(userreports), len(self.users))
for userreport in userreports:
self.assertEqual(set(userreport.keys()), set(query_features))
self.assertIn(userreport['username'], [user.username for user in self.users])
self.assertIn(userreport['email'], [user.email for user in self.users])
self.assertIn(userreport['name'], [user.profile.name for user in self.users])
def test_enrolled_students_meta_features_keys(self):
"""
Assert that we can query individual fields in the 'meta' field in the UserProfile
"""
query_features = ('meta.position', 'meta.company')
with self.assertNumQueries(1):
userreports = enrolled_students_features(self.course_key, query_features)
self.assertEqual(len(userreports), len(self.users))
for userreport in userreports:
self.assertEqual(set(userreport.keys()), set(query_features))
self.assertIn(userreport['meta.position'], ["edX expert {}".format(user.id) for user in self.users])
self.assertIn(userreport['meta.company'], ["Open edX Inc {}".format(user.id) for user in self.users])
def test_enrolled_students_features_keys_cohorted(self):
course = CourseFactory.create(org="test", course="course1", display_name="run1")
course.cohort_config = {'cohorted': True, 'auto_cohort': True, 'auto_cohort_groups': ['cohort']}
self.store.update_item(course, self.instructor.id)
cohort = CohortFactory.create(name='cohort', course_id=course.id)
cohorted_students = [UserFactory.create() for _ in xrange(10)]
cohorted_usernames = [student.username for student in cohorted_students]
non_cohorted_student = UserFactory.create()
for student in cohorted_students:
cohort.users.add(student)
CourseEnrollment.enroll(student, course.id)
CourseEnrollment.enroll(non_cohorted_student, course.id)
instructor = InstructorFactory(course_key=course.id)
self.client.login(username=instructor.username, password='test')
query_features = ('username', 'cohort')
# There should be a constant of 2 SQL queries when calling
# enrolled_students_features. The first query comes from the call to
# User.objects.filter(...), and the second comes from
# prefetch_related('course_groups').
with self.assertNumQueries(2):
userreports = enrolled_students_features(course.id, query_features)
self.assertEqual(len([r for r in userreports if r['username'] in cohorted_usernames]), len(cohorted_students))
self.assertEqual(len([r for r in userreports if r['username'] == non_cohorted_student.username]), 1)
for report in userreports:
self.assertEqual(set(report.keys()), set(query_features))
if report['username'] in cohorted_usernames:
self.assertEqual(report['cohort'], cohort.name)
else:
self.assertEqual(report['cohort'], '[unassigned]')
def test_available_features(self):
self.assertEqual(len(AVAILABLE_FEATURES), len(STUDENT_FEATURES + PROFILE_FEATURES))
self.assertEqual(set(AVAILABLE_FEATURES), set(STUDENT_FEATURES + PROFILE_FEATURES))
def test_list_may_enroll(self):
may_enroll = list_may_enroll(self.course_key, ['email'])
self.assertEqual(len(may_enroll), len(self.students_who_may_enroll) - len(self.users))
email_adresses = [student.email for student in self.students_who_may_enroll]
for student in may_enroll:
self.assertEqual(student.keys(), ['email'])
self.assertIn(student['email'], email_adresses)
@patch.dict('django.conf.settings.FEATURES', {'ENABLE_PAID_COURSE_REGISTRATION': True})
class TestCourseSaleRecordsAnalyticsBasic(ModuleStoreTestCase):
""" Test basic course sale records analytics functions. """
def setUp(self):
"""
Fixtures.
"""
super(TestCourseSaleRecordsAnalyticsBasic, self).setUp()
self.course = CourseFactory.create()
self.cost = 40
self.course_mode = CourseMode(
course_id=self.course.id, mode_slug="honor",
mode_display_name="honor cert", min_price=self.cost
)
self.course_mode.save()
self.instructor = InstructorFactory(course_key=self.course.id)
self.client.login(username=self.instructor.username, password='test')
def test_course_sale_features(self):
query_features = [
'company_name', 'company_contact_name', 'company_contact_email', 'total_codes', 'total_used_codes',
'total_amount', 'created_at', 'customer_reference_number', 'recipient_name', 'recipient_email',
'created_by', 'internal_reference', 'invoice_number', 'codes', 'course_id'
]
#create invoice
sale_invoice = Invoice.objects.create(
total_amount=1234.32, company_name='Test1', company_contact_name='TestName',
company_contact_email='test@company.com', recipient_name='Testw_1', recipient_email='test2@test.com',
customer_reference_number='2Fwe23S', internal_reference="ABC", course_id=self.course.id
)
invoice_item = CourseRegistrationCodeInvoiceItem.objects.create(
invoice=sale_invoice,
qty=1,
unit_price=1234.32,
course_id=self.course.id
)
for i in range(5):
course_code = CourseRegistrationCode(
code="test_code{}".format(i), course_id=self.course.id.to_deprecated_string(),
created_by=self.instructor, invoice=sale_invoice, invoice_item=invoice_item, mode_slug='honor'
)
course_code.save()
course_sale_records_list = sale_record_features(self.course.id, query_features)
for sale_record in course_sale_records_list:
self.assertEqual(sale_record['total_amount'], sale_invoice.total_amount)
self.assertEqual(sale_record['recipient_email'], sale_invoice.recipient_email)
self.assertEqual(sale_record['recipient_name'], sale_invoice.recipient_name)
self.assertEqual(sale_record['company_name'], sale_invoice.company_name)
self.assertEqual(sale_record['company_contact_name'], sale_invoice.company_contact_name)
self.assertEqual(sale_record['company_contact_email'], sale_invoice.company_contact_email)
self.assertEqual(sale_record['internal_reference'], sale_invoice.internal_reference)
self.assertEqual(sale_record['customer_reference_number'], sale_invoice.customer_reference_number)
self.assertEqual(sale_record['invoice_number'], sale_invoice.id)
self.assertEqual(sale_record['created_by'], self.instructor)
self.assertEqual(sale_record['total_used_codes'], 0)
self.assertEqual(sale_record['total_codes'], 5)
def test_sale_order_features_with_discount(self):
"""
Test Order Sales Report CSV
"""
query_features = [
('id', 'Order Id'),
('company_name', 'Company Name'),
('company_contact_name', 'Company Contact Name'),
('company_contact_email', 'Company Contact Email'),
('total_amount', 'Total Amount'),
('total_codes', 'Total Codes'),
('total_used_codes', 'Total Used Codes'),
('logged_in_username', 'Login Username'),
('logged_in_email', 'Login User Email'),
('purchase_time', 'Date of Sale'),
('customer_reference_number', 'Customer Reference Number'),
('recipient_name', 'Recipient Name'),
('recipient_email', 'Recipient Email'),
('bill_to_street1', 'Street 1'),
('bill_to_street2', 'Street 2'),
('bill_to_city', 'City'),
('bill_to_state', 'State'),
('bill_to_postalcode', 'Postal Code'),
('bill_to_country', 'Country'),
('order_type', 'Order Type'),
('status', 'Order Item Status'),
('coupon_code', 'Coupon Code'),
('unit_cost', 'Unit Price'),
('list_price', 'List Price'),
('codes', 'Registration Codes'),
('course_id', 'Course Id')
]
# add the coupon code for the course
coupon = Coupon(
code='test_code',
description='test_description',
course_id=self.course.id,
percentage_discount='10',
created_by=self.instructor,
is_active=True
)
coupon.save()
order = Order.get_cart_for_user(self.instructor)
order.order_type = 'business'
order.save()
order.add_billing_details(
company_name='Test Company',
company_contact_name='Test',
company_contact_email='test@123',
recipient_name='R1', recipient_email='',
customer_reference_number='PO#23'
)
CourseRegCodeItem.add_to_order(order, self.course.id, 4)
# apply the coupon code to the item in the cart
resp = self.client.post(reverse('shoppingcart.views.use_code'), {'code': coupon.code})
self.assertEqual(resp.status_code, 200)
order.purchase()
# get the updated item
item = order.orderitem_set.all().select_subclasses()[0]
# get the redeemed coupon information
coupon_redemption = CouponRedemption.objects.select_related('coupon').filter(order=order)
db_columns = [x[0] for x in query_features]
sale_order_records_list = sale_order_record_features(self.course.id, db_columns)
for sale_order_record in sale_order_records_list:
self.assertEqual(sale_order_record['recipient_email'], order.recipient_email)
self.assertEqual(sale_order_record['recipient_name'], order.recipient_name)
self.assertEqual(sale_order_record['company_name'], order.company_name)
self.assertEqual(sale_order_record['company_contact_name'], order.company_contact_name)
self.assertEqual(sale_order_record['company_contact_email'], order.company_contact_email)
self.assertEqual(sale_order_record['customer_reference_number'], order.customer_reference_number)
self.assertEqual(sale_order_record['unit_cost'], item.unit_cost)
self.assertEqual(sale_order_record['list_price'], item.list_price)
self.assertEqual(sale_order_record['status'], item.status)
self.assertEqual(sale_order_record['coupon_code'], coupon_redemption[0].coupon.code)
def test_sale_order_features_without_discount(self):
"""
Test Order Sales Report CSV
"""
query_features = [
('id', 'Order Id'),
('company_name', 'Company Name'),
('company_contact_name', 'Company Contact Name'),
('company_contact_email', 'Company Contact Email'),
('total_amount', 'Total Amount'),
('total_codes', 'Total Codes'),
('total_used_codes', 'Total Used Codes'),
('logged_in_username', 'Login Username'),
('logged_in_email', 'Login User Email'),
('purchase_time', 'Date of Sale'),
('customer_reference_number', 'Customer Reference Number'),
('recipient_name', 'Recipient Name'),
('recipient_email', 'Recipient Email'),
('bill_to_street1', 'Street 1'),
('bill_to_street2', 'Street 2'),
('bill_to_city', 'City'),
('bill_to_state', 'State'),
('bill_to_postalcode', 'Postal Code'),
('bill_to_country', 'Country'),
('order_type', 'Order Type'),
('status', 'Order Item Status'),
('coupon_code', 'Coupon Code'),
('unit_cost', 'Unit Price'),
('list_price', 'List Price'),
('codes', 'Registration Codes'),
('course_id', 'Course Id'),
('quantity', 'Quantity'),
('total_discount', 'Total Discount'),
('total_amount', 'Total Amount Paid'),
]
# add the coupon code for the course
order = Order.get_cart_for_user(self.instructor)
order.order_type = 'business'
order.save()
order.add_billing_details(
company_name='Test Company',
company_contact_name='Test',
company_contact_email='test@123',
recipient_name='R1', recipient_email='',
customer_reference_number='PO#23'
)
CourseRegCodeItem.add_to_order(order, self.course.id, 4)
order.purchase()
# get the updated item
item = order.orderitem_set.all().select_subclasses()[0]
db_columns = [x[0] for x in query_features]
sale_order_records_list = sale_order_record_features(self.course.id, db_columns)
for sale_order_record in sale_order_records_list:
self.assertEqual(sale_order_record['recipient_email'], order.recipient_email)
self.assertEqual(sale_order_record['recipient_name'], order.recipient_name)
self.assertEqual(sale_order_record['company_name'], order.company_name)
self.assertEqual(sale_order_record['company_contact_name'], order.company_contact_name)
self.assertEqual(sale_order_record['company_contact_email'], order.company_contact_email)
self.assertEqual(sale_order_record['customer_reference_number'], order.customer_reference_number)
self.assertEqual(sale_order_record['unit_cost'], item.unit_cost)
# Make sure list price is not None and matches the unit price since no discount was applied.
self.assertIsNotNone(sale_order_record['list_price'])
self.assertEqual(sale_order_record['list_price'], item.unit_cost)
self.assertEqual(sale_order_record['status'], item.status)
self.assertEqual(sale_order_record['coupon_code'], 'N/A')
self.assertEqual(sale_order_record['total_amount'], item.unit_cost * item.qty)
self.assertEqual(sale_order_record['total_discount'], 0)
self.assertEqual(sale_order_record['quantity'], item.qty)
class TestCourseRegistrationCodeAnalyticsBasic(ModuleStoreTestCase):
""" Test basic course registration codes analytics functions. """
def setUp(self):
"""
Fixtures.
"""
super(TestCourseRegistrationCodeAnalyticsBasic, self).setUp()
self.course = CourseFactory.create()
self.instructor = InstructorFactory(course_key=self.course.id)
self.client.login(username=self.instructor.username, password='test')
CourseSalesAdminRole(self.course.id).add_users(self.instructor)
# Create a paid course mode.
mode = CourseModeFactory.create()
mode.course_id = self.course.id
mode.min_price = 1
mode.save()
url = reverse('generate_registration_codes',
kwargs={'course_id': self.course.id.to_deprecated_string()})
data = {
'total_registration_codes': 12, 'company_name': 'Test Group', 'unit_price': 122.45,
'company_contact_name': 'TestName', 'company_contact_email': 'test@company.com', 'recipient_name': 'Test123',
'recipient_email': 'test@123.com', 'address_line_1': 'Portland Street', 'address_line_2': '',
'address_line_3': '', 'city': '', 'state': '', 'zip': '', 'country': '',
'customer_reference_number': '123A23F', 'internal_reference': '', 'invoice': ''
}
response = self.client.post(url, data, **{'HTTP_HOST': 'localhost'})
self.assertEqual(response.status_code, 200, response.content)
def test_course_registration_features(self):
query_features = [
'code', 'redeem_code_url', 'course_id', 'company_name', 'created_by',
'redeemed_by', 'invoice_id', 'purchaser', 'customer_reference_number', 'internal_reference'
]
order = Order(user=self.instructor, status='purchased')
order.save()
registration_code_redemption = RegistrationCodeRedemption(
registration_code_id=1, redeemed_by=self.instructor
)
registration_code_redemption.save()
registration_codes = CourseRegistrationCode.objects.all()
course_registration_list = course_registration_features(query_features, registration_codes, csv_type='download')
self.assertEqual(len(course_registration_list), len(registration_codes))
for course_registration in course_registration_list:
self.assertEqual(set(course_registration.keys()), set(query_features))
self.assertIn(course_registration['code'], [registration_code.code for registration_code in registration_codes])
self.assertIn(
course_registration['course_id'],
[registration_code.course_id.to_deprecated_string() for registration_code in registration_codes]
)
self.assertIn(
course_registration['company_name'],
[
getattr(registration_code.invoice_item.invoice, 'company_name')
for registration_code in registration_codes
]
)
self.assertIn(
course_registration['invoice_id'],
[
registration_code.invoice_item.invoice_id
for registration_code in registration_codes
]
)
def test_coupon_codes_features(self):
query_features = [
'course_id', 'percentage_discount', 'code_redeemed_count', 'description', 'expiration_date',
'total_discounted_amount', 'total_discounted_seats'
]
for i in range(10):
coupon = Coupon(
code='test_code{0}'.format(i),
description='test_description',
course_id=self.course.id, percentage_discount='{0}'.format(i),
created_by=self.instructor,
is_active=True
)
coupon.save()
#now create coupons with the expiration dates
for i in range(5):
coupon = Coupon(
code='coupon{0}'.format(i), description='test_description', course_id=self.course.id,
percentage_discount='{0}'.format(i), created_by=self.instructor, is_active=True,
expiration_date=datetime.datetime.now(pytz.UTC) + datetime.timedelta(days=2)
)
coupon.save()
active_coupons = Coupon.objects.filter(
Q(course_id=self.course.id),
Q(is_active=True),
Q(expiration_date__gt=datetime.datetime.now(pytz.UTC)) |
Q(expiration_date__isnull=True)
)
active_coupons_list = coupon_codes_features(query_features, active_coupons, self.course.id)
self.assertEqual(len(active_coupons_list), len(active_coupons))
for active_coupon in active_coupons_list:
self.assertEqual(set(active_coupon.keys()), set(query_features))
self.assertIn(active_coupon['percentage_discount'], [coupon.percentage_discount for coupon in active_coupons])
self.assertIn(active_coupon['description'], [coupon.description for coupon in active_coupons])
if active_coupon['expiration_date']:
self.assertIn(active_coupon['expiration_date'], [coupon.display_expiry_date for coupon in active_coupons])
self.assertIn(
active_coupon['course_id'],
[coupon.course_id.to_deprecated_string() for coupon in active_coupons]
)
|
xaviercobain88/framework-python
|
refs/heads/master
|
build/lib.linux-i686-2.7/openerp/addons/l10n_ve/__openerp__.py
|
51
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2008 Tiny SPRL (<http://tiny.be>). All Rights Reserved
##############################################################################
# Module programed and financed by:
# Vauxoo, C.A. (<http://vauxoo.com>).
# Our Community team mantain this module:
# https://launchpad.net/~openerp-venezuela
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name' : 'Venezuela - Accounting',
'version': '1.0',
'author': ['OpenERP SA', 'Vauxoo'],
'category': 'Localization/Account Charts',
'description':
"""
Chart of Account for Venezuela.
===============================
Venezuela doesn't have any chart of account by law, but the default
proposed in OpenERP should comply with some Accepted best practices in Venezuela,
this plan comply with this practices.
This module has been tested as base for more of 1000 companies, because
it is based in a mixtures of most common softwares in the Venezuelan
market what will allow for sure to accountants feel them first steps with
OpenERP more confortable.
This module doesn't pretend be the total localization for Venezuela,
but it will help you to start really quickly with OpenERP in this country.
This module give you.
---------------------
- Basic taxes for Venezuela.
- Have basic data to run tests with community localization.
- Start a company from 0 if your needs are basic from an accounting PoV.
We recomend install account_anglo_saxon if you want valued your
stocks as Venezuela does with out invoices.
If you install this module, and select Custom chart a basic chart will be proposed,
but you will need set manually account defaults for taxes.
""",
'depends': ['account',
'base_vat',
'account_chart'
],
'demo': [],
'data': ['data/account_tax_code.xml',
'data/account_user_types.xml',
'data/account_chart.xml',
'data/account_tax.xml',
'data/l10n_chart_ve_wizard.xml'
],
'auto_install': False,
'installable': True,
'images': ['images/config_chart_l10n_ve.jpeg',
'images/l10n_ve_chart.jpeg'],
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
elbeardmorez/quodlibet
|
refs/heads/widgetbars_all
|
quodlibet/quodlibet/ext/playlist/shuffle.py
|
1
|
# -*- coding: utf-8 -*-
# Copyright 2014,2016 Nick Boultbee
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
from quodlibet import _
from quodlibet.plugins.playlist import PlaylistPlugin
from quodlibet.qltk import Icons
class Shuffle(PlaylistPlugin):
PLUGIN_ID = "Shuffle Playlist"
PLUGIN_NAME = _("Shuffle Playlist")
PLUGIN_DESC = _("Randomly shuffles a playlist.")
PLUGIN_ICON = Icons.MEDIA_PLAYLIST_SHUFFLE
def plugin_playlist(self, playlist):
playlist.shuffle()
return True
def plugin_handles(self, playlists):
return len(playlists) == 1 and len(playlists[0].songs) > 1
|
Moe-Team/MysteryOnline
|
refs/heads/master
|
MysteryOnline/__init__.py
|
1
|
dev = False
PREFIX = "v"
MAJOR = 1
MINOR = 3
REVISION = 4
SUFFIX = " stable"
def set_dev(value: bool):
global dev
dev = value
def get_dev() -> bool:
return dev
def get_version() -> str:
return "{0}{1}.{2}.{3}{4}".format(PREFIX, MAJOR, MINOR, REVISION, SUFFIX)
|
umglurf/juleol
|
refs/heads/master
|
migrations/versions/c8fd9f1d6ba9_add_tasting_lock.py
|
1
|
# SPDX-FileCopyrightText: 2020 Håvard Moen <post@haavard.name>
#
# SPDX-License-Identifier: GPL-3.0-or-later
"""add tasting lock
Revision ID: c8fd9f1d6ba9
Revises: 8eb7162afee7
Create Date: 2020-11-08 10:13:30.894263
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'c8fd9f1d6ba9'
down_revision = '8eb7162afee7'
branch_labels = None
depends_on = None
def upgrade():
op.add_column('tastings', sa.Column('locked', sa.Boolean(), nullable=False, default=False))
def downgrade():
op.drop_column('tastings', 'locked')
|
pim89/youtube-dl
|
refs/heads/master
|
youtube_dl/extractor/roxwel.py
|
73
|
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import unified_strdate, determine_ext
class RoxwelIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?roxwel\.com/player/(?P<filename>.+?)(\.|\?|$)'
_TEST = {
'url': 'http://www.roxwel.com/player/passionpittakeawalklive.html',
'info_dict': {
'id': 'passionpittakeawalklive',
'ext': 'flv',
'title': 'Take A Walk (live)',
'uploader': 'Passion Pit',
'uploader_id': 'passionpit',
'upload_date': '20120928',
'description': 'Passion Pit performs "Take A Walk\" live at The Backyard in Austin, Texas. ',
},
'params': {
# rtmp download
'skip_download': True,
}
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
filename = mobj.group('filename')
info_url = 'http://www.roxwel.com/api/videos/%s' % filename
info = self._download_json(info_url, filename)
rtmp_rates = sorted([int(r.replace('flv_', '')) for r in info['media_rates'] if r.startswith('flv_')])
best_rate = rtmp_rates[-1]
url_page_url = 'http://roxwel.com/pl_one_time.php?filename=%s&quality=%s' % (filename, best_rate)
rtmp_url = self._download_webpage(url_page_url, filename, 'Downloading video url')
ext = determine_ext(rtmp_url)
if ext == 'f4v':
rtmp_url = rtmp_url.replace(filename, 'mp4:%s' % filename)
return {
'id': filename,
'title': info['title'],
'url': rtmp_url,
'ext': 'flv',
'description': info['description'],
'thumbnail': info.get('player_image_url') or info.get('image_url_large'),
'uploader': info['artist'],
'uploader_id': info['artistname'],
'upload_date': unified_strdate(info['dbdate']),
}
|
aavanian/bokeh
|
refs/heads/master
|
sphinx/source/docs/user_guide/examples/interaction_toggle_button.py
|
16
|
from bokeh.io import output_file, show
from bokeh.layouts import widgetbox
from bokeh.models.widgets import Toggle
output_file("toggle.html")
toggle = Toggle(label="Foo", button_type="success")
show(widgetbox(toggle))
|
wang1352083/pythontool
|
refs/heads/master
|
python-2.7.12-lib/cProfile.py
|
169
|
#! /usr/bin/env python
"""Python interface for the 'lsprof' profiler.
Compatible with the 'profile' module.
"""
__all__ = ["run", "runctx", "help", "Profile"]
import _lsprof
# ____________________________________________________________
# Simple interface
def run(statement, filename=None, sort=-1):
"""Run statement under profiler optionally saving results in filename
This function takes a single argument that can be passed to the
"exec" statement, and an optional file name. In all cases this
routine attempts to "exec" its first argument and gather profiling
statistics from the execution. If no file name is present, then this
function automatically prints a simple profiling report, sorted by the
standard name string (file/line/function-name) that is presented in
each line.
"""
prof = Profile()
result = None
try:
try:
prof = prof.run(statement)
except SystemExit:
pass
finally:
if filename is not None:
prof.dump_stats(filename)
else:
result = prof.print_stats(sort)
return result
def runctx(statement, globals, locals, filename=None, sort=-1):
"""Run statement under profiler, supplying your own globals and locals,
optionally saving results in filename.
statement and filename have the same semantics as profile.run
"""
prof = Profile()
result = None
try:
try:
prof = prof.runctx(statement, globals, locals)
except SystemExit:
pass
finally:
if filename is not None:
prof.dump_stats(filename)
else:
result = prof.print_stats(sort)
return result
# Backwards compatibility.
def help():
print "Documentation for the profile/cProfile modules can be found "
print "in the Python Library Reference, section 'The Python Profiler'."
# ____________________________________________________________
class Profile(_lsprof.Profiler):
"""Profile(custom_timer=None, time_unit=None, subcalls=True, builtins=True)
Builds a profiler object using the specified timer function.
The default timer is a fast built-in one based on real time.
For custom timer functions returning integers, time_unit can
be a float specifying a scale (i.e. how long each integer unit
is, in seconds).
"""
# Most of the functionality is in the base class.
# This subclass only adds convenient and backward-compatible methods.
def print_stats(self, sort=-1):
import pstats
pstats.Stats(self).strip_dirs().sort_stats(sort).print_stats()
def dump_stats(self, file):
import marshal
f = open(file, 'wb')
self.create_stats()
marshal.dump(self.stats, f)
f.close()
def create_stats(self):
self.disable()
self.snapshot_stats()
def snapshot_stats(self):
entries = self.getstats()
self.stats = {}
callersdicts = {}
# call information
for entry in entries:
func = label(entry.code)
nc = entry.callcount # ncalls column of pstats (before '/')
cc = nc - entry.reccallcount # ncalls column of pstats (after '/')
tt = entry.inlinetime # tottime column of pstats
ct = entry.totaltime # cumtime column of pstats
callers = {}
callersdicts[id(entry.code)] = callers
self.stats[func] = cc, nc, tt, ct, callers
# subcall information
for entry in entries:
if entry.calls:
func = label(entry.code)
for subentry in entry.calls:
try:
callers = callersdicts[id(subentry.code)]
except KeyError:
continue
nc = subentry.callcount
cc = nc - subentry.reccallcount
tt = subentry.inlinetime
ct = subentry.totaltime
if func in callers:
prev = callers[func]
nc += prev[0]
cc += prev[1]
tt += prev[2]
ct += prev[3]
callers[func] = nc, cc, tt, ct
# The following two methods can be called by clients to use
# a profiler to profile a statement, given as a string.
def run(self, cmd):
import __main__
dict = __main__.__dict__
return self.runctx(cmd, dict, dict)
def runctx(self, cmd, globals, locals):
self.enable()
try:
exec cmd in globals, locals
finally:
self.disable()
return self
# This method is more useful to profile a single function call.
def runcall(self, func, *args, **kw):
self.enable()
try:
return func(*args, **kw)
finally:
self.disable()
# ____________________________________________________________
def label(code):
if isinstance(code, str):
return ('~', 0, code) # built-in functions ('~' sorts at the end)
else:
return (code.co_filename, code.co_firstlineno, code.co_name)
# ____________________________________________________________
def main():
import os, sys
from optparse import OptionParser
usage = "cProfile.py [-o output_file_path] [-s sort] scriptfile [arg] ..."
parser = OptionParser(usage=usage)
parser.allow_interspersed_args = False
parser.add_option('-o', '--outfile', dest="outfile",
help="Save stats to <outfile>", default=None)
parser.add_option('-s', '--sort', dest="sort",
help="Sort order when printing to stdout, based on pstats.Stats class",
default=-1)
if not sys.argv[1:]:
parser.print_usage()
sys.exit(2)
(options, args) = parser.parse_args()
sys.argv[:] = args
if len(args) > 0:
progname = args[0]
sys.path.insert(0, os.path.dirname(progname))
with open(progname, 'rb') as fp:
code = compile(fp.read(), progname, 'exec')
globs = {
'__file__': progname,
'__name__': '__main__',
'__package__': None,
}
runctx(code, globs, None, options.outfile, options.sort)
else:
parser.print_usage()
return parser
# When invoked as main program, invoke the profiler on a script
if __name__ == '__main__':
main()
|
caphrim007/ansible
|
refs/heads/devel
|
test/units/modules/network/netscaler/test_netscaler_lb_monitor.py
|
18
|
# Copyright (c) 2017 Citrix Systems
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from ansible.compat.tests.mock import patch, Mock, MagicMock, call
from units.modules.utils import set_module_args
from .netscaler_module import TestModule, nitro_base_patcher
import sys
if sys.version_info[:2] != (2, 6):
import requests
class TestNetscalerLBVServerModule(TestModule):
@classmethod
def setUpClass(cls):
class MockException(Exception):
pass
cls.MockException = MockException
m = MagicMock()
nssrc_modules_mock = {
'nssrc.com.citrix.netscaler.nitro.resource.config.lb': m,
'nssrc.com.citrix.netscaler.nitro.resource.config.lb.lbmonitor': m,
'nssrc.com.citrix.netscaler.nitro.resource.config.lb.lbmonitor.lbvmonitor': m,
}
cls.nitro_specific_patcher = patch.dict(sys.modules, nssrc_modules_mock)
cls.nitro_base_patcher = nitro_base_patcher
@classmethod
def tearDownClass(cls):
cls.nitro_base_patcher.stop()
cls.nitro_specific_patcher.stop()
def setUp(self):
super(TestNetscalerLBVServerModule, self).setUp()
self.nitro_base_patcher.start()
self.nitro_specific_patcher.start()
# Setup minimal required arguments to pass AnsibleModule argument parsing
def tearDown(self):
super(TestNetscalerLBVServerModule, self).tearDown()
self.nitro_base_patcher.stop()
self.nitro_specific_patcher.stop()
def test_graceful_nitro_api_import_error(self):
# Stop nitro api patching to cause ImportError
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='192.0.2.1',
state='present',
))
self.nitro_base_patcher.stop()
self.nitro_specific_patcher.stop()
from ansible.modules.network.netscaler import netscaler_lb_monitor
self.module = netscaler_lb_monitor
result = self.failed()
self.assertEqual(result['msg'], 'Could not load nitro python sdk')
def test_graceful_nitro_error_on_login(self):
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='192.0.2.1',
state='present',
))
from ansible.modules.network.netscaler import netscaler_lb_monitor
class MockException(Exception):
def __init__(self, *args, **kwargs):
self.errorcode = 0
self.message = ''
client_mock = Mock()
client_mock.login = Mock(side_effect=MockException)
m = Mock(return_value=client_mock)
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_lb_monitor',
get_nitro_client=m,
nitro_exception=MockException,
):
self.module = netscaler_lb_monitor
result = self.failed()
self.assertTrue(result['msg'].startswith('nitro exception'), msg='nitro exception during login not handled properly')
def test_graceful_no_connection_error(self):
if sys.version_info[:2] == (2, 6):
self.skipTest('requests library not available under python2.6')
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='192.0.2.1',
state='present',
))
from ansible.modules.network.netscaler import netscaler_lb_monitor
client_mock = Mock()
attrs = {'login.side_effect': requests.exceptions.ConnectionError}
client_mock.configure_mock(**attrs)
m = Mock(return_value=client_mock)
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_lb_monitor',
get_nitro_client=m,
nitro_exception=self.MockException,
):
self.module = netscaler_lb_monitor
result = self.failed()
self.assertTrue(result['msg'].startswith('Connection error'), msg='Connection error was not handled gracefully')
def test_graceful_login_error(self):
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='192.0.2.1',
state='present',
))
from ansible.modules.network.netscaler import netscaler_lb_monitor
if sys.version_info[:2] == (2, 6):
self.skipTest('requests library not available under python2.6')
client_mock = Mock()
attrs = {'login.side_effect': requests.exceptions.SSLError}
client_mock.configure_mock(**attrs)
m = Mock(return_value=client_mock)
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_lb_monitor',
get_nitro_client=m,
nitro_exception=self.MockException,
):
self.module = netscaler_lb_monitor
result = self.failed()
self.assertTrue(result['msg'].startswith('SSL Error'), msg='SSL Error was not handled gracefully')
def test_save_config_called_on_state_present(self):
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='192.0.2.1',
state='present',
))
from ansible.modules.network.netscaler import netscaler_lb_monitor
client_mock = Mock()
m = Mock(return_value=client_mock)
lb_monitor_proxy_mock = Mock(diff_object=Mock(return_value={}))
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_lb_monitor',
get_nitro_client=m,
lbmonitor_exists=Mock(side_effect=[False, True]),
ConfigProxy=Mock(return_value=lb_monitor_proxy_mock),
ensure_feature_is_enabled=Mock(return_value=True),
):
self.module = netscaler_lb_monitor
self.exited()
self.assertIn(call.save_config(), client_mock.mock_calls)
def test_save_config_called_on_state_absent(self):
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='192.0.2.1',
state='absent',
))
from ansible.modules.network.netscaler import netscaler_lb_monitor
client_mock = Mock()
m = Mock(return_value=client_mock)
lb_monitor_proxy_mock = Mock(diff_object=Mock(return_value={}))
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_lb_monitor',
get_nitro_client=m,
lbmonitor_exists=Mock(side_effect=[True, False]),
ConfigProxy=Mock(return_value=lb_monitor_proxy_mock),
ensure_feature_is_enabled=Mock(return_value=True),
):
self.module = netscaler_lb_monitor
self.exited()
self.assertIn(call.save_config(), client_mock.mock_calls)
def test_save_config_not_called_on_state_present(self):
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='192.0.2.1',
state='present',
save_config=False,
))
from ansible.modules.network.netscaler import netscaler_lb_monitor
client_mock = Mock()
m = Mock(return_value=client_mock)
lb_monitor_proxy_mock = Mock(diff_object=Mock(return_value={}))
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_lb_monitor',
get_nitro_client=m,
lbmonitor_exists=Mock(side_effect=[False, True]),
ConfigProxy=Mock(return_value=lb_monitor_proxy_mock),
ensure_feature_is_enabled=Mock(return_value=True),
):
self.module = netscaler_lb_monitor
self.exited()
self.assertNotIn(call.save_config(), client_mock.mock_calls)
def test_save_config_not_called_on_state_absent(self):
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='192.0.2.1',
state='absent',
save_config=False,
))
from ansible.modules.network.netscaler import netscaler_lb_monitor
client_mock = Mock()
m = Mock(return_value=client_mock)
lb_monitor_proxy_mock = Mock(diff_object=Mock(return_value={}))
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_lb_monitor',
get_nitro_client=m,
lbmonitor_exists=Mock(side_effect=[True, False]),
ConfigProxy=Mock(return_value=lb_monitor_proxy_mock),
ensure_feature_is_enabled=Mock(return_value=True),
):
self.module = netscaler_lb_monitor
self.exited()
self.assertNotIn(call.save_config(), client_mock.mock_calls)
def test_ensure_feature_is_enabled_called(self):
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='192.0.2.1',
state='present',
save_config=False,
))
from ansible.modules.network.netscaler import netscaler_lb_monitor
client_mock = Mock()
lb_monitor_proxy_mock = Mock(diff_object=Mock(return_value={}))
feature_mock = Mock()
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_lb_monitor',
get_nitro_client=Mock(return_value=client_mock),
lbmonitor_exists=Mock(side_effect=[True, True]),
lbmonitor_identical=Mock(side_effect=[True, True]),
ConfigProxy=Mock(return_value=lb_monitor_proxy_mock),
ensure_feature_is_enabled=feature_mock,
):
self.module = netscaler_lb_monitor
self.exited()
feature_mock.assert_called_with(client_mock, 'LB')
def test_ensure_feature_is_enabled_nitro_exception_caught(self):
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='192.0.2.1',
state='present',
save_config=False,
))
from ansible.modules.network.netscaler import netscaler_lb_monitor
client_mock = Mock()
lb_monitor_proxy_mock = Mock(diff_object=Mock(return_value={}))
errorcode = 10
message = 'mock error'
class MockException(Exception):
def __init__(self):
self.errorcode = errorcode
self.message = message
feature_mock = Mock(side_effect=MockException)
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_lb_monitor',
get_nitro_client=Mock(return_value=client_mock),
lbmonitor_exists=Mock(side_effect=[True, True]),
lbmonitor_identical=Mock(side_effect=[True, True]),
ConfigProxy=Mock(return_value=lb_monitor_proxy_mock),
ensure_feature_is_enabled=feature_mock,
nitro_exception=MockException,
):
self.module = netscaler_lb_monitor
result = self.failed()
expected_msg = 'nitro exception errorcode=%s, message=%s' % (errorcode, message)
self.assertEqual(result['msg'], expected_msg, 'Failed to handle nitro exception')
def test_create_new_lb_monitor_workflow(self):
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='192.0.2.1',
state='present',
save_config=False,
))
from ansible.modules.network.netscaler import netscaler_lb_monitor
lb_monitor_proxy_mock = Mock(diff_object=Mock(return_value={}))
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_lb_monitor',
get_nitro_client=Mock(return_value=Mock()),
lbmonitor_exists=Mock(side_effect=[False, True]),
lbmonitor_identical=Mock(side_effect=[True]),
ConfigProxy=Mock(return_value=lb_monitor_proxy_mock),
ensure_feature_is_enabled=Mock(return_value=True),
):
self.module = netscaler_lb_monitor
result = self.exited()
lb_monitor_proxy_mock.assert_has_calls([call.add()])
self.assertTrue(result['changed'])
def test_update_lb_monitor_workflow(self):
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='192.0.2.1',
state='present',
save_config=False,
))
from ansible.modules.network.netscaler import netscaler_lb_monitor
lb_monitor_proxy_mock = Mock(diff_object=Mock(return_value={}))
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_lb_monitor',
get_nitro_client=Mock(return_value=Mock()),
lbmonitor_exists=Mock(side_effect=[True, True]),
lbmonitor_identical=Mock(side_effect=[False, True]),
ConfigProxy=Mock(return_value=lb_monitor_proxy_mock),
ensure_feature_is_enabled=Mock(return_value=True),
get_immutables_intersection=Mock(return_value=[]),
diff_list=Mock(return_value={}),
):
self.module = netscaler_lb_monitor
result = self.exited()
lb_monitor_proxy_mock.assert_has_calls([call.update()])
self.assertTrue(result['changed'])
def test_lb_monitor_exists_sanity_check(self):
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='192.0.2.1',
state='present',
))
from ansible.modules.network.netscaler import netscaler_lb_monitor
lb_monitor_proxy_mock = Mock(diff_object=Mock(return_value={}))
client_mock = Mock()
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_lb_monitor',
get_nitro_client=Mock(return_value=client_mock),
lbmonitor_exists=Mock(side_effect=[False, False]),
lbmonitor_identical=Mock(side_effect=[False, True]),
ConfigProxy=Mock(return_value=lb_monitor_proxy_mock),
ensure_feature_is_enabled=Mock(return_value=True),
nitro_exception=self.MockException,
):
self.module = netscaler_lb_monitor
result = self.failed()
self.assertEqual(result['msg'], 'lb monitor does not exist')
def test_lb_monitor_identical_sanity_check(self):
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='192.0.2.1',
state='present',
))
from ansible.modules.network.netscaler import netscaler_lb_monitor
lb_monitor_proxy_mock = Mock(diff_object=Mock(return_value={}))
client_mock = Mock()
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_lb_monitor',
get_nitro_client=Mock(return_value=client_mock),
lbmonitor_exists=Mock(side_effect=[True, True]),
lbmonitor_identical=Mock(side_effect=[False, False]),
ConfigProxy=Mock(return_value=lb_monitor_proxy_mock),
ensure_feature_is_enabled=Mock(return_value=True),
get_immutables_intersection=(Mock(return_value=[])),
nitro_exception=self.MockException,
diff_list=Mock(return_value={}),
):
self.module = netscaler_lb_monitor
result = self.failed()
self.assertEqual(result['msg'], 'lb monitor is not configured correctly')
def test_absent_state_workflow(self):
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='192.0.2.1',
state='absent',
))
from ansible.modules.network.netscaler import netscaler_lb_monitor
lb_monitor_proxy_mock = Mock(diff_object=Mock(return_value={}))
client_mock = Mock()
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_lb_monitor',
get_nitro_client=Mock(return_value=client_mock),
ConfigProxy=Mock(return_value=lb_monitor_proxy_mock),
ensure_feature_is_enabled=Mock(return_value=True),
lbmonitor_exists=Mock(side_effect=[True, False]),
):
self.module = netscaler_lb_monitor
result = self.exited()
lb_monitor_proxy_mock.assert_has_calls([call.delete()])
self.assertTrue(result['changed'])
def test_absent_state_sanity_check(self):
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='192.0.2.1',
state='absent',
))
from ansible.modules.network.netscaler import netscaler_lb_monitor
lb_monitor_proxy_mock = Mock(diff_object=Mock(return_value={}))
client_mock = Mock()
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_lb_monitor',
get_nitro_client=Mock(return_value=client_mock),
ConfigProxy=Mock(return_value=lb_monitor_proxy_mock),
ensure_feature_is_enabled=Mock(return_value=True),
lbmonitor_exists=Mock(side_effect=[True, True]),
nitro_exception=self.MockException,
):
self.module = netscaler_lb_monitor
result = self.failed()
lb_monitor_proxy_mock.assert_has_calls([call.delete()])
self.assertEqual(result['msg'], 'lb monitor still exists')
def test_get_immutables_failure(self):
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='192.0.2.1',
state='present',
))
from ansible.modules.network.netscaler import netscaler_lb_monitor
lb_monitor_proxy_mock = Mock(diff_object=Mock(return_value={}))
client_mock = Mock()
m = Mock(return_value=['some'])
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_lb_monitor',
get_nitro_client=Mock(return_value=client_mock),
ConfigProxy=Mock(return_value=lb_monitor_proxy_mock),
ensure_feature_is_enabled=Mock(),
lbmonitor_exists=Mock(side_effect=[True, True]),
lbmonitor_identical=Mock(side_effect=[False, True]),
get_immutables_intersection=m,
diff_list=Mock(return_value={}),
nitro_exception=self.MockException,
):
self.module = netscaler_lb_monitor
result = self.failed()
self.assertTrue(
result['msg'].startswith('Cannot update immutable attributes'),
msg='Did not handle immutables error correctly',
)
|
sbktechnology/trufil-frappe
|
refs/heads/develop
|
frappe/utils/make_random.py
|
30
|
import frappe, random
settings = frappe._dict(
prob = {
"default": { "make": 0.6, "qty": (1,5) },
}
)
def add_random_children(doc, fieldname, rows, randomize, unique=None):
nrows = rows
if rows > 1:
nrows = random.randrange(1, rows)
for i in xrange(nrows):
d = {}
for key, val in randomize.items():
if isinstance(val[0], basestring):
d[key] = get_random(*val)
else:
d[key] = random.randrange(*val)
if unique:
if not doc.get(fieldname, {unique:d[unique]}):
doc.append(fieldname, d)
else:
doc.append(fieldname, d)
def get_random(doctype, filters=None):
condition = []
if filters:
for key, val in filters.items():
condition.append("%s='%s'" % (key, str(val).replace("'", "\'")))
if condition:
condition = " where " + " and ".join(condition)
else:
condition = ""
out = frappe.db.sql("""select name from `tab%s` %s
order by RAND() limit 0,1""" % (doctype, condition))
return out and out[0][0] or None
def can_make(doctype):
return random.random() < settings.prob.get(doctype, settings.prob["default"])["make"]
def how_many(doctype):
return random.randrange(*settings.prob.get(doctype, settings.prob["default"])["qty"])
|
ccrook/Quantum-GIS
|
refs/heads/master
|
python/plugins/processing/algs/grass7/ext/r_li_edgedensity.py
|
5
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
r_li_edgedensity.py
-------------------
Date : February 2016
Copyright : (C) 2016 by Médéric Ribreux
Email : medspx at medspx dot fr
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Médéric Ribreux'
__date__ = 'February 2016'
__copyright__ = '(C) 2016, Médéric Ribreux'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from .r_li import checkMovingWindow, configFile
def checkParameterValuesBeforeExecuting(alg, parameters, context):
return checkMovingWindow(alg, parameters, context)
def processCommand(alg, parameters, context):
configFile(alg, parameters, context)
|
KaranToor/MA450
|
refs/heads/master
|
google-cloud-sdk/.install/.backup/lib/surface/bigtable/__init__.py
|
2
|
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The main command group for bigtable."""
from googlecloudsdk.calliope import base
from googlecloudsdk.core import apis
from googlecloudsdk.core import properties
from googlecloudsdk.core import resolvers
from googlecloudsdk.core import resources
@base.ReleaseTracks(base.ReleaseTrack.BETA)
class BigtableV2(base.Group):
"""Manage your Cloud Bigtable storage."""
def Filter(self, context, args):
project = properties.VALUES.core.project
resolver = resolvers.FromProperty(project)
resources.REGISTRY.SetParamDefault(
'bigtableadmin', collection=None, param='projectsId', resolver=resolver)
|
robclark/chromium
|
refs/heads/master
|
third_party/tlslite/tlslite/SessionCache.py
|
358
|
"""Class for caching TLS sessions."""
import thread
import time
class SessionCache:
"""This class is used by the server to cache TLS sessions.
Caching sessions allows the client to use TLS session resumption
and avoid the expense of a full handshake. To use this class,
simply pass a SessionCache instance into the server handshake
function.
This class is thread-safe.
"""
#References to these instances
#are also held by the caller, who may change the 'resumable'
#flag, so the SessionCache must return the same instances
#it was passed in.
def __init__(self, maxEntries=10000, maxAge=14400):
"""Create a new SessionCache.
@type maxEntries: int
@param maxEntries: The maximum size of the cache. When this
limit is reached, the oldest sessions will be deleted as
necessary to make room for new ones. The default is 10000.
@type maxAge: int
@param maxAge: The number of seconds before a session expires
from the cache. The default is 14400 (i.e. 4 hours)."""
self.lock = thread.allocate_lock()
# Maps sessionIDs to sessions
self.entriesDict = {}
#Circular list of (sessionID, timestamp) pairs
self.entriesList = [(None,None)] * maxEntries
self.firstIndex = 0
self.lastIndex = 0
self.maxAge = maxAge
def __getitem__(self, sessionID):
self.lock.acquire()
try:
self._purge() #Delete old items, so we're assured of a new one
session = self.entriesDict[sessionID]
#When we add sessions they're resumable, but it's possible
#for the session to be invalidated later on (if a fatal alert
#is returned), so we have to check for resumability before
#returning the session.
if session.valid():
return session
else:
raise KeyError()
finally:
self.lock.release()
def __setitem__(self, sessionID, session):
self.lock.acquire()
try:
#Add the new element
self.entriesDict[sessionID] = session
self.entriesList[self.lastIndex] = (sessionID, time.time())
self.lastIndex = (self.lastIndex+1) % len(self.entriesList)
#If the cache is full, we delete the oldest element to make an
#empty space
if self.lastIndex == self.firstIndex:
del(self.entriesDict[self.entriesList[self.firstIndex][0]])
self.firstIndex = (self.firstIndex+1) % len(self.entriesList)
finally:
self.lock.release()
#Delete expired items
def _purge(self):
currentTime = time.time()
#Search through the circular list, deleting expired elements until
#we reach a non-expired element. Since elements in list are
#ordered in time, we can break once we reach the first non-expired
#element
index = self.firstIndex
while index != self.lastIndex:
if currentTime - self.entriesList[index][1] > self.maxAge:
del(self.entriesDict[self.entriesList[index][0]])
index = (index+1) % len(self.entriesList)
else:
break
self.firstIndex = index
def _test():
import doctest, SessionCache
return doctest.testmod(SessionCache)
if __name__ == "__main__":
_test()
|
treeform/pystorm
|
refs/heads/master
|
tests/basic/for_step.py
|
5
|
for x in xrange(19,342,13):
print x
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.