repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
|---|---|---|---|---|
BeDjango/intef-openedx
|
refs/heads/master
|
lms/djangoapps/mobile_api/__init__.py
|
218
|
"""
Mobile API
"""
|
MFoster/breeze
|
refs/heads/master
|
django/contrib/formtools/tests/wizard/wizardtests/forms.py
|
313
|
import os
import tempfile
from django import forms
from django.contrib.auth.models import User
from django.core.files.storage import FileSystemStorage
from django.forms.formsets import formset_factory
from django.forms.models import modelformset_factory
from django.http import HttpResponse
from django.template import Template, Context
from django.contrib.auth.models import User
from django.contrib.formtools.wizard.views import WizardView
temp_storage_location = tempfile.mkdtemp(dir=os.environ.get('DJANGO_TEST_TEMP_DIR'))
temp_storage = FileSystemStorage(location=temp_storage_location)
class Page1(forms.Form):
name = forms.CharField(max_length=100)
user = forms.ModelChoiceField(queryset=User.objects.all())
thirsty = forms.NullBooleanField()
class Page2(forms.Form):
address1 = forms.CharField(max_length=100)
address2 = forms.CharField(max_length=100)
file1 = forms.FileField()
class Page3(forms.Form):
random_crap = forms.CharField(max_length=100)
Page4 = formset_factory(Page3, extra=2)
class ContactWizard(WizardView):
file_storage = temp_storage
def done(self, form_list, **kwargs):
c = Context({
'form_list': [x.cleaned_data for x in form_list],
'all_cleaned_data': self.get_all_cleaned_data(),
})
for form in self.form_list.keys():
c[form] = self.get_cleaned_data_for_step(form)
c['this_will_fail'] = self.get_cleaned_data_for_step('this_will_fail')
return HttpResponse(Template('').render(c))
def get_context_data(self, form, **kwargs):
context = super(ContactWizard, self).get_context_data(form, **kwargs)
if self.storage.current_step == 'form2':
context.update({'another_var': True})
return context
class UserForm(forms.ModelForm):
class Meta:
model = User
fields = ('username', 'email')
UserFormSet = modelformset_factory(User, form=UserForm)
class SessionContactWizard(ContactWizard):
storage_name = 'django.contrib.formtools.wizard.storage.session.SessionStorage'
class CookieContactWizard(ContactWizard):
storage_name = 'django.contrib.formtools.wizard.storage.cookie.CookieStorage'
|
ycaihua/kbengine
|
refs/heads/master
|
kbe/res/scripts/common/Lib/test/test_strptime.py
|
84
|
"""PyUnit testing against strptime"""
import unittest
import time
import locale
import re
import sys
from test import support
from datetime import date as datetime_date
import _strptime
class getlang_Tests(unittest.TestCase):
"""Test _getlang"""
def test_basic(self):
self.assertEqual(_strptime._getlang(), locale.getlocale(locale.LC_TIME))
class LocaleTime_Tests(unittest.TestCase):
"""Tests for _strptime.LocaleTime.
All values are lower-cased when stored in LocaleTime, so make sure to
compare values after running ``lower`` on them.
"""
def setUp(self):
"""Create time tuple based on current time."""
self.time_tuple = time.localtime()
self.LT_ins = _strptime.LocaleTime()
def compare_against_time(self, testing, directive, tuple_position,
error_msg):
"""Helper method that tests testing against directive based on the
tuple_position of time_tuple. Uses error_msg as error message.
"""
strftime_output = time.strftime(directive, self.time_tuple).lower()
comparison = testing[self.time_tuple[tuple_position]]
self.assertIn(strftime_output, testing,
"%s: not found in tuple" % error_msg)
self.assertEqual(comparison, strftime_output,
"%s: position within tuple incorrect; %s != %s" %
(error_msg, comparison, strftime_output))
def test_weekday(self):
# Make sure that full and abbreviated weekday names are correct in
# both string and position with tuple
self.compare_against_time(self.LT_ins.f_weekday, '%A', 6,
"Testing of full weekday name failed")
self.compare_against_time(self.LT_ins.a_weekday, '%a', 6,
"Testing of abbreviated weekday name failed")
def test_month(self):
# Test full and abbreviated month names; both string and position
# within the tuple
self.compare_against_time(self.LT_ins.f_month, '%B', 1,
"Testing against full month name failed")
self.compare_against_time(self.LT_ins.a_month, '%b', 1,
"Testing against abbreviated month name failed")
def test_am_pm(self):
# Make sure AM/PM representation done properly
strftime_output = time.strftime("%p", self.time_tuple).lower()
self.assertIn(strftime_output, self.LT_ins.am_pm,
"AM/PM representation not in tuple")
if self.time_tuple[3] < 12: position = 0
else: position = 1
self.assertEqual(self.LT_ins.am_pm[position], strftime_output,
"AM/PM representation in the wrong position within the tuple")
def test_timezone(self):
# Make sure timezone is correct
timezone = time.strftime("%Z", self.time_tuple).lower()
if timezone:
self.assertTrue(timezone in self.LT_ins.timezone[0] or
timezone in self.LT_ins.timezone[1],
"timezone %s not found in %s" %
(timezone, self.LT_ins.timezone))
def test_date_time(self):
# Check that LC_date_time, LC_date, and LC_time are correct
# the magic date is used so as to not have issues with %c when day of
# the month is a single digit and has a leading space. This is not an
# issue since strptime still parses it correctly. The problem is
# testing these directives for correctness by comparing strftime
# output.
magic_date = (1999, 3, 17, 22, 44, 55, 2, 76, 0)
strftime_output = time.strftime("%c", magic_date)
self.assertEqual(time.strftime(self.LT_ins.LC_date_time, magic_date),
strftime_output, "LC_date_time incorrect")
strftime_output = time.strftime("%x", magic_date)
self.assertEqual(time.strftime(self.LT_ins.LC_date, magic_date),
strftime_output, "LC_date incorrect")
strftime_output = time.strftime("%X", magic_date)
self.assertEqual(time.strftime(self.LT_ins.LC_time, magic_date),
strftime_output, "LC_time incorrect")
LT = _strptime.LocaleTime()
LT.am_pm = ('', '')
self.assertTrue(LT.LC_time, "LocaleTime's LC directives cannot handle "
"empty strings")
def test_lang(self):
# Make sure lang is set to what _getlang() returns
# Assuming locale has not changed between now and when self.LT_ins was created
self.assertEqual(self.LT_ins.lang, _strptime._getlang())
class TimeRETests(unittest.TestCase):
"""Tests for TimeRE."""
def setUp(self):
"""Construct generic TimeRE object."""
self.time_re = _strptime.TimeRE()
self.locale_time = _strptime.LocaleTime()
def test_pattern(self):
# Test TimeRE.pattern
pattern_string = self.time_re.pattern(r"%a %A %d")
self.assertTrue(pattern_string.find(self.locale_time.a_weekday[2]) != -1,
"did not find abbreviated weekday in pattern string '%s'" %
pattern_string)
self.assertTrue(pattern_string.find(self.locale_time.f_weekday[4]) != -1,
"did not find full weekday in pattern string '%s'" %
pattern_string)
self.assertTrue(pattern_string.find(self.time_re['d']) != -1,
"did not find 'd' directive pattern string '%s'" %
pattern_string)
def test_pattern_escaping(self):
# Make sure any characters in the format string that might be taken as
# regex syntax is escaped.
pattern_string = self.time_re.pattern("\d+")
self.assertIn(r"\\d\+", pattern_string,
"%s does not have re characters escaped properly" %
pattern_string)
def test_compile(self):
# Check that compiled regex is correct
found = self.time_re.compile(r"%A").match(self.locale_time.f_weekday[6])
self.assertTrue(found and found.group('A') == self.locale_time.f_weekday[6],
"re object for '%A' failed")
compiled = self.time_re.compile(r"%a %b")
found = compiled.match("%s %s" % (self.locale_time.a_weekday[4],
self.locale_time.a_month[4]))
self.assertTrue(found,
"Match failed with '%s' regex and '%s' string" %
(compiled.pattern, "%s %s" % (self.locale_time.a_weekday[4],
self.locale_time.a_month[4])))
self.assertTrue(found.group('a') == self.locale_time.a_weekday[4] and
found.group('b') == self.locale_time.a_month[4],
"re object couldn't find the abbreviated weekday month in "
"'%s' using '%s'; group 'a' = '%s', group 'b' = %s'" %
(found.string, found.re.pattern, found.group('a'),
found.group('b')))
for directive in ('a','A','b','B','c','d','H','I','j','m','M','p','S',
'U','w','W','x','X','y','Y','Z','%'):
compiled = self.time_re.compile("%" + directive)
found = compiled.match(time.strftime("%" + directive))
self.assertTrue(found, "Matching failed on '%s' using '%s' regex" %
(time.strftime("%" + directive),
compiled.pattern))
def test_blankpattern(self):
# Make sure when tuple or something has no values no regex is generated.
# Fixes bug #661354
test_locale = _strptime.LocaleTime()
test_locale.timezone = (frozenset(), frozenset())
self.assertEqual(_strptime.TimeRE(test_locale).pattern("%Z"), '',
"with timezone == ('',''), TimeRE().pattern('%Z') != ''")
def test_matching_with_escapes(self):
# Make sure a format that requires escaping of characters works
compiled_re = self.time_re.compile("\w+ %m")
found = compiled_re.match("\w+ 10")
self.assertTrue(found, "Escaping failed of format '\w+ 10'")
def test_locale_data_w_regex_metacharacters(self):
# Check that if locale data contains regex metacharacters they are
# escaped properly.
# Discovered by bug #1039270 .
locale_time = _strptime.LocaleTime()
locale_time.timezone = (frozenset(("utc", "gmt",
"Tokyo (standard time)")),
frozenset("Tokyo (daylight time)"))
time_re = _strptime.TimeRE(locale_time)
self.assertTrue(time_re.compile("%Z").match("Tokyo (standard time)"),
"locale data that contains regex metacharacters is not"
" properly escaped")
def test_whitespace_substitution(self):
# When pattern contains whitespace, make sure it is taken into account
# so as to not allow to subpatterns to end up next to each other and
# "steal" characters from each other.
pattern = self.time_re.pattern('%j %H')
self.assertFalse(re.match(pattern, "180"))
self.assertTrue(re.match(pattern, "18 0"))
class StrptimeTests(unittest.TestCase):
"""Tests for _strptime.strptime."""
def setUp(self):
"""Create testing time tuple."""
self.time_tuple = time.gmtime()
def test_ValueError(self):
# Make sure ValueError is raised when match fails or format is bad
self.assertRaises(ValueError, _strptime._strptime_time, data_string="%d",
format="%A")
for bad_format in ("%", "% ", "%e"):
try:
_strptime._strptime_time("2005", bad_format)
except ValueError:
continue
except Exception as err:
self.fail("'%s' raised %s, not ValueError" %
(bad_format, err.__class__.__name__))
else:
self.fail("'%s' did not raise ValueError" % bad_format)
def test_strptime_exception_context(self):
# check that this doesn't chain exceptions needlessly (see #17572)
with self.assertRaises(ValueError) as e:
_strptime._strptime_time('', '%D')
self.assertIs(e.exception.__suppress_context__, True)
# additional check for IndexError branch (issue #19545)
with self.assertRaises(ValueError) as e:
_strptime._strptime_time('19', '%Y %')
self.assertIs(e.exception.__suppress_context__, True)
def test_unconverteddata(self):
# Check ValueError is raised when there is unconverted data
self.assertRaises(ValueError, _strptime._strptime_time, "10 12", "%m")
def helper(self, directive, position):
"""Helper fxn in testing."""
strf_output = time.strftime("%" + directive, self.time_tuple)
strp_output = _strptime._strptime_time(strf_output, "%" + directive)
self.assertTrue(strp_output[position] == self.time_tuple[position],
"testing of '%s' directive failed; '%s' -> %s != %s" %
(directive, strf_output, strp_output[position],
self.time_tuple[position]))
def test_year(self):
# Test that the year is handled properly
for directive in ('y', 'Y'):
self.helper(directive, 0)
# Must also make sure %y values are correct for bounds set by Open Group
for century, bounds in ((1900, ('69', '99')), (2000, ('00', '68'))):
for bound in bounds:
strp_output = _strptime._strptime_time(bound, '%y')
expected_result = century + int(bound)
self.assertTrue(strp_output[0] == expected_result,
"'y' test failed; passed in '%s' "
"and returned '%s'" % (bound, strp_output[0]))
def test_month(self):
# Test for month directives
for directive in ('B', 'b', 'm'):
self.helper(directive, 1)
def test_day(self):
# Test for day directives
self.helper('d', 2)
def test_hour(self):
# Test hour directives
self.helper('H', 3)
strf_output = time.strftime("%I %p", self.time_tuple)
strp_output = _strptime._strptime_time(strf_output, "%I %p")
self.assertTrue(strp_output[3] == self.time_tuple[3],
"testing of '%%I %%p' directive failed; '%s' -> %s != %s" %
(strf_output, strp_output[3], self.time_tuple[3]))
def test_minute(self):
# Test minute directives
self.helper('M', 4)
def test_second(self):
# Test second directives
self.helper('S', 5)
def test_fraction(self):
# Test microseconds
import datetime
d = datetime.datetime(2012, 12, 20, 12, 34, 56, 78987)
tup, frac = _strptime._strptime(str(d), format="%Y-%m-%d %H:%M:%S.%f")
self.assertEqual(frac, d.microsecond)
def test_weekday(self):
# Test weekday directives
for directive in ('A', 'a', 'w'):
self.helper(directive,6)
def test_julian(self):
# Test julian directives
self.helper('j', 7)
def test_timezone(self):
# Test timezone directives.
# When gmtime() is used with %Z, entire result of strftime() is empty.
# Check for equal timezone names deals with bad locale info when this
# occurs; first found in FreeBSD 4.4.
strp_output = _strptime._strptime_time("UTC", "%Z")
self.assertEqual(strp_output.tm_isdst, 0)
strp_output = _strptime._strptime_time("GMT", "%Z")
self.assertEqual(strp_output.tm_isdst, 0)
time_tuple = time.localtime()
strf_output = time.strftime("%Z") #UTC does not have a timezone
strp_output = _strptime._strptime_time(strf_output, "%Z")
locale_time = _strptime.LocaleTime()
if time.tzname[0] != time.tzname[1] or not time.daylight:
self.assertTrue(strp_output[8] == time_tuple[8],
"timezone check failed; '%s' -> %s != %s" %
(strf_output, strp_output[8], time_tuple[8]))
else:
self.assertTrue(strp_output[8] == -1,
"LocaleTime().timezone has duplicate values and "
"time.daylight but timezone value not set to -1")
def test_bad_timezone(self):
# Explicitly test possibility of bad timezone;
# when time.tzname[0] == time.tzname[1] and time.daylight
tz_name = time.tzname[0]
if tz_name.upper() in ("UTC", "GMT"):
self.skipTest('need non-UTC/GMT timezone')
try:
original_tzname = time.tzname
original_daylight = time.daylight
time.tzname = (tz_name, tz_name)
time.daylight = 1
tz_value = _strptime._strptime_time(tz_name, "%Z")[8]
self.assertEqual(tz_value, -1,
"%s lead to a timezone value of %s instead of -1 when "
"time.daylight set to %s and passing in %s" %
(time.tzname, tz_value, time.daylight, tz_name))
finally:
time.tzname = original_tzname
time.daylight = original_daylight
def test_date_time(self):
# Test %c directive
for position in range(6):
self.helper('c', position)
def test_date(self):
# Test %x directive
for position in range(0,3):
self.helper('x', position)
def test_time(self):
# Test %X directive
for position in range(3,6):
self.helper('X', position)
def test_percent(self):
# Make sure % signs are handled properly
strf_output = time.strftime("%m %% %Y", self.time_tuple)
strp_output = _strptime._strptime_time(strf_output, "%m %% %Y")
self.assertTrue(strp_output[0] == self.time_tuple[0] and
strp_output[1] == self.time_tuple[1],
"handling of percent sign failed")
def test_caseinsensitive(self):
# Should handle names case-insensitively.
strf_output = time.strftime("%B", self.time_tuple)
self.assertTrue(_strptime._strptime_time(strf_output.upper(), "%B"),
"strptime does not handle ALL-CAPS names properly")
self.assertTrue(_strptime._strptime_time(strf_output.lower(), "%B"),
"strptime does not handle lowercase names properly")
self.assertTrue(_strptime._strptime_time(strf_output.capitalize(), "%B"),
"strptime does not handle capword names properly")
def test_defaults(self):
# Default return value should be (1900, 1, 1, 0, 0, 0, 0, 1, 0)
defaults = (1900, 1, 1, 0, 0, 0, 0, 1, -1)
strp_output = _strptime._strptime_time('1', '%m')
self.assertTrue(strp_output == defaults,
"Default values for strptime() are incorrect;"
" %s != %s" % (strp_output, defaults))
def test_escaping(self):
# Make sure all characters that have regex significance are escaped.
# Parentheses are in a purposeful order; will cause an error of
# unbalanced parentheses when the regex is compiled if they are not
# escaped.
# Test instigated by bug #796149 .
need_escaping = ".^$*+?{}\[]|)("
self.assertTrue(_strptime._strptime_time(need_escaping, need_escaping))
def test_feb29_on_leap_year_without_year(self):
time.strptime("Feb 29", "%b %d")
def test_mar1_comes_after_feb29_even_when_omitting_the_year(self):
self.assertLess(
time.strptime("Feb 29", "%b %d"),
time.strptime("Mar 1", "%b %d"))
class Strptime12AMPMTests(unittest.TestCase):
"""Test a _strptime regression in '%I %p' at 12 noon (12 PM)"""
def test_twelve_noon_midnight(self):
eq = self.assertEqual
eq(time.strptime('12 PM', '%I %p')[3], 12)
eq(time.strptime('12 AM', '%I %p')[3], 0)
eq(_strptime._strptime_time('12 PM', '%I %p')[3], 12)
eq(_strptime._strptime_time('12 AM', '%I %p')[3], 0)
class JulianTests(unittest.TestCase):
"""Test a _strptime regression that all julian (1-366) are accepted"""
def test_all_julian_days(self):
eq = self.assertEqual
for i in range(1, 367):
# use 2004, since it is a leap year, we have 366 days
eq(_strptime._strptime_time('%d 2004' % i, '%j %Y')[7], i)
class CalculationTests(unittest.TestCase):
"""Test that strptime() fills in missing info correctly"""
def setUp(self):
self.time_tuple = time.gmtime()
def test_julian_calculation(self):
# Make sure that when Julian is missing that it is calculated
format_string = "%Y %m %d %H %M %S %w %Z"
result = _strptime._strptime_time(time.strftime(format_string, self.time_tuple),
format_string)
self.assertTrue(result.tm_yday == self.time_tuple.tm_yday,
"Calculation of tm_yday failed; %s != %s" %
(result.tm_yday, self.time_tuple.tm_yday))
def test_gregorian_calculation(self):
# Test that Gregorian date can be calculated from Julian day
format_string = "%Y %H %M %S %w %j %Z"
result = _strptime._strptime_time(time.strftime(format_string, self.time_tuple),
format_string)
self.assertTrue(result.tm_year == self.time_tuple.tm_year and
result.tm_mon == self.time_tuple.tm_mon and
result.tm_mday == self.time_tuple.tm_mday,
"Calculation of Gregorian date failed;"
"%s-%s-%s != %s-%s-%s" %
(result.tm_year, result.tm_mon, result.tm_mday,
self.time_tuple.tm_year, self.time_tuple.tm_mon,
self.time_tuple.tm_mday))
def test_day_of_week_calculation(self):
# Test that the day of the week is calculated as needed
format_string = "%Y %m %d %H %S %j %Z"
result = _strptime._strptime_time(time.strftime(format_string, self.time_tuple),
format_string)
self.assertTrue(result.tm_wday == self.time_tuple.tm_wday,
"Calculation of day of the week failed;"
"%s != %s" % (result.tm_wday, self.time_tuple.tm_wday))
def test_week_of_year_and_day_of_week_calculation(self):
# Should be able to infer date if given year, week of year (%U or %W)
# and day of the week
def test_helper(ymd_tuple, test_reason):
for directive in ('W', 'U'):
format_string = "%%Y %%%s %%w" % directive
dt_date = datetime_date(*ymd_tuple)
strp_input = dt_date.strftime(format_string)
strp_output = _strptime._strptime_time(strp_input, format_string)
self.assertTrue(strp_output[:3] == ymd_tuple,
"%s(%s) test failed w/ '%s': %s != %s (%s != %s)" %
(test_reason, directive, strp_input,
strp_output[:3], ymd_tuple,
strp_output[7], dt_date.timetuple()[7]))
test_helper((1901, 1, 3), "week 0")
test_helper((1901, 1, 8), "common case")
test_helper((1901, 1, 13), "day on Sunday")
test_helper((1901, 1, 14), "day on Monday")
test_helper((1905, 1, 1), "Jan 1 on Sunday")
test_helper((1906, 1, 1), "Jan 1 on Monday")
test_helper((1906, 1, 7), "first Sunday in a year starting on Monday")
test_helper((1905, 12, 31), "Dec 31 on Sunday")
test_helper((1906, 12, 31), "Dec 31 on Monday")
test_helper((2008, 12, 29), "Monday in the last week of the year")
test_helper((2008, 12, 22), "Monday in the second-to-last week of the "
"year")
test_helper((1978, 10, 23), "randomly chosen date")
test_helper((2004, 12, 18), "randomly chosen date")
test_helper((1978, 10, 23), "year starting and ending on Monday while "
"date not on Sunday or Monday")
test_helper((1917, 12, 17), "year starting and ending on Monday with "
"a Monday not at the beginning or end "
"of the year")
test_helper((1917, 12, 31), "Dec 31 on Monday with year starting and "
"ending on Monday")
test_helper((2007, 1, 7), "First Sunday of 2007")
test_helper((2007, 1, 14), "Second Sunday of 2007")
test_helper((2006, 12, 31), "Last Sunday of 2006")
test_helper((2006, 12, 24), "Second to last Sunday of 2006")
class CacheTests(unittest.TestCase):
"""Test that caching works properly."""
def test_time_re_recreation(self):
# Make sure cache is recreated when current locale does not match what
# cached object was created with.
_strptime._strptime_time("10", "%d")
_strptime._strptime_time("2005", "%Y")
_strptime._TimeRE_cache.locale_time.lang = "Ni"
original_time_re = _strptime._TimeRE_cache
_strptime._strptime_time("10", "%d")
self.assertIsNot(original_time_re, _strptime._TimeRE_cache)
self.assertEqual(len(_strptime._regex_cache), 1)
def test_regex_cleanup(self):
# Make sure cached regexes are discarded when cache becomes "full".
try:
del _strptime._regex_cache['%d']
except KeyError:
pass
bogus_key = 0
while len(_strptime._regex_cache) <= _strptime._CACHE_MAX_SIZE:
_strptime._regex_cache[bogus_key] = None
bogus_key += 1
_strptime._strptime_time("10", "%d")
self.assertEqual(len(_strptime._regex_cache), 1)
def test_new_localetime(self):
# A new LocaleTime instance should be created when a new TimeRE object
# is created.
locale_time_id = _strptime._TimeRE_cache.locale_time
_strptime._TimeRE_cache.locale_time.lang = "Ni"
_strptime._strptime_time("10", "%d")
self.assertIsNot(locale_time_id, _strptime._TimeRE_cache.locale_time)
def test_TimeRE_recreation(self):
# The TimeRE instance should be recreated upon changing the locale.
locale_info = locale.getlocale(locale.LC_TIME)
try:
locale.setlocale(locale.LC_TIME, ('en_US', 'UTF8'))
except locale.Error:
self.skipTest('test needs en_US.UTF8 locale')
try:
_strptime._strptime_time('10', '%d')
# Get id of current cache object.
first_time_re = _strptime._TimeRE_cache
try:
# Change the locale and force a recreation of the cache.
locale.setlocale(locale.LC_TIME, ('de_DE', 'UTF8'))
_strptime._strptime_time('10', '%d')
# Get the new cache object's id.
second_time_re = _strptime._TimeRE_cache
# They should not be equal.
self.assertIsNot(first_time_re, second_time_re)
# Possible test locale is not supported while initial locale is.
# If this is the case just suppress the exception and fall-through
# to the resetting to the original locale.
except locale.Error:
self.skipTest('test needs de_DE.UTF8 locale')
# Make sure we don't trample on the locale setting once we leave the
# test.
finally:
locale.setlocale(locale.LC_TIME, locale_info)
def test_main():
support.run_unittest(
getlang_Tests,
LocaleTime_Tests,
TimeRETests,
StrptimeTests,
Strptime12AMPMTests,
JulianTests,
CalculationTests,
CacheTests
)
if __name__ == '__main__':
test_main()
|
drmrd/ansible
|
refs/heads/devel
|
lib/ansible/playbook/role/definition.py
|
47
|
# (c) 2014 Michael DeHaan, <michael@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleAssertionError
from ansible.module_utils.six import iteritems, string_types
from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject, AnsibleMapping
from ansible.playbook.attribute import Attribute, FieldAttribute
from ansible.playbook.base import Base
from ansible.playbook.become import Become
from ansible.playbook.conditional import Conditional
from ansible.playbook.taggable import Taggable
from ansible.template import Templar
from ansible.utils.path import unfrackpath
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
__all__ = ['RoleDefinition']
class RoleDefinition(Base, Become, Conditional, Taggable):
_role = FieldAttribute(isa='string')
def __init__(self, play=None, role_basedir=None, variable_manager=None, loader=None):
super(RoleDefinition, self).__init__()
self._play = play
self._variable_manager = variable_manager
self._loader = loader
self._role_path = None
self._role_basedir = role_basedir
self._role_params = dict()
# def __repr__(self):
# return 'ROLEDEF: ' + self._attributes.get('role', '<no name set>')
@staticmethod
def load(data, variable_manager=None, loader=None):
raise AnsibleError("not implemented")
def preprocess_data(self, ds):
# role names that are simply numbers can be parsed by PyYAML
# as integers even when quoted, so turn it into a string type
if isinstance(ds, int):
ds = "%s" % ds
if not isinstance(ds, dict) and not isinstance(ds, string_types) and not isinstance(ds, AnsibleBaseYAMLObject):
raise AnsibleAssertionError()
if isinstance(ds, dict):
ds = super(RoleDefinition, self).preprocess_data(ds)
# save the original ds for use later
self._ds = ds
# we create a new data structure here, using the same
# object used internally by the YAML parsing code so we
# can preserve file:line:column information if it exists
new_ds = AnsibleMapping()
if isinstance(ds, AnsibleBaseYAMLObject):
new_ds.ansible_pos = ds.ansible_pos
# first we pull the role name out of the data structure,
# and then use that to determine the role path (which may
# result in a new role name, if it was a file path)
role_name = self._load_role_name(ds)
(role_name, role_path) = self._load_role_path(role_name)
# next, we split the role params out from the valid role
# attributes and update the new datastructure with that
# result and the role name
if isinstance(ds, dict):
(new_role_def, role_params) = self._split_role_params(ds)
new_ds.update(new_role_def)
self._role_params = role_params
# set the role name in the new ds
new_ds['role'] = role_name
# we store the role path internally
self._role_path = role_path
# and return the cleaned-up data structure
return new_ds
def _load_role_name(self, ds):
'''
Returns the role name (either the role: or name: field) from
the role definition, or (when the role definition is a simple
string), just that string
'''
if isinstance(ds, string_types):
return ds
role_name = ds.get('role', ds.get('name'))
if not role_name or not isinstance(role_name, string_types):
raise AnsibleError('role definitions must contain a role name', obj=ds)
# if we have the required datastructures, and if the role_name
# contains a variable, try and template it now
if self._variable_manager:
all_vars = self._variable_manager.get_vars(play=self._play)
templar = Templar(loader=self._loader, variables=all_vars)
if templar._contains_vars(role_name):
role_name = templar.template(role_name)
return role_name
def _load_role_path(self, role_name):
'''
the 'role', as specified in the ds (or as a bare string), can either
be a simple name or a full path. If it is a full path, we use the
basename as the role name, otherwise we take the name as-given and
append it to the default role path
'''
# we always start the search for roles in the base directory of the playbook
role_search_paths = [
os.path.join(self._loader.get_basedir(), u'roles'),
]
# also search in the configured roles path
if C.DEFAULT_ROLES_PATH:
role_search_paths.extend(C.DEFAULT_ROLES_PATH)
# next, append the roles basedir, if it was set, so we can
# search relative to that directory for dependent roles
if self._role_basedir:
role_search_paths.append(self._role_basedir)
# finally as a last resort we look in the current basedir as set
# in the loader (which should be the playbook dir itself) but without
# the roles/ dir appended
role_search_paths.append(self._loader.get_basedir())
# create a templar class to template the dependency names, in
# case they contain variables
if self._variable_manager is not None:
all_vars = self._variable_manager.get_vars(play=self._play)
else:
all_vars = dict()
templar = Templar(loader=self._loader, variables=all_vars)
role_name = templar.template(role_name)
# now iterate through the possible paths and return the first one we find
for path in role_search_paths:
path = templar.template(path)
role_path = unfrackpath(os.path.join(path, role_name))
if self._loader.path_exists(role_path):
return (role_name, role_path)
# if not found elsewhere try to extract path from name
role_path = unfrackpath(role_name)
if self._loader.path_exists(role_path):
role_name = os.path.basename(role_name)
return (role_name, role_path)
raise AnsibleError("the role '%s' was not found in %s" % (role_name, ":".join(role_search_paths)), obj=self._ds)
def _split_role_params(self, ds):
'''
Splits any random role params off from the role spec and store
them in a dictionary of params for parsing later
'''
role_def = dict()
role_params = dict()
base_attribute_names = frozenset(self._valid_attrs.keys())
for (key, value) in iteritems(ds):
# use the list of FieldAttribute values to determine what is and is not
# an extra parameter for this role (or sub-class of this role)
# FIXME: hard-coded list of exception key names here corresponds to the
# connection fields in the Base class. There may need to be some
# other mechanism where we exclude certain kinds of field attributes,
# or make this list more automatic in some way so we don't have to
# remember to update it manually.
if key not in base_attribute_names or key in ('connection', 'port', 'remote_user'):
if key in ('connection', 'port', 'remote_user'):
display.deprecated("Using '%s' as a role param has been deprecated. " % key +
"In the future, these values should be entered in the `vars:` " +
"section for roles, but for now we'll store it as both a param and an attribute.", version="2.7")
role_def[key] = value
# this key does not match a field attribute, so it must be a role param
role_params[key] = value
else:
# this is a field attribute, so copy it over directly
role_def[key] = value
return (role_def, role_params)
def get_role_params(self):
return self._role_params.copy()
def get_role_path(self):
return self._role_path
|
almarklein/visvis.dev
|
refs/heads/master
|
wobjects/sliceTextures.py
|
5
|
# -*- coding: utf-8 -*-
# Copyright (C) 2012, Almar Klein
#
# Visvis is distributed under the terms of the (new) BSD License.
# The full license can be found in 'license.txt'.
""" Module sliceTextures
Defines texture wobjects for visualizing slices in 3D volumes.
"""
import OpenGL.GL as gl
import OpenGL.GLU as glu
from visvis.pypoints import Point
from visvis import Wobject, Colormapable
from visvis.core.misc import Property, PropWithDraw, getColor
from visvis.core import shaders
from visvis.wobjects.textures import BaseTexture, TextureObjectToVisualize
class SliceTexture(BaseTexture):
""" SliceTexture
A slice texture is a 2D texture of a 3D data volume. It enables
visualizing 3D data without the need for glsl renderering (and can
therefore be used on older systems.
"""
def __init__(self, parent, data, axis=0, index=0):
BaseTexture.__init__(self, parent, data)
self._ndim = 3
# Init parameters
self._axis = axis
self._index = index
# create texture
self._texture1 = TextureObjectToVisualize(2, data)
# init shader
self._InitShader()
# set data (data to textureToV. only for min/max)
self.SetData(data)
# init interpolation
self._texture1._interpolate = True
# For edge
self._edgeColor = None
self._edgeColor2 = getColor('g')
self._edgeWidth = 3.0
# For interaction
self._interact_over = False
self._interact_down = False
self._screenVec = None
self._refPos = (0,0)
self._refIndex = 0
#
self.hitTest = True
#
self.eventEnter.Bind(self._OnMouseEnter)
self.eventLeave.Bind(self._OnMouseLeave)
self.eventMouseDown.Bind(self._OnMouseDown)
self.eventMouseUp.Bind(self._OnMouseUp)
self.eventMotion.Bind(self._OnMouseMotion)
def _InitShader(self):
# Add components of shaders
self.shader.vertex.Clear()
self.shader.fragment.Clear()
self.shader.fragment.AddPart(shaders.SH_2F_BASE)
self.shader.fragment.AddPart(shaders.SH_2F_AASTEPS_0)
self.shader.fragment.AddPart(shaders.SH_COLOR_SCALAR)
def uniform_shape():
shape = self._texture1._shape[:2] # as in opengl
return [float(s) for s in reversed(list(shape))]
def uniform_extent():
data = self._texture1._dataRef # as original array
shape = reversed(data.shape[:2])
if hasattr(data, 'sampling'):
sampling = reversed(data.sampling[:2])
else:
sampling = [1.0 for s in range(2)]
del data
return [s1*s2 for s1, s2 in zip(shape, sampling)]
# Set some uniforms
self.shader.SetStaticUniform('colormap', self._colormap)
self.shader.SetStaticUniform('shape', uniform_shape)
self.shader.SetStaticUniform('scaleBias', self._texture1._ScaleBias_get)
self.shader.SetStaticUniform('extent', uniform_extent)
self.shader.SetStaticUniform('aakernel', [1.0, 0, 0, 0])
def _SetData(self, data):
""" _SetData(data)
Give reference to the raw data. For internal use. Inheriting
classes can override this to store data in their own way and
update the OpenGL textures accordingly.
"""
# Store data
self._dataRef3D = data
# Slice it
i = self._index
if self._axis == 0:
slice = self._dataRef3D[i]
elif self._axis == 1:
slice = self._dataRef3D[:,i]
elif self._axis == 2:
slice = self._dataRef3D[:,:,i]
# Update texture
self._texture1.SetData(slice)
def _GetData(self):
""" _GetData()
Get a reference to the raw data. For internal use.
"""
return self._dataRef3D
def _GetLimits(self):
""" Get the limits in world coordinates between which the object exists.
"""
# Obtain untransformed coords
shape = self._dataRef3D.shape
x1, x2 = -0.5, shape[2]-0.5
y1, y2 = -0.5, shape[1]-0.5
z1, z2 = -0.5, shape[0]-0.5
# There we are
return Wobject._GetLimits(self, x1, x2, y1, y2, z1, z2)
def OnDestroy(self):
# Clear normaly, and also remove reference to data
BaseTexture.OnDestroy(self)
self._dataRef3D = None
def OnDrawShape(self, clr):
# Implementation of the OnDrawShape method.
gl.glColor(clr[0], clr[1], clr[2], 1.0)
self._DrawQuads()
def OnDraw(self, fast=False):
# Draw the texture.
# set color to white, otherwise with no shading, there is odd scaling
gl.glColor3f(1.0,1.0,1.0)
# Enable texture, so that it has a corresponding OpenGl texture.
# Binding is done by the shader
self._texture1.Enable(-1)
self.shader.SetUniform('texture', self._texture1)
# _texture._shape is a good indicator of a valid texture
if not self._texture1._shape:
return
if self.shader.isUsable and self.shader.hasCode:
# fragment shader on -> anti-aliasing
self.shader.Enable()
else:
# Fixed funcrion pipeline
self.shader.EnableTextureOnly('texture')
# do the drawing!
self._DrawQuads()
gl.glFlush()
# clean up
self.shader.Disable()
# Draw outline?
clr = self._edgeColor
if self._interact_down or self._interact_over:
clr = self._edgeColor2
if clr:
self._DrawQuads(clr)
# Get screen vector?
if self._screenVec is None:
pos1 = [int(s/2) for s in self._dataRef3D.shape]
pos2 = [s for s in pos1]
pos2[self._axis] += 1
#
screen1 = glu.gluProject(pos1[2], pos1[1], pos1[0])
screen2 = glu.gluProject(pos2[2], pos2[1], pos2[0])
#
self._screenVec = screen2[0]-screen1[0], screen1[1]-screen2[1]
def _DrawQuads(self, clr=None):
""" Draw the quads of the texture.
This is done in a seperate method to reuse code in
OnDraw() and OnDrawShape().
"""
if not self._texture1._shape:
return
# The -0.5 offset is to center pixels/voxels. This works correctly
# for anisotropic data.
x1, x2 = -0.5, self._dataRef3D.shape[2]-0.5
y2, y1 = -0.5, self._dataRef3D.shape[1]-0.5
z2, z1 = -0.5, self._dataRef3D.shape[0]-0.5
# Calculate quads
i = self._index
if self._axis == 0:
quads = [ (x1, y2, i),
(x2, y2, i),
(x2, y1, i),
(x1, y1, i), ]
elif self._axis == 1:
quads = [ (x1, i, z2),
(x2, i, z2),
(x2, i, z1),
(x1, i, z1), ]
elif self._axis == 2:
quads = [ (i, y2, z2),
(i, y1, z2),
(i, y1, z1),
(i, y2, z1), ]
if clr:
# Draw lines
gl.glColor(clr[0], clr[1], clr[2], 1.0)
gl.glLineWidth(self._edgeWidth)
gl.glBegin(gl.GL_LINE_STRIP)
for i in [0,1,2,3,0]:
gl.glVertex3d(*quads[i])
gl.glEnd()
else:
# Draw texture
gl.glBegin(gl.GL_QUADS)
gl.glTexCoord2f(0,0); gl.glVertex3d(*quads[0])
gl.glTexCoord2f(1,0); gl.glVertex3d(*quads[1])
gl.glTexCoord2f(1,1); gl.glVertex3d(*quads[2])
gl.glTexCoord2f(0,1); gl.glVertex3d(*quads[3])
gl.glEnd()
## Interaction
def _OnMouseEnter(self, event):
self._interact_over = True
self.Draw()
def _OnMouseLeave(self, event):
self._interact_over = False
self.Draw()
def _OnMouseDown(self, event):
if event.button == 1:
# Signal that its down
self._interact_down = True
# Make the screen vector be calculated on the next draw
self._screenVec = None
# Store position and index for reference
self._refPos = event.x, event.y
self._refIndex = self._index
# Redraw
self.Draw()
# Handle the event
return True
def _OnMouseUp(self, event):
self._interact_down = False
self.Draw()
def _OnMouseMotion(self, event):
# Handle or pass?
if not (self._interact_down and self._screenVec):
return
# Get vector relative to reference position
refPos = Point(self._refPos)
pos = Point(event.x, event.y)
vec = pos - refPos
# Length of reference vector, and its normalized version
screenVec = Point(self._screenVec)
L = screenVec.norm()
V = screenVec.normalize()
# Number of indexes to change
n = vec.dot(V) / L
# Apply!
self.index = int(self._refIndex + n)
## Properties
@PropWithDraw
def index():
""" The index of the slice in the volume to display.
"""
def fget(self):
return self._index
def fset(self, value):
# Check value
if value < 0:
value = 0
maxIndex = self._dataRef3D.shape[self._axis] - 1
if value > maxIndex:
value = maxIndex
# Set and update
self._index = value
self._SetData(self._dataRef3D)
return locals()
@PropWithDraw
def axis():
""" The axis of the slice in the volume to display.
"""
def fget(self):
return self._axis
def fset(self, value):
# Check value
if value < 0 or value >= 3:
raise ValueError('Invalid axis.')
# Set and update index (can now be out of bounds.
self._axis = value
self.index = self.index
return locals()
@PropWithDraw
def edgeColor():
""" The color of the edge of the slice (can be None).
"""
def fget(self):
return self._edgeColor
def fset(self, value):
self._edgeColor = getColor(value)
return locals()
@PropWithDraw
def edgeColor2():
""" The color of the edge of the slice when interacting.
"""
def fget(self):
return self._edgeColor2
def fset(self, value):
self._edgeColor2 = getColor(value)
return locals()
class SliceTextureProxy(Wobject, Colormapable):
""" SliceTextureProxy(*sliceTextures)
A proxi class for multiple SliceTexture instances. By making them
children of an instance of this class, their properties can be
changed simultaneously.
This makes it possible to call volshow() and stay agnostic of how
the volume is vizualized (using a 3D render, or with 3 slice
textures); all public texture-specific methods and properties are
transferred to all children automatically.
"""
def SetData(self, *args, **kwargs):
for s in self.children:
s.SetData(*args, **kwargs)
def Refresh(self, *args, **kwargs):
for s in self.children:
s.Refresh(*args, **kwargs)
def SetClim(self, *args, **kwargs):
for s in self.children:
s.SetClim(*args, **kwargs)
def _GetColormap(self):
return self.children[0].colormap
def _SetColormap(self, value):
for s in self.children:
s._SetColormap(value)
def _EnableColormap(self, texUnit=0):
return self.children[0]._EnableColormap(texUnit)
def _DisableColormap(self):
return self.children[0]._DisableColormap()
def _GetClim(self):
return self.children[0].clim
def _SetClim(self, value):
for s in self.children:
s._SetClim(value)
@Property
def renderStyle():
""" Not available for SliceTextures. This
property is implemented to be able to produce a warning when
it is used.
"""
def fget(self):
return 'None'
def fset(self, value):
print 'Warning: SliceTexture instances have no renderStyle.'
return locals()
@Property
def isoThreshold():
""" Not available for SliceTextures. This
property is implemented to be able to produce a warning when
it is used.
"""
def fget(self):
return 0.0
def fset(self, value):
print 'Warning: SliceTexture instances have no isoThreshold.'
return locals()
@Property
def interpolate():
""" Get/Set whether to interpolate the image when zooming in
(using linear interpolation).
"""
def fget(self):
return self.children[0].interpolate
def fset(self, value):
for s in self.children:
s.interpolate = value
return locals()
@Property
def index():
""" The index of the slice in the volume to display.
"""
def fget(self):
return self.children[0].index
def fset(self, value):
for s in self.children:
s.index = value
return locals()
@Property
def axis():
""" The axis of the slice in the volume to display.
"""
def fget(self):
return self.children[0].axis
def fset(self, value):
for s in self.children:
s.axis = value
return locals()
@Property
def edgeColor():
""" The color of the edge of the slice (can be None).
"""
def fget(self):
return self.children[0].edgeColor
def fset(self, value):
for s in self.children:
s.edgeColor = value
return locals()
@Property
def edgeColor2():
""" The color of the edge of the slice when interacting.
"""
def fget(self):
return self.children[0].edgeColor2
def fset(self, value):
for s in self.children:
s.edgeColor2 = value
return locals()
|
MaplePlan/djwp
|
refs/heads/master
|
django/contrib/gis/geometry/backend/__init__.py
|
104
|
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.utils.importlib import import_module
geom_backend = getattr(settings, 'GEOMETRY_BACKEND', 'geos')
try:
module = import_module('django.contrib.gis.geometry.backend.%s' % geom_backend)
except ImportError:
try:
module = import_module(geom_backend)
except ImportError:
raise ImproperlyConfigured('Could not import user-defined GEOMETRY_BACKEND '
'"%s".' % geom_backend)
try:
Geometry = module.Geometry
GeometryException = module.GeometryException
except AttributeError:
raise ImproperlyConfigured('Cannot import Geometry from the "%s" '
'geometry backend.' % geom_backend)
|
M4gn4tor/mastercard-api-python
|
refs/heads/master
|
Services/restaurants/services/countrieslocalfavoritesservice.py
|
1
|
from common import connector
from common import environment
from common import xmlutil
from services.restaurants.domain.countries import country
from services.restaurants.domain.countries import countries
import xml.etree.ElementTree as ET
SANDBOX_URL = 'https://sandbox.api.mastercard.com/restaurants/v1/country?Format=XML'
PRODUCTION_URL = 'https://api.mastercard.com/restaurants/v1/country?Format=XML'
class CountriesLocalFavoritesService(connector.Connector):
def __init__(self, consumer_key, private_key, environment):
super().__init__(consumer_key, private_key)
self.environment = environment
def get_countries(self):
url = self.get_url()
xml_response = ET.fromstring(self.do_request(url, 'GET'))
return self.generate_return_object(xml_response)
def get_url(self):
url = SANDBOX_URL
if self.environment == environment.Environment.PRODUCTION:
url = PRODUCTION_URL
return url
def generate_return_object(self, xml_response):
none_check = xmlutil.XMLUtil()
country_list = list()
for xml_country in xml_response.findall('Country'):
tmp_country = country.Country(
none_check.verify_not_none(xml_country.find('Name')),
none_check.verify_not_none(xml_country.find('Code'))
)
tmp_country.geo_coding = none_check.verify_not_none(xml_country.find('GeoCoding'))
country_list.append(tmp_country)
countries_ = countries.Countries(country_list)
return countries_
|
leeseuljeong/leeseulstack_neutron
|
refs/heads/master
|
neutron/plugins/ml2/drivers/l2pop/db.py
|
8
|
# Copyright (c) 2013 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo.serialization import jsonutils
from oslo.utils import timeutils
from sqlalchemy import sql
from neutron.common import constants as const
from neutron.db import agents_db
from neutron.db import common_db_mixin as base_db
from neutron.db import models_v2
from neutron.plugins.ml2.drivers.l2pop import constants as l2_const
from neutron.plugins.ml2 import models as ml2_models
class L2populationDbMixin(base_db.CommonDbMixin):
def get_agent_ip_by_host(self, session, agent_host):
agent = self.get_agent_by_host(session, agent_host)
if agent:
return self.get_agent_ip(agent)
def get_agent_ip(self, agent):
configuration = jsonutils.loads(agent.configurations)
return configuration.get('tunneling_ip')
def get_agent_uptime(self, agent):
return timeutils.delta_seconds(agent.started_at,
agent.heartbeat_timestamp)
def get_agent_tunnel_types(self, agent):
configuration = jsonutils.loads(agent.configurations)
return configuration.get('tunnel_types')
def get_agent_l2pop_network_types(self, agent):
configuration = jsonutils.loads(agent.configurations)
return configuration.get('l2pop_network_types')
def get_agent_by_host(self, session, agent_host):
with session.begin(subtransactions=True):
query = session.query(agents_db.Agent)
query = query.filter(agents_db.Agent.host == agent_host,
agents_db.Agent.agent_type.in_(
l2_const.SUPPORTED_AGENT_TYPES))
return query.first()
def get_network_ports(self, session, network_id):
with session.begin(subtransactions=True):
query = session.query(ml2_models.PortBinding,
agents_db.Agent)
query = query.join(agents_db.Agent,
agents_db.Agent.host ==
ml2_models.PortBinding.host)
query = query.join(models_v2.Port)
query = query.filter(models_v2.Port.network_id == network_id,
models_v2.Port.admin_state_up == sql.true(),
agents_db.Agent.agent_type.in_(
l2_const.SUPPORTED_AGENT_TYPES))
return query
def get_nondvr_network_ports(self, session, network_id):
query = self.get_network_ports(session, network_id)
return query.filter(models_v2.Port.device_owner !=
const.DEVICE_OWNER_DVR_INTERFACE)
def get_dvr_network_ports(self, session, network_id):
with session.begin(subtransactions=True):
query = session.query(ml2_models.DVRPortBinding,
agents_db.Agent)
query = query.join(agents_db.Agent,
agents_db.Agent.host ==
ml2_models.DVRPortBinding.host)
query = query.join(models_v2.Port)
query = query.filter(models_v2.Port.network_id == network_id,
models_v2.Port.admin_state_up == sql.true(),
models_v2.Port.device_owner ==
const.DEVICE_OWNER_DVR_INTERFACE,
agents_db.Agent.agent_type.in_(
l2_const.SUPPORTED_AGENT_TYPES))
return query
def get_agent_network_active_port_count(self, session, agent_host,
network_id):
with session.begin(subtransactions=True):
query = session.query(models_v2.Port)
query1 = query.join(ml2_models.PortBinding)
query1 = query1.filter(models_v2.Port.network_id == network_id,
models_v2.Port.status ==
const.PORT_STATUS_ACTIVE,
models_v2.Port.device_owner !=
const.DEVICE_OWNER_DVR_INTERFACE,
ml2_models.PortBinding.host == agent_host)
query2 = query.join(ml2_models.DVRPortBinding)
query2 = query2.filter(models_v2.Port.network_id == network_id,
ml2_models.DVRPortBinding.status ==
const.PORT_STATUS_ACTIVE,
models_v2.Port.device_owner ==
const.DEVICE_OWNER_DVR_INTERFACE,
ml2_models.DVRPortBinding.host ==
agent_host)
return (query1.count() + query2.count())
|
bikong2/django
|
refs/heads/master
|
django/contrib/gis/db/backends/mysql/schema.py
|
448
|
import logging
from django.contrib.gis.db.models.fields import GeometryField
from django.db.backends.mysql.schema import DatabaseSchemaEditor
from django.db.utils import OperationalError
logger = logging.getLogger('django.contrib.gis')
class MySQLGISSchemaEditor(DatabaseSchemaEditor):
sql_add_spatial_index = 'CREATE SPATIAL INDEX %(index)s ON %(table)s(%(column)s)'
sql_drop_spatial_index = 'DROP INDEX %(index)s ON %(table)s'
def __init__(self, *args, **kwargs):
super(MySQLGISSchemaEditor, self).__init__(*args, **kwargs)
self.geometry_sql = []
def skip_default(self, field):
return (
super(MySQLGISSchemaEditor, self).skip_default(field) or
# Geometry fields are stored as BLOB/TEXT and can't have defaults.
isinstance(field, GeometryField)
)
def column_sql(self, model, field, include_default=False):
column_sql = super(MySQLGISSchemaEditor, self).column_sql(model, field, include_default)
# MySQL doesn't support spatial indexes on NULL columns
if isinstance(field, GeometryField) and field.spatial_index and not field.null:
qn = self.connection.ops.quote_name
db_table = model._meta.db_table
self.geometry_sql.append(
self.sql_add_spatial_index % {
'index': qn(self._create_spatial_index_name(model, field)),
'table': qn(db_table),
'column': qn(field.column),
}
)
return column_sql
def create_model(self, model):
super(MySQLGISSchemaEditor, self).create_model(model)
self.create_spatial_indexes()
def add_field(self, model, field):
super(MySQLGISSchemaEditor, self).add_field(model, field)
self.create_spatial_indexes()
def remove_field(self, model, field):
if isinstance(field, GeometryField) and field.spatial_index:
qn = self.connection.ops.quote_name
sql = self.sql_drop_spatial_index % {
'index': qn(self._create_spatial_index_name(model, field)),
'table': qn(model._meta.db_table),
}
try:
self.execute(sql)
except OperationalError:
logger.error(
"Couldn't remove spatial index: %s (may be expected "
"if your storage engine doesn't support them)." % sql
)
super(MySQLGISSchemaEditor, self).remove_field(model, field)
def _create_spatial_index_name(self, model, field):
return '%s_%s_id' % (model._meta.db_table, field.column)
def create_spatial_indexes(self):
for sql in self.geometry_sql:
try:
self.execute(sql)
except OperationalError:
logger.error(
"Cannot create SPATIAL INDEX %s. Only MyISAM and (as of "
"MySQL 5.7.5) InnoDB support them." % sql
)
self.geometry_sql = []
|
Peddle/hue
|
refs/heads/master
|
desktop/core/ext-py/boto-2.38.0/boto/elasticache/layer1.py
|
150
|
# Copyright (c) 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
import boto
from boto.compat import json
from boto.connection import AWSQueryConnection
from boto.regioninfo import RegionInfo
class ElastiCacheConnection(AWSQueryConnection):
"""
Amazon ElastiCache
Amazon ElastiCache is a web service that makes it easier to set
up, operate, and scale a distributed cache in the cloud.
With ElastiCache, customers gain all of the benefits of a high-
performance, in-memory cache with far less of the administrative
burden of launching and managing a distributed cache. The service
makes set-up, scaling, and cluster failure handling much simpler
than in a self-managed cache deployment.
In addition, through integration with Amazon CloudWatch, customers
get enhanced visibility into the key performance statistics
associated with their cache and can receive alarms if a part of
their cache runs hot.
"""
APIVersion = "2013-06-15"
DefaultRegionName = "us-east-1"
DefaultRegionEndpoint = "elasticache.us-east-1.amazonaws.com"
def __init__(self, **kwargs):
region = kwargs.get('region')
if not region:
region = RegionInfo(self, self.DefaultRegionName,
self.DefaultRegionEndpoint)
else:
del kwargs['region']
kwargs['host'] = region.endpoint
super(ElastiCacheConnection, self).__init__(**kwargs)
self.region = region
def _required_auth_capability(self):
return ['hmac-v4']
def authorize_cache_security_group_ingress(self,
cache_security_group_name,
ec2_security_group_name,
ec2_security_group_owner_id):
"""
The AuthorizeCacheSecurityGroupIngress operation allows
network ingress to a cache security group. Applications using
ElastiCache must be running on Amazon EC2, and Amazon EC2
security groups are used as the authorization mechanism.
You cannot authorize ingress from an Amazon EC2 security group
in one Region to an ElastiCache cluster in another Region.
:type cache_security_group_name: string
:param cache_security_group_name: The cache security group which will
allow network ingress.
:type ec2_security_group_name: string
:param ec2_security_group_name: The Amazon EC2 security group to be
authorized for ingress to the cache security group.
:type ec2_security_group_owner_id: string
:param ec2_security_group_owner_id: The AWS account number of the
Amazon EC2 security group owner. Note that this is not the same
thing as an AWS access key ID - you must provide a valid AWS
account number for this parameter.
"""
params = {
'CacheSecurityGroupName': cache_security_group_name,
'EC2SecurityGroupName': ec2_security_group_name,
'EC2SecurityGroupOwnerId': ec2_security_group_owner_id,
}
return self._make_request(
action='AuthorizeCacheSecurityGroupIngress',
verb='POST',
path='/', params=params)
def create_cache_cluster(self, cache_cluster_id, num_cache_nodes=None,
cache_node_type=None, engine=None,
replication_group_id=None, engine_version=None,
cache_parameter_group_name=None,
cache_subnet_group_name=None,
cache_security_group_names=None,
security_group_ids=None, snapshot_arns=None,
preferred_availability_zone=None,
preferred_maintenance_window=None, port=None,
notification_topic_arn=None,
auto_minor_version_upgrade=None):
"""
The CreateCacheCluster operation creates a new cache cluster.
All nodes in the cache cluster run the same protocol-compliant
cache engine software - either Memcached or Redis.
:type cache_cluster_id: string
:param cache_cluster_id:
The cache cluster identifier. This parameter is stored as a lowercase
string.
Constraints:
+ Must contain from 1 to 20 alphanumeric characters or hyphens.
+ First character must be a letter.
+ Cannot end with a hyphen or contain two consecutive hyphens.
:type replication_group_id: string
:param replication_group_id: The replication group to which this cache
cluster should belong. If this parameter is specified, the cache
cluster will be added to the specified replication group as a read
replica; otherwise, the cache cluster will be a standalone primary
that is not part of any replication group.
:type num_cache_nodes: integer
:param num_cache_nodes: The initial number of cache nodes that the
cache cluster will have.
For a Memcached cluster, valid values are between 1 and 20. If you need
to exceed this limit, please fill out the ElastiCache Limit
Increase Request form at ``_ .
For Redis, only single-node cache clusters are supported at this time,
so the value for this parameter must be 1.
:type cache_node_type: string
:param cache_node_type: The compute and memory capacity of the nodes in
the cache cluster.
Valid values for Memcached:
`cache.t1.micro` | `cache.m1.small` | `cache.m1.medium` |
`cache.m1.large` | `cache.m1.xlarge` | `cache.m3.xlarge` |
`cache.m3.2xlarge` | `cache.m2.xlarge` | `cache.m2.2xlarge` |
`cache.m2.4xlarge` | `cache.c1.xlarge`
Valid values for Redis:
`cache.t1.micro` | `cache.m1.small` | `cache.m1.medium` |
`cache.m1.large` | `cache.m1.xlarge` | `cache.m2.xlarge` |
`cache.m2.2xlarge` | `cache.m2.4xlarge` | `cache.c1.xlarge`
For a complete listing of cache node types and specifications, see `.
:type engine: string
:param engine: The name of the cache engine to be used for this cache
cluster.
Valid values for this parameter are:
`memcached` | `redis`
:type engine_version: string
:param engine_version: The version number of the cache engine to be
used for this cluster. To view the supported cache engine versions,
use the DescribeCacheEngineVersions operation.
:type cache_parameter_group_name: string
:param cache_parameter_group_name: The name of the cache parameter
group to associate with this cache cluster. If this argument is
omitted, the default cache parameter group for the specified engine
will be used.
:type cache_subnet_group_name: string
:param cache_subnet_group_name: The name of the cache subnet group to
be used for the cache cluster.
Use this parameter only when you are creating a cluster in an Amazon
Virtual Private Cloud (VPC).
:type cache_security_group_names: list
:param cache_security_group_names: A list of cache security group names
to associate with this cache cluster.
Use this parameter only when you are creating a cluster outside of an
Amazon Virtual Private Cloud (VPC).
:type security_group_ids: list
:param security_group_ids: One or more VPC security groups associated
with the cache cluster.
Use this parameter only when you are creating a cluster in an Amazon
Virtual Private Cloud (VPC).
:type snapshot_arns: list
:param snapshot_arns: A single-element string list containing an Amazon
Resource Name (ARN) that uniquely identifies a Redis RDB snapshot
file stored in Amazon S3. The snapshot file will be used to
populate the Redis cache in the new cache cluster. The Amazon S3
object name in the ARN cannot contain any commas.
Here is an example of an Amazon S3 ARN:
`arn:aws:s3:::my_bucket/snapshot1.rdb`
**Note:** This parameter is only valid if the `Engine` parameter is
`redis`.
:type preferred_availability_zone: string
:param preferred_availability_zone: The EC2 Availability Zone in which
the cache cluster will be created.
All cache nodes belonging to a cache cluster are placed in the
preferred availability zone.
Default: System chosen availability zone.
:type preferred_maintenance_window: string
:param preferred_maintenance_window: The weekly time range (in UTC)
during which system maintenance can occur.
Example: `sun:05:00-sun:09:00`
:type port: integer
:param port: The port number on which each of the cache nodes will
accept connections.
:type notification_topic_arn: string
:param notification_topic_arn:
The Amazon Resource Name (ARN) of the Amazon Simple Notification
Service (SNS) topic to which notifications will be sent.
The Amazon SNS topic owner must be the same as the cache cluster owner.
:type auto_minor_version_upgrade: boolean
:param auto_minor_version_upgrade: Determines whether minor engine
upgrades will be applied automatically to the cache cluster during
the maintenance window. A value of `True` allows these upgrades to
occur; `False` disables automatic upgrades.
Default: `True`
"""
params = {
'CacheClusterId': cache_cluster_id,
}
if num_cache_nodes is not None:
params['NumCacheNodes'] = num_cache_nodes
if cache_node_type is not None:
params['CacheNodeType'] = cache_node_type
if engine is not None:
params['Engine'] = engine
if replication_group_id is not None:
params['ReplicationGroupId'] = replication_group_id
if engine_version is not None:
params['EngineVersion'] = engine_version
if cache_parameter_group_name is not None:
params['CacheParameterGroupName'] = cache_parameter_group_name
if cache_subnet_group_name is not None:
params['CacheSubnetGroupName'] = cache_subnet_group_name
if cache_security_group_names is not None:
self.build_list_params(params,
cache_security_group_names,
'CacheSecurityGroupNames.member')
if security_group_ids is not None:
self.build_list_params(params,
security_group_ids,
'SecurityGroupIds.member')
if snapshot_arns is not None:
self.build_list_params(params,
snapshot_arns,
'SnapshotArns.member')
if preferred_availability_zone is not None:
params['PreferredAvailabilityZone'] = preferred_availability_zone
if preferred_maintenance_window is not None:
params['PreferredMaintenanceWindow'] = preferred_maintenance_window
if port is not None:
params['Port'] = port
if notification_topic_arn is not None:
params['NotificationTopicArn'] = notification_topic_arn
if auto_minor_version_upgrade is not None:
params['AutoMinorVersionUpgrade'] = str(
auto_minor_version_upgrade).lower()
return self._make_request(
action='CreateCacheCluster',
verb='POST',
path='/', params=params)
def create_cache_parameter_group(self, cache_parameter_group_name,
cache_parameter_group_family,
description):
"""
The CreateCacheParameterGroup operation creates a new cache
parameter group. A cache parameter group is a collection of
parameters that you apply to all of the nodes in a cache
cluster.
:type cache_parameter_group_name: string
:param cache_parameter_group_name: A user-specified name for the cache
parameter group.
:type cache_parameter_group_family: string
:param cache_parameter_group_family: The name of the cache parameter
group family the cache parameter group can be used with.
Valid values are: `memcached1.4` | `redis2.6`
:type description: string
:param description: A user-specified description for the cache
parameter group.
"""
params = {
'CacheParameterGroupName': cache_parameter_group_name,
'CacheParameterGroupFamily': cache_parameter_group_family,
'Description': description,
}
return self._make_request(
action='CreateCacheParameterGroup',
verb='POST',
path='/', params=params)
def create_cache_security_group(self, cache_security_group_name,
description):
"""
The CreateCacheSecurityGroup operation creates a new cache
security group. Use a cache security group to control access
to one or more cache clusters.
Cache security groups are only used when you are creating a
cluster outside of an Amazon Virtual Private Cloud (VPC). If
you are creating a cluster inside of a VPC, use a cache subnet
group instead. For more information, see
CreateCacheSubnetGroup .
:type cache_security_group_name: string
:param cache_security_group_name: A name for the cache security group.
This value is stored as a lowercase string.
Constraints: Must contain no more than 255 alphanumeric characters.
Must not be the word "Default".
Example: `mysecuritygroup`
:type description: string
:param description: A description for the cache security group.
"""
params = {
'CacheSecurityGroupName': cache_security_group_name,
'Description': description,
}
return self._make_request(
action='CreateCacheSecurityGroup',
verb='POST',
path='/', params=params)
def create_cache_subnet_group(self, cache_subnet_group_name,
cache_subnet_group_description, subnet_ids):
"""
The CreateCacheSubnetGroup operation creates a new cache
subnet group.
Use this parameter only when you are creating a cluster in an
Amazon Virtual Private Cloud (VPC).
:type cache_subnet_group_name: string
:param cache_subnet_group_name: A name for the cache subnet group. This
value is stored as a lowercase string.
Constraints: Must contain no more than 255 alphanumeric characters or
hyphens.
Example: `mysubnetgroup`
:type cache_subnet_group_description: string
:param cache_subnet_group_description: A description for the cache
subnet group.
:type subnet_ids: list
:param subnet_ids: A list of VPC subnet IDs for the cache subnet group.
"""
params = {
'CacheSubnetGroupName': cache_subnet_group_name,
'CacheSubnetGroupDescription': cache_subnet_group_description,
}
self.build_list_params(params,
subnet_ids,
'SubnetIds.member')
return self._make_request(
action='CreateCacheSubnetGroup',
verb='POST',
path='/', params=params)
def create_replication_group(self, replication_group_id,
primary_cluster_id,
replication_group_description):
"""
The CreateReplicationGroup operation creates a replication
group. A replication group is a collection of cache clusters,
where one of the clusters is a read/write primary and the
other clusters are read-only replicas. Writes to the primary
are automatically propagated to the replicas.
When you create a replication group, you must specify an
existing cache cluster that is in the primary role. When the
replication group has been successfully created, you can add
one or more read replica replicas to it, up to a total of five
read replicas.
:type replication_group_id: string
:param replication_group_id:
The replication group identifier. This parameter is stored as a
lowercase string.
Constraints:
+ Must contain from 1 to 20 alphanumeric characters or hyphens.
+ First character must be a letter.
+ Cannot end with a hyphen or contain two consecutive hyphens.
:type primary_cluster_id: string
:param primary_cluster_id: The identifier of the cache cluster that
will serve as the primary for this replication group. This cache
cluster must already exist and have a status of available .
:type replication_group_description: string
:param replication_group_description: A user-specified description for
the replication group.
"""
params = {
'ReplicationGroupId': replication_group_id,
'PrimaryClusterId': primary_cluster_id,
'ReplicationGroupDescription': replication_group_description,
}
return self._make_request(
action='CreateReplicationGroup',
verb='POST',
path='/', params=params)
def delete_cache_cluster(self, cache_cluster_id):
"""
The DeleteCacheCluster operation deletes a previously
provisioned cache cluster. DeleteCacheCluster deletes all
associated cache nodes, node endpoints and the cache cluster
itself. When you receive a successful response from this
operation, Amazon ElastiCache immediately begins deleting the
cache cluster; you cannot cancel or revert this operation.
:type cache_cluster_id: string
:param cache_cluster_id: The cache cluster identifier for the cluster
to be deleted. This parameter is not case sensitive.
"""
params = {'CacheClusterId': cache_cluster_id, }
return self._make_request(
action='DeleteCacheCluster',
verb='POST',
path='/', params=params)
def delete_cache_parameter_group(self, cache_parameter_group_name):
"""
The DeleteCacheParameterGroup operation deletes the specified
cache parameter group. You cannot delete a cache parameter
group if it is associated with any cache clusters.
:type cache_parameter_group_name: string
:param cache_parameter_group_name:
The name of the cache parameter group to delete.
The specified cache security group must not be associated with any
cache clusters.
"""
params = {
'CacheParameterGroupName': cache_parameter_group_name,
}
return self._make_request(
action='DeleteCacheParameterGroup',
verb='POST',
path='/', params=params)
def delete_cache_security_group(self, cache_security_group_name):
"""
The DeleteCacheSecurityGroup operation deletes a cache
security group.
You cannot delete a cache security group if it is associated
with any cache clusters.
:type cache_security_group_name: string
:param cache_security_group_name:
The name of the cache security group to delete.
You cannot delete the default security group.
"""
params = {
'CacheSecurityGroupName': cache_security_group_name,
}
return self._make_request(
action='DeleteCacheSecurityGroup',
verb='POST',
path='/', params=params)
def delete_cache_subnet_group(self, cache_subnet_group_name):
"""
The DeleteCacheSubnetGroup operation deletes a cache subnet
group.
You cannot delete a cache subnet group if it is associated
with any cache clusters.
:type cache_subnet_group_name: string
:param cache_subnet_group_name: The name of the cache subnet group to
delete.
Constraints: Must contain no more than 255 alphanumeric characters or
hyphens.
"""
params = {'CacheSubnetGroupName': cache_subnet_group_name, }
return self._make_request(
action='DeleteCacheSubnetGroup',
verb='POST',
path='/', params=params)
def delete_replication_group(self, replication_group_id):
"""
The DeleteReplicationGroup operation deletes an existing
replication group. DeleteReplicationGroup deletes the primary
cache cluster and all of the read replicas in the replication
group. When you receive a successful response from this
operation, Amazon ElastiCache immediately begins deleting the
entire replication group; you cannot cancel or revert this
operation.
:type replication_group_id: string
:param replication_group_id: The identifier for the replication group
to be deleted. This parameter is not case sensitive.
"""
params = {'ReplicationGroupId': replication_group_id, }
return self._make_request(
action='DeleteReplicationGroup',
verb='POST',
path='/', params=params)
def describe_cache_clusters(self, cache_cluster_id=None,
max_records=None, marker=None,
show_cache_node_info=None):
"""
The DescribeCacheClusters operation returns information about
all provisioned cache clusters if no cache cluster identifier
is specified, or about a specific cache cluster if a cache
cluster identifier is supplied.
By default, abbreviated information about the cache
clusters(s) will be returned. You can use the optional
ShowDetails flag to retrieve detailed information about the
cache nodes associated with the cache clusters. These details
include the DNS address and port for the cache node endpoint.
If the cluster is in the CREATING state, only cluster level
information will be displayed until all of the nodes are
successfully provisioned.
If the cluster is in the DELETING state, only cluster level
information will be displayed.
If cache nodes are currently being added to the cache cluster,
node endpoint information and creation time for the additional
nodes will not be displayed until they are completely
provisioned. When the cache cluster state is available , the
cluster is ready for use.
If cache nodes are currently being removed from the cache
cluster, no endpoint information for the removed nodes is
displayed.
:type cache_cluster_id: string
:param cache_cluster_id: The user-supplied cluster identifier. If this
parameter is specified, only information about that specific cache
cluster is returned. This parameter isn't case sensitive.
:type max_records: integer
:param max_records: The maximum number of records to include in the
response. If more records exist than the specified `MaxRecords`
value, a marker is included in the response so that the remaining
results can be retrieved.
Default: 100
Constraints: minimum 20; maximum 100.
:type marker: string
:param marker: An optional marker returned from a prior request. Use
this marker for pagination of results from this operation. If this
parameter is specified, the response includes only records beyond
the marker, up to the value specified by MaxRecords .
:type show_cache_node_info: boolean
:param show_cache_node_info: An optional flag that can be included in
the DescribeCacheCluster request to retrieve information about the
individual cache nodes.
"""
params = {}
if cache_cluster_id is not None:
params['CacheClusterId'] = cache_cluster_id
if max_records is not None:
params['MaxRecords'] = max_records
if marker is not None:
params['Marker'] = marker
if show_cache_node_info is not None:
params['ShowCacheNodeInfo'] = str(
show_cache_node_info).lower()
return self._make_request(
action='DescribeCacheClusters',
verb='POST',
path='/', params=params)
def describe_cache_engine_versions(self, engine=None,
engine_version=None,
cache_parameter_group_family=None,
max_records=None, marker=None,
default_only=None):
"""
The DescribeCacheEngineVersions operation returns a list of
the available cache engines and their versions.
:type engine: string
:param engine: The cache engine to return. Valid values: `memcached` |
`redis`
:type engine_version: string
:param engine_version: The cache engine version to return.
Example: `1.4.14`
:type cache_parameter_group_family: string
:param cache_parameter_group_family:
The name of a specific cache parameter group family to return details
for.
Constraints:
+ Must be 1 to 255 alphanumeric characters
+ First character must be a letter
+ Cannot end with a hyphen or contain two consecutive hyphens
:type max_records: integer
:param max_records: The maximum number of records to include in the
response. If more records exist than the specified `MaxRecords`
value, a marker is included in the response so that the remaining
results can be retrieved.
Default: 100
Constraints: minimum 20; maximum 100.
:type marker: string
:param marker: An optional marker returned from a prior request. Use
this marker for pagination of results from this operation. If this
parameter is specified, the response includes only records beyond
the marker, up to the value specified by MaxRecords .
:type default_only: boolean
:param default_only: If true , specifies that only the default version
of the specified engine or engine and major version combination is
to be returned.
"""
params = {}
if engine is not None:
params['Engine'] = engine
if engine_version is not None:
params['EngineVersion'] = engine_version
if cache_parameter_group_family is not None:
params['CacheParameterGroupFamily'] = cache_parameter_group_family
if max_records is not None:
params['MaxRecords'] = max_records
if marker is not None:
params['Marker'] = marker
if default_only is not None:
params['DefaultOnly'] = str(
default_only).lower()
return self._make_request(
action='DescribeCacheEngineVersions',
verb='POST',
path='/', params=params)
def describe_cache_parameter_groups(self,
cache_parameter_group_name=None,
max_records=None, marker=None):
"""
The DescribeCacheParameterGroups operation returns a list of
cache parameter group descriptions. If a cache parameter group
name is specified, the list will contain only the descriptions
for that group.
:type cache_parameter_group_name: string
:param cache_parameter_group_name: The name of a specific cache
parameter group to return details for.
:type max_records: integer
:param max_records: The maximum number of records to include in the
response. If more records exist than the specified `MaxRecords`
value, a marker is included in the response so that the remaining
results can be retrieved.
Default: 100
Constraints: minimum 20; maximum 100.
:type marker: string
:param marker: An optional marker returned from a prior request. Use
this marker for pagination of results from this operation. If this
parameter is specified, the response includes only records beyond
the marker, up to the value specified by MaxRecords .
"""
params = {}
if cache_parameter_group_name is not None:
params['CacheParameterGroupName'] = cache_parameter_group_name
if max_records is not None:
params['MaxRecords'] = max_records
if marker is not None:
params['Marker'] = marker
return self._make_request(
action='DescribeCacheParameterGroups',
verb='POST',
path='/', params=params)
def describe_cache_parameters(self, cache_parameter_group_name,
source=None, max_records=None, marker=None):
"""
The DescribeCacheParameters operation returns the detailed
parameter list for a particular cache parameter group.
:type cache_parameter_group_name: string
:param cache_parameter_group_name: The name of a specific cache
parameter group to return details for.
:type source: string
:param source: The parameter types to return.
Valid values: `user` | `system` | `engine-default`
:type max_records: integer
:param max_records: The maximum number of records to include in the
response. If more records exist than the specified `MaxRecords`
value, a marker is included in the response so that the remaining
results can be retrieved.
Default: 100
Constraints: minimum 20; maximum 100.
:type marker: string
:param marker: An optional marker returned from a prior request. Use
this marker for pagination of results from this operation. If this
parameter is specified, the response includes only records beyond
the marker, up to the value specified by MaxRecords .
"""
params = {
'CacheParameterGroupName': cache_parameter_group_name,
}
if source is not None:
params['Source'] = source
if max_records is not None:
params['MaxRecords'] = max_records
if marker is not None:
params['Marker'] = marker
return self._make_request(
action='DescribeCacheParameters',
verb='POST',
path='/', params=params)
def describe_cache_security_groups(self, cache_security_group_name=None,
max_records=None, marker=None):
"""
The DescribeCacheSecurityGroups operation returns a list of
cache security group descriptions. If a cache security group
name is specified, the list will contain only the description
of that group.
:type cache_security_group_name: string
:param cache_security_group_name: The name of the cache security group
to return details for.
:type max_records: integer
:param max_records: The maximum number of records to include in the
response. If more records exist than the specified `MaxRecords`
value, a marker is included in the response so that the remaining
results can be retrieved.
Default: 100
Constraints: minimum 20; maximum 100.
:type marker: string
:param marker: An optional marker returned from a prior request. Use
this marker for pagination of results from this operation. If this
parameter is specified, the response includes only records beyond
the marker, up to the value specified by MaxRecords .
"""
params = {}
if cache_security_group_name is not None:
params['CacheSecurityGroupName'] = cache_security_group_name
if max_records is not None:
params['MaxRecords'] = max_records
if marker is not None:
params['Marker'] = marker
return self._make_request(
action='DescribeCacheSecurityGroups',
verb='POST',
path='/', params=params)
def describe_cache_subnet_groups(self, cache_subnet_group_name=None,
max_records=None, marker=None):
"""
The DescribeCacheSubnetGroups operation returns a list of
cache subnet group descriptions. If a subnet group name is
specified, the list will contain only the description of that
group.
:type cache_subnet_group_name: string
:param cache_subnet_group_name: The name of the cache subnet group to
return details for.
:type max_records: integer
:param max_records: The maximum number of records to include in the
response. If more records exist than the specified `MaxRecords`
value, a marker is included in the response so that the remaining
results can be retrieved.
Default: 100
Constraints: minimum 20; maximum 100.
:type marker: string
:param marker: An optional marker returned from a prior request. Use
this marker for pagination of results from this operation. If this
parameter is specified, the response includes only records beyond
the marker, up to the value specified by MaxRecords .
"""
params = {}
if cache_subnet_group_name is not None:
params['CacheSubnetGroupName'] = cache_subnet_group_name
if max_records is not None:
params['MaxRecords'] = max_records
if marker is not None:
params['Marker'] = marker
return self._make_request(
action='DescribeCacheSubnetGroups',
verb='POST',
path='/', params=params)
def describe_engine_default_parameters(self,
cache_parameter_group_family,
max_records=None, marker=None):
"""
The DescribeEngineDefaultParameters operation returns the
default engine and system parameter information for the
specified cache engine.
:type cache_parameter_group_family: string
:param cache_parameter_group_family: The name of the cache parameter
group family. Valid values are: `memcached1.4` | `redis2.6`
:type max_records: integer
:param max_records: The maximum number of records to include in the
response. If more records exist than the specified `MaxRecords`
value, a marker is included in the response so that the remaining
results can be retrieved.
Default: 100
Constraints: minimum 20; maximum 100.
:type marker: string
:param marker: An optional marker returned from a prior request. Use
this marker for pagination of results from this operation. If this
parameter is specified, the response includes only records beyond
the marker, up to the value specified by MaxRecords .
"""
params = {
'CacheParameterGroupFamily': cache_parameter_group_family,
}
if max_records is not None:
params['MaxRecords'] = max_records
if marker is not None:
params['Marker'] = marker
return self._make_request(
action='DescribeEngineDefaultParameters',
verb='POST',
path='/', params=params)
def describe_events(self, source_identifier=None, source_type=None,
start_time=None, end_time=None, duration=None,
max_records=None, marker=None):
"""
The DescribeEvents operation returns events related to cache
clusters, cache security groups, and cache parameter groups.
You can obtain events specific to a particular cache cluster,
cache security group, or cache parameter group by providing
the name as a parameter.
By default, only the events occurring within the last hour are
returned; however, you can retrieve up to 14 days' worth of
events if necessary.
:type source_identifier: string
:param source_identifier: The identifier of the event source for which
events will be returned. If not specified, then all sources are
included in the response.
:type source_type: string
:param source_type: The event source to retrieve events for. If no
value is specified, all events are returned.
Valid values are: `cache-cluster` | `cache-parameter-group` | `cache-
security-group` | `cache-subnet-group`
:type start_time: timestamp
:param start_time: The beginning of the time interval to retrieve
events for, specified in ISO 8601 format.
:type end_time: timestamp
:param end_time: The end of the time interval for which to retrieve
events, specified in ISO 8601 format.
:type duration: integer
:param duration: The number of minutes' worth of events to retrieve.
:type max_records: integer
:param max_records: The maximum number of records to include in the
response. If more records exist than the specified `MaxRecords`
value, a marker is included in the response so that the remaining
results can be retrieved.
Default: 100
Constraints: minimum 20; maximum 100.
:type marker: string
:param marker: An optional marker returned from a prior request. Use
this marker for pagination of results from this operation. If this
parameter is specified, the response includes only records beyond
the marker, up to the value specified by MaxRecords .
"""
params = {}
if source_identifier is not None:
params['SourceIdentifier'] = source_identifier
if source_type is not None:
params['SourceType'] = source_type
if start_time is not None:
params['StartTime'] = start_time
if end_time is not None:
params['EndTime'] = end_time
if duration is not None:
params['Duration'] = duration
if max_records is not None:
params['MaxRecords'] = max_records
if marker is not None:
params['Marker'] = marker
return self._make_request(
action='DescribeEvents',
verb='POST',
path='/', params=params)
def describe_replication_groups(self, replication_group_id=None,
max_records=None, marker=None):
"""
The DescribeReplicationGroups operation returns information
about a particular replication group. If no identifier is
specified, DescribeReplicationGroups returns information about
all replication groups.
:type replication_group_id: string
:param replication_group_id: The identifier for the replication group
to be described. This parameter is not case sensitive.
If you do not specify this parameter, information about all replication
groups is returned.
:type max_records: integer
:param max_records: The maximum number of records to include in the
response. If more records exist than the specified `MaxRecords`
value, a marker is included in the response so that the remaining
results can be retrieved.
Default: 100
Constraints: minimum 20; maximum 100.
:type marker: string
:param marker: An optional marker returned from a prior request. Use
this marker for pagination of results from this operation. If this
parameter is specified, the response includes only records beyond
the marker, up to the value specified by MaxRecords .
"""
params = {}
if replication_group_id is not None:
params['ReplicationGroupId'] = replication_group_id
if max_records is not None:
params['MaxRecords'] = max_records
if marker is not None:
params['Marker'] = marker
return self._make_request(
action='DescribeReplicationGroups',
verb='POST',
path='/', params=params)
def describe_reserved_cache_nodes(self, reserved_cache_node_id=None,
reserved_cache_nodes_offering_id=None,
cache_node_type=None, duration=None,
product_description=None,
offering_type=None, max_records=None,
marker=None):
"""
The DescribeReservedCacheNodes operation returns information
about reserved cache nodes for this account, or about a
specified reserved cache node.
:type reserved_cache_node_id: string
:param reserved_cache_node_id: The reserved cache node identifier
filter value. Use this parameter to show only the reservation that
matches the specified reservation ID.
:type reserved_cache_nodes_offering_id: string
:param reserved_cache_nodes_offering_id: The offering identifier filter
value. Use this parameter to show only purchased reservations
matching the specified offering identifier.
:type cache_node_type: string
:param cache_node_type: The cache node type filter value. Use this
parameter to show only those reservations matching the specified
cache node type.
:type duration: string
:param duration: The duration filter value, specified in years or
seconds. Use this parameter to show only reservations for this
duration.
Valid Values: `1 | 3 | 31536000 | 94608000`
:type product_description: string
:param product_description: The product description filter value. Use
this parameter to show only those reservations matching the
specified product description.
:type offering_type: string
:param offering_type: The offering type filter value. Use this
parameter to show only the available offerings matching the
specified offering type.
Valid values: `"Light Utilization" | "Medium Utilization" | "Heavy
Utilization" `
:type max_records: integer
:param max_records: The maximum number of records to include in the
response. If more records exist than the specified `MaxRecords`
value, a marker is included in the response so that the remaining
results can be retrieved.
Default: 100
Constraints: minimum 20; maximum 100.
:type marker: string
:param marker: An optional marker returned from a prior request. Use
this marker for pagination of results from this operation. If this
parameter is specified, the response includes only records beyond
the marker, up to the value specified by MaxRecords .
"""
params = {}
if reserved_cache_node_id is not None:
params['ReservedCacheNodeId'] = reserved_cache_node_id
if reserved_cache_nodes_offering_id is not None:
params['ReservedCacheNodesOfferingId'] = reserved_cache_nodes_offering_id
if cache_node_type is not None:
params['CacheNodeType'] = cache_node_type
if duration is not None:
params['Duration'] = duration
if product_description is not None:
params['ProductDescription'] = product_description
if offering_type is not None:
params['OfferingType'] = offering_type
if max_records is not None:
params['MaxRecords'] = max_records
if marker is not None:
params['Marker'] = marker
return self._make_request(
action='DescribeReservedCacheNodes',
verb='POST',
path='/', params=params)
def describe_reserved_cache_nodes_offerings(self,
reserved_cache_nodes_offering_id=None,
cache_node_type=None,
duration=None,
product_description=None,
offering_type=None,
max_records=None,
marker=None):
"""
The DescribeReservedCacheNodesOfferings operation lists
available reserved cache node offerings.
:type reserved_cache_nodes_offering_id: string
:param reserved_cache_nodes_offering_id: The offering identifier filter
value. Use this parameter to show only the available offering that
matches the specified reservation identifier.
Example: `438012d3-4052-4cc7-b2e3-8d3372e0e706`
:type cache_node_type: string
:param cache_node_type: The cache node type filter value. Use this
parameter to show only the available offerings matching the
specified cache node type.
:type duration: string
:param duration: Duration filter value, specified in years or seconds.
Use this parameter to show only reservations for a given duration.
Valid Values: `1 | 3 | 31536000 | 94608000`
:type product_description: string
:param product_description: The product description filter value. Use
this parameter to show only the available offerings matching the
specified product description.
:type offering_type: string
:param offering_type: The offering type filter value. Use this
parameter to show only the available offerings matching the
specified offering type.
Valid Values: `"Light Utilization" | "Medium Utilization" | "Heavy
Utilization" `
:type max_records: integer
:param max_records: The maximum number of records to include in the
response. If more records exist than the specified `MaxRecords`
value, a marker is included in the response so that the remaining
results can be retrieved.
Default: 100
Constraints: minimum 20; maximum 100.
:type marker: string
:param marker: An optional marker returned from a prior request. Use
this marker for pagination of results from this operation. If this
parameter is specified, the response includes only records beyond
the marker, up to the value specified by MaxRecords .
"""
params = {}
if reserved_cache_nodes_offering_id is not None:
params['ReservedCacheNodesOfferingId'] = reserved_cache_nodes_offering_id
if cache_node_type is not None:
params['CacheNodeType'] = cache_node_type
if duration is not None:
params['Duration'] = duration
if product_description is not None:
params['ProductDescription'] = product_description
if offering_type is not None:
params['OfferingType'] = offering_type
if max_records is not None:
params['MaxRecords'] = max_records
if marker is not None:
params['Marker'] = marker
return self._make_request(
action='DescribeReservedCacheNodesOfferings',
verb='POST',
path='/', params=params)
def modify_cache_cluster(self, cache_cluster_id, num_cache_nodes=None,
cache_node_ids_to_remove=None,
cache_security_group_names=None,
security_group_ids=None,
preferred_maintenance_window=None,
notification_topic_arn=None,
cache_parameter_group_name=None,
notification_topic_status=None,
apply_immediately=None, engine_version=None,
auto_minor_version_upgrade=None):
"""
The ModifyCacheCluster operation modifies the settings for a
cache cluster. You can use this operation to change one or
more cluster configuration parameters by specifying the
parameters and the new values.
:type cache_cluster_id: string
:param cache_cluster_id: The cache cluster identifier. This value is
stored as a lowercase string.
:type num_cache_nodes: integer
:param num_cache_nodes: The number of cache nodes that the cache
cluster should have. If the value for NumCacheNodes is greater than
the existing number of cache nodes, then more nodes will be added.
If the value is less than the existing number of cache nodes, then
cache nodes will be removed.
If you are removing cache nodes, you must use the CacheNodeIdsToRemove
parameter to provide the IDs of the specific cache nodes to be
removed.
:type cache_node_ids_to_remove: list
:param cache_node_ids_to_remove: A list of cache node IDs to be
removed. A node ID is a numeric identifier (0001, 0002, etc.). This
parameter is only valid when NumCacheNodes is less than the
existing number of cache nodes. The number of cache node IDs
supplied in this parameter must match the difference between the
existing number of cache nodes in the cluster and the value of
NumCacheNodes in the request.
:type cache_security_group_names: list
:param cache_security_group_names: A list of cache security group names
to authorize on this cache cluster. This change is asynchronously
applied as soon as possible.
This parameter can be used only with clusters that are created outside
of an Amazon Virtual Private Cloud (VPC).
Constraints: Must contain no more than 255 alphanumeric characters.
Must not be "Default".
:type security_group_ids: list
:param security_group_ids: Specifies the VPC Security Groups associated
with the cache cluster.
This parameter can be used only with clusters that are created in an
Amazon Virtual Private Cloud (VPC).
:type preferred_maintenance_window: string
:param preferred_maintenance_window: The weekly time range (in UTC)
during which system maintenance can occur. Note that system
maintenance may result in an outage. This change is made
immediately. If you are moving this window to the current time,
there must be at least 120 minutes between the current time and end
of the window to ensure that pending changes are applied.
:type notification_topic_arn: string
:param notification_topic_arn:
The Amazon Resource Name (ARN) of the SNS topic to which notifications
will be sent.
The SNS topic owner must be same as the cache cluster owner.
:type cache_parameter_group_name: string
:param cache_parameter_group_name: The name of the cache parameter
group to apply to this cache cluster. This change is asynchronously
applied as soon as possible for parameters when the
ApplyImmediately parameter is specified as true for this request.
:type notification_topic_status: string
:param notification_topic_status: The status of the Amazon SNS
notification topic. Notifications are sent only if the status is
active .
Valid values: `active` | `inactive`
:type apply_immediately: boolean
:param apply_immediately: If `True`, this parameter causes the
modifications in this request and any pending modifications to be
applied, asynchronously and as soon as possible, regardless of the
PreferredMaintenanceWindow setting for the cache cluster.
If `False`, then changes to the cache cluster are applied on the next
maintenance reboot, or the next failure reboot, whichever occurs
first.
Valid values: `True` | `False`
Default: `False`
:type engine_version: string
:param engine_version: The upgraded version of the cache engine to be
run on the cache cluster nodes.
:type auto_minor_version_upgrade: boolean
:param auto_minor_version_upgrade: If `True`, then minor engine
upgrades will be applied automatically to the cache cluster during
the maintenance window.
Valid values: `True` | `False`
Default: `True`
"""
params = {'CacheClusterId': cache_cluster_id, }
if num_cache_nodes is not None:
params['NumCacheNodes'] = num_cache_nodes
if cache_node_ids_to_remove is not None:
self.build_list_params(params,
cache_node_ids_to_remove,
'CacheNodeIdsToRemove.member')
if cache_security_group_names is not None:
self.build_list_params(params,
cache_security_group_names,
'CacheSecurityGroupNames.member')
if security_group_ids is not None:
self.build_list_params(params,
security_group_ids,
'SecurityGroupIds.member')
if preferred_maintenance_window is not None:
params['PreferredMaintenanceWindow'] = preferred_maintenance_window
if notification_topic_arn is not None:
params['NotificationTopicArn'] = notification_topic_arn
if cache_parameter_group_name is not None:
params['CacheParameterGroupName'] = cache_parameter_group_name
if notification_topic_status is not None:
params['NotificationTopicStatus'] = notification_topic_status
if apply_immediately is not None:
params['ApplyImmediately'] = str(
apply_immediately).lower()
if engine_version is not None:
params['EngineVersion'] = engine_version
if auto_minor_version_upgrade is not None:
params['AutoMinorVersionUpgrade'] = str(
auto_minor_version_upgrade).lower()
return self._make_request(
action='ModifyCacheCluster',
verb='POST',
path='/', params=params)
def modify_cache_parameter_group(self, cache_parameter_group_name,
parameter_name_values):
"""
The ModifyCacheParameterGroup operation modifies the
parameters of a cache parameter group. You can modify up to 20
parameters in a single request by submitting a list parameter
name and value pairs.
:type cache_parameter_group_name: string
:param cache_parameter_group_name: The name of the cache parameter
group to modify.
:type parameter_name_values: list
:param parameter_name_values: An array of parameter names and values
for the parameter update. You must supply at least one parameter
name and value; subsequent arguments are optional. A maximum of 20
parameters may be modified per request.
"""
params = {
'CacheParameterGroupName': cache_parameter_group_name,
}
self.build_complex_list_params(
params, parameter_name_values,
'ParameterNameValues.member',
('ParameterName', 'ParameterValue'))
return self._make_request(
action='ModifyCacheParameterGroup',
verb='POST',
path='/', params=params)
def modify_cache_subnet_group(self, cache_subnet_group_name,
cache_subnet_group_description=None,
subnet_ids=None):
"""
The ModifyCacheSubnetGroup operation modifies an existing
cache subnet group.
:type cache_subnet_group_name: string
:param cache_subnet_group_name: The name for the cache subnet group.
This value is stored as a lowercase string.
Constraints: Must contain no more than 255 alphanumeric characters or
hyphens.
Example: `mysubnetgroup`
:type cache_subnet_group_description: string
:param cache_subnet_group_description: A description for the cache
subnet group.
:type subnet_ids: list
:param subnet_ids: The EC2 subnet IDs for the cache subnet group.
"""
params = {'CacheSubnetGroupName': cache_subnet_group_name, }
if cache_subnet_group_description is not None:
params['CacheSubnetGroupDescription'] = cache_subnet_group_description
if subnet_ids is not None:
self.build_list_params(params,
subnet_ids,
'SubnetIds.member')
return self._make_request(
action='ModifyCacheSubnetGroup',
verb='POST',
path='/', params=params)
def modify_replication_group(self, replication_group_id,
replication_group_description=None,
cache_security_group_names=None,
security_group_ids=None,
preferred_maintenance_window=None,
notification_topic_arn=None,
cache_parameter_group_name=None,
notification_topic_status=None,
apply_immediately=None, engine_version=None,
auto_minor_version_upgrade=None,
primary_cluster_id=None):
"""
The ModifyReplicationGroup operation modifies the settings for
a replication group.
:type replication_group_id: string
:param replication_group_id: The identifier of the replication group to
modify.
:type replication_group_description: string
:param replication_group_description: A description for the replication
group. Maximum length is 255 characters.
:type cache_security_group_names: list
:param cache_security_group_names: A list of cache security group names
to authorize for the clusters in this replication group. This
change is asynchronously applied as soon as possible.
This parameter can be used only with replication groups containing
cache clusters running outside of an Amazon Virtual Private Cloud
(VPC).
Constraints: Must contain no more than 255 alphanumeric characters.
Must not be "Default".
:type security_group_ids: list
:param security_group_ids: Specifies the VPC Security Groups associated
with the cache clusters in the replication group.
This parameter can be used only with replication groups containing
cache clusters running in an Amazon Virtual Private Cloud (VPC).
:type preferred_maintenance_window: string
:param preferred_maintenance_window: The weekly time range (in UTC)
during which replication group system maintenance can occur. Note
that system maintenance may result in an outage. This change is
made immediately. If you are moving this window to the current
time, there must be at least 120 minutes between the current time
and end of the window to ensure that pending changes are applied.
:type notification_topic_arn: string
:param notification_topic_arn:
The Amazon Resource Name (ARN) of the SNS topic to which notifications
will be sent.
The SNS topic owner must be same as the replication group owner.
:type cache_parameter_group_name: string
:param cache_parameter_group_name: The name of the cache parameter
group to apply to all of the cache nodes in this replication group.
This change is asynchronously applied as soon as possible for
parameters when the ApplyImmediately parameter is specified as true
for this request.
:type notification_topic_status: string
:param notification_topic_status: The status of the Amazon SNS
notification topic for the replication group. Notifications are
sent only if the status is active .
Valid values: `active` | `inactive`
:type apply_immediately: boolean
:param apply_immediately: If `True`, this parameter causes the
modifications in this request and any pending modifications to be
applied, asynchronously and as soon as possible, regardless of the
PreferredMaintenanceWindow setting for the replication group.
If `False`, then changes to the nodes in the replication group are
applied on the next maintenance reboot, or the next failure reboot,
whichever occurs first.
Valid values: `True` | `False`
Default: `False`
:type engine_version: string
:param engine_version: The upgraded version of the cache engine to be
run on the nodes in the replication group..
:type auto_minor_version_upgrade: boolean
:param auto_minor_version_upgrade: Determines whether minor engine
upgrades will be applied automatically to all of the cache nodes in
the replication group during the maintenance window. A value of
`True` allows these upgrades to occur; `False` disables automatic
upgrades.
:type primary_cluster_id: string
:param primary_cluster_id: If this parameter is specified, ElastiCache
will promote each of the nodes in the specified cache cluster to
the primary role. The nodes of all other clusters in the
replication group will be read replicas.
"""
params = {'ReplicationGroupId': replication_group_id, }
if replication_group_description is not None:
params['ReplicationGroupDescription'] = replication_group_description
if cache_security_group_names is not None:
self.build_list_params(params,
cache_security_group_names,
'CacheSecurityGroupNames.member')
if security_group_ids is not None:
self.build_list_params(params,
security_group_ids,
'SecurityGroupIds.member')
if preferred_maintenance_window is not None:
params['PreferredMaintenanceWindow'] = preferred_maintenance_window
if notification_topic_arn is not None:
params['NotificationTopicArn'] = notification_topic_arn
if cache_parameter_group_name is not None:
params['CacheParameterGroupName'] = cache_parameter_group_name
if notification_topic_status is not None:
params['NotificationTopicStatus'] = notification_topic_status
if apply_immediately is not None:
params['ApplyImmediately'] = str(
apply_immediately).lower()
if engine_version is not None:
params['EngineVersion'] = engine_version
if auto_minor_version_upgrade is not None:
params['AutoMinorVersionUpgrade'] = str(
auto_minor_version_upgrade).lower()
if primary_cluster_id is not None:
params['PrimaryClusterId'] = primary_cluster_id
return self._make_request(
action='ModifyReplicationGroup',
verb='POST',
path='/', params=params)
def purchase_reserved_cache_nodes_offering(self,
reserved_cache_nodes_offering_id,
reserved_cache_node_id=None,
cache_node_count=None):
"""
The PurchaseReservedCacheNodesOffering operation allows you to
purchase a reserved cache node offering.
:type reserved_cache_nodes_offering_id: string
:param reserved_cache_nodes_offering_id: The ID of the reserved cache
node offering to purchase.
Example: 438012d3-4052-4cc7-b2e3-8d3372e0e706
:type reserved_cache_node_id: string
:param reserved_cache_node_id: A customer-specified identifier to track
this reservation.
Example: myreservationID
:type cache_node_count: integer
:param cache_node_count: The number of cache node instances to reserve.
Default: `1`
"""
params = {
'ReservedCacheNodesOfferingId': reserved_cache_nodes_offering_id,
}
if reserved_cache_node_id is not None:
params['ReservedCacheNodeId'] = reserved_cache_node_id
if cache_node_count is not None:
params['CacheNodeCount'] = cache_node_count
return self._make_request(
action='PurchaseReservedCacheNodesOffering',
verb='POST',
path='/', params=params)
def reboot_cache_cluster(self, cache_cluster_id,
cache_node_ids_to_reboot):
"""
The RebootCacheCluster operation reboots some, or all, of the
cache cluster nodes within a provisioned cache cluster. This
API will apply any modified cache parameter groups to the
cache cluster. The reboot action takes place as soon as
possible, and results in a momentary outage to the cache
cluster. During the reboot, the cache cluster status is set to
REBOOTING.
The reboot causes the contents of the cache (for each cache
cluster node being rebooted) to be lost.
When the reboot is complete, a cache cluster event is created.
:type cache_cluster_id: string
:param cache_cluster_id: The cache cluster identifier. This parameter
is stored as a lowercase string.
:type cache_node_ids_to_reboot: list
:param cache_node_ids_to_reboot: A list of cache cluster node IDs to
reboot. A node ID is a numeric identifier (0001, 0002, etc.). To
reboot an entire cache cluster, specify all of the cache cluster
node IDs.
"""
params = {'CacheClusterId': cache_cluster_id, }
self.build_list_params(params,
cache_node_ids_to_reboot,
'CacheNodeIdsToReboot.member')
return self._make_request(
action='RebootCacheCluster',
verb='POST',
path='/', params=params)
def reset_cache_parameter_group(self, cache_parameter_group_name,
parameter_name_values,
reset_all_parameters=None):
"""
The ResetCacheParameterGroup operation modifies the parameters
of a cache parameter group to the engine or system default
value. You can reset specific parameters by submitting a list
of parameter names. To reset the entire cache parameter group,
specify the ResetAllParameters and CacheParameterGroupName
parameters.
:type cache_parameter_group_name: string
:param cache_parameter_group_name: The name of the cache parameter
group to reset.
:type reset_all_parameters: boolean
:param reset_all_parameters: If true , all parameters in the cache
parameter group will be reset to default values. If false , no such
action occurs.
Valid values: `True` | `False`
:type parameter_name_values: list
:param parameter_name_values: An array of parameter names to be reset.
If you are not resetting the entire cache parameter group, you must
specify at least one parameter name.
"""
params = {
'CacheParameterGroupName': cache_parameter_group_name,
}
self.build_complex_list_params(
params, parameter_name_values,
'ParameterNameValues.member',
('ParameterName', 'ParameterValue'))
if reset_all_parameters is not None:
params['ResetAllParameters'] = str(
reset_all_parameters).lower()
return self._make_request(
action='ResetCacheParameterGroup',
verb='POST',
path='/', params=params)
def revoke_cache_security_group_ingress(self, cache_security_group_name,
ec2_security_group_name,
ec2_security_group_owner_id):
"""
The RevokeCacheSecurityGroupIngress operation revokes ingress
from a cache security group. Use this operation to disallow
access from an Amazon EC2 security group that had been
previously authorized.
:type cache_security_group_name: string
:param cache_security_group_name: The name of the cache security group
to revoke ingress from.
:type ec2_security_group_name: string
:param ec2_security_group_name: The name of the Amazon EC2 security
group to revoke access from.
:type ec2_security_group_owner_id: string
:param ec2_security_group_owner_id: The AWS account number of the
Amazon EC2 security group owner. Note that this is not the same
thing as an AWS access key ID - you must provide a valid AWS
account number for this parameter.
"""
params = {
'CacheSecurityGroupName': cache_security_group_name,
'EC2SecurityGroupName': ec2_security_group_name,
'EC2SecurityGroupOwnerId': ec2_security_group_owner_id,
}
return self._make_request(
action='RevokeCacheSecurityGroupIngress',
verb='POST',
path='/', params=params)
def _make_request(self, action, verb, path, params):
params['ContentType'] = 'JSON'
response = self.make_request(action=action, verb='POST',
path='/', params=params)
body = response.read().decode('utf-8')
boto.log.debug(body)
if response.status == 200:
return json.loads(body)
else:
raise self.ResponseError(response.status, response.reason, body)
|
matejcik/osc
|
refs/heads/osclib
|
osclib/util.py
|
1
|
from collections import namedtuple
from . import package
class PackagePath (namedtuple("PackagePath", "project, name")):
"""
This is horrible.
It must be changed.
We can't have inconsistency between things that return PackagePath
and Package. That's pure bullshit.
Why am I even doing this.
"""
__slots__ = ()
def package(self, api):
return package.Package(api, self.project, self.name)
# TODO look for usages of split_package, make sure we don't *need* them
# or convert them all to take an api argument and make them a full Package
# Or change Package to not need API?
def split_package(arg, brg=None):
if isinstance(arg, package.Package) or isinstance(arg, PackagePath):
return arg
if hasattr(arg, '__iter__'):
# list, tuple, generator
return PackagePath(*arg)
if brg is not None:
return PackagePath(arg, brg)
components = arg.split('/')
if not len(components) == 2:
raise ValueError("'{}' is not a package string".format(arg))
return PackagePath(*components)
|
yephper/django
|
refs/heads/master
|
tests/migrations/test_state.py
|
1
|
from django.apps.registry import Apps
from django.contrib.contenttypes.fields import GenericForeignKey
from django.db import models
from django.db.migrations.exceptions import InvalidBasesError
from django.db.migrations.operations import (
AddField, AlterField, DeleteModel, RemoveField,
)
from django.db.migrations.state import (
ModelState, ProjectState, get_related_models_recursive,
)
from django.test import SimpleTestCase, override_settings
from django.utils import six
from .models import (
FoodManager, FoodQuerySet, ModelWithCustomBase, NoMigrationFoodManager,
UnicodeModel,
)
class StateTests(SimpleTestCase):
"""
Tests state construction, rendering and modification by operations.
"""
def test_create(self):
"""
Tests making a ProjectState from an Apps
"""
new_apps = Apps(["migrations"])
class Author(models.Model):
name = models.CharField(max_length=255)
bio = models.TextField()
age = models.IntegerField(blank=True, null=True)
class Meta:
app_label = "migrations"
apps = new_apps
unique_together = ["name", "bio"]
index_together = ["bio", "age"]
class AuthorProxy(Author):
class Meta:
app_label = "migrations"
apps = new_apps
proxy = True
ordering = ["name"]
class SubAuthor(Author):
width = models.FloatField(null=True)
class Meta:
app_label = "migrations"
apps = new_apps
class Book(models.Model):
title = models.CharField(max_length=1000)
author = models.ForeignKey(Author, models.CASCADE)
contributors = models.ManyToManyField(Author)
class Meta:
app_label = "migrations"
apps = new_apps
verbose_name = "tome"
db_table = "test_tome"
class Food(models.Model):
food_mgr = FoodManager('a', 'b')
food_qs = FoodQuerySet.as_manager()
food_no_mgr = NoMigrationFoodManager('x', 'y')
class Meta:
app_label = "migrations"
apps = new_apps
class FoodNoManagers(models.Model):
class Meta:
app_label = "migrations"
apps = new_apps
class FoodNoDefaultManager(models.Model):
food_no_mgr = NoMigrationFoodManager('x', 'y')
food_mgr = FoodManager('a', 'b')
food_qs = FoodQuerySet.as_manager()
class Meta:
app_label = "migrations"
apps = new_apps
mgr1 = FoodManager('a', 'b')
mgr2 = FoodManager('x', 'y', c=3, d=4)
class FoodOrderedManagers(models.Model):
# The managers on this model should be ordered by their creation
# counter and not by the order in model body
food_no_mgr = NoMigrationFoodManager('x', 'y')
food_mgr2 = mgr2
food_mgr1 = mgr1
class Meta:
app_label = "migrations"
apps = new_apps
project_state = ProjectState.from_apps(new_apps)
author_state = project_state.models['migrations', 'author']
author_proxy_state = project_state.models['migrations', 'authorproxy']
sub_author_state = project_state.models['migrations', 'subauthor']
book_state = project_state.models['migrations', 'book']
food_state = project_state.models['migrations', 'food']
food_no_managers_state = project_state.models['migrations', 'foodnomanagers']
food_no_default_manager_state = project_state.models['migrations', 'foodnodefaultmanager']
food_order_manager_state = project_state.models['migrations', 'foodorderedmanagers']
self.assertEqual(author_state.app_label, "migrations")
self.assertEqual(author_state.name, "Author")
self.assertEqual([x for x, y in author_state.fields], ["id", "name", "bio", "age"])
self.assertEqual(author_state.fields[1][1].max_length, 255)
self.assertEqual(author_state.fields[2][1].null, False)
self.assertEqual(author_state.fields[3][1].null, True)
self.assertEqual(
author_state.options,
{"unique_together": {("name", "bio")}, "index_together": {("bio", "age")}}
)
self.assertEqual(author_state.bases, (models.Model, ))
self.assertEqual(book_state.app_label, "migrations")
self.assertEqual(book_state.name, "Book")
self.assertEqual([x for x, y in book_state.fields], ["id", "title", "author", "contributors"])
self.assertEqual(book_state.fields[1][1].max_length, 1000)
self.assertEqual(book_state.fields[2][1].null, False)
self.assertEqual(book_state.fields[3][1].__class__.__name__, "ManyToManyField")
self.assertEqual(book_state.options, {"verbose_name": "tome", "db_table": "test_tome"})
self.assertEqual(book_state.bases, (models.Model, ))
self.assertEqual(author_proxy_state.app_label, "migrations")
self.assertEqual(author_proxy_state.name, "AuthorProxy")
self.assertEqual(author_proxy_state.fields, [])
self.assertEqual(author_proxy_state.options, {"proxy": True, "ordering": ["name"]})
self.assertEqual(author_proxy_state.bases, ("migrations.author", ))
self.assertEqual(sub_author_state.app_label, "migrations")
self.assertEqual(sub_author_state.name, "SubAuthor")
self.assertEqual(len(sub_author_state.fields), 2)
self.assertEqual(sub_author_state.bases, ("migrations.author", ))
# The default manager is used in migrations
self.assertEqual([name for name, mgr in food_state.managers], ['food_mgr'])
self.assertTrue(all(isinstance(name, six.text_type) for name, mgr in food_state.managers))
self.assertEqual(food_state.managers[0][1].args, ('a', 'b', 1, 2))
# No explicit managers defined. Migrations will fall back to the default
self.assertEqual(food_no_managers_state.managers, [])
# food_mgr is used in migration but isn't the default mgr, hence add the
# default
self.assertEqual([name for name, mgr in food_no_default_manager_state.managers],
['food_no_mgr', 'food_mgr'])
self.assertTrue(all(isinstance(name, six.text_type) for name, mgr in food_no_default_manager_state.managers))
self.assertEqual(food_no_default_manager_state.managers[0][1].__class__, models.Manager)
self.assertIsInstance(food_no_default_manager_state.managers[1][1], FoodManager)
self.assertEqual([name for name, mgr in food_order_manager_state.managers],
['food_mgr1', 'food_mgr2'])
self.assertTrue(all(isinstance(name, six.text_type) for name, mgr in food_order_manager_state.managers))
self.assertEqual([mgr.args for name, mgr in food_order_manager_state.managers],
[('a', 'b', 1, 2), ('x', 'y', 3, 4)])
def test_custom_default_manager_added_to_the_model_state(self):
"""
When the default manager of the model is a custom manager,
it needs to be added to the model state.
"""
new_apps = Apps(['migrations'])
custom_manager = models.Manager()
class Author(models.Model):
objects = models.TextField()
authors = custom_manager
class Meta:
app_label = 'migrations'
apps = new_apps
project_state = ProjectState.from_apps(new_apps)
author_state = project_state.models['migrations', 'author']
self.assertEqual(author_state.managers, [('authors', custom_manager)])
def test_apps_bulk_update(self):
"""
StateApps.bulk_update() should update apps.ready to False and reset
the value afterwards.
"""
project_state = ProjectState()
apps = project_state.apps
with apps.bulk_update():
self.assertFalse(apps.ready)
self.assertTrue(apps.ready)
with self.assertRaises(ValueError):
with apps.bulk_update():
self.assertFalse(apps.ready)
raise ValueError()
self.assertTrue(apps.ready)
def test_render(self):
"""
Tests rendering a ProjectState into an Apps.
"""
project_state = ProjectState()
project_state.add_model(ModelState(
app_label="migrations",
name="Tag",
fields=[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=100)),
("hidden", models.BooleanField()),
],
))
project_state.add_model(ModelState(
app_label="migrations",
name="SubTag",
fields=[
('tag_ptr', models.OneToOneField(
'migrations.Tag',
models.CASCADE,
auto_created=True,
primary_key=True,
to_field='id',
serialize=False,
)),
("awesome", models.BooleanField()),
],
bases=("migrations.Tag",),
))
base_mgr = models.Manager()
mgr1 = FoodManager('a', 'b')
mgr2 = FoodManager('x', 'y', c=3, d=4)
project_state.add_model(ModelState(
app_label="migrations",
name="Food",
fields=[
("id", models.AutoField(primary_key=True)),
],
managers=[
# The ordering we really want is objects, mgr1, mgr2
('default', base_mgr),
('food_mgr2', mgr2),
(b'food_mgr1', mgr1),
]
))
new_apps = project_state.apps
self.assertEqual(new_apps.get_model("migrations", "Tag")._meta.get_field("name").max_length, 100)
self.assertEqual(new_apps.get_model("migrations", "Tag")._meta.get_field("hidden").null, False)
self.assertEqual(len(new_apps.get_model("migrations", "SubTag")._meta.local_fields), 2)
Food = new_apps.get_model("migrations", "Food")
managers = sorted(Food._meta.managers)
self.assertEqual([mgr.name for _, mgr, _ in managers],
['default', 'food_mgr1', 'food_mgr2'])
self.assertTrue(all(isinstance(mgr.name, six.text_type) for _, mgr, _ in managers))
self.assertEqual([mgr.__class__ for _, mgr, _ in managers],
[models.Manager, FoodManager, FoodManager])
self.assertIs(managers[0][1], Food._default_manager)
def test_render_model_inheritance(self):
class Book(models.Model):
title = models.CharField(max_length=1000)
class Meta:
app_label = "migrations"
apps = Apps()
class Novel(Book):
class Meta:
app_label = "migrations"
apps = Apps()
# First, test rendering individually
apps = Apps(["migrations"])
# We shouldn't be able to render yet
ms = ModelState.from_model(Novel)
with self.assertRaises(InvalidBasesError):
ms.render(apps)
# Once the parent model is in the app registry, it should be fine
ModelState.from_model(Book).render(apps)
ModelState.from_model(Novel).render(apps)
def test_render_model_with_multiple_inheritance(self):
class Foo(models.Model):
class Meta:
app_label = "migrations"
apps = Apps()
class Bar(models.Model):
class Meta:
app_label = "migrations"
apps = Apps()
class FooBar(Foo, Bar):
class Meta:
app_label = "migrations"
apps = Apps()
class AbstractSubFooBar(FooBar):
class Meta:
abstract = True
apps = Apps()
class SubFooBar(AbstractSubFooBar):
class Meta:
app_label = "migrations"
apps = Apps()
apps = Apps(["migrations"])
# We shouldn't be able to render yet
ms = ModelState.from_model(FooBar)
with self.assertRaises(InvalidBasesError):
ms.render(apps)
# Once the parent models are in the app registry, it should be fine
ModelState.from_model(Foo).render(apps)
self.assertSequenceEqual(ModelState.from_model(Foo).bases, [models.Model])
ModelState.from_model(Bar).render(apps)
self.assertSequenceEqual(ModelState.from_model(Bar).bases, [models.Model])
ModelState.from_model(FooBar).render(apps)
self.assertSequenceEqual(ModelState.from_model(FooBar).bases, ['migrations.foo', 'migrations.bar'])
ModelState.from_model(SubFooBar).render(apps)
self.assertSequenceEqual(ModelState.from_model(SubFooBar).bases, ['migrations.foobar'])
def test_render_project_dependencies(self):
"""
Tests that the ProjectState render method correctly renders models
to account for inter-model base dependencies.
"""
new_apps = Apps()
class A(models.Model):
class Meta:
app_label = "migrations"
apps = new_apps
class B(A):
class Meta:
app_label = "migrations"
apps = new_apps
class C(B):
class Meta:
app_label = "migrations"
apps = new_apps
class D(A):
class Meta:
app_label = "migrations"
apps = new_apps
class E(B):
class Meta:
app_label = "migrations"
apps = new_apps
proxy = True
class F(D):
class Meta:
app_label = "migrations"
apps = new_apps
proxy = True
# Make a ProjectState and render it
project_state = ProjectState()
project_state.add_model(ModelState.from_model(A))
project_state.add_model(ModelState.from_model(B))
project_state.add_model(ModelState.from_model(C))
project_state.add_model(ModelState.from_model(D))
project_state.add_model(ModelState.from_model(E))
project_state.add_model(ModelState.from_model(F))
final_apps = project_state.apps
self.assertEqual(len(final_apps.get_models()), 6)
# Now make an invalid ProjectState and make sure it fails
project_state = ProjectState()
project_state.add_model(ModelState.from_model(A))
project_state.add_model(ModelState.from_model(B))
project_state.add_model(ModelState.from_model(C))
project_state.add_model(ModelState.from_model(F))
with self.assertRaises(InvalidBasesError):
project_state.apps
def test_render_unique_app_labels(self):
"""
Tests that the ProjectState render method doesn't raise an
ImproperlyConfigured exception about unique labels if two dotted app
names have the same last part.
"""
class A(models.Model):
class Meta:
app_label = "django.contrib.auth"
class B(models.Model):
class Meta:
app_label = "vendor.auth"
# Make a ProjectState and render it
project_state = ProjectState()
project_state.add_model(ModelState.from_model(A))
project_state.add_model(ModelState.from_model(B))
self.assertEqual(len(project_state.apps.get_models()), 2)
def test_add_relations(self):
"""
#24573 - Adding relations to existing models should reload the
referenced models too.
"""
new_apps = Apps()
class A(models.Model):
class Meta:
app_label = 'something'
apps = new_apps
class B(A):
class Meta:
app_label = 'something'
apps = new_apps
class C(models.Model):
class Meta:
app_label = 'something'
apps = new_apps
project_state = ProjectState()
project_state.add_model(ModelState.from_model(A))
project_state.add_model(ModelState.from_model(B))
project_state.add_model(ModelState.from_model(C))
project_state.apps # We need to work with rendered models
old_state = project_state.clone()
model_a_old = old_state.apps.get_model('something', 'A')
model_b_old = old_state.apps.get_model('something', 'B')
model_c_old = old_state.apps.get_model('something', 'C')
# Check that the relations between the old models are correct
self.assertIs(model_a_old._meta.get_field('b').related_model, model_b_old)
self.assertIs(model_b_old._meta.get_field('a_ptr').related_model, model_a_old)
operation = AddField('c', 'to_a', models.OneToOneField(
'something.A',
models.CASCADE,
related_name='from_c',
))
operation.state_forwards('something', project_state)
model_a_new = project_state.apps.get_model('something', 'A')
model_b_new = project_state.apps.get_model('something', 'B')
model_c_new = project_state.apps.get_model('something', 'C')
# Check that all models have changed
self.assertIsNot(model_a_old, model_a_new)
self.assertIsNot(model_b_old, model_b_new)
self.assertIsNot(model_c_old, model_c_new)
# Check that the relations between the old models still hold
self.assertIs(model_a_old._meta.get_field('b').related_model, model_b_old)
self.assertIs(model_b_old._meta.get_field('a_ptr').related_model, model_a_old)
# Check that the relations between the new models correct
self.assertIs(model_a_new._meta.get_field('b').related_model, model_b_new)
self.assertIs(model_b_new._meta.get_field('a_ptr').related_model, model_a_new)
self.assertIs(model_a_new._meta.get_field('from_c').related_model, model_c_new)
self.assertIs(model_c_new._meta.get_field('to_a').related_model, model_a_new)
def test_remove_relations(self):
"""
#24225 - Tests that relations between models are updated while
remaining the relations and references for models of an old state.
"""
new_apps = Apps()
class A(models.Model):
class Meta:
app_label = "something"
apps = new_apps
class B(models.Model):
to_a = models.ForeignKey(A, models.CASCADE)
class Meta:
app_label = "something"
apps = new_apps
def get_model_a(state):
return [mod for mod in state.apps.get_models() if mod._meta.model_name == 'a'][0]
project_state = ProjectState()
project_state.add_model(ModelState.from_model(A))
project_state.add_model(ModelState.from_model(B))
self.assertEqual(len(get_model_a(project_state)._meta.related_objects), 1)
old_state = project_state.clone()
operation = RemoveField("b", "to_a")
operation.state_forwards("something", project_state)
# Tests that model from old_state still has the relation
model_a_old = get_model_a(old_state)
model_a_new = get_model_a(project_state)
self.assertIsNot(model_a_old, model_a_new)
self.assertEqual(len(model_a_old._meta.related_objects), 1)
self.assertEqual(len(model_a_new._meta.related_objects), 0)
# Same test for deleted model
project_state = ProjectState()
project_state.add_model(ModelState.from_model(A))
project_state.add_model(ModelState.from_model(B))
old_state = project_state.clone()
operation = DeleteModel("b")
operation.state_forwards("something", project_state)
model_a_old = get_model_a(old_state)
model_a_new = get_model_a(project_state)
self.assertIsNot(model_a_old, model_a_new)
self.assertEqual(len(model_a_old._meta.related_objects), 1)
self.assertEqual(len(model_a_new._meta.related_objects), 0)
def test_self_relation(self):
"""
#24513 - Modifying an object pointing to itself would cause it to be
rendered twice and thus breaking its related M2M through objects.
"""
class A(models.Model):
to_a = models.ManyToManyField('something.A', symmetrical=False)
class Meta:
app_label = "something"
def get_model_a(state):
return [mod for mod in state.apps.get_models() if mod._meta.model_name == 'a'][0]
project_state = ProjectState()
project_state.add_model((ModelState.from_model(A)))
self.assertEqual(len(get_model_a(project_state)._meta.related_objects), 1)
old_state = project_state.clone()
operation = AlterField(
model_name="a",
name="to_a",
field=models.ManyToManyField("something.A", symmetrical=False, blank=True)
)
# At this point the model would be rendered twice causing its related
# M2M through objects to point to an old copy and thus breaking their
# attribute lookup.
operation.state_forwards("something", project_state)
model_a_old = get_model_a(old_state)
model_a_new = get_model_a(project_state)
self.assertIsNot(model_a_old, model_a_new)
# Tests that the old model's _meta is still consistent
field_to_a_old = model_a_old._meta.get_field("to_a")
self.assertEqual(field_to_a_old.m2m_field_name(), "from_a")
self.assertEqual(field_to_a_old.m2m_reverse_field_name(), "to_a")
self.assertIs(field_to_a_old.related_model, model_a_old)
self.assertIs(field_to_a_old.remote_field.through._meta.get_field('to_a').related_model, model_a_old)
self.assertIs(field_to_a_old.remote_field.through._meta.get_field('from_a').related_model, model_a_old)
# Tests that the new model's _meta is still consistent
field_to_a_new = model_a_new._meta.get_field("to_a")
self.assertEqual(field_to_a_new.m2m_field_name(), "from_a")
self.assertEqual(field_to_a_new.m2m_reverse_field_name(), "to_a")
self.assertIs(field_to_a_new.related_model, model_a_new)
self.assertIs(field_to_a_new.remote_field.through._meta.get_field('to_a').related_model, model_a_new)
self.assertIs(field_to_a_new.remote_field.through._meta.get_field('from_a').related_model, model_a_new)
def test_equality(self):
"""
Tests that == and != are implemented correctly.
"""
# Test two things that should be equal
project_state = ProjectState()
project_state.add_model(ModelState(
"migrations",
"Tag",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=100)),
("hidden", models.BooleanField()),
],
{},
None,
))
project_state.apps # Fill the apps cached property
other_state = project_state.clone()
self.assertEqual(project_state, project_state)
self.assertEqual(project_state, other_state)
self.assertEqual(project_state != project_state, False)
self.assertEqual(project_state != other_state, False)
self.assertNotEqual(project_state.apps, other_state.apps)
# Make a very small change (max_len 99) and see if that affects it
project_state = ProjectState()
project_state.add_model(ModelState(
"migrations",
"Tag",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=99)),
("hidden", models.BooleanField()),
],
{},
None,
))
self.assertNotEqual(project_state, other_state)
self.assertEqual(project_state == other_state, False)
def test_dangling_references_throw_error(self):
new_apps = Apps()
class Author(models.Model):
name = models.TextField()
class Meta:
app_label = "migrations"
apps = new_apps
class Publisher(models.Model):
name = models.TextField()
class Meta:
app_label = "migrations"
apps = new_apps
class Book(models.Model):
author = models.ForeignKey(Author, models.CASCADE)
publisher = models.ForeignKey(Publisher, models.CASCADE)
class Meta:
app_label = "migrations"
apps = new_apps
class Magazine(models.Model):
authors = models.ManyToManyField(Author)
class Meta:
app_label = "migrations"
apps = new_apps
# Make a valid ProjectState and render it
project_state = ProjectState()
project_state.add_model(ModelState.from_model(Author))
project_state.add_model(ModelState.from_model(Publisher))
project_state.add_model(ModelState.from_model(Book))
project_state.add_model(ModelState.from_model(Magazine))
self.assertEqual(len(project_state.apps.get_models()), 4)
# now make an invalid one with a ForeignKey
project_state = ProjectState()
project_state.add_model(ModelState.from_model(Book))
msg = (
"Unhandled pending operations for models:\n"
" migrations.author (referred to by fields: migrations.Book.author)\n"
" migrations.publisher (referred to by fields: migrations.Book.publisher)"
)
with self.assertRaisesMessage(ValueError, msg):
project_state.apps
# And another with ManyToManyField.
project_state = ProjectState()
project_state.add_model(ModelState.from_model(Magazine))
msg = (
"Unhandled pending operations for models:\n"
" migrations.author (referred to by fields: "
"migrations.Magazine.authors, migrations.Magazine_authors.author)"
)
with self.assertRaisesMessage(ValueError, msg):
project_state.apps
# And now with multiple models and multiple fields.
project_state.add_model(ModelState.from_model(Book))
msg = (
"Unhandled pending operations for models:\n"
" migrations.author (referred to by fields: migrations.Book.author, "
"migrations.Magazine.authors, migrations.Magazine_authors.author)\n"
" migrations.publisher (referred to by fields: migrations.Book.publisher)"
)
with self.assertRaisesMessage(ValueError, msg):
project_state.apps
def test_real_apps(self):
"""
Tests that including real apps can resolve dangling FK errors.
This test relies on the fact that contenttypes is always loaded.
"""
new_apps = Apps()
class TestModel(models.Model):
ct = models.ForeignKey("contenttypes.ContentType", models.CASCADE)
class Meta:
app_label = "migrations"
apps = new_apps
# If we just stick it into an empty state it should fail
project_state = ProjectState()
project_state.add_model(ModelState.from_model(TestModel))
with self.assertRaises(ValueError):
project_state.apps
# If we include the real app it should succeed
project_state = ProjectState(real_apps=["contenttypes"])
project_state.add_model(ModelState.from_model(TestModel))
rendered_state = project_state.apps
self.assertEqual(
len([x for x in rendered_state.get_models() if x._meta.app_label == "migrations"]),
1,
)
def test_ignore_order_wrt(self):
"""
Makes sure ProjectState doesn't include OrderWrt fields when
making from existing models.
"""
new_apps = Apps()
class Author(models.Model):
name = models.TextField()
class Meta:
app_label = "migrations"
apps = new_apps
class Book(models.Model):
author = models.ForeignKey(Author, models.CASCADE)
class Meta:
app_label = "migrations"
apps = new_apps
order_with_respect_to = "author"
# Make a valid ProjectState and render it
project_state = ProjectState()
project_state.add_model(ModelState.from_model(Author))
project_state.add_model(ModelState.from_model(Book))
self.assertEqual(
[name for name, field in project_state.models["migrations", "book"].fields],
["id", "author"],
)
def test_manager_refer_correct_model_version(self):
"""
#24147 - Tests that managers refer to the correct version of a
historical model
"""
project_state = ProjectState()
project_state.add_model(ModelState(
app_label="migrations",
name="Tag",
fields=[
("id", models.AutoField(primary_key=True)),
("hidden", models.BooleanField()),
],
managers=[
('food_mgr', FoodManager('a', 'b')),
('food_qs', FoodQuerySet.as_manager()),
]
))
old_model = project_state.apps.get_model('migrations', 'tag')
new_state = project_state.clone()
operation = RemoveField("tag", "hidden")
operation.state_forwards("migrations", new_state)
new_model = new_state.apps.get_model('migrations', 'tag')
self.assertIsNot(old_model, new_model)
self.assertIs(old_model, old_model.food_mgr.model)
self.assertIs(old_model, old_model.food_qs.model)
self.assertIs(new_model, new_model.food_mgr.model)
self.assertIs(new_model, new_model.food_qs.model)
self.assertIsNot(old_model.food_mgr, new_model.food_mgr)
self.assertIsNot(old_model.food_qs, new_model.food_qs)
self.assertIsNot(old_model.food_mgr.model, new_model.food_mgr.model)
self.assertIsNot(old_model.food_qs.model, new_model.food_qs.model)
def test_choices_iterator(self):
"""
#24483 - ProjectState.from_apps should not destructively consume
Field.choices iterators.
"""
new_apps = Apps(["migrations"])
choices = [('a', 'A'), ('b', 'B')]
class Author(models.Model):
name = models.CharField(max_length=255)
choice = models.CharField(max_length=255, choices=iter(choices))
class Meta:
app_label = "migrations"
apps = new_apps
ProjectState.from_apps(new_apps)
choices_field = Author._meta.get_field('choice')
self.assertEqual(list(choices_field.choices), choices)
class ModelStateTests(SimpleTestCase):
def test_custom_model_base(self):
state = ModelState.from_model(ModelWithCustomBase)
self.assertEqual(state.bases, (models.Model,))
def test_bound_field_sanity_check(self):
field = models.CharField(max_length=1)
field.model = models.Model
with self.assertRaisesMessage(ValueError,
'ModelState.fields cannot be bound to a model - "field" is.'):
ModelState('app', 'Model', [('field', field)])
def test_sanity_check_to(self):
field = models.ForeignKey(UnicodeModel, models.CASCADE)
with self.assertRaisesMessage(ValueError,
'ModelState.fields cannot refer to a model class - "field.to" does. '
'Use a string reference instead.'):
ModelState('app', 'Model', [('field', field)])
def test_sanity_check_through(self):
field = models.ManyToManyField('UnicodeModel')
field.remote_field.through = UnicodeModel
with self.assertRaisesMessage(ValueError,
'ModelState.fields cannot refer to a model class - "field.through" does. '
'Use a string reference instead.'):
ModelState('app', 'Model', [('field', field)])
def test_fields_immutability(self):
"""
Tests that rendering a model state doesn't alter its internal fields.
"""
apps = Apps()
field = models.CharField(max_length=1)
state = ModelState('app', 'Model', [('name', field)])
Model = state.render(apps)
self.assertNotEqual(Model._meta.get_field('name'), field)
def test_repr(self):
field = models.CharField(max_length=1)
state = ModelState('app', 'Model', [('name', field)], bases=['app.A', 'app.B', 'app.C'])
self.assertEqual(repr(state), "<ModelState: 'app.Model'>")
project_state = ProjectState()
project_state.add_model(state)
with self.assertRaisesMessage(InvalidBasesError, "Cannot resolve bases for [<ModelState: 'app.Model'>]"):
project_state.apps
@override_settings(TEST_SWAPPABLE_MODEL='migrations.SomeFakeModel')
def test_create_swappable(self):
"""
Tests making a ProjectState from an Apps with a swappable model
"""
new_apps = Apps(['migrations'])
class Author(models.Model):
name = models.CharField(max_length=255)
bio = models.TextField()
age = models.IntegerField(blank=True, null=True)
class Meta:
app_label = 'migrations'
apps = new_apps
swappable = 'TEST_SWAPPABLE_MODEL'
author_state = ModelState.from_model(Author)
self.assertEqual(author_state.app_label, 'migrations')
self.assertEqual(author_state.name, 'Author')
self.assertEqual([x for x, y in author_state.fields], ['id', 'name', 'bio', 'age'])
self.assertEqual(author_state.fields[1][1].max_length, 255)
self.assertEqual(author_state.fields[2][1].null, False)
self.assertEqual(author_state.fields[3][1].null, True)
self.assertEqual(author_state.options, {'swappable': 'TEST_SWAPPABLE_MODEL'})
self.assertEqual(author_state.bases, (models.Model, ))
self.assertEqual(author_state.managers, [])
@override_settings(TEST_SWAPPABLE_MODEL='migrations.SomeFakeModel')
def test_custom_manager_swappable(self):
"""
Tests making a ProjectState from unused models with custom managers
"""
new_apps = Apps(['migrations'])
class Food(models.Model):
food_mgr = FoodManager('a', 'b')
food_qs = FoodQuerySet.as_manager()
food_no_mgr = NoMigrationFoodManager('x', 'y')
class Meta:
app_label = "migrations"
apps = new_apps
swappable = 'TEST_SWAPPABLE_MODEL'
food_state = ModelState.from_model(Food)
# The default manager is used in migrations
self.assertEqual([name for name, mgr in food_state.managers], ['food_mgr'])
self.assertEqual(food_state.managers[0][1].args, ('a', 'b', 1, 2))
class RelatedModelsTests(SimpleTestCase):
def setUp(self):
self.apps = Apps(['migrations.related_models_app'])
def create_model(self, name, foreign_keys=[], bases=(), abstract=False, proxy=False):
test_name = 'related_models_app'
assert not (abstract and proxy)
meta_contents = {
'abstract': abstract,
'app_label': test_name,
'apps': self.apps,
'proxy': proxy,
}
meta = type(str("Meta"), tuple(), meta_contents)
if not bases:
bases = (models.Model,)
body = {
'Meta': meta,
'__module__': "__fake__",
}
fname_base = fname = '%s_%%d' % name.lower()
for i, fk in enumerate(foreign_keys, 1):
fname = fname_base % i
body[fname] = fk
return type(name, bases, body)
def assertRelated(self, model, needle):
self.assertEqual(
get_related_models_recursive(model),
{(n._meta.app_label, n._meta.model_name) for n in needle},
)
def test_unrelated(self):
A = self.create_model("A")
B = self.create_model("B")
self.assertRelated(A, [])
self.assertRelated(B, [])
def test_direct_fk(self):
A = self.create_model("A", foreign_keys=[models.ForeignKey('B', models.CASCADE)])
B = self.create_model("B")
self.assertRelated(A, [B])
self.assertRelated(B, [A])
def test_direct_hidden_fk(self):
A = self.create_model("A", foreign_keys=[models.ForeignKey('B', models.CASCADE, related_name='+')])
B = self.create_model("B")
self.assertRelated(A, [B])
self.assertRelated(B, [A])
def test_fk_through_proxy(self):
A = self.create_model("A")
B = self.create_model("B", bases=(A,), proxy=True)
C = self.create_model("C", bases=(B,), proxy=True)
D = self.create_model("D", foreign_keys=[models.ForeignKey('C', models.CASCADE)])
self.assertRelated(A, [B, C, D])
self.assertRelated(B, [A, C, D])
self.assertRelated(C, [A, B, D])
self.assertRelated(D, [A, B, C])
def test_nested_fk(self):
A = self.create_model("A", foreign_keys=[models.ForeignKey('B', models.CASCADE)])
B = self.create_model("B", foreign_keys=[models.ForeignKey('C', models.CASCADE)])
C = self.create_model("C")
self.assertRelated(A, [B, C])
self.assertRelated(B, [A, C])
self.assertRelated(C, [A, B])
def test_two_sided(self):
A = self.create_model("A", foreign_keys=[models.ForeignKey('B', models.CASCADE)])
B = self.create_model("B", foreign_keys=[models.ForeignKey('A', models.CASCADE)])
self.assertRelated(A, [B])
self.assertRelated(B, [A])
def test_circle(self):
A = self.create_model("A", foreign_keys=[models.ForeignKey('B', models.CASCADE)])
B = self.create_model("B", foreign_keys=[models.ForeignKey('C', models.CASCADE)])
C = self.create_model("C", foreign_keys=[models.ForeignKey('A', models.CASCADE)])
self.assertRelated(A, [B, C])
self.assertRelated(B, [A, C])
self.assertRelated(C, [A, B])
def test_base(self):
A = self.create_model("A")
B = self.create_model("B", bases=(A,))
self.assertRelated(A, [B])
self.assertRelated(B, [A])
def test_nested_base(self):
A = self.create_model("A")
B = self.create_model("B", bases=(A,))
C = self.create_model("C", bases=(B,))
self.assertRelated(A, [B, C])
self.assertRelated(B, [A, C])
self.assertRelated(C, [A, B])
def test_multiple_bases(self):
A = self.create_model("A")
B = self.create_model("B")
C = self.create_model("C", bases=(A, B,))
self.assertRelated(A, [B, C])
self.assertRelated(B, [A, C])
self.assertRelated(C, [A, B])
def test_multiple_nested_bases(self):
A = self.create_model("A")
B = self.create_model("B")
C = self.create_model("C", bases=(A, B,))
D = self.create_model("D")
E = self.create_model("E", bases=(D,))
F = self.create_model("F", bases=(C, E,))
Y = self.create_model("Y")
Z = self.create_model("Z", bases=(Y,))
self.assertRelated(A, [B, C, D, E, F])
self.assertRelated(B, [A, C, D, E, F])
self.assertRelated(C, [A, B, D, E, F])
self.assertRelated(D, [A, B, C, E, F])
self.assertRelated(E, [A, B, C, D, F])
self.assertRelated(F, [A, B, C, D, E])
self.assertRelated(Y, [Z])
self.assertRelated(Z, [Y])
def test_base_to_base_fk(self):
A = self.create_model("A", foreign_keys=[models.ForeignKey('Y', models.CASCADE)])
B = self.create_model("B", bases=(A,))
Y = self.create_model("Y")
Z = self.create_model("Z", bases=(Y,))
self.assertRelated(A, [B, Y, Z])
self.assertRelated(B, [A, Y, Z])
self.assertRelated(Y, [A, B, Z])
self.assertRelated(Z, [A, B, Y])
def test_base_to_subclass_fk(self):
A = self.create_model("A", foreign_keys=[models.ForeignKey('Z', models.CASCADE)])
B = self.create_model("B", bases=(A,))
Y = self.create_model("Y")
Z = self.create_model("Z", bases=(Y,))
self.assertRelated(A, [B, Y, Z])
self.assertRelated(B, [A, Y, Z])
self.assertRelated(Y, [A, B, Z])
self.assertRelated(Z, [A, B, Y])
def test_direct_m2m(self):
A = self.create_model("A", foreign_keys=[models.ManyToManyField('B')])
B = self.create_model("B")
self.assertRelated(A, [A.a_1.rel.through, B])
self.assertRelated(B, [A, A.a_1.rel.through])
def test_direct_m2m_self(self):
A = self.create_model("A", foreign_keys=[models.ManyToManyField('A')])
self.assertRelated(A, [A.a_1.rel.through])
def test_intermediate_m2m_self(self):
A = self.create_model("A", foreign_keys=[models.ManyToManyField('A', through='T')])
T = self.create_model("T", foreign_keys=[
models.ForeignKey('A', models.CASCADE),
models.ForeignKey('A', models.CASCADE),
])
self.assertRelated(A, [T])
self.assertRelated(T, [A])
def test_intermediate_m2m(self):
A = self.create_model("A", foreign_keys=[models.ManyToManyField('B', through='T')])
B = self.create_model("B")
T = self.create_model("T", foreign_keys=[
models.ForeignKey('A', models.CASCADE),
models.ForeignKey('B', models.CASCADE),
])
self.assertRelated(A, [B, T])
self.assertRelated(B, [A, T])
self.assertRelated(T, [A, B])
def test_intermediate_m2m_extern_fk(self):
A = self.create_model("A", foreign_keys=[models.ManyToManyField('B', through='T')])
B = self.create_model("B")
Z = self.create_model("Z")
T = self.create_model("T", foreign_keys=[
models.ForeignKey('A', models.CASCADE),
models.ForeignKey('B', models.CASCADE),
models.ForeignKey('Z', models.CASCADE),
])
self.assertRelated(A, [B, T, Z])
self.assertRelated(B, [A, T, Z])
self.assertRelated(T, [A, B, Z])
self.assertRelated(Z, [A, B, T])
def test_intermediate_m2m_base(self):
A = self.create_model("A", foreign_keys=[models.ManyToManyField('B', through='T')])
B = self.create_model("B")
S = self.create_model("S")
T = self.create_model("T", foreign_keys=[
models.ForeignKey('A', models.CASCADE),
models.ForeignKey('B', models.CASCADE),
], bases=(S,))
self.assertRelated(A, [B, S, T])
self.assertRelated(B, [A, S, T])
self.assertRelated(S, [A, B, T])
self.assertRelated(T, [A, B, S])
def test_generic_fk(self):
A = self.create_model("A", foreign_keys=[
models.ForeignKey('B', models.CASCADE),
GenericForeignKey(),
])
B = self.create_model("B", foreign_keys=[
models.ForeignKey('C', models.CASCADE),
])
self.assertRelated(A, [B])
self.assertRelated(B, [A])
def test_abstract_base(self):
A = self.create_model("A", abstract=True)
B = self.create_model("B", bases=(A,))
self.assertRelated(A, [B])
self.assertRelated(B, [])
def test_nested_abstract_base(self):
A = self.create_model("A", abstract=True)
B = self.create_model("B", bases=(A,), abstract=True)
C = self.create_model("C", bases=(B,))
self.assertRelated(A, [B, C])
self.assertRelated(B, [C])
self.assertRelated(C, [])
def test_proxy_base(self):
A = self.create_model("A")
B = self.create_model("B", bases=(A,), proxy=True)
self.assertRelated(A, [B])
self.assertRelated(B, [])
def test_nested_proxy_base(self):
A = self.create_model("A")
B = self.create_model("B", bases=(A,), proxy=True)
C = self.create_model("C", bases=(B,), proxy=True)
self.assertRelated(A, [B, C])
self.assertRelated(B, [C])
self.assertRelated(C, [])
def test_multiple_mixed_bases(self):
A = self.create_model("A", abstract=True)
M = self.create_model("M")
P = self.create_model("P")
Q = self.create_model("Q", bases=(P,), proxy=True)
Z = self.create_model("Z", bases=(A, M, Q))
# M has a pointer O2O field p_ptr to P
self.assertRelated(A, [M, P, Q, Z])
self.assertRelated(M, [P, Q, Z])
self.assertRelated(P, [M, Q, Z])
self.assertRelated(Q, [M, P, Z])
self.assertRelated(Z, [M, P, Q])
|
Freso/beets
|
refs/heads/master
|
beets/util/confit.py
|
1
|
# -*- coding: utf-8 -*-
# This file is part of Confit.
# Copyright 2016, Adrian Sampson.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
"""Worry-free YAML configuration files.
"""
from __future__ import division, absolute_import, print_function
import platform
import os
import pkgutil
import sys
import yaml
import types
import collections
import re
try:
from collections import OrderedDict
except ImportError:
from ordereddict import OrderedDict
UNIX_DIR_VAR = 'XDG_CONFIG_HOME'
UNIX_DIR_FALLBACK = '~/.config'
WINDOWS_DIR_VAR = 'APPDATA'
WINDOWS_DIR_FALLBACK = '~\\AppData\\Roaming'
MAC_DIR = '~/Library/Application Support'
CONFIG_FILENAME = 'config.yaml'
DEFAULT_FILENAME = 'config_default.yaml'
ROOT_NAME = 'root'
YAML_TAB_PROBLEM = "found character '\\t' that cannot start any token"
REDACTED_TOMBSTONE = 'REDACTED'
# Utilities.
PY3 = sys.version_info[0] == 3
STRING = str if PY3 else unicode
BASESTRING = str if PY3 else basestring
NUMERIC_TYPES = (int, float) if PY3 else (int, float, long)
TYPE_TYPES = (type,) if PY3 else (type, types.ClassType)
def iter_first(sequence):
"""Get the first element from an iterable or raise a ValueError if
the iterator generates no values.
"""
it = iter(sequence)
try:
if PY3:
return next(it)
else:
return it.next()
except StopIteration:
raise ValueError()
# Exceptions.
class ConfigError(Exception):
"""Base class for exceptions raised when querying a configuration.
"""
class NotFoundError(ConfigError):
"""A requested value could not be found in the configuration trees.
"""
class ConfigValueError(ConfigError):
"""The value in the configuration is illegal."""
class ConfigTypeError(ConfigValueError):
"""The value in the configuration did not match the expected type.
"""
class ConfigTemplateError(ConfigError):
"""Base class for exceptions raised because of an invalid template.
"""
class ConfigReadError(ConfigError):
"""A configuration file could not be read."""
def __init__(self, filename, reason=None):
self.filename = filename
self.reason = reason
message = u'file {0} could not be read'.format(filename)
if isinstance(reason, yaml.scanner.ScannerError) and \
reason.problem == YAML_TAB_PROBLEM:
# Special-case error message for tab indentation in YAML markup.
message += u': found tab character at line {0}, column {1}'.format(
reason.problem_mark.line + 1,
reason.problem_mark.column + 1,
)
elif reason:
# Generic error message uses exception's message.
message += u': {0}'.format(reason)
super(ConfigReadError, self).__init__(message)
# Views and sources.
class ConfigSource(dict):
"""A dictionary augmented with metadata about the source of the
configuration.
"""
def __init__(self, value, filename=None, default=False):
super(ConfigSource, self).__init__(value)
if filename is not None and not isinstance(filename, BASESTRING):
raise TypeError(u'filename must be a string or None')
self.filename = filename
self.default = default
def __repr__(self):
return 'ConfigSource({0!r}, {1!r}, {2!r})'.format(
super(ConfigSource, self),
self.filename,
self.default,
)
@classmethod
def of(cls, value):
"""Given either a dictionary or a `ConfigSource` object, return
a `ConfigSource` object. This lets a function accept either type
of object as an argument.
"""
if isinstance(value, ConfigSource):
return value
elif isinstance(value, dict):
return ConfigSource(value)
else:
raise TypeError(u'source value must be a dict')
class ConfigView(object):
"""A configuration "view" is a query into a program's configuration
data. A view represents a hypothetical location in the configuration
tree; to extract the data from the location, a client typically
calls the ``view.get()`` method. The client can access children in
the tree (subviews) by subscripting the parent view (i.e.,
``view[key]``).
"""
name = None
"""The name of the view, depicting the path taken through the
configuration in Python-like syntax (e.g., ``foo['bar'][42]``).
"""
def resolve(self):
"""The core (internal) data retrieval method. Generates (value,
source) pairs for each source that contains a value for this
view. May raise ConfigTypeError if a type error occurs while
traversing a source.
"""
raise NotImplementedError
def first(self):
"""Return a (value, source) pair for the first object found for
this view. This amounts to the first element returned by
`resolve`. If no values are available, a NotFoundError is
raised.
"""
pairs = self.resolve()
try:
return iter_first(pairs)
except ValueError:
raise NotFoundError(u"{0} not found".format(self.name))
def exists(self):
"""Determine whether the view has a setting in any source.
"""
try:
self.first()
except NotFoundError:
return False
return True
def add(self, value):
"""Set the *default* value for this configuration view. The
specified value is added as the lowest-priority configuration
data source.
"""
raise NotImplementedError
def set(self, value):
"""*Override* the value for this configuration view. The
specified value is added as the highest-priority configuration
data source.
"""
raise NotImplementedError
def root(self):
"""The RootView object from which this view is descended.
"""
raise NotImplementedError
def __repr__(self):
return '<{}: {}>'.format(self.__class__.__name__, self.name)
def __iter__(self):
"""Iterate over the keys of a dictionary view or the *subviews*
of a list view.
"""
# Try getting the keys, if this is a dictionary view.
try:
keys = self.keys()
for key in keys:
yield key
except ConfigTypeError:
# Otherwise, try iterating over a list.
collection = self.get()
if not isinstance(collection, (list, tuple)):
raise ConfigTypeError(
u'{0} must be a dictionary or a list, not {1}'.format(
self.name, type(collection).__name__
)
)
# Yield all the indices in the list.
for index in range(len(collection)):
yield self[index]
def __getitem__(self, key):
"""Get a subview of this view."""
return Subview(self, key)
def __setitem__(self, key, value):
"""Create an overlay source to assign a given key under this
view.
"""
self.set({key: value})
def __contains__(self, key):
return self[key].exists()
def set_args(self, namespace):
"""Overlay parsed command-line arguments, generated by a library
like argparse or optparse, onto this view's value.
"""
args = {}
for key, value in namespace.__dict__.items():
if value is not None: # Avoid unset options.
args[key] = value
self.set(args)
# Magical conversions. These special methods make it possible to use
# View objects somewhat transparently in certain circumstances. For
# example, rather than using ``view.get(bool)``, it's possible to
# just say ``bool(view)`` or use ``view`` in a conditional.
def __str__(self):
"""Get the value for this view as a bytestring.
"""
if PY3:
return self.__unicode__()
else:
return bytes(self.get())
def __unicode__(self):
"""Get the value for this view as a Unicode string.
"""
return STRING(self.get())
def __nonzero__(self):
"""Gets the value for this view as a boolean. (Python 2 only.)
"""
return self.__bool__()
def __bool__(self):
"""Gets the value for this view as a boolean. (Python 3 only.)
"""
return bool(self.get())
# Dictionary emulation methods.
def keys(self):
"""Returns a list containing all the keys available as subviews
of the current views. This enumerates all the keys in *all*
dictionaries matching the current view, in contrast to
``view.get(dict).keys()``, which gets all the keys for the
*first* dict matching the view. If the object for this view in
any source is not a dict, then a ConfigTypeError is raised. The
keys are ordered according to how they appear in each source.
"""
keys = []
for dic, _ in self.resolve():
try:
cur_keys = dic.keys()
except AttributeError:
raise ConfigTypeError(
u'{0} must be a dict, not {1}'.format(
self.name, type(dic).__name__
)
)
for key in cur_keys:
if key not in keys:
keys.append(key)
return keys
def items(self):
"""Iterates over (key, subview) pairs contained in dictionaries
from *all* sources at this view. If the object for this view in
any source is not a dict, then a ConfigTypeError is raised.
"""
for key in self.keys():
yield key, self[key]
def values(self):
"""Iterates over all the subviews contained in dictionaries from
*all* sources at this view. If the object for this view in any
source is not a dict, then a ConfigTypeError is raised.
"""
for key in self.keys():
yield self[key]
# List/sequence emulation.
def all_contents(self):
"""Iterates over all subviews from collections at this view from
*all* sources. If the object for this view in any source is not
iterable, then a ConfigTypeError is raised. This method is
intended to be used when the view indicates a list; this method
will concatenate the contents of the list from all sources.
"""
for collection, _ in self.resolve():
try:
it = iter(collection)
except TypeError:
raise ConfigTypeError(
u'{0} must be an iterable, not {1}'.format(
self.name, type(collection).__name__
)
)
for value in it:
yield value
# Validation and conversion.
def flatten(self, redact=False):
"""Create a hierarchy of OrderedDicts containing the data from
this view, recursively reifying all views to get their
represented values.
If `redact` is set, then sensitive values are replaced with
the string "REDACTED".
"""
od = OrderedDict()
for key, view in self.items():
if redact and view.redact:
od[key] = REDACTED_TOMBSTONE
else:
try:
od[key] = view.flatten(redact=redact)
except ConfigTypeError:
od[key] = view.get()
return od
def get(self, template=None):
"""Retrieve the value for this view according to the template.
The `template` against which the values are checked can be
anything convertible to a `Template` using `as_template`. This
means you can pass in a default integer or string value, for
example, or a type to just check that something matches the type
you expect.
May raise a `ConfigValueError` (or its subclass,
`ConfigTypeError`) or a `NotFoundError` when the configuration
doesn't satisfy the template.
"""
return as_template(template).value(self, template)
# Old validation methods (deprecated).
def as_filename(self):
return self.get(Filename())
def as_choice(self, choices):
return self.get(Choice(choices))
def as_number(self):
return self.get(Number())
def as_str_seq(self):
return self.get(StrSeq())
# Redaction.
@property
def redact(self):
"""Whether the view contains sensitive information and should be
redacted from output.
"""
return () in self.get_redactions()
@redact.setter
def redact(self, flag):
self.set_redaction((), flag)
def set_redaction(self, path, flag):
"""Add or remove a redaction for a key path, which should be an
iterable of keys.
"""
raise NotImplementedError()
def get_redactions(self):
"""Get the set of currently-redacted sub-key-paths at this view.
"""
raise NotImplementedError()
class RootView(ConfigView):
"""The base of a view hierarchy. This view keeps track of the
sources that may be accessed by subviews.
"""
def __init__(self, sources):
"""Create a configuration hierarchy for a list of sources. At
least one source must be provided. The first source in the list
has the highest priority.
"""
self.sources = list(sources)
self.name = ROOT_NAME
self.redactions = set()
def add(self, obj):
self.sources.append(ConfigSource.of(obj))
def set(self, value):
self.sources.insert(0, ConfigSource.of(value))
def resolve(self):
return ((dict(s), s) for s in self.sources)
def clear(self):
"""Remove all sources (and redactions) from this
configuration.
"""
del self.sources[:]
self.redactions.clear()
def root(self):
return self
def set_redaction(self, path, flag):
if flag:
self.redactions.add(path)
elif path in self.redactions:
self.redactions.remove(path)
def get_redactions(self):
return self.redactions
class Subview(ConfigView):
"""A subview accessed via a subscript of a parent view."""
def __init__(self, parent, key):
"""Make a subview of a parent view for a given subscript key.
"""
self.parent = parent
self.key = key
# Choose a human-readable name for this view.
if isinstance(self.parent, RootView):
self.name = ''
else:
self.name = self.parent.name
if not isinstance(self.key, int):
self.name += '.'
if isinstance(self.key, int):
self.name += u'#{0}'.format(self.key)
elif isinstance(self.key, BASESTRING):
if isinstance(self.key, bytes):
self.name += self.key.decode('utf8')
else:
self.name += self.key
else:
self.name += repr(self.key)
def resolve(self):
for collection, source in self.parent.resolve():
try:
value = collection[self.key]
except IndexError:
# List index out of bounds.
continue
except KeyError:
# Dict key does not exist.
continue
except TypeError:
# Not subscriptable.
raise ConfigTypeError(
u"{0} must be a collection, not {1}".format(
self.parent.name, type(collection).__name__
)
)
yield value, source
def set(self, value):
self.parent.set({self.key: value})
def add(self, value):
self.parent.add({self.key: value})
def root(self):
return self.parent.root()
def set_redaction(self, path, flag):
self.parent.set_redaction((self.key,) + path, flag)
def get_redactions(self):
return (kp[1:] for kp in self.parent.get_redactions()
if kp and kp[0] == self.key)
# Config file paths, including platform-specific paths and in-package
# defaults.
# Based on get_root_path from Flask by Armin Ronacher.
def _package_path(name):
"""Returns the path to the package containing the named module or
None if the path could not be identified (e.g., if
``name == "__main__"``).
"""
loader = pkgutil.get_loader(name)
if loader is None or name == b'__main__':
return None
if hasattr(loader, 'get_filename'):
filepath = loader.get_filename(name)
else:
# Fall back to importing the specified module.
__import__(name)
filepath = sys.modules[name].__file__
return os.path.dirname(os.path.abspath(filepath))
def config_dirs():
"""Return a platform-specific list of candidates for user
configuration directories on the system.
The candidates are in order of priority, from highest to lowest. The
last element is the "fallback" location to be used when no
higher-priority config file exists.
"""
paths = []
if platform.system() == 'Darwin':
paths.append(MAC_DIR)
paths.append(UNIX_DIR_FALLBACK)
if UNIX_DIR_VAR in os.environ:
paths.append(os.environ[UNIX_DIR_VAR])
elif platform.system() == 'Windows':
paths.append(WINDOWS_DIR_FALLBACK)
if WINDOWS_DIR_VAR in os.environ:
paths.append(os.environ[WINDOWS_DIR_VAR])
else:
# Assume Unix.
paths.append(UNIX_DIR_FALLBACK)
if UNIX_DIR_VAR in os.environ:
paths.append(os.environ[UNIX_DIR_VAR])
# Expand and deduplicate paths.
out = []
for path in paths:
path = os.path.abspath(os.path.expanduser(path))
if path not in out:
out.append(path)
return out
# YAML loading.
class Loader(yaml.SafeLoader):
"""A customized YAML loader. This loader deviates from the official
YAML spec in a few convenient ways:
- All strings as are Unicode objects.
- All maps are OrderedDicts.
- Strings can begin with % without quotation.
"""
# All strings should be Unicode objects, regardless of contents.
def _construct_unicode(self, node):
return self.construct_scalar(node)
# Use ordered dictionaries for every YAML map.
# From https://gist.github.com/844388
def construct_yaml_map(self, node):
data = OrderedDict()
yield data
value = self.construct_mapping(node)
data.update(value)
def construct_mapping(self, node, deep=False):
if isinstance(node, yaml.MappingNode):
self.flatten_mapping(node)
else:
raise yaml.constructor.ConstructorError(
None, None,
u'expected a mapping node, but found %s' % node.id,
node.start_mark
)
mapping = OrderedDict()
for key_node, value_node in node.value:
key = self.construct_object(key_node, deep=deep)
try:
hash(key)
except TypeError as exc:
raise yaml.constructor.ConstructorError(
u'while constructing a mapping',
node.start_mark, 'found unacceptable key (%s)' % exc,
key_node.start_mark
)
value = self.construct_object(value_node, deep=deep)
mapping[key] = value
return mapping
# Allow bare strings to begin with %. Directives are still detected.
def check_plain(self):
plain = super(Loader, self).check_plain()
return plain or self.peek() == '%'
Loader.add_constructor('tag:yaml.org,2002:str', Loader._construct_unicode)
Loader.add_constructor('tag:yaml.org,2002:map', Loader.construct_yaml_map)
Loader.add_constructor('tag:yaml.org,2002:omap', Loader.construct_yaml_map)
def load_yaml(filename):
"""Read a YAML document from a file. If the file cannot be read or
parsed, a ConfigReadError is raised.
"""
try:
with open(filename, 'r') as f:
return yaml.load(f, Loader=Loader)
except (IOError, yaml.error.YAMLError) as exc:
raise ConfigReadError(filename, exc)
# YAML dumping.
class Dumper(yaml.SafeDumper):
"""A PyYAML Dumper that represents OrderedDicts as ordinary mappings
(in order, of course).
"""
# From http://pyyaml.org/attachment/ticket/161/use_ordered_dict.py
def represent_mapping(self, tag, mapping, flow_style=None):
value = []
node = yaml.MappingNode(tag, value, flow_style=flow_style)
if self.alias_key is not None:
self.represented_objects[self.alias_key] = node
best_style = False
if hasattr(mapping, 'items'):
mapping = list(mapping.items())
for item_key, item_value in mapping:
node_key = self.represent_data(item_key)
node_value = self.represent_data(item_value)
if not (isinstance(node_key, yaml.ScalarNode) and
not node_key.style):
best_style = False
if not (isinstance(node_value, yaml.ScalarNode) and
not node_value.style):
best_style = False
value.append((node_key, node_value))
if flow_style is None:
if self.default_flow_style is not None:
node.flow_style = self.default_flow_style
else:
node.flow_style = best_style
return node
def represent_list(self, data):
"""If a list has less than 4 items, represent it in inline style
(i.e. comma separated, within square brackets).
"""
node = super(Dumper, self).represent_list(data)
length = len(data)
if self.default_flow_style is None and length < 4:
node.flow_style = True
elif self.default_flow_style is None:
node.flow_style = False
return node
def represent_bool(self, data):
"""Represent bool as 'yes' or 'no' instead of 'true' or 'false'.
"""
if data:
value = u'yes'
else:
value = u'no'
return self.represent_scalar('tag:yaml.org,2002:bool', value)
def represent_none(self, data):
"""Represent a None value with nothing instead of 'none'.
"""
return self.represent_scalar('tag:yaml.org,2002:null', '')
Dumper.add_representer(OrderedDict, Dumper.represent_dict)
Dumper.add_representer(bool, Dumper.represent_bool)
Dumper.add_representer(type(None), Dumper.represent_none)
Dumper.add_representer(list, Dumper.represent_list)
def restore_yaml_comments(data, default_data):
"""Scan default_data for comments (we include empty lines in our
definition of comments) and place them before the same keys in data.
Only works with comments that are on one or more own lines, i.e.
not next to a yaml mapping.
"""
comment_map = dict()
default_lines = iter(default_data.splitlines())
for line in default_lines:
if not line:
comment = "\n"
elif line.startswith("#"):
comment = "{0}\n".format(line)
else:
continue
while True:
line = next(default_lines)
if line and not line.startswith("#"):
break
comment += "{0}\n".format(line)
key = line.split(':')[0].strip()
comment_map[key] = comment
out_lines = iter(data.splitlines())
out_data = ""
for line in out_lines:
key = line.split(':')[0].strip()
if key in comment_map:
out_data += comment_map[key]
out_data += "{0}\n".format(line)
return out_data
# Main interface.
class Configuration(RootView):
def __init__(self, appname, modname=None, read=True):
"""Create a configuration object by reading the
automatically-discovered config files for the application for a
given name. If `modname` is specified, it should be the import
name of a module whose package will be searched for a default
config file. (Otherwise, no defaults are used.) Pass `False` for
`read` to disable automatic reading of all discovered
configuration files. Use this when creating a configuration
object at module load time and then call the `read` method
later.
"""
super(Configuration, self).__init__([])
self.appname = appname
self.modname = modname
self._env_var = '{0}DIR'.format(self.appname.upper())
if read:
self.read()
def user_config_path(self):
"""Points to the location of the user configuration.
The file may not exist.
"""
return os.path.join(self.config_dir(), CONFIG_FILENAME)
def _add_user_source(self):
"""Add the configuration options from the YAML file in the
user's configuration directory (given by `config_dir`) if it
exists.
"""
filename = self.user_config_path()
if os.path.isfile(filename):
self.add(ConfigSource(load_yaml(filename) or {}, filename))
def _add_default_source(self):
"""Add the package's default configuration settings. This looks
for a YAML file located inside the package for the module
`modname` if it was given.
"""
if self.modname:
pkg_path = _package_path(self.modname)
if pkg_path:
filename = os.path.join(pkg_path, DEFAULT_FILENAME)
if os.path.isfile(filename):
self.add(ConfigSource(load_yaml(filename), filename, True))
def read(self, user=True, defaults=True):
"""Find and read the files for this configuration and set them
as the sources for this configuration. To disable either
discovered user configuration files or the in-package defaults,
set `user` or `defaults` to `False`.
"""
if user:
self._add_user_source()
if defaults:
self._add_default_source()
def config_dir(self):
"""Get the path to the user configuration directory. The
directory is guaranteed to exist as a postcondition (one may be
created if none exist).
If the application's ``...DIR`` environment variable is set, it
is used as the configuration directory. Otherwise,
platform-specific standard configuration locations are searched
for a ``config.yaml`` file. If no configuration file is found, a
fallback path is used.
"""
# If environment variable is set, use it.
if self._env_var in os.environ:
appdir = os.environ[self._env_var]
appdir = os.path.abspath(os.path.expanduser(appdir))
if os.path.isfile(appdir):
raise ConfigError(u'{0} must be a directory'.format(
self._env_var
))
else:
# Search platform-specific locations. If no config file is
# found, fall back to the final directory in the list.
for confdir in config_dirs():
appdir = os.path.join(confdir, self.appname)
if os.path.isfile(os.path.join(appdir, CONFIG_FILENAME)):
break
# Ensure that the directory exists.
if not os.path.isdir(appdir):
os.makedirs(appdir)
return appdir
def set_file(self, filename):
"""Parses the file as YAML and inserts it into the configuration
sources with highest priority.
"""
filename = os.path.abspath(filename)
self.set(ConfigSource(load_yaml(filename), filename))
def dump(self, full=True, redact=False):
"""Dump the Configuration object to a YAML file.
The order of the keys is determined from the default
configuration file. All keys not in the default configuration
will be appended to the end of the file.
:param filename: The file to dump the configuration to, or None
if the YAML string should be returned instead
:type filename: unicode
:param full: Dump settings that don't differ from the defaults
as well
:param redact: Remove sensitive information (views with the `redact`
flag set) from the output
"""
if full:
out_dict = self.flatten(redact=redact)
else:
# Exclude defaults when flattening.
sources = [s for s in self.sources if not s.default]
temp_root = RootView(sources)
temp_root.redactions = self.redactions
out_dict = temp_root.flatten(redact=redact)
yaml_out = yaml.dump(out_dict, Dumper=Dumper,
default_flow_style=None, indent=4,
width=1000)
# Restore comments to the YAML text.
default_source = None
for source in self.sources:
if source.default:
default_source = source
break
if default_source and default_source.filename:
with open(default_source.filename, 'r') as fp:
default_data = fp.read()
yaml_out = restore_yaml_comments(yaml_out, default_data)
return yaml_out
class LazyConfig(Configuration):
"""A Configuration at reads files on demand when it is first
accessed. This is appropriate for using as a global config object at
the module level.
"""
def __init__(self, appname, modname=None):
super(LazyConfig, self).__init__(appname, modname, False)
self._materialized = False # Have we read the files yet?
self._lazy_prefix = [] # Pre-materialization calls to set().
self._lazy_suffix = [] # Calls to add().
def read(self, user=True, defaults=True):
self._materialized = True
super(LazyConfig, self).read(user, defaults)
def resolve(self):
if not self._materialized:
# Read files and unspool buffers.
self.read()
self.sources += self._lazy_suffix
self.sources[:0] = self._lazy_prefix
return super(LazyConfig, self).resolve()
def add(self, value):
super(LazyConfig, self).add(value)
if not self._materialized:
# Buffer additions to end.
self._lazy_suffix += self.sources
del self.sources[:]
def set(self, value):
super(LazyConfig, self).set(value)
if not self._materialized:
# Buffer additions to beginning.
self._lazy_prefix[:0] = self.sources
del self.sources[:]
def clear(self):
"""Remove all sources from this configuration."""
super(LazyConfig, self).clear()
self._lazy_suffix = []
self._lazy_prefix = []
# "Validated" configuration views: experimental!
REQUIRED = object()
"""A sentinel indicating that there is no default value and an exception
should be raised when the value is missing.
"""
class Template(object):
"""A value template for configuration fields.
The template works like a type and instructs Confit about how to
interpret a deserialized YAML value. This includes type conversions,
providing a default value, and validating for errors. For example, a
filepath type might expand tildes and check that the file exists.
"""
def __init__(self, default=REQUIRED):
"""Create a template with a given default value.
If `default` is the sentinel `REQUIRED` (as it is by default),
then an error will be raised when a value is missing. Otherwise,
missing values will instead return `default`.
"""
self.default = default
def __call__(self, view):
"""Invoking a template on a view gets the view's value according
to the template.
"""
return self.value(view, self)
def value(self, view, template=None):
"""Get the value for a `ConfigView`.
May raise a `NotFoundError` if the value is missing (and the
template requires it) or a `ConfigValueError` for invalid values.
"""
if view.exists():
value, _ = view.first()
return self.convert(value, view)
elif self.default is REQUIRED:
# Missing required value. This is an error.
raise NotFoundError(u"{0} not found".format(view.name))
else:
# Missing value, but not required.
return self.default
def convert(self, value, view):
"""Convert the YAML-deserialized value to a value of the desired
type.
Subclasses should override this to provide useful conversions.
May raise a `ConfigValueError` when the configuration is wrong.
"""
# Default implementation does no conversion.
return value
def fail(self, message, view, type_error=False):
"""Raise an exception indicating that a value cannot be
accepted.
`type_error` indicates whether the error is due to a type
mismatch rather than a malformed value. In this case, a more
specific exception is raised.
"""
exc_class = ConfigTypeError if type_error else ConfigValueError
raise exc_class(
u'{0}: {1}'.format(view.name, message)
)
def __repr__(self):
return '{0}({1})'.format(
type(self).__name__,
'' if self.default is REQUIRED else repr(self.default),
)
class Integer(Template):
"""An integer configuration value template.
"""
def convert(self, value, view):
"""Check that the value is an integer. Floats are rounded.
"""
if isinstance(value, int):
return value
elif isinstance(value, float):
return int(value)
else:
self.fail(u'must be a number', view, True)
class Number(Template):
"""A numeric type: either an integer or a floating-point number.
"""
def convert(self, value, view):
"""Check that the value is an int or a float.
"""
if isinstance(value, NUMERIC_TYPES):
return value
else:
self.fail(
u'must be numeric, not {0}'.format(type(value).__name__),
view,
True
)
class MappingTemplate(Template):
"""A template that uses a dictionary to specify other types for the
values for a set of keys and produce a validated `AttrDict`.
"""
def __init__(self, mapping):
"""Create a template according to a dict (mapping). The
mapping's values should themselves either be Types or
convertible to Types.
"""
subtemplates = {}
for key, typ in mapping.items():
subtemplates[key] = as_template(typ)
self.subtemplates = subtemplates
def value(self, view, template=None):
"""Get a dict with the same keys as the template and values
validated according to the value types.
"""
out = AttrDict()
for key, typ in self.subtemplates.items():
out[key] = typ.value(view[key], self)
return out
def __repr__(self):
return 'MappingTemplate({0})'.format(repr(self.subtemplates))
class String(Template):
"""A string configuration value template.
"""
def __init__(self, default=REQUIRED, pattern=None):
"""Create a template with the added optional `pattern` argument,
a regular expression string that the value should match.
"""
super(String, self).__init__(default)
self.pattern = pattern
if pattern:
self.regex = re.compile(pattern)
def __repr__(self):
args = []
if self.default is not REQUIRED:
args.append(repr(self.default))
if self.pattern is not None:
args.append('pattern=' + repr(self.pattern))
return 'String({0})'.format(', '.join(args))
def convert(self, value, view):
"""Check that the value is a string and matches the pattern.
"""
if isinstance(value, BASESTRING):
if self.pattern and not self.regex.match(value):
self.fail(
u"must match the pattern {0}".format(self.pattern),
view
)
return value
else:
self.fail(u'must be a string', view, True)
class Choice(Template):
"""A template that permits values from a sequence of choices.
"""
def __init__(self, choices):
"""Create a template that validates any of the values from the
iterable `choices`.
If `choices` is a map, then the corresponding value is emitted.
Otherwise, the value itself is emitted.
"""
self.choices = choices
def convert(self, value, view):
"""Ensure that the value is among the choices (and remap if the
choices are a mapping).
"""
if value not in self.choices:
self.fail(
u'must be one of {0}, not {1}'.format(
repr(list(self.choices)), repr(value)
),
view
)
if isinstance(self.choices, collections.Mapping):
return self.choices[value]
else:
return value
def __repr__(self):
return 'Choice({0!r})'.format(self.choices)
class OneOf(Template):
"""A template that permits values complying to one of the given templates.
"""
def __init__(self, allowed, default=REQUIRED):
super(OneOf, self).__init__(default)
self.allowed = list(allowed)
def __repr__(self):
args = []
if self.allowed is not None:
args.append('allowed=' + repr(self.allowed))
if self.default is not REQUIRED:
args.append(repr(self.default))
return 'OneOf({0})'.format(', '.join(args))
def value(self, view, template):
self.template = template
return super(OneOf, self).value(view, template)
def convert(self, value, view):
"""Ensure that the value follows at least one template.
"""
is_mapping = isinstance(self.template, MappingTemplate)
for candidate in self.allowed:
try:
if is_mapping:
if isinstance(candidate, Filename) and \
candidate.relative_to:
next_template = candidate.template_with_relatives(
view,
self.template
)
next_template.subtemplates[view.key] = as_template(
candidate
)
else:
next_template = MappingTemplate({view.key: candidate})
return view.parent.get(next_template)[view.key]
else:
return view.get(candidate)
except ConfigTemplateError:
raise
except ConfigError:
pass
except ValueError as exc:
raise ConfigTemplateError(exc)
self.fail(
u'must be one of {0}, not {1}'.format(
repr(self.allowed), repr(value)
),
view
)
class StrSeq(Template):
"""A template for values that are lists of strings.
Validates both actual YAML string lists and single strings. Strings
can optionally be split on whitespace.
"""
def __init__(self, split=True):
"""Create a new template.
`split` indicates whether, when the underlying value is a single
string, it should be split on whitespace. Otherwise, the
resulting value is a list containing a single string.
"""
super(StrSeq, self).__init__()
self.split = split
def convert(self, value, view):
if isinstance(value, bytes):
value = value.decode('utf8', 'ignore')
if isinstance(value, STRING):
if self.split:
return value.split()
else:
return [value]
try:
value = list(value)
except TypeError:
self.fail(u'must be a whitespace-separated string or a list',
view, True)
def convert(x):
if isinstance(x, STRING):
return x
elif isinstance(x, bytes):
return x.decode('utf8', 'ignore')
else:
self.fail(u'must be a list of strings', view, True)
return list(map(convert, value))
class Filename(Template):
"""A template that validates strings as filenames.
Filenames are returned as absolute, tilde-free paths.
Relative paths are relative to the template's `cwd` argument
when it is specified, then the configuration directory (see
the `config_dir` method) if they come from a file. Otherwise,
they are relative to the current working directory. This helps
attain the expected behavior when using command-line options.
"""
def __init__(self, default=REQUIRED, cwd=None, relative_to=None,
in_app_dir=False):
"""`relative_to` is the name of a sibling value that is
being validated at the same time.
`in_app_dir` indicates whether the path should be resolved
inside the application's config directory (even when the setting
does not come from a file).
"""
super(Filename, self).__init__(default)
self.cwd = cwd
self.relative_to = relative_to
self.in_app_dir = in_app_dir
def __repr__(self):
args = []
if self.default is not REQUIRED:
args.append(repr(self.default))
if self.cwd is not None:
args.append('cwd=' + repr(self.cwd))
if self.relative_to is not None:
args.append('relative_to=' + repr(self.relative_to))
if self.in_app_dir:
args.append('in_app_dir=True')
return 'Filename({0})'.format(', '.join(args))
def resolve_relative_to(self, view, template):
if not isinstance(template, (collections.Mapping, MappingTemplate)):
# disallow config.get(Filename(relative_to='foo'))
raise ConfigTemplateError(
u'relative_to may only be used when getting multiple values.'
)
elif self.relative_to == view.key:
raise ConfigTemplateError(
u'{0} is relative to itself'.format(view.name)
)
elif self.relative_to not in view.parent.keys():
# self.relative_to is not in the config
self.fail(
(
u'needs sibling value "{0}" to expand relative path'
).format(self.relative_to),
view
)
old_template = {}
old_template.update(template.subtemplates)
# save time by skipping MappingTemplate's init loop
next_template = MappingTemplate({})
next_relative = self.relative_to
# gather all the needed templates and nothing else
while next_relative is not None:
try:
# pop to avoid infinite loop because of recursive
# relative paths
rel_to_template = old_template.pop(next_relative)
except KeyError:
if next_relative in template.subtemplates:
# we encountered this config key previously
raise ConfigTemplateError((
u'{0} and {1} are recursively relative'
).format(view.name, self.relative_to))
else:
raise ConfigTemplateError((
u'missing template for {0}, needed to expand {1}\'s' +
u'relative path'
).format(self.relative_to, view.name))
next_template.subtemplates[next_relative] = rel_to_template
next_relative = rel_to_template.relative_to
return view.parent.get(next_template)[self.relative_to]
def value(self, view, template=None):
path, source = view.first()
if not isinstance(path, BASESTRING):
self.fail(
u'must be a filename, not {0}'.format(type(path).__name__),
view,
True
)
path = os.path.expanduser(STRING(path))
if not os.path.isabs(path):
if self.cwd is not None:
# relative to the template's argument
path = os.path.join(self.cwd, path)
elif self.relative_to is not None:
path = os.path.join(
self.resolve_relative_to(view, template),
path,
)
elif source.filename or self.in_app_dir:
# From defaults: relative to the app's directory.
path = os.path.join(view.root().config_dir(), path)
return os.path.abspath(path)
class TypeTemplate(Template):
"""A simple template that checks that a value is an instance of a
desired Python type.
"""
def __init__(self, typ, default=REQUIRED):
"""Create a template that checks that the value is an instance
of `typ`.
"""
super(TypeTemplate, self).__init__(default)
self.typ = typ
def convert(self, value, view):
if not isinstance(value, self.typ):
self.fail(
u'must be a {0}, not {1}'.format(
self.typ.__name__,
type(value).__name__,
),
view,
True
)
return value
class AttrDict(dict):
"""A `dict` subclass that can be accessed via attributes (dot
notation) for convenience.
"""
def __getattr__(self, key):
if key in self:
return self[key]
else:
raise AttributeError(key)
def as_template(value):
"""Convert a simple "shorthand" Python value to a `Template`.
"""
if isinstance(value, Template):
# If it's already a Template, pass it through.
return value
elif isinstance(value, collections.Mapping):
# Dictionaries work as templates.
return MappingTemplate(value)
elif value is int:
return Integer()
elif isinstance(value, int):
return Integer(value)
elif isinstance(value, type) and issubclass(value, BASESTRING):
return String()
elif isinstance(value, BASESTRING):
return String(value)
elif isinstance(value, set):
# convert to list to avoid hash related problems
return Choice(list(value))
elif isinstance(value, list):
return OneOf(value)
elif value is float:
return Number()
elif value is None:
return Template()
elif value is dict:
return TypeTemplate(collections.Mapping)
elif value is list:
return TypeTemplate(collections.Sequence)
elif isinstance(value, type):
return TypeTemplate(value)
else:
raise ValueError(u'cannot convert to template: {0!r}'.format(value))
|
Seinlin/nv7fire-external-protobuf
|
refs/heads/master
|
gtest/test/gtest_help_test.py
|
68
|
#!/usr/bin/env python
#
# Copyright 2009, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Tests the --help flag of Google C++ Testing Framework.
SYNOPSIS
gtest_help_test.py --gtest_build_dir=BUILD/DIR
# where BUILD/DIR contains the built gtest_help_test_ file.
gtest_help_test.py
"""
__author__ = 'wan@google.com (Zhanyong Wan)'
import os
import re
import gtest_test_utils
IS_WINDOWS = os.name == 'nt'
PROGRAM_PATH = gtest_test_utils.GetTestExecutablePath('gtest_help_test_')
FLAG_PREFIX = '--gtest_'
CATCH_EXCEPTIONS_FLAG = FLAG_PREFIX + 'catch_exceptions'
DEATH_TEST_STYLE_FLAG = FLAG_PREFIX + 'death_test_style'
# The help message must match this regex.
HELP_REGEX = re.compile(
FLAG_PREFIX + r'list_tests.*' +
FLAG_PREFIX + r'filter=.*' +
FLAG_PREFIX + r'also_run_disabled_tests.*' +
FLAG_PREFIX + r'repeat=.*' +
FLAG_PREFIX + r'shuffle.*' +
FLAG_PREFIX + r'random_seed=.*' +
FLAG_PREFIX + r'color=.*' +
FLAG_PREFIX + r'print_time.*' +
FLAG_PREFIX + r'output=.*' +
FLAG_PREFIX + r'break_on_failure.*' +
FLAG_PREFIX + r'throw_on_failure.*',
re.DOTALL)
def RunWithFlag(flag):
"""Runs gtest_help_test_ with the given flag.
Returns:
the exit code and the text output as a tuple.
Args:
flag: the command-line flag to pass to gtest_help_test_, or None.
"""
if flag is None:
command = [PROGRAM_PATH]
else:
command = [PROGRAM_PATH, flag]
child = gtest_test_utils.Subprocess(command)
return child.exit_code, child.output
class GTestHelpTest(gtest_test_utils.TestCase):
"""Tests the --help flag and its equivalent forms."""
def TestHelpFlag(self, flag):
"""Verifies that the right message is printed and the tests are
skipped when the given flag is specified."""
exit_code, output = RunWithFlag(flag)
self.assertEquals(0, exit_code)
self.assert_(HELP_REGEX.search(output), output)
if IS_WINDOWS:
self.assert_(CATCH_EXCEPTIONS_FLAG in output, output)
self.assert_(DEATH_TEST_STYLE_FLAG not in output, output)
else:
self.assert_(CATCH_EXCEPTIONS_FLAG not in output, output)
self.assert_(DEATH_TEST_STYLE_FLAG in output, output)
def testPrintsHelpWithFullFlag(self):
self.TestHelpFlag('--help')
def testPrintsHelpWithShortFlag(self):
self.TestHelpFlag('-h')
def testPrintsHelpWithQuestionFlag(self):
self.TestHelpFlag('-?')
def testPrintsHelpWithWindowsStyleQuestionFlag(self):
self.TestHelpFlag('/?')
def testRunsTestsWithoutHelpFlag(self):
"""Verifies that when no help flag is specified, the tests are run
and the help message is not printed."""
exit_code, output = RunWithFlag(None)
self.assert_(exit_code != 0)
self.assert_(not HELP_REGEX.search(output), output)
if __name__ == '__main__':
gtest_test_utils.Main()
|
Yong-Lee/django
|
refs/heads/master
|
tests/i18n/urls.py
|
205
|
from __future__ import unicode_literals
from django.conf.urls.i18n import i18n_patterns
from django.http import HttpResponse, StreamingHttpResponse
from django.test import ignore_warnings
from django.utils.deprecation import RemovedInDjango110Warning
from django.utils.translation import ugettext_lazy as _
# test deprecated version of i18n_patterns() function (with prefix). Remove it
# and convert to list of urls() in Django 1.10
i18n_patterns = ignore_warnings(category=RemovedInDjango110Warning)(i18n_patterns)
urlpatterns = i18n_patterns('',
(r'^simple/$', lambda r: HttpResponse()),
(r'^streaming/$', lambda r: StreamingHttpResponse([_("Yes"), "/", _("No")])),
)
|
lbybee/food_truck_project
|
refs/heads/master
|
food_truck_scraper.py
|
1
|
import requests
from BeautifulSoup import BeautifulSoup
def getPage(date):
"""gets all the food trucks, locations and dates for a page"""
url = "http://chicagofoodtruckfinder.com/weekly-schedule?date="
truck_data_list = []
soup = BeautifulSoup(requests.post(url + date).text)
table = soup.find("table").findAll("tr")
days = [d.text for d in table[0].findAll("th")[1:]]
print days
for location in table[1:]:
cols = location.findAll("td")
loc_name = cols[0].text
for i, c in enumerate(cols[1:]):
print i
print c
trucks = c.findAll("img")
if trucks is not None:
for t in trucks:
time_name = t["title"]
am_spt = time_name.split("AM")
pm_spt = time_name.split("PM")
if len(pm_spt) > 1 and len(am_spt) > 1:
name = pm_spt[1]
if len(pm_spt) > 1 and len(am_spt) == 1:
name = pm_spt[2]
if len(pm_spt) == 1 and len(am_spt) > 1:
name = am_spt[2]
time = time_name.replace(name, "")
truck_data_list.append({"name": name,
"time": time,
"week_day": days[i],
"date": date,
"location": loc_name})
return truck_data_list
test_date = "20131214"
test = getPage(test_date)
|
laonawuli/addrest
|
refs/heads/master
|
web2py/gluon/contrib/qdb.py
|
43
|
#!/usr/bin/env python
# coding:utf-8
"Queues(Pipe)-based independent remote client-server Python Debugger"
__author__ = "Mariano Reingart (reingart@gmail.com)"
__copyright__ = "Copyright (C) 2011 Mariano Reingart"
__license__ = "LGPL 3.0"
__version__ = "1.01b"
# remote debugger queue-based (jsonrpc-like interface):
# - bidirectional communication (request - response calls in both ways)
# - request with id == null is a notification (do not send a response)
# - request with a value for id is a normal call, wait response
# based on idle, inspired by pythonwin implementation, taken many code from pdb
import bdb
import inspect
import linecache
import os
import sys
import traceback
import cmd
import pydoc
import threading
class Qdb(bdb.Bdb):
"Qdb Debugger Backend"
def __init__(self, pipe, redirect_stdio=True, allow_interruptions=False,
skip=[__name__]):
kwargs = {}
if sys.version_info > (2, 7):
kwargs['skip'] = skip
bdb.Bdb.__init__(self, **kwargs)
self.frame = None
self.i = 1 # sequential RPC call id
self.waiting = False
self.pipe = pipe # for communication
self._wait_for_mainpyfile = False
self._wait_for_breakpoint = False
self.mainpyfile = ""
self._lineno = None # last listed line numbre
# replace system standard input and output (send them thru the pipe)
if redirect_stdio:
sys.stdin = self
sys.stdout = self
sys.stderr = self
if allow_interruptions:
# fake breakpoint to prevent removing trace_dispatch on set_continue
self.breaks[None] = []
self.allow_interruptions = allow_interruptions
self.burst = 0 # do not send notifications ("burst" mode)
self.params = {} # optional parameters for interaction
def pull_actions(self):
# receive a remote procedure call from the frontend:
# returns True if action processed
# None when 'run' notification is received (see 'startup')
request = self.pipe.recv()
if request.get("method") == 'run':
return None
response = {'version': '1.1', 'id': request.get('id'),
'result': None,
'error': None}
try:
# dispatch message (JSON RPC like)
method = getattr(self, request['method'])
response['result'] = method.__call__(*request['args'],
**request.get('kwargs', {}))
except Exception, e:
response['error'] = {'code': 0, 'message': str(e)}
# send the result for normal method calls, not for notifications
if request.get('id'):
self.pipe.send(response)
return True
# Override Bdb methods
def trace_dispatch(self, frame, event, arg):
# check for non-interaction rpc (set_breakpoint, interrupt)
while self.allow_interruptions and self.pipe.poll():
self.pull_actions()
# process the frame (see Bdb.trace_dispatch)
if self.quitting:
return # None
if event == 'line':
return self.dispatch_line(frame)
if event == 'call':
return self.dispatch_call(frame, arg)
if event == 'return':
return self.dispatch_return(frame, arg)
if event == 'exception':
return self.dispatch_exception(frame, arg)
return self.trace_dispatch
def user_call(self, frame, argument_list):
"""This method is called when there is the remote possibility
that we ever need to stop in this function."""
if self._wait_for_mainpyfile or self._wait_for_breakpoint:
return
if self.stop_here(frame):
self.interaction(frame, None)
def user_line(self, frame):
"""This function is called when we stop or break at this line."""
if self._wait_for_mainpyfile:
if (not self.canonic(frame.f_code.co_filename).startswith(self.mainpyfile)
or frame.f_lineno <= 0):
return
self._wait_for_mainpyfile = 0
if self._wait_for_breakpoint:
if not self.break_here(frame):
return
self._wait_for_breakpoint = 0
self.interaction(frame)
def user_exception(self, frame, info):
"""This function is called if an exception occurs,
but only if we are to stop at or just below this level."""
if self._wait_for_mainpyfile or self._wait_for_breakpoint:
return
extype, exvalue, trace = info
# pre-process stack trace as it isn't pickeable (cannot be sent pure)
msg = ''.join(traceback.format_exception(extype, exvalue, trace))
trace = traceback.extract_tb(trace)
title = traceback.format_exception_only(extype, exvalue)[0]
# send an Exception notification
msg = {'method': 'exception',
'args': (title, extype.__name__, exvalue, trace, msg),
'id': None}
self.pipe.send(msg)
self.interaction(frame, info)
def run(self, code, interp=None, *args, **kwargs):
try:
return bdb.Bdb.run(self, code, *args, **kwargs)
finally:
pass
def runcall(self, function, interp=None, *args, **kwargs):
try:
self.interp = interp
return bdb.Bdb.runcall(self, function, *args, **kwargs)
finally:
pass
def _runscript(self, filename):
# The script has to run in __main__ namespace (clear it)
import __main__
import imp
__main__.__dict__.clear()
__main__.__dict__.update({"__name__": "__main__",
"__file__": filename,
"__builtins__": __builtins__,
"imp": imp, # need for run
})
# avoid stopping before we reach the main script
self._wait_for_mainpyfile = 1
self.mainpyfile = self.canonic(filename)
self._user_requested_quit = 0
statement = 'imp.load_source("__main__", "%s")' % filename
# notify and wait frontend to set initial params and breakpoints
self.pipe.send({'method': 'startup', 'args': (__version__, )})
while self.pull_actions() is not None:
pass
self.run(statement)
# General interaction function
def interaction(self, frame, info=None):
# chache frame locals to ensure that modifications are not overwritten
self.frame_locals = frame and frame.f_locals or {}
# extract current filename and line number
code, lineno = frame.f_code, frame.f_lineno
filename = code.co_filename
basename = os.path.basename(filename)
message = "%s:%s" % (basename, lineno)
if code.co_name != "?":
message = "%s: %s()" % (message, code.co_name)
# wait user events
self.waiting = True
self.frame = frame
try:
while self.waiting:
# sync_source_line()
if frame and filename[:1] + filename[-1:] != "<>" and os.path.exists(filename):
line = linecache.getline(filename, self.frame.f_lineno,
self.frame.f_globals)
else:
line = ""
# send the notification (debug event) - DOESN'T WAIT RESPONSE
self.burst -= 1
if self.burst < 0:
kwargs = {}
if self.params.get('call_stack'):
kwargs['call_stack'] = self.do_where()
if self.params.get('environment'):
kwargs['environment'] = self.do_environment()
self.pipe.send({'method': 'interaction', 'id': None,
'args': (filename, self.frame.f_lineno, line),
'kwargs': kwargs})
self.pull_actions()
finally:
self.waiting = False
self.frame = None
def do_debug(self, mainpyfile=None, wait_breakpoint=1):
self.reset()
if not wait_breakpoint or mainpyfile:
self._wait_for_mainpyfile = 1
if not mainpyfile:
frame = sys._getframe().f_back
mainpyfile = frame.f_code.co_filename
self.mainpyfile = self.canonic(mainpyfile)
self._wait_for_breakpoint = wait_breakpoint
sys.settrace(self.trace_dispatch)
def set_trace(self, frame=None):
# start debugger interaction immediatelly
if frame is None:
frame = sys._getframe().f_back
self._wait_for_mainpyfile = frame.f_code.co_filename
self._wait_for_breakpoint = 0
bdb.Bdb.set_trace(self, frame)
# Command definitions, called by interaction()
def do_continue(self):
self.set_continue()
self.waiting = False
def do_step(self):
self.set_step()
self.waiting = False
def do_return(self):
self.set_return(self.frame)
self.waiting = False
def do_next(self):
self.set_next(self.frame)
self.waiting = False
def interrupt(self):
self.set_step()
def do_quit(self):
self.set_quit()
self.waiting = False
def do_jump(self, lineno):
arg = int(lineno)
try:
self.frame.f_lineno = arg
return arg
except ValueError, e:
print '*** Jump failed:', e
return False
def do_list(self, arg):
last = None
if arg:
if isinstance(arg, tuple):
first, last = arg
else:
first = arg
elif not self._lineno:
first = max(1, self.frame.f_lineno - 5)
else:
first = self._lineno + 1
if last is None:
last = first + 10
filename = self.frame.f_code.co_filename
breaklist = self.get_file_breaks(filename)
lines = []
for lineno in range(first, last + 1):
line = linecache.getline(filename, lineno,
self.frame.f_globals)
if not line:
lines.append((filename, lineno, '', current, "<EOF>\n"))
break
else:
breakpoint = "B" if lineno in breaklist else ""
current = "->" if self.frame.f_lineno == lineno else ""
lines.append((filename, lineno, breakpoint, current, line))
self._lineno = lineno
return lines
def do_read(self, filename):
return open(filename, "Ur").read()
def do_set_breakpoint(self, filename, lineno, temporary=0, cond=None):
return self.set_break(filename, int(lineno), temporary, cond)
def do_list_breakpoint(self):
breaks = []
if self.breaks: # There's at least one
for bp in bdb.Breakpoint.bpbynumber:
if bp:
breaks.append((bp.number, bp.file, bp.line,
bp.temporary, bp.enabled, bp.hits, bp.cond, ))
return breaks
def do_clear_breakpoint(self, filename, lineno):
self.clear_break(filename, lineno)
def do_clear_file_breakpoints(self, filename):
self.clear_all_file_breaks(filename)
def do_clear(self, arg):
# required by BDB to remove temp breakpoints!
err = self.clear_bpbynumber(arg)
if err:
print '*** DO_CLEAR failed', err
def do_eval(self, arg, safe=True):
ret = eval(arg, self.frame.f_globals,
self.frame_locals)
if safe:
ret = pydoc.cram(repr(ret), 255)
return ret
def do_exec(self, arg):
locals = self.frame_locals
globals = self.frame.f_globals
code = compile(arg + '\n', '<stdin>', 'single')
save_displayhook = sys.displayhook
self.displayhook_value = None
try:
sys.displayhook = self.displayhook
exec code in globals, locals
finally:
sys.displayhook = save_displayhook
return self.displayhook_value
def do_where(self):
"print_stack_trace"
stack, curindex = self.get_stack(self.frame, None)
lines = []
for frame, lineno in stack:
filename = frame.f_code.co_filename
line = linecache.getline(filename, lineno)
lines.append((filename, lineno, "", "", line, ))
return lines
def do_environment(self):
"return current frame local and global environment"
env = {'locals': {}, 'globals': {}}
# converts the frame global and locals to a short text representation:
if self.frame:
for name, value in self.frame_locals.items():
env['locals'][name] = pydoc.cram(repr(
value), 255), repr(type(value))
for name, value in self.frame.f_globals.items():
env['globals'][name] = pydoc.cram(repr(
value), 20), repr(type(value))
return env
def get_autocomplete_list(self, expression):
"Return list of auto-completion options for expression"
try:
obj = self.do_eval(expression)
except:
return []
else:
return dir(obj)
def get_call_tip(self, expression):
"Return list of auto-completion options for expression"
try:
obj = self.do_eval(expression)
except Exception, e:
return ('', '', str(e))
else:
name = ''
try:
name = obj.__name__
except AttributeError:
pass
argspec = ''
drop_self = 0
f = None
try:
if inspect.isbuiltin(obj):
pass
elif inspect.ismethod(obj):
# Get the function from the object
f = obj.im_func
drop_self = 1
elif inspect.isclass(obj):
# Get the __init__ method function for the class.
if hasattr(obj, '__init__'):
f = obj.__init__.im_func
else:
for base in object.__bases__:
if hasattr(base, '__init__'):
f = base.__init__.im_func
break
if f is not None:
drop_self = 1
elif callable(obj):
# use the obj as a function by default
f = obj
# Get the __call__ method instead.
f = obj.__call__.im_func
drop_self = 0
except AttributeError:
pass
if f:
argspec = apply(inspect.formatargspec, inspect.getargspec(f))
doc = ''
if callable(obj):
try:
doc = inspect.getdoc(obj)
except:
pass
return (name, argspec[1:-1], doc.strip())
def set_burst(self, val):
"Set burst mode -multiple command count- (shut up notifications)"
self.burst = val
def set_params(self, params):
"Set parameters for interaction"
self.params.update(params)
def displayhook(self, obj):
"""Custom displayhook for the do_exec which prevents
assignment of the _ variable in the builtins.
"""
self.displayhook_value = repr(obj)
def reset(self):
bdb.Bdb.reset(self)
self.waiting = False
self.frame = None
def post_mortem(self, t=None):
# handling the default
if t is None:
# sys.exc_info() returns (type, value, traceback) if an exception is
# being handled, otherwise it returns None
t = sys.exc_info()[2]
if t is None:
raise ValueError("A valid traceback must be passed if no "
"exception is being handled")
self.reset()
# get last frame:
while t is not None:
frame = t.tb_frame
t = t.tb_next
code, lineno = frame.f_code, frame.f_lineno
filename = code.co_filename
line = linecache.getline(filename, lineno)
#(filename, lineno, "", current, line, )}
self.interaction(frame)
# console file-like object emulation
def readline(self):
"Replacement for stdin.readline()"
msg = {'method': 'readline', 'args': (), 'id': self.i}
self.pipe.send(msg)
msg = self.pipe.recv()
self.i += 1
return msg['result']
def readlines(self):
"Replacement for stdin.readlines()"
lines = []
while lines[-1:] != ['\n']:
lines.append(self.readline())
return lines
def write(self, text):
"Replacement for stdout.write()"
msg = {'method': 'write', 'args': (text, ), 'id': None}
self.pipe.send(msg)
def writelines(self, l):
map(self.write, l)
def flush(self):
pass
def isatty(self):
return 0
class QueuePipe(object):
"Simulated pipe for threads (using two queues)"
def __init__(self, name, in_queue, out_queue):
self.__name = name
self.in_queue = in_queue
self.out_queue = out_queue
def send(self, data):
self.out_queue.put(data, block=True)
def recv(self, count=None, timeout=None):
data = self.in_queue.get(block=True, timeout=timeout)
return data
def poll(self, timeout=None):
return not self.in_queue.empty()
def close(self):
pass
class RPCError(RuntimeError):
"Remote Error (not user exception)"
pass
class Frontend(object):
"Qdb generic Frontend interface"
def __init__(self, pipe):
self.i = 1
self.pipe = pipe
self.notifies = []
self.read_lock = threading.RLock()
self.write_lock = threading.RLock()
def recv(self):
self.read_lock.acquire()
try:
return self.pipe.recv()
finally:
self.read_lock.release()
def send(self, data):
self.write_lock.acquire()
try:
return self.pipe.send(data)
finally:
self.write_lock.release()
def startup(self):
self.send({'method': 'run', 'args': (), 'id': None})
def interaction(self, filename, lineno, line, *kwargs):
raise NotImplementedError
def exception(self, title, extype, exvalue, trace, request):
"Show a user_exception"
raise NotImplementedError
def write(self, text):
"Console output (print)"
raise NotImplementedError
def readline(self, text):
"Console input/rawinput"
raise NotImplementedError
def run(self):
"Main method dispatcher (infinite loop)"
if self.pipe:
if not self.notifies:
# wait for a message...
request = self.recv()
else:
# process an asyncronus notification received earlier
request = self.notifies.pop(0)
return self.process_message(request)
def process_message(self, request):
if request:
result = None
if request.get("error"):
# it is not supposed to get an error here
# it should be raised by the method call
raise RPCError(res['error']['message'])
elif request.get('method') == 'interaction':
self.interaction(*request.get("args"), **request.get("kwargs"))
elif request.get('method') == 'startup':
self.startup()
elif request.get('method') == 'exception':
self.exception(*request['args'])
elif request.get('method') == 'write':
self.write(*request.get("args"))
elif request.get('method') == 'readline':
result = self.readline()
if result:
response = {'version': '1.1', 'id': request.get('id'),
'result': result,
'error': None}
self.send(response)
return True
def call(self, method, *args):
"Actually call the remote method (inside the thread)"
req = {'method': method, 'args': args, 'id': self.i}
self.send(req)
self.i += 1 # increment the id
while 1:
# wait until command acknowledge (response id match the request)
res = self.recv()
if 'id' not in res or not res['id']:
# nested notification received (i.e. write)! process it!
self.process_message(res)
elif 'result' not in res:
# nested request received (i.e. readline)! process it!
self.process_message(res)
elif long(req['id']) != long(res['id']):
print "DEBUGGER wrong packet received: expecting id", req[
'id'], res['id']
# protocol state is unknown
elif 'error' in res and res['error']:
raise RPCError(res['error']['message'])
else:
return res['result']
def do_step(self, arg=None):
"Execute the current line, stop at the first possible occasion"
self.call('do_step')
def do_next(self, arg=None):
"Execute the current line, do not stop at function calls"
self.call('do_next')
def do_continue(self, arg=None):
"Continue execution, only stop when a breakpoint is encountered."
self.call('do_continue')
def do_return(self, arg=None):
"Continue execution until the current function returns"
self.call('do_return')
def do_jump(self, arg):
"Set the next line that will be executed."
res = self.call('do_jump', arg)
print res
def do_where(self, arg=None):
"Print a stack trace, with the most recent frame at the bottom."
return self.call('do_where')
def do_quit(self, arg=None):
"Quit from the debugger. The program being executed is aborted."
self.call('do_quit')
def do_eval(self, expr):
"Inspect the value of the expression"
return self.call('do_eval', expr)
def do_environment(self):
"List all the locals and globals variables (string representation)"
return self.call('do_environment')
def do_list(self, arg=None):
"List source code for the current file"
return self.call('do_list', arg)
def do_read(self, filename):
"Read and send a local filename"
return self.call('do_read', filename)
def do_set_breakpoint(self, filename, lineno, temporary=0, cond=None):
"Set a breakpoint at filename:breakpoint"
self.call('do_set_breakpoint', filename, lineno, temporary, cond)
def do_clear_breakpoint(self, filename, lineno):
"Remove a breakpoint at filename:breakpoint"
self.call('do_clear_breakpoint', filename, lineno)
def do_clear_file_breakpoints(self, filename):
"Remove all breakpoints at filename"
self.call('do_clear_breakpoints', filename, lineno)
def do_list_breakpoint(self):
"List all breakpoints"
return self.call('do_list_breakpoint')
def do_exec(self, statement):
return self.call('do_exec', statement)
def get_autocomplete_list(self, expression):
return self.call('get_autocomplete_list', expression)
def get_call_tip(self, expression):
return self.call('get_call_tip', expression)
def interrupt(self):
"Immediately stop at the first possible occasion (outside interaction)"
# this is a notification!, do not expect a response
req = {'method': 'interrupt', 'args': ()}
self.send(req)
def set_burst(self, value):
req = {'method': 'set_burst', 'args': (value, )}
self.send(req)
def set_params(self, params):
req = {'method': 'set_params', 'args': (params, )}
self.send(req)
class Cli(Frontend, cmd.Cmd):
"Qdb Front-end command line interface"
def __init__(self, pipe, completekey='tab', stdin=None, stdout=None, skip=None):
cmd.Cmd.__init__(self, completekey, stdin, stdout)
Frontend.__init__(self, pipe)
# redefine Frontend methods:
def run(self):
while 1:
try:
Frontend.run(self)
except KeyboardInterrupt:
print "Interupting..."
self.interrupt()
def interaction(self, filename, lineno, line):
print "> %s(%d)\n-> %s" % (filename, lineno, line),
self.filename = filename
self.cmdloop()
def exception(self, title, extype, exvalue, trace, request):
print "=" * 80
print "Exception", title
print request
print "-" * 80
def write(self, text):
print text,
def readline(self):
return raw_input()
def postcmd(self, stop, line):
return not line.startswith("h") # stop
do_h = cmd.Cmd.do_help
do_s = Frontend.do_step
do_n = Frontend.do_next
do_c = Frontend.do_continue
do_r = Frontend.do_return
do_j = Frontend.do_jump
do_q = Frontend.do_quit
def do_eval(self, args):
"Inspect the value of the expression"
print Frontend.do_eval(self, args)
def do_list(self, args):
"List source code for the current file"
lines = Frontend.do_list(self, eval(args, {}, {}) if args else None)
self.print_lines(lines)
def do_where(self, args):
"Print a stack trace, with the most recent frame at the bottom."
lines = Frontend.do_where(self)
self.print_lines(lines)
def do_environment(self, args=None):
env = Frontend.do_environment(self)
for key in env:
print "=" * 78
print key.capitalize()
print "-" * 78
for name, value in env[key].items():
print "%-12s = %s" % (name, value)
def do_list_breakpoint(self, arg=None):
"List all breakpoints"
breaks = Frontend.do_list_breakpoint(self)
print "Num File Line Temp Enab Hits Cond"
for bp in breaks:
print '%-4d%-30s%4d %4s %4s %4d %s' % bp
print
def do_set_breakpoint(self, arg):
"Set a breakpoint at filename:breakpoint"
if arg:
if ':' in arg:
args = arg.split(":")
else:
args = (self.filename, arg)
Frontend.do_set_breakpoint(self, *args)
else:
self.do_list_breakpoint()
do_b = do_set_breakpoint
do_l = do_list
do_p = do_eval
do_w = do_where
do_e = do_environment
def default(self, line):
"Default command"
if line[:1] == '!':
print self.do_exec(line[1:])
else:
print "*** Unknown command: ", line
def print_lines(self, lines):
for filename, lineno, bp, current, source in lines:
print "%s:%4d%s%s\t%s" % (filename, lineno, bp, current, source),
print
def test():
def f(pipe):
print "creating debugger"
qdb = Qdb(pipe=pipe, redirect_stdio=False)
print "set trace"
my_var = "Mariano!"
qdb.set_trace()
print "hello world!"
print "good by!"
saraza
if '--process' in sys.argv:
from multiprocessing import Process, Pipe
pipe, child_conn = Pipe()
p = Process(target=f, args=(child_conn,))
else:
from threading import Thread
from Queue import Queue
parent_queue, child_queue = Queue(), Queue()
front_conn = QueuePipe("parent", parent_queue, child_queue)
child_conn = QueuePipe("child", child_queue, parent_queue)
p = Thread(target=f, args=(child_conn,))
p.start()
import time
class Test(Frontend):
def interaction(self, *args):
print "interaction!", args
def exception(self, *args):
print "exception", args
#raise RuntimeError("exception %s" % repr(args))
qdb = Test(front_conn)
time.sleep(5)
while 1:
print "running..."
Frontend.run(qdb)
time.sleep(1)
print "do_next"
qdb.do_next()
p.join()
def connect(host="localhost", port=6000, authkey='secret password'):
"Connect to a running debugger backend"
address = (host, port)
from multiprocessing.connection import Client
print "qdb debugger fronted: waiting for connection to", address
conn = Client(address, authkey=authkey)
try:
Cli(conn).run()
except EOFError:
pass
finally:
conn.close()
def main(host='localhost', port=6000, authkey='secret password'):
"Debug a script and accept a remote frontend"
if not sys.argv[1:] or sys.argv[1] in ("--help", "-h"):
print "usage: pdb.py scriptfile [arg] ..."
sys.exit(2)
mainpyfile = sys.argv[1] # Get script filename
if not os.path.exists(mainpyfile):
print 'Error:', mainpyfile, 'does not exist'
sys.exit(1)
del sys.argv[0] # Hide "pdb.py" from argument list
# Replace pdb's dir with script's dir in front of module search path.
sys.path[0] = os.path.dirname(mainpyfile)
from multiprocessing.connection import Listener
address = (host, port) # family is deduced to be 'AF_INET'
listener = Listener(address, authkey=authkey)
print "qdb debugger backend: waiting for connection at", address
conn = listener.accept()
print 'qdb debugger backend: connected to', listener.last_accepted
# create the backend
qdb = Qdb(conn, redirect_stdio=True, allow_interruptions=True)
try:
print "running", mainpyfile
qdb._runscript(mainpyfile)
print "The program finished"
except SystemExit:
# In most cases SystemExit does not warrant a post-mortem session.
print "The program exited via sys.exit(). Exit status: ",
print sys.exc_info()[1]
raise
except:
raise
conn.close()
listener.close()
qdb = None
def set_trace(host='localhost', port=6000, authkey='secret password'):
"Simplified interface to debug running programs"
global qdb, listener, conn
from multiprocessing.connection import Listener
# only create it if not currently instantiated
if not qdb:
address = (host, port) # family is deduced to be 'AF_INET'
listener = Listener(address, authkey=authkey)
conn = listener.accept()
# create the backend
qdb = Qdb(conn)
# start debugger backend:
qdb.set_trace()
def quit():
"Remove trace and quit"
global qdb, listener, conn
if qdb:
sys.settrace(None)
qdb = None
if conn:
conn.close()
conn = None
if listener:
listener.close()
listener = None
if __name__ == '__main__':
# When invoked as main program:
if '--test' in sys.argv:
test()
# Check environment for configuration parameters:
kwargs = {}
for param in 'host', 'port', 'authkey':
if 'QDB_%s' % param.upper() in os.environ:
kwargs[param] = os.environ['QDB_%s' % param.upper()]
if not sys.argv[1:]:
# connect to a remote debbuger
connect(**kwargs)
else:
# start the debugger on a script
# reimport as global __main__ namespace is destroyed
import qdb
qdb.main(**kwargs)
|
crosswalk-project/crosswalk-android-extensions
|
refs/heads/master
|
build/idl-generator/third_party/WebKit/Tools/Scripts/webkitpy/layout_tests/breakpad/dump_reader_win.py
|
50
|
# Copyright (C) 2013 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import logging
import os
import shlex
from webkitpy.layout_tests.breakpad.dump_reader import DumpReader
_log = logging.getLogger(__name__)
class DumpReaderWin(DumpReader):
"""DumpReader for windows breakpad."""
def __init__(self, host, build_dir):
super(DumpReaderWin, self).__init__(host, build_dir)
self._cdb_available = None
def check_is_functional(self):
return self._check_cdb_available()
def _file_extension(self):
return 'txt'
def _get_pid_from_dump(self, dump_file):
with self._host.filesystem.open_text_file_for_reading(dump_file) as f:
crash_keys = dict([l.split(':', 1) for l in f.read().splitlines()])
if 'pid' in crash_keys:
return crash_keys['pid']
return None
def _get_stack_from_dump(self, dump_file):
minidump = dump_file[:-3] + 'dmp'
cmd = [self._cdb_path, '-y', self._build_dir, '-c', '.lines;.ecxr;k30;q', '-z', minidump]
try:
stack = self._host.executive.run_command(cmd)
except:
_log.warning('Failed to execute "%s"' % ' '.join(cmd))
else:
return stack
return None
def _find_depot_tools_path(self):
"""Attempt to find depot_tools location in PATH."""
for i in os.environ['PATH'].split(os.pathsep):
if os.path.isfile(os.path.join(i, 'gclient')):
return i
def _check_cdb_available(self):
"""Checks whether we can use cdb to symbolize minidumps."""
if self._cdb_available != None:
return self._cdb_available
CDB_LOCATION_TEMPLATES = [
'%s\\Debugging Tools For Windows',
'%s\\Debugging Tools For Windows (x86)',
'%s\\Debugging Tools For Windows (x64)',
'%s\\Windows Kits\\8.0\\Debuggers\\x86',
'%s\\Windows Kits\\8.0\\Debuggers\\x64',
'%s\\Windows Kits\\8.1\\Debuggers\\x86',
'%s\\Windows Kits\\8.1\\Debuggers\\x64',
]
program_files_directories = ['C:\\Program Files']
program_files = os.environ.get('ProgramFiles')
if program_files:
program_files_directories.append(program_files)
program_files = os.environ.get('ProgramFiles(x86)')
if program_files:
program_files_directories.append(program_files)
possible_cdb_locations = []
for template in CDB_LOCATION_TEMPLATES:
for program_files in program_files_directories:
possible_cdb_locations.append(template % program_files)
gyp_defines = os.environ.get('GYP_DEFINES', [])
if gyp_defines:
gyp_defines = shlex.split(gyp_defines)
if 'windows_sdk_path' in gyp_defines:
possible_cdb_locations.extend([
'%s\\Debuggers\\x86' % gyp_defines['windows_sdk_path'],
'%s\\Debuggers\\x64' % gyp_defines['windows_sdk_path'],
])
# Look in depot_tools win_toolchain too.
depot_tools = self._find_depot_tools_path()
if depot_tools:
win8sdk = os.path.join(depot_tools, 'win_toolchain', 'vs2013_files', 'win8sdk')
possible_cdb_locations.extend([
'%s\\Debuggers\\x86' % win8sdk,
'%s\\Debuggers\\x64' % win8sdk,
])
for cdb_path in possible_cdb_locations:
cdb = self._host.filesystem.join(cdb_path, 'cdb.exe')
try:
_ = self._host.executive.run_command([cdb, '-version'])
except:
pass
else:
self._cdb_path = cdb
self._cdb_available = True
return self._cdb_available
_log.warning("CDB is not installed; can't symbolize minidumps.")
_log.warning('')
self._cdb_available = False
return self._cdb_available
|
tomalrussell/smif
|
refs/heads/master
|
src/smif/convert/register.py
|
3
|
"""Register, ResolutionSet abstract classes to contain metadata and generate conversion
coefficients.
NDimensionalRegister is used in :class:`smif.convert.interval.IntervalAdaptor` and
:class:`smif.convert.region.RegionAdaptor`.
"""
import logging
from abc import ABCMeta, abstractmethod
from collections import OrderedDict, defaultdict
from typing import Dict, List
import numpy as np # type: ignore
class ResolutionSet(metaclass=ABCMeta):
"""Abstract class which holds the Resolution definitions
"""
def __init__(self):
self.name = ''
self.description = ''
self._data = []
self.logger = logging.getLogger(__name__)
def as_dict(self):
"""Get a serialisable representation of the object
"""
return {'name': self.name,
'description': self.description}
def __iter__(self):
return iter(self.data)
def __len__(self):
return len(self.data)
@property
def data(self):
"""Resolution set data
Returns
-------
list
"""
return self._data
@data.setter
def data(self, data):
self._data = data
@abstractmethod
def get_entry_names(self):
"""Get the names of the entries in the ResolutionSet
Returns
-------
set
The set of names which identify each entry in the ResolutionSet
"""
raise NotImplementedError
@abstractmethod
def intersection(self, bounds):
"""Return the subset of entries intersecting with the bounds
"""
raise NotImplementedError
@abstractmethod
def get_proportion(self, entry_a, entry_b):
"""Calculate the proportion of `entry_a` and `entry_b`
Arguments
---------
entry_a : string
Name of an entry in `ResolutionSet`
entry_b : string
Name of an entry in `ResolutionSet`
Returns
-------
float
The proportion of `entry_a` and `entry_b`
"""
raise NotImplementedError
@property
@abstractmethod
def coverage(self):
raise NotImplementedError
@staticmethod
@abstractmethod
def get_bounds(entry):
"""Implement this helper method to return bounds from an entry in the register
Arguments
---------
entry
An entry from a ResolutionSet
Returns
-------
bounds
The bounds of the entry
"""
raise NotImplementedError
class LogMixin(object):
@property
def logger(self):
try:
logger = self._logger
except AttributeError:
name = '.'.join([__name__, self.__class__.__name__])
logger = logging.getLogger(name)
self._logger = logger
return self._logger
@logger.setter
def logger(self, logger):
self._logger = logger
class Register(LogMixin, metaclass=ABCMeta):
"""Abstract class which holds the ResolutionSets
Arguments
---------
axis : int, default=None
The axis over which operations on the data array are performed
"""
store = None
def __init__(self, axis=None):
self.axis = axis
@property
@abstractmethod
def names(self):
raise NotImplementedError
@abstractmethod
def register(self, resolution_set: ResolutionSet):
raise NotImplementedError
@abstractmethod
def get_coefficients(self, source: str, destination: str):
raise NotImplementedError
def convert(self, data: np.ndarray, from_set_name: str, to_set_name: str) -> np.ndarray:
"""Convert a list of data points for a given set to another set
.. deprecated
Usage superceded by Adaptor.convert
Parameters
----------
data: numpy.ndarray
from_set_name: str
to_set_name: str
Returns
-------
numpy.ndarray
"""
coefficients = self.get_coefficients(from_set_name, to_set_name)
converted = Register.convert_with_coefficients(data, coefficients, self.axis)
self.logger.debug("Converting from %s to %s.", from_set_name, to_set_name)
self.logger.debug("Converted value from %s to %s", data.sum(), converted.sum())
return converted
@staticmethod
def convert_with_coefficients(data, coefficients: np.ndarray, axis=None) -> np.ndarray:
"""Convert an array of data using given coefficients, along a given axis
.. deprecated
Usage superceded by Adaptor.convert
Parameters
----------
data: numpy.ndarray
coefficients: numpy.ndarray
axis: integer, optional
Returns
-------
numpy.ndarray
"""
if axis is not None:
data_count = data.shape[axis]
if coefficients.shape[0] != data_count:
msg = "Size of coefficient array does not match source " \
"resolution set from data matrix: %s != %s"
raise ValueError(msg, coefficients.shape[axis], data_count)
if axis == 0:
converted = np.dot(coefficients.T, data)
elif axis == 1:
converted = np.dot(data, coefficients)
else:
converted = np.dot(data, coefficients)
return converted
class NDimensionalRegister(Register):
"""Abstract class which holds N-Dimensional ResolutionSets
Arguments
---------
axis : int, default=None
The axis over which operations on the data array are performed
"""
def __init__(self, axis=None):
super().__init__(axis)
self._register = OrderedDict() # type: Dict[str, ResolutionSet]
self._conversions = defaultdict(dict)
def register(self, resolution_set: ResolutionSet):
"""Add a ResolutionSet to the register
Parameters
----------
resolution_set : :class:`smif.convert.ResolutionSet`
Raises
------
ValueError
If a ResolutionSet of the same name already exists in the register
"""
if resolution_set.name in self._register:
msg = "A ResolutionSet named {} has already been loaded"
raise ValueError(msg.format(resolution_set.name))
self.logger.info("Registering '%s' with %i items",
resolution_set.name,
len(resolution_set))
self._register[resolution_set.name] = resolution_set
@property
def names(self) -> List[str]:
"""Names of registered region sets
Returns
-------
sets: list[str]
"""
return list(self._register.keys())
def get_entry(self, name: str) -> ResolutionSet:
"""Returns the ResolutionSet of `name`
Arguments
---------
name : str
The unique identifier of a ResolutionSet in the register
Returns
-------
smif.convert.ResolutionSet
"""
if name not in self._register:
msg = "ResolutionSet '{}' not registered"
raise ValueError(msg.format(name))
return self._register[name]
def _write_coefficients(self, source, destination, data: np.ndarray):
if self.store:
self.store.write_coefficients(source, destination, data)
else:
msg = "Data interface not available to write coefficients"
self.logger.warning(msg)
def get_coefficients(self, source: str, destination: str) -> np.ndarray:
"""Get coefficients representing intersection of sets
Arguments
---------
source : string
The name of the source set
destination : string
The name of the destination set
Returns
-------
numpy.ndarray
"""
from_set = self.get_entry(source)
to_set = self.get_entry(destination)
if from_set.coverage != to_set.coverage:
log_msg = "Coverage for '%s' is %d and does not match coverage " \
"for '%s' which is %d"
self.logger.warning(log_msg, from_set.name, from_set.coverage,
to_set.name, to_set.coverage)
coefficients = self.generate_coefficients(from_set, to_set)
return coefficients
def generate_coefficients(
self, from_set: ResolutionSet, to_set: ResolutionSet) -> np.ndarray:
"""Generate coefficients for converting between two :class:`ResolutionSet`s
Coefficients for converting a single dimension will always be 2D, of shape
(len(from_set), len(to_set)).
Parameters
----------
from_set : ResolutionSet
to_set : ResolutionSet
Returns
-------
numpy.ndarray
"""
coefficients = np.zeros((len(from_set), len(to_set)), dtype=np.float)
self.logger.debug("Coefficients array is of shape %s for %s to %s",
coefficients.shape, from_set.name, to_set.name)
from_names = from_set.get_entry_names()
for to_idx, to_entry in enumerate(to_set):
for from_idx in from_set.intersection(to_entry):
from_entry = from_set.data[from_idx]
proportion = from_set.get_proportion(from_idx, to_entry)
self.logger.debug("%i percent of %s (#%s) is in %s (#%s)",
proportion * 100,
to_entry.name, to_idx,
from_entry.name, from_idx)
from_idx = from_names.index(from_entry.name)
coefficients[from_idx, to_idx] = proportion
self.logger.debug("Generated %s", coefficients)
return coefficients
|
ChrisGoedhart/Uforia
|
refs/heads/master
|
source/testdirs.py
|
1
|
import os
top=os.getcwd()
for root, dirs, files in os.walk(top, topdown=False):
for name in files:
print os.path.join(root,name)
for name in dirs:
x=os.path.join(root,name)
print x
|
emanuelschuetze/OpenSlides
|
refs/heads/master
|
openslides/users/management/commands/createsuperuser.py
|
2
|
from django.core.management.base import BaseCommand
from ...models import User
class Command(BaseCommand):
"""
Command to create or reset the admin user.
"""
help = "Creates or resets the admin user."
def handle(self, *args, **options):
created = User.objects.create_or_reset_admin_user()
if created:
self.stdout.write("Admin user successfully created.")
else:
self.stdout.write("Admin user successfully reset.")
|
rouge8/pip
|
refs/heads/develop
|
src/pip/_vendor/packaging/utils.py
|
62
|
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import re
from .version import InvalidVersion, Version
_canonicalize_regex = re.compile(r"[-_.]+")
def canonicalize_name(name):
# This is taken from PEP 503.
return _canonicalize_regex.sub("-", name).lower()
def canonicalize_version(version):
"""
This is very similar to Version.__str__, but has one subtle differences
with the way it handles the release segment.
"""
try:
version = Version(version)
except InvalidVersion:
# Legacy versions cannot be normalized
return version
parts = []
# Epoch
if version.epoch != 0:
parts.append("{0}!".format(version.epoch))
# Release segment
# NB: This strips trailing '.0's to normalize
parts.append(re.sub(r"(\.0)+$", "", ".".join(str(x) for x in version.release)))
# Pre-release
if version.pre is not None:
parts.append("".join(str(x) for x in version.pre))
# Post-release
if version.post is not None:
parts.append(".post{0}".format(version.post))
# Development release
if version.dev is not None:
parts.append(".dev{0}".format(version.dev))
# Local version segment
if version.local is not None:
parts.append("+{0}".format(version.local))
return "".join(parts)
|
lynxis/pyLogicSniffer
|
refs/heads/master
|
analyzer_tool_spi.py
|
2
|
# -*- coding: UTF-8 -*-
'''SPI analysis tool for pyLogicSniffer.
Copyright © 2011, Mel Wilson mwilson@melwilsonsoftware.ca
This file is part of pyLogicSniffer.
pyLogicSniffer is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
pyLogicSniffer is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with pyLogicSniffer. If not, see <http://www.gnu.org/licenses/>.
'''
import wx
import numpy as np
import itertools, time
import analyzer_tools
tool_menu_string = '&SPI' # recommended menu string
tool_title_string = 'SPI' # recommended title string
class AnalyzerDialog (wx.Dialog):
'''Edit settings for SPI tool.'''
def __init__ (self, parent, settings=None):
wx.Dialog.__init__ (self, parent, wx.ID_ANY, 'SPI Settings')
self.clock_ctrl = wx.TextCtrl (self, wx.ID_ANY, '', validator=SpiPinValidator())
self.mosi_ctrl = wx.TextCtrl (self, wx.ID_ANY, '', validator=SpiPinValidator())
self.miso_ctrl = wx.TextCtrl (self, wx.ID_ANY, '', validator=SpiPinValidator())
self.ssel_ctrl = wx.TextCtrl (self, wx.ID_ANY, '', validator=SpiPinValidator())
self.master_ctrl = wx.CheckBox(self, wx.ID_ANY, '')
self.leading_ctrl = wx.RadioBox (self, -1, 'Leading Bit', choices=['MSB', 'LSB'])
self.leading_ctrl.SetSelection (0)
self.cpol_ctrl = wx.RadioBox (self, -1, 'Leading Edge', choices=['Rising', 'Falling'])
self.cpol_ctrl.SetSelection (0)
self.cpha_ctrl = wx.RadioBox (self, -1, 'Leading Edge Action', choices=['Sample', 'Setup'])
self.cpha_ctrl.SetSelection (0)
if settings is not None:
self.SetValue (settings)
gs = wx.FlexGridSizer (7, 2)
gs.Add (wx.StaticText (self, wx.ID_ANY, 'SCK'), 0, wx.ALIGN_CENTER_VERTICAL)
gs.Add (self.clock_ctrl, 1, 0)
gs.Add (wx.StaticText (self, wx.ID_ANY, 'MOSI'), 0, wx.ALIGN_CENTER_VERTICAL)
gs.Add (self.mosi_ctrl, 1, 0)
gs.Add (wx.StaticText (self, wx.ID_ANY, 'MISO'), 0, wx.ALIGN_CENTER_VERTICAL)
gs.Add (self.miso_ctrl, 1, 0)
gs.Add (wx.StaticText (self, wx.ID_ANY, '/SS'), 0, wx.ALIGN_CENTER_VERTICAL)
gs.Add (self.ssel_ctrl, 1, 0)
gs.Add (wx.StaticText (self, wx.ID_ANY, 'Master'), 0, wx.ALIGN_CENTER_VERTICAL)
gs.Add (self.master_ctrl, 1, 0)
ts = wx.BoxSizer (wx.VERTICAL)
ts.Add (gs, 1, wx.ALIGN_CENTER)
ts.Add (self.leading_ctrl, 0, wx.EXPAND)
clock_box = wx.StaticBox (self, -1, 'Clock') # in Python, StaticBoxSizer can't create this
clock_box_sizer = wx.StaticBoxSizer (clock_box, wx.VERTICAL)
clock_box_sizer.Add (self.cpol_ctrl, 0, wx.EXPAND)
clock_box_sizer.Add (self.cpha_ctrl, 0, wx.EXPAND)
ts.Add (clock_box_sizer, 0, wx.EXPAND)
ts.Add (self.CreateButtonSizer (wx.OK|wx.CANCEL), 0, wx.EXPAND)
self.SetAutoLayout (True)
self.SetSizer (ts)
ts.Fit (self)
ts.SetSizeHints (self)
def SetValue (self, settings):
if 'sck' in settings: self.clock_ctrl.SetValue (str (settings['sck']))
if 'mosi' in settings: self.mosi_ctrl.SetValue (str (settings['mosi']))
if 'miso' in settings: self.miso_ctrl.SetValue (str (settings['miso']))
if 'nss' in settings: self.ssel_ctrl.SetValue (str (settings['nss']))
if 'master' in settings: self.master_ctrl.SetValue (settings['master'])
if 'leading' in settings: self.leading_ctrl.SetStringSelection (settings['leading'])
if 'mode' in settings:
self.cpol_ctrl.SetSelection (settings['mode'] >> 1)
self.cpha_ctrl.SetSelection (settings['mode'] & 1)
def GetValue (self):
return {
'sck': int (self.clock_ctrl.GetValue()),
'mosi': int (self.mosi_ctrl.GetValue()),
'miso': int (self.miso_ctrl.GetValue()),
'nss': int (self.ssel_ctrl.GetValue()),
'mode': (self.cpol_ctrl.GetSelection() << 1) | self.cpha_ctrl.GetSelection(),
'master': self.master_ctrl.IsChecked(),
'leading': self.leading_ctrl.GetStringSelection(),
'cpol': self.cpol_ctrl.GetSelection(), # optional
'cpha': self.cpha_ctrl.GetSelection(), # optional
}
class SpiPinValidator (analyzer_tools.SimpleValidator):
def Validate (self, parent):
return self.DoValidation (int, lambda v: 0 <= v <= 31, 'Pin number must be an integer from 0 to 31.')
#===========================================================
class AnalyzerPanel (wx.ScrolledWindow):
'''Display SPI tool analysis.'''
spi_settings = None
def __init__ (self, parent, settings, tracedata):
wx.ScrolledWindow.__init__ (self, parent, wx.ID_ANY)
self.settings = settings
self.tracedata = tracedata
dg = self.display_grid = wx.grid.Grid (self, -1)
dg.CreateGrid (0, 5)
dg.SetRowLabelSize (0)
dg.SetColLabelValue (0, '#')
dg.SetColLabelValue (1, 'μSeconds')
dg.SetColLabelValue (2, 'Status')
dg.SetColLabelValue (3, 'MOSI')
dg.SetColLabelValue (4, 'MISO')
dg.SetColFormatNumber (0)
dg.SetColFormatFloat (1)
self.Analyze()
dg.AutoSize()
ts = wx.BoxSizer (wx.VERTICAL)
ts.Add (dg, 1, wx.EXPAND)
self.SetAutoLayout (True)
self.SetSizer (ts)
ts.Fit (self)
def Analyze (self):
settings = self.settings
pol = (settings['mode'] >> 1) & 1 # clock polarity
pha = settings['mode'] & 1 # sample/setup phase
channel_data = self.tracedata.channel_data
spi_data = itertools.izip (
itertools.count(),
channel_data (settings['nss']),
channel_data (settings['sck']),
channel_data (settings['miso']),
channel_data (settings['mosi'])
)
stime, oldnss, oldsck, oldmiso, oldmosi = spi_data.next()
mosi_data = miso_data = 0
miso_bitcount = mosi_bitcount = 0
for stime, nss, sck, miso, mosi in spi_data:
if oldnss > nss: # SPI just became active
self._log_nss_enable (stime)
mosi_data = miso_data = 0
miso_bitcount = mosi_bitcount = 0
elif oldnss < nss: # SPI just became inactive
self._log_nss_disable (stime, mosi_bitcount, mosi_data, miso_bitcount,miso_data)
if not nss: # SPI is active
if oldsck^pol < sck^pol : # leading clock edge
if pha: # setup output level
mosi_data = (mosi_data << 1) | bool (mosi)
mosi_bitcount += 1
else: # sample input level
miso_data = (miso_data << 1) | bool (miso)
miso_bitcount += 1
elif oldsck^pol > sck^pol: # trailing clock edge
if pha: # sample input level
miso_data = (miso_data << 1) | bool (miso)
miso_bitcount += 1
else: # setup output level
mosi_data = (mosi_data << 1) | bool (mosi)
mosi_bitcount += 1
if miso_bitcount > 7:
self._log_data_byte (stime, None, miso_data)
miso_data = 0
miso_bitcount = 0
if mosi_bitcount > 7:
self._log_data_byte (stime, mosi_data, None)
mosi_data = 0
mosi_bitcount = 0
oldnss, oldsck, oldmiso, oldmosi = nss, sck, miso, mosi
# finished examining the trace data ..
if miso_bitcount > 0 or mosi_bitcount > 0:
dg, r = self._new_row()
self._log_header (dg, r, stime)
dg.SetCellValue (r, 2, 'End')
if mosi_bitcount > 0:
dg.SetCellValue (r, 3, partial_bits (mosi_bitcount, mosi_data))
if miso_bitcount > 0:
dg.SetCellValue (r, 4, partial_bits (miso_bitcount, miso_data))
def _log_header (self, dg, r, sample):
dg.SetCellValue (r, 0, str (sample))
dg.SetCellValue (r, 1, str (self._sample_time (sample)*1e6))
def _new_row (self):
dg = self.display_grid
r = dg.GetNumberRows()
dg.AppendRows (1)
return dg, r
def _log_nss_disable (self, sample, mosi_bitcount, mosi_data, miso_bitcount, miso_data):
dg, r = self._new_row ()
self._log_header (dg, r, sample)
dg.SetCellValue (r, 2, 'Disable')
if mosi_bitcount > 0:
dg.SetCellValue (r, 3, partial_bits (mosi_bitcount, mosi_data))
if miso_bitcount > 0:
dg.SetCellValue (r, 4, partial_bits (miso_bitcount, miso_data))
def _log_nss_enable (self, sample):
dg, r = self._new_row ()
self._log_header (dg, r, sample)
dg.SetCellValue (r, 2, 'Enable')
def _log_data_byte (self, sample, mosi, miso):
dg, r = self._new_row ()
self._log_header (dg, r, sample)
if mosi is not None:
dg.SetCellValue (r, 3, '0x%02x' % (mosi,))
if miso is not None:
dg.SetCellValue (r, 4, '0x%02x' % (miso,))
def _sample_time (self, sample):
d = self.tracedata
return float (sample - d.read_count + d.delay_count) / d.frequency
#===========================================================
class AnalyzerFrame (analyzer_tools.AnalyzerFrame):
'''Free-standing window to display SPI analyzer panel.'''
def CreatePanel (self, settings, tracedata):
'''Return an instance of the analysis panel to include in this window.'''
return AnalyzerPanel (self, settings, tracedata)
def SettingsDescription (self, settings):
'''Return a string describing specific settings.'''
return 'SCK:%(sck)d\tMOSI:%(mosi)d\tMISO:%(miso)d\tnSS:%(nss)d' % settings
def SetTitle (self, title):
'''Set the title for this window.'''
analyzer_tools.AnalyzerFrame.SetTitle (self, '%s - %s' % (title, tool_title_string))
#===========================================================
def partial_bits (bitcount, data, msbfirst=True):
'''String representing a byte of less than 8 bits, MSB first.'''
s = [str ((data >> i) & 1) for i in xrange (bitcount)]
if msbfirst:
return ''.join (s[::-1]) + 'x'*(8-bitcount)
else:
return 'x'*(8-bitcount) + ''.join(s)
# Test jig ...
if __name__ == '__main__':
from simple_test_frame import SimpleTestFrame
class MyTestFrame (SimpleTestFrame):
dialog_data = None
def OnTest (self, evt):
dlg = AnalyzerDialog (self, self.dialog_data)
if dlg.ShowModal () == wx.ID_OK:
if not dlg.Validate():
return
self.dialog_data = dlg.GetValue()
dlg.Destroy()
class MyApp (wx.App):
'''Application.'''
def OnInit (self):
frame = MyTestFrame ('AnalyzerDialog Test', 'About '+__file__, __doc__)
frame.Show (True)
self.SetTopWindow (frame)
return True
app = MyApp (0)
app.MainLoop()
|
ProfessionalIT/maxigenios-website
|
refs/heads/master
|
sdk/google_appengine/lib/django-1.2/django/views/decorators/__init__.py
|
12133432
| |
DmitryErmolchik/MediaCatalog
|
refs/heads/master
|
__init__.py
|
12133432
| |
MonsterNya/Starbound_RU
|
refs/heads/web-interface
|
tools/parser_settings.py
|
6
|
from re import compile as regex
foi = {
"*": [".*escription$","^(.+/)?[Tt]ext$","^(.+/)?[Dd]ialog/[^/]+/[^/]+/.*[0-9]+$",
"^(.+/)?(sub)?[tT]itle(/value)?$", "^(.+/)+caption$", "^(.+/)?label$",
"^(.+/)?message$", "^.+Name/value$", "^.*friendlyName$", ".*senderName$",
".*destinations/.+[Nn]ame$", ".+[lL]abel[a-zA-Z]*/(value|name)$",
"bookmarkName"],
".object": ["^chatOptions/.+$",],
".codex": ["contentPages/[0-9]+$"],
".matherial": [],
".config": ["^(.*/)?(?!generatedParameters)[a-zA-Z]+/displayName$",
"^(.+/)?lbl[a-zA-Z]+/value$", "^labels/.+$", "^otherDeed/.+$", "^.*Format$",
"^enclosedArea/.+$", "^tagCriteria/.+$", "^hail/.+$", "^converse/.+$",
"^follow/.+$", "^flee/.+$", "^encorage/.+$", "^severe/.+$", "^accuse/.+$",
"^tout/.+$", "^rent/.+$", "^alert/.+$", "^welcome/.+$", "^beacon/.+$",
"^safe/.+$", "^helpme/.+$", "^final/.+$", "^.+Text(/value)?$",
"^gui.+/value$", "^paneLayout/.+/value$", "areYouSure/value$",
"^blueprintUnlock$", "^blueprintAlreadyKnown$",
"^rotTimeDescriptions/.+/1$", "^[a-zA-Z]*[mM]essages/[a-zA-Z]+$",
".+[mM]essage$", "^.*(hint|regex|([a-zA-Z]*(Caption|[Tt]itle)))$",
"^defaultPetNameLabel$", ".*descriptions/[0-9]+$", "^(un)?trackLabel$",
"^modeTypeTextAndColor/[0-9]+/[0-9]+$"],
"themes.config":["^[0-9]+/1/[0-9]+/(0|(1/)?name)$"],
"placestation.config":["^.*Text/[^/]+$"],
"dungeonitems.config":["^[0-9]+/1/[0-9]+/(0|(1/)?name)$"],
"threats.config":["^[0-9]+/1/[0-9]+/(0|(1/)?name)$"],
"weapon.config":["^[0-9]+/1/[0-9]+/(0|(1/)?name)$"],
"monsters.config":["^[0-9]+/1/[0-9]+/(0|(1/)?name)$"],
"hatadjectives.config":["^[0-9]+/1/[0-9]+/(0|(1/)?name)$"],
"cockpit.config": ["^topLeftButtonTooltips/.*",
".+Caption/[^/]+", "^jumpDialog/[^/]+$", "^[a-zA-Z]+Names/[^/]+$",
"^clusterMoons/[^/]+$", "^worldTypeDescription/.+$",
"^visitableTypeDescription/.+$", "^terraformedPlanetDescription/.+$",
"^threatLevelText/[a-zA-Z]+/[0-9]+$", "^threatTextPrefix$",
"^objectThreatText/.*$", "^systemTooltip/exploredLabel/.+$"],
"statuses.config":["^statuses/.+$"],
"help.config": ["^[a-z]+Commands/.+$"],
"hunger.config": ["^.*$"],
"locations.config": [".*/name$"],
"namegen.config": ["^names/1/[0-9]+/[0-9]+$"],
"quests.config": ["^pronouns/.+$", "^objectiveDescriptions/.+"],
".species": ["^charGenTextLabels/[0-9]+$"],
".sbvn": ["^.+/options/[0-9]+/0$"],
".tech": [],
".cinematic": [],
".currency": [],
".liquid": [],
".biome": [],
".item": [],
".instrument": [],
".legs": [],
".chest": [],
".back": [],
".head": [],
".harvestingtool": [],
".flashlight": [],
".painttool": [],
".wiretool": [],
".tillingtool": [],
".miningtool": [],
".beamaxe": [],
".inspectiontool": ["^outOfRangeText/.+$", "^nothingThereText/.+$"],
".thrownitem": [],
".unlock": ["^unlockMessage$"],
".matitem": [],
".liqitem": [],
".augment": [],
".consumable": [],
".coinitem": [],
".activeitem": [ "^altAbility/name$"],
".namesource": ["^sourceNames/[0-9]+$"],
".particle": [],
".damage": [],
".statuseffect": [],
".stagehand": ["^radioMessages/[^/]+/(0|2)$"],
".material": [],
".matmod": [],
".npctype": ["^scriptConfig/crew/role/(name|field)$",
"^scriptConfig/crew/ranks/[0-9]+$", "^npcname$"],
".mat": [],
".radiomessages": [],
".bush": [],
".grass": [],
".monstertype": ["^(.+/)?dialog/.+$"],
".monsterskill": ["^label$"],
".aimission": [".*Text$"],
".questtemplate": ["^.+Text(/[^0-9]+([0-9]+/1)?/[0-9]+)?$",
"^scriptConfig/(descriptions|.+Note|parcel(Name|Description))/.+$",
"^.+/example/name$", "^scriptConfig/objectiveLists/[^/]+/[0-9]+/0$"],
".tooltip": [],
".itemdescription": [],
".weaponability": ["^ability/name$"],
"_metadata":[]
}
files_of_interest = dict()
for ext, poi in foi.items():
files_of_interest[ext] = list()
for p in poi:
#print(p)
files_of_interest[ext].append(regex(p))
|
nicecapj/crossplatfromMmorpgServer
|
refs/heads/master
|
ThirdParty/boost_1_61_0/tools/build/src/tools/types/obj.py
|
75
|
# Copyright David Abrahams 2004. Distributed under the Boost
# Software License, Version 1.0. (See accompanying
# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
from b2.build import type
def register ():
type.register_type ('OBJ', ['obj'], None, ['NT', 'CYGWIN'])
type.register_type ('OBJ', ['o'])
register ()
|
gvlproject/bioconda-recipes
|
refs/heads/master
|
recipes/peptide-shaker/1.16.16/peptide-shaker.py
|
45
|
#!/usr/bin/env python
#
# Wrapper script for Java Conda packages that ensures that the java runtime
# is invoked with the right options. Adapted from the bash script (http://stackoverflow.com/questions/59895/can-a-bash-script-tell-what-directory-its-stored-in/246128#246128).
#
# Program Parameters
#
import os
import subprocess
import sys
import shutil
from os import access
from os import getenv
from os import X_OK
jar_file = 'PeptideShaker-1.16.16.jar'
default_jvm_mem_opts = ['-Xms512m', '-Xmx1g']
# !!! End of parameter section. No user-serviceable code below this line !!!
def real_dirname(path):
"""Return the symlink-resolved, canonicalized directory-portion of path."""
return os.path.dirname(os.path.realpath(path))
def java_executable():
"""Return the executable name of the Java interpreter."""
java_home = getenv('JAVA_HOME')
java_bin = os.path.join('bin', 'java')
if java_home and access(os.path.join(java_home, java_bin), X_OK):
return os.path.join(java_home, java_bin)
else:
return 'java'
def jvm_opts(argv):
"""Construct list of Java arguments based on our argument list.
The argument list passed in argv must not include the script name.
The return value is a 3-tuple lists of strings of the form:
(memory_options, prop_options, passthrough_options)
"""
mem_opts = []
prop_opts = []
pass_args = []
exec_dir = None
for arg in argv:
if arg.startswith('-D'):
prop_opts.append(arg)
elif arg.startswith('-XX'):
prop_opts.append(arg)
elif arg.startswith('-Xm'):
mem_opts.append(arg)
elif arg.startswith('--exec_dir='):
exec_dir = arg.split('=')[1].strip('"').strip("'")
if not os.path.exists(exec_dir):
shutil.copytree(real_dirname(sys.argv[0]), exec_dir, symlinks=False, ignore=None)
else:
pass_args.append(arg)
# In the original shell script the test coded below read:
# if [ "$jvm_mem_opts" == "" ] && [ -z ${_JAVA_OPTIONS+x} ]
# To reproduce the behaviour of the above shell code fragment
# it is important to explictly check for equality with None
# in the second condition, so a null envar value counts as True!
if mem_opts == [] and getenv('_JAVA_OPTIONS') is None:
mem_opts = default_jvm_mem_opts
return (mem_opts, prop_opts, pass_args, exec_dir)
def main():
java = java_executable()
"""
PeptideShaker updates files relative to the path of the jar file.
In a multiuser setting, the option --exec_dir="exec_dir"
can be used as the location for the peptide-shaker distribution.
If the exec_dir dies not exist,
we copy the jar file, lib, and resources to the exec_dir directory.
"""
(mem_opts, prop_opts, pass_args, exec_dir) = jvm_opts(sys.argv[1:])
jar_dir = exec_dir if exec_dir else real_dirname(sys.argv[0])
if pass_args != [] and pass_args[0].startswith('eu'):
jar_arg = '-cp'
else:
jar_arg = '-jar'
jar_path = os.path.join(jar_dir, jar_file)
java_args = [java] + mem_opts + prop_opts + [jar_arg] + [jar_path] + pass_args
sys.exit(subprocess.call(java_args))
if __name__ == '__main__':
main()
|
jonsito/AgilityContest
|
refs/heads/master
|
applications/RaspBerryPi_NowRunning/NRMain.py
|
1
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright 2018-2021 by Juan Antonio Martinez ( juansgaviota at gmail dot com )
#
# This program is free software; you can redistribute it and/or modify it under the terms
# of the GNU General Public License as published by the Free Software Foundation;
# either version 2 of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
# without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with this program;
# if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
import time
import argparse
import threading
import sys
import os
import NRDisplay
import NRNetwork
import NROptions
import NRWeb
def isInteger(val):
try:
int(val)
return True
except ValueError:
return False
def restart(mode): # 0:exit 1:restart 2:shutdown
import getch
global displayHandler
global networkHandler
global webHandler
global displayName
msgoob = [ 'Exit','Restart','Shut down']
msgs = [ 'Exiting...','Restarting...','Shutting down...']
displayHandler.setOobMessage("Confirm "+msgoob[mode]+" +/-?",1)
time.sleep(2)
displayHandler.setMenuMessage('+/-?')
# get confirm. clear prompt
c= getch.getch()
displayHandler.setMenuMessage('')
if c!='+':
return True # continue loop
displayHandler.setOobMessage(msgs[mode],1)
time.sleep(2)
# start closing threads
networkHandler.stopNetwork()
displayHandler.stopDisplay()
webHandler.stopWeb()
if displayName == "pygame":
# do not restart nor shutdown on pygame, just stop
return False
else:
os._exit(mode)
def inputParser():
global displayHandler
global networkHandler
global menuHandler
loop = True
while loop==True:
data = sys.stdin.readline()
if (data == "\n") or (data == "+\n"):
displayHandler.setNextRunning()
elif data == "-\n":
displayHandler.setPrevRunning()
elif data == "*9\n":
loop=restart(0)
elif data == "*0\n":
print("Return to normal mode")
displayHandler.setCountDown(0)
displayHandler.setChronoMode(0)
displayHandler.setClockMode(False)
elif data == "*1\n":
print("Enter Course walk mode")
# parar reloj, activar reconocimiento
displayHandler.setClockMode(False)
displayHandler.setChronoMode(0)
displayHandler.setOobMessage("Course Walk",2)
displayHandler.setCountDown(menuHandler.getCountDown())
elif data == "*2\n":
print("Enter clock mode")
# parar reconocimiento, activar relog
displayHandler.setOobMessage("Clock Mode",2)
displayHandler.setCountDown(0)
displayHandler.setClockMode(True)
displayHandler.setChronoMode(0)
elif data == "*3\n":
print("Enter chrono mode")
# parar reconocimiento, activar relog
displayHandler.setOobMessage("Chrono Mode",2)
displayHandler.setCountDown(0)
displayHandler.setClockMode(False)
displayHandler.setChronoMode(1)
elif data == "**\n":
print("Enter in menu")
# paramos reloj y reconocimiento
displayHandler.setCountDown(0)
displayHandler.setClockMode(False)
displayHandler.setChronoMode(0)
res = menuHandler.runMenu(displayHandler,networkHandler)
if res > 0: # 1:stop 2:restart 3:shutdown
loop=restart(res-1) # 0:stop 1:restart 2:shuthdown
elif isInteger(data) == False:
print ("Unrecongnized data entry: '%s'" % (data))
else:
print ("received '"+data+"'")
displayHandler.setNowRunning(int(data))
# end def
print("inputLoopThread() exiting")
# end def
if __name__ == "__main__":
global displayName
global displayHandler
global networkHandler
global menuHandler
global webHandler
parser = argparse.ArgumentParser(description='SuTurno cmdline arguments',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--display','-d',type=str,default='hub08',help='Display mode "pygame", "max7219", or "hub08"')
parser.add_argument('--ring','-r', type=int, default=1, help='Ring to listen events from (1..4)')
parser.add_argument('--interface','-i', type=str, default='',help='Use specific network interface to look for server')
parser.add_argument('--port','-p', type=int, default=80, help='Port to attach Web server interface (0:disable)')
parser.add_argument('--cascaded', '-n', type=int, default=4, help='Number of cascaded MAX7219 LED matrices')
parser.add_argument('--block_orientation', type=int, default=-90, choices=[0, 90, -90], help='Corrects block orientation when wired vertically')
parser.add_argument('--rotate', type=int, default=2, choices=[0, 1, 2, 3], help='Rotate display 0=0°, 1=90°, 2=180°, 3=270°')
args = parser.parse_args()
displayName= args.display
try:
threads=[]
# init display handler
displayHandler = NRDisplay.NRDisplay(args.display,args.cascaded, args.block_orientation, args.rotate)
displayHandler.setRing(int(args.ring))
# search network for connection
networkHandler = NRNetwork.NRNetwork(args.interface,args.ring,displayHandler)
# start display threads
w = threading.Thread(target = displayHandler.setStdMessage) # setting of main message
threads.append(w)
w.start()
w = threading.Thread(target = displayHandler.displayLoop) # display message loop
threads.append(w)
w.start()
# create menu handler
menuHandler = NROptions.NROptions()
# start keyboard handler thread
w = threading.Thread(target=inputParser)
threads.append(w)
w.start()
# network event threads
w = threading.Thread(target = networkHandler.networkLoop) # network thread loop
threads.append(w)
w.start()
# web server event thread
if args.port != 0 :
webHandler = NRWeb.NRWeb(args.port,displayHandler,networkHandler,menuHandler)
w = threading.Thread(target = webHandler.webLoop) # network thread loop
threads.append(w)
w.start()
# wait for all threads to die
for x in threads:
x.join()
except KeyboardInterrupt:
networkHandler.stopNetwork()
displayHandler.stopDisplay()
webHandler.stopWeb()
pass
|
shail2810/nova
|
refs/heads/master
|
nova/network/__init__.py
|
63
|
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import oslo_config.cfg
from oslo_utils import importutils
_network_opts = [
oslo_config.cfg.StrOpt('network_api_class',
default='nova.network.api.API',
help='The full class name of the '
'network API class to use'),
]
oslo_config.cfg.CONF.register_opts(_network_opts)
def API(skip_policy_check=False):
network_api_class = oslo_config.cfg.CONF.network_api_class
if 'quantumv2' in network_api_class:
network_api_class = network_api_class.replace('quantumv2', 'neutronv2')
cls = importutils.import_class(network_api_class)
return cls(skip_policy_check=skip_policy_check)
|
danielneis/osf.io
|
refs/heads/develop
|
website/addons/box/tests/test_utils.py
|
8
|
# -*- coding: utf-8 -*-
"""Tests for website.addons.box.utils."""
import mock
from nose.tools import * # noqa (PEP8 asserts)
from framework.auth import Auth
from website.project.model import NodeLog
from tests.factories import ProjectFactory
from website.addons.box.tests.utils import BoxAddonTestCase
from website.addons.box import utils
from website.addons.box.serializer import BoxSerializer
from website.addons.box.model import BoxNodeSettings
class TestNodeLogger(BoxAddonTestCase):
def test_log_file_added(self):
logger = utils.BoxNodeLogger(
node=self.project,
auth=Auth(self.user),
)
logger.log(NodeLog.FILE_ADDED, save=True)
last_log = self.project.logs[-1]
assert_equal(last_log.action, "box_{0}".format(NodeLog.FILE_ADDED))
# Regression test for https://github.com/CenterForOpenScience/osf.io/issues/1557
def test_log_deauthorized_when_node_settings_are_deleted(self):
project = ProjectFactory()
project.add_addon('box', auth=Auth(project.creator))
dbox_settings = project.get_addon('box')
dbox_settings.delete(save=True)
# sanity check
assert_true(dbox_settings.deleted)
logger = utils.BoxNodeLogger(node=project, auth=Auth(self.user))
logger.log(action='node_deauthorized', save=True)
last_log = project.logs[-1]
assert_equal(last_log.action, 'box_node_deauthorized')
class TestBoxAddonFolder(BoxAddonTestCase):
@mock.patch.object(BoxNodeSettings, 'fetch_folder_name', lambda self: 'foo')
def test_works(self):
folder = utils.box_addon_folder(
self.node_settings, Auth(self.user))
assert_true(isinstance(folder, list))
assert_true(isinstance(folder[0], dict))
def test_returns_none_unconfigured(self):
self.node_settings.folder_id = None
assert_is(utils.box_addon_folder(
self.node_settings, Auth(self.user)), None)
|
britcey/ansible
|
refs/heads/devel
|
lib/ansible/modules/network/nxos/nxos_snmp_location.py
|
55
|
#!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: nxos_snmp_location
extends_documentation_fragment: nxos
version_added: "2.2"
short_description: Manages SNMP location information.
description:
- Manages SNMP location configuration.
author:
- Jason Edelman (@jedelman8)
- Gabriele Gerbino (@GGabriele)
options:
location:
description:
- Location information.
required: true
state:
description:
- Manage the state of the resource.
required: false
default: present
choices: ['present','absent']
'''
EXAMPLES = '''
# ensure snmp location is configured
- nxos_snmp_location:
location: Test
state: present
host: "{{ inventory_hostname }}"
username: "{{ un }}"
password: "{{ pwd }}"
# ensure snmp location is not configured
- nxos_snmp_location:
location: Test
state: absent
host: "{{ inventory_hostname }}"
username: "{{ un }}"
password: "{{ pwd }}"
'''
RETURN = '''
proposed:
description: k/v pairs of parameters passed into module
returned: always
type: dict
sample: {"location": "New_Test"}
existing:
description: k/v pairs of existing snmp location
returned: always
type: dict
sample: {"location": "Test"}
end_state:
description: k/v pairs of location info after module execution
returned: always
type: dict
sample: {"location": "New_Test"}
updates:
description: command sent to the device
returned: always
type: list
sample: ["snmp-server location New_Test"]
changed:
description: check to see if a change was made on the device
returned: always
type: boolean
sample: true
'''
from ansible.module_utils.nxos import get_config, load_config, run_commands
from ansible.module_utils.nxos import nxos_argument_spec, check_args
from ansible.module_utils.basic import AnsibleModule
import re
import re
def execute_show_command(command, module, command_type='cli_show'):
if module.params['transport'] == 'cli':
if 'show run' not in command:
command += ' | json'
cmds = [command]
body = run_commands(module, cmds)
elif module.params['transport'] == 'nxapi':
cmds = [command]
body = run_commands(module, cmds)
return body
def apply_key_map(key_map, table):
new_dict = {}
for key, value in table.items():
new_key = key_map.get(key)
if new_key:
value = table.get(key)
if value:
new_dict[new_key] = str(value)
else:
new_dict[new_key] = value
return new_dict
def flatten_list(command_lists):
flat_command_list = []
for command in command_lists:
if isinstance(command, list):
flat_command_list.extend(command)
else:
flat_command_list.append(command)
return flat_command_list
def get_snmp_location(module):
location = {}
location_regex = '.*snmp-server\slocation\s(?P<location>\S+).*'
command = 'show run snmp'
body = execute_show_command(command, module, command_type='cli_show_ascii')
try:
match_location = re.match(location_regex, body[0], re.DOTALL)
group_location = match_location.groupdict()
location['location'] = group_location["location"]
except (AttributeError, TypeError):
location = {}
return location
def main():
argument_spec = dict(
location=dict(required=True, type='str'),
state=dict(choices=['absent', 'present'],
default='present')
)
argument_spec.update(nxos_argument_spec)
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True)
warnings = list()
check_args(module, warnings)
location = module.params['location']
state = module.params['state']
existing = get_snmp_location(module)
changed = False
commands = []
proposed = dict(location=location)
end_state = existing
if state == 'absent':
if existing and existing['location'] == location:
commands.append('no snmp-server location')
elif state == 'present':
if not existing or existing['location'] != location:
commands.append('snmp-server location {0}'.format(location))
cmds = flatten_list(commands)
if cmds:
if module.check_mode:
module.exit_json(changed=True, commands=cmds)
else:
changed = True
load_config(module, cmds)
end_state = get_snmp_location(module)
if 'configure' in cmds:
cmds.pop(0)
results = {}
results['proposed'] = proposed
results['existing'] = existing
results['end_state'] = end_state
results['updates'] = cmds
results['changed'] = changed
results['warnings'] = warnings
module.exit_json(**results)
from ansible.module_utils.basic import *
if __name__ == "__main__":
main()
|
soltanmm-google/grpc
|
refs/heads/master
|
src/python/grpcio/grpc/framework/foundation/stream_util.py
|
29
|
# Copyright 2015, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Helpful utilities related to the stream module."""
import logging
import threading
from grpc.framework.foundation import stream
_NO_VALUE = object()
class TransformingConsumer(stream.Consumer):
"""A stream.Consumer that passes a transformation of its input to another."""
def __init__(self, transformation, downstream):
self._transformation = transformation
self._downstream = downstream
def consume(self, value):
self._downstream.consume(self._transformation(value))
def terminate(self):
self._downstream.terminate()
def consume_and_terminate(self, value):
self._downstream.consume_and_terminate(self._transformation(value))
class IterableConsumer(stream.Consumer):
"""A Consumer that when iterated over emits the values it has consumed."""
def __init__(self):
self._condition = threading.Condition()
self._values = []
self._active = True
def consume(self, stock_reply):
with self._condition:
if self._active:
self._values.append(stock_reply)
self._condition.notify()
def terminate(self):
with self._condition:
self._active = False
self._condition.notify()
def consume_and_terminate(self, stock_reply):
with self._condition:
if self._active:
self._values.append(stock_reply)
self._active = False
self._condition.notify()
def __iter__(self):
return self
def __next__(self):
return self.next()
def next(self):
with self._condition:
while self._active and not self._values:
self._condition.wait()
if self._values:
return self._values.pop(0)
else:
raise StopIteration()
class ThreadSwitchingConsumer(stream.Consumer):
"""A Consumer decorator that affords serialization and asynchrony."""
def __init__(self, sink, pool):
self._lock = threading.Lock()
self._sink = sink
self._pool = pool
# True if self._spin has been submitted to the pool to be called once and
# that call has not yet returned, False otherwise.
self._spinning = False
self._values = []
self._active = True
def _spin(self, sink, value, terminate):
while True:
try:
if value is _NO_VALUE:
sink.terminate()
elif terminate:
sink.consume_and_terminate(value)
else:
sink.consume(value)
except Exception as e: # pylint:disable=broad-except
logging.exception(e)
with self._lock:
if terminate:
self._spinning = False
return
elif self._values:
value = self._values.pop(0)
terminate = not self._values and not self._active
elif not self._active:
value = _NO_VALUE
terminate = True
else:
self._spinning = False
return
def consume(self, value):
with self._lock:
if self._active:
if self._spinning:
self._values.append(value)
else:
self._pool.submit(self._spin, self._sink, value, False)
self._spinning = True
def terminate(self):
with self._lock:
if self._active:
self._active = False
if not self._spinning:
self._pool.submit(self._spin, self._sink, _NO_VALUE, True)
self._spinning = True
def consume_and_terminate(self, value):
with self._lock:
if self._active:
self._active = False
if self._spinning:
self._values.append(value)
else:
self._pool.submit(self._spin, self._sink, value, True)
self._spinning = True
|
bootphon/abkhazia
|
refs/heads/master
|
abkhazia/utils/old/kaldi2features.py
|
1
|
# Copyright 2016 Thomas Schatz, Xuan-Nga Cao, Mathieu Bernard
#
# This file is part of abkhazia: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Abkhazia is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with abkhazia. If not, see <http://www.gnu.org/licenses/>.
"""Export transcriptions and lattice posteriors to the h5features format"""
import codecs
import h5features
import numpy as np
import re
import abkhazia.core.kaldi2abkhazia as k2a
def get_phone_order(phonemap):
"""
Output an easily reproducible phone order from a phonemap
obtained by reading a phones.txt file with k2a.read_kaldi_phonemap
"""
# remove kaldi disambiguation symbols and <eps> from the phonemap,
# as those shouldn't be in the phone_order
codes = phonemap.keys()
for code in codes:
if re.match(u'#[0-9]+$|<eps>$', phonemap[code]):
del phonemap[code]
# order the phones in an easily reproducible way unique is needed
# since there can be several variants of each phone in the map
phone_order = list(np.unique(phonemap.values()))
phone_order.sort() # to guarantee reproducible ordering
return phone_order
def transcription2features(phones_file, tra_file, out_file,
word_position_dependent=True):
"""Kaldi 1-best aligned transcription to h5features
format in h5features is frame by frame, as this allows both
frame-to-frame DTW distance and edit distance to be used (for
edit_distance the first step would be extracting the phone-level
sequence from the frame-level sequence, discarding segments that
have too few frames)
This avoids problems with long phones if coding only the
centerpoint of a phone (a long time interval within the phone, but
that does not include the centerpoint will have empty
representation). Allowing representations indexed by time
intervals instead of time points could be more elegant when one
wants to use edit_distance but this would require some
(substantial but not huge) recoding in h5features and
ABXpy.distances. One would need to check that the time-intervals
have no overlap and are consecutive and one would need to adapt
the features reading to provide the sequence of consecutive
feature vectors with their durations and for the first and last
their degree of overlap with the required time segment.
"""
phonemap = k2a.read_kaldi_phonemap(phones_file, word_position_dependent)
# get order used to encode the phones as integer in the features files
phone_order = get_phone_order(phonemap)
utt_ids = []
times = []
features = []
current_utt = None
utt_times = []
utt_features = []
i = 1
for utt_id, start, stop, phone in k2a.read_kaldi_alignment(
phonemap, tra_file):
print i
i = i+1
if current_utt is None:
current_utt = utt_id
if utt_id != current_utt:
utt_ids.append(current_utt)
times.append(np.array(utt_times))
nb_phones = len(utt_features)
# not sure how h5features handles 1-d arrays, so reshaping
features.append(np.array(utt_features).reshape((nb_phones, 1)))
current_utt = utt_id
utt_times = []
utt_features = []
else:
# expanding to frame by frame using ad hoc 10ms window spacing
# since start and stop are spaced by a multiple of 10ms due to
# standard window spacing used by kaldi
nframes = (stop-start)/0.01
assert np.abs(nframes-np.round(nframes)) < 1e-7 # ad hoc tolerance
nframes = int(np.round(nframes))
utt_features = utt_features + [phone_order.index(phone)]*nframes
frame_times = start + 0.01*np.arange(nframes)
utt_times = utt_times + list(frame_times)
h5features.write(out_file, 'features', utt_ids, times, features)
def lattice2features(phones_file, post_file, out_file,
word_position_dependent=True):
"""
kaldi lattice posteriors to h5features
this loads everything into memory, but it would be easy to write
an incremental version if this poses a problem
"""
phonemap = k2a.read_kaldi_phonemap(phones_file, word_position_dependent)
# get order in which phones will be represented in the dimensions
# of the posteriorgram
phone_order = get_phone_order(phonemap)
d = len(phone_order) # posteriorgram dimension
# below is basically a parser for kaldi matrix format for each line
# parse input text file
with codecs.open(post_file, mode='r', encoding='UTF-8') as inp:
lines = inp.xreadlines()
# here would be nice to use sparse feature format (need to have it
# in h5features though) might want to begin by using sparse numpy
# matrix format
features = []
utt_ids = []
times = []
for index, line in enumerate(lines):
print("Processing line {0} / {1}".format(index+1, len(lines)))
tokens = line.strip().split(u" ")
utt_id, tokens = tokens[0], tokens[1:]
frames = []
inside = False
for token in tokens:
if token == u"[":
assert not(inside)
inside = True
frame = []
elif token == u"]":
assert inside
inside = False
frames.append(frame)
else:
assert inside
frame.append(token)
utt_features = np.zeros(shape=(len(frames), d), dtype=np.float64)
for f, frame in enumerate(frames):
assert len(frame) % 2 == 0
probas = [float(p) for p in frame[1::2]]
phones = [phonemap[code] for code in frame[::2]]
# optimisation 1 would be mapping directly a given code to
# a given posterior dim
for phone, proba in zip(phones, probas):
i = phone_order.index(phone)
# add to previous proba since different variants of a
# same phone will map to the same dimension i of the
# posteriorgram
utt_features[f, i] = utt_features[f, i] + proba
# normalize posteriorgrams to correct for rounding or
# thresholding errors by rescaling globally
total_proba = np.sum(utt_features, axis=1)
if np.max(np.abs(total_proba-1)) >= 1e-5: # ad hoc numerical tolerance
raise IOError(
"In utterance {0}, frame {1}: posteriorgram does not sum "
"to one, difference is {2}: ".format(
utt_id, f, np.max(np.abs(total_proba-1))))
utt_features = utt_features/np.tile(total_proba, (d, 1)).T
features.append(utt_features)
utt_ids.append(utt_id)
# as in kaldi2abkhazia, this is ad hoc and has not been
# checked formally
times.append(0.0125 + 0.01*np.arange(len(frames)))
h5features.write(out_file, 'features', utt_ids, times, features)
# TODO this function can be removed as utils.kaldi.{ark, scp}_to_h5f
# replace it
#
# def features2features(in_file, out_file):
# """
# kaldi input features (mfcc, etc.) to h5features
# this loads everything into memory, but it would be easy to write
# an incremental version if this poses a problem
# Input features must be in a single archive text format, that can be
# obtained using the 'copy-feats' kaldi utility
# """
# # below is basically a parser for kaldi vector format for each line
# # parse input text file
# outside_utt = True
# features = []
# utt_ids = []
# times = []
# with codecs.open(in_file, mode='r', encoding='UTF-8') as inp:
# for index, line in enumerate(inp):
# print("Processing line {0}".format(index+1))
# # / {1}".format(index+1, len(lines)))
#
# tokens = line.strip().split(u" ")
# if outside_utt:
# assert (len(tokens) == 3 and
# tokens[1] == u"" and
# tokens[2] == u"[")
#
# utt_id = tokens[0]
# outside_utt = False
# frames = []
# else:
# if tokens[-1] == u"]":
# # end of utterance
# outside_utt = True
# tokens = tokens[:-1]
# frames.append(np.array(tokens, dtype=np.float))
# if outside_utt:
# # end of utterance, continued
# features.append(np.row_stack(frames))
#
# # as in kaldi2abkhazia, this is ad hoc and has not
# # been checked formally
# times.append(0.0125 + 0.01*np.arange(len(frames)))
# utt_ids.append(utt_id)
# h5features.write(out_file, 'features', utt_ids, times, features)
|
xiandiancloud/edx-platform-Y
|
refs/heads/master
|
lms/djangoapps/courseware/tests/test_about.py
|
15
|
"""
Test the about xblock
"""
import mock
from mock import patch
import pytz
import datetime
from django.test.utils import override_settings
from django.core.urlresolvers import reverse
from django.conf import settings
from .helpers import LoginEnrollmentTestCase
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from courseware.tests.modulestore_config import TEST_DATA_MIXED_MODULESTORE
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from student.tests.factories import UserFactory, CourseEnrollmentAllowedFactory
# HTML for registration button
REG_STR = "<form id=\"class_enroll_form\" method=\"post\" data-remote=\"true\" action=\"/change_enrollment\">"
SHIB_ERROR_STR = "The currently logged-in user account does not have permission to enroll in this course."
@override_settings(MODULESTORE=TEST_DATA_MIXED_MODULESTORE)
class AboutTestCase(LoginEnrollmentTestCase, ModuleStoreTestCase):
def setUp(self):
self.course = CourseFactory.create()
self.about = ItemFactory.create(
category="about", parent_location=self.course.location,
data="OOGIE BLOOGIE", display_name="overview"
)
def test_logged_in(self):
self.setup_user()
url = reverse('about_course', args=[self.course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("OOGIE BLOOGIE", resp.content)
def test_anonymous_user(self):
url = reverse('about_course', args=[self.course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("OOGIE BLOOGIE", resp.content)
# Check that registration button is present
self.assertIn(REG_STR, resp.content)
@override_settings(MODULESTORE=TEST_DATA_MIXED_MODULESTORE)
class AboutTestCaseXML(LoginEnrollmentTestCase, ModuleStoreTestCase):
# The following XML test course (which lives at common/test/data/2014)
# is closed; we're testing that an about page still appears when
# the course is already closed
xml_course_id = SlashSeparatedCourseKey('edX', 'detached_pages', '2014')
# this text appears in that course's about page
# common/test/data/2014/about/overview.html
xml_data = "about page 463139"
@mock.patch.dict('django.conf.settings.FEATURES', {'DISABLE_START_DATES': False})
def test_logged_in_xml(self):
self.setup_user()
url = reverse('about_course', args=[self.xml_course_id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn(self.xml_data, resp.content)
@mock.patch.dict('django.conf.settings.FEATURES', {'DISABLE_START_DATES': False})
def test_anonymous_user_xml(self):
url = reverse('about_course', args=[self.xml_course_id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn(self.xml_data, resp.content)
@override_settings(MODULESTORE=TEST_DATA_MIXED_MODULESTORE)
class AboutWithCappedEnrollmentsTestCase(LoginEnrollmentTestCase, ModuleStoreTestCase):
"""
This test case will check the About page when a course has a capped enrollment
"""
def setUp(self):
"""
Set up the tests
"""
self.course = CourseFactory.create(metadata={"max_student_enrollments_allowed": 1})
self.about = ItemFactory.create(
category="about", parent_location=self.course.location,
data="OOGIE BLOOGIE", display_name="overview"
)
def test_enrollment_cap(self):
"""
This test will make sure that enrollment caps are enforced
"""
self.setup_user()
url = reverse('about_course', args=[self.course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn('<a href="#" class="register">', resp.content)
self.enroll(self.course, verify=True)
# create a new account since the first account is already registered for the course
self.email = 'foo_second@test.com'
self.password = 'bar'
self.username = 'test_second'
self.create_account(self.username,
self.email, self.password)
self.activate_user(self.email)
self.login(self.email, self.password)
# Get the about page again and make sure that the page says that the course is full
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("Course is full", resp.content)
# Try to enroll as well
result = self.enroll(self.course)
self.assertFalse(result)
# Check that registration button is not present
self.assertNotIn(REG_STR, resp.content)
@override_settings(MODULESTORE=TEST_DATA_MIXED_MODULESTORE)
class AboutWithInvitationOnly(ModuleStoreTestCase):
"""
This test case will check the About page when a course is invitation only.
"""
def setUp(self):
self.course = CourseFactory.create(metadata={"invitation_only": True})
self.about = ItemFactory.create(
category="about", parent_location=self.course.location,
display_name="overview"
)
def test_invitation_only(self):
"""
Test for user not logged in, invitation only course.
"""
url = reverse('about_course', args=[self.course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("Enrollment in this course is by invitation only", resp.content)
# Check that registration button is not present
self.assertNotIn(REG_STR, resp.content)
def test_invitation_only_but_allowed(self):
"""
Test for user logged in and allowed to enroll in invitation only course.
"""
# Course is invitation only, student is allowed to enroll and logged in
user = UserFactory.create(username='allowed_student', password='test', email='allowed_student@test.com')
CourseEnrollmentAllowedFactory(email=user.email, course_id=self.course.id)
self.client.login(username=user.username, password='test')
url = reverse('about_course', args=[self.course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("Register for 999", resp.content)
# Check that registration button is present
self.assertIn(REG_STR, resp.content)
@patch.dict(settings.FEATURES, {'RESTRICT_ENROLL_BY_REG_METHOD': True})
@override_settings(MODULESTORE=TEST_DATA_MIXED_MODULESTORE)
class AboutTestCaseShibCourse(LoginEnrollmentTestCase, ModuleStoreTestCase):
"""
Test cases covering about page behavior for courses that use shib enrollment domain ("shib courses")
"""
def setUp(self):
self.course = CourseFactory.create(enrollment_domain="shib:https://idp.stanford.edu/")
self.about = ItemFactory.create(
category="about", parent_location=self.course.location,
data="OOGIE BLOOGIE", display_name="overview"
)
def test_logged_in_shib_course(self):
"""
For shib courses, logged in users will see the register button, but get rejected once they click there
"""
self.setup_user()
url = reverse('about_course', args=[self.course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("OOGIE BLOOGIE", resp.content)
self.assertIn("Register for 999", resp.content)
self.assertIn(SHIB_ERROR_STR, resp.content)
self.assertIn(REG_STR, resp.content)
def test_anonymous_user_shib_course(self):
"""
For shib courses, anonymous users will also see the register button
"""
url = reverse('about_course', args=[self.course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("OOGIE BLOOGIE", resp.content)
self.assertIn("Register for 999", resp.content)
self.assertIn(SHIB_ERROR_STR, resp.content)
self.assertIn(REG_STR, resp.content)
@override_settings(MODULESTORE=TEST_DATA_MIXED_MODULESTORE)
class AboutWithClosedEnrollment(ModuleStoreTestCase):
"""
This test case will check the About page for a course that has enrollment start/end
set but it is currently outside of that period.
"""
def setUp(self):
super(AboutWithClosedEnrollment, self).setUp()
self.course = CourseFactory.create(metadata={"invitation_only": False})
# Setup enrollment period to be in future
now = datetime.datetime.now(pytz.UTC)
tomorrow = now + datetime.timedelta(days=1)
nextday = tomorrow + datetime.timedelta(days=1)
self.course.enrollment_start = tomorrow
self.course.enrollment_end = nextday
self.course = self.update_course(self.course, self.user.id)
self.about = ItemFactory.create(
category="about", parent_location=self.course.location,
display_name="overview"
)
def test_closed_enrollmement(self):
url = reverse('about_course', args=[self.course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("Enrollment is Closed", resp.content)
# Check that registration button is not present
self.assertNotIn(REG_STR, resp.content)
|
alexdglover/shill-isms
|
refs/heads/master
|
venv/lib/python2.7/site-packages/pip/_vendor/requests/packages/chardet/chardistribution.py
|
2754
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .euctwfreq import (EUCTWCharToFreqOrder, EUCTW_TABLE_SIZE,
EUCTW_TYPICAL_DISTRIBUTION_RATIO)
from .euckrfreq import (EUCKRCharToFreqOrder, EUCKR_TABLE_SIZE,
EUCKR_TYPICAL_DISTRIBUTION_RATIO)
from .gb2312freq import (GB2312CharToFreqOrder, GB2312_TABLE_SIZE,
GB2312_TYPICAL_DISTRIBUTION_RATIO)
from .big5freq import (Big5CharToFreqOrder, BIG5_TABLE_SIZE,
BIG5_TYPICAL_DISTRIBUTION_RATIO)
from .jisfreq import (JISCharToFreqOrder, JIS_TABLE_SIZE,
JIS_TYPICAL_DISTRIBUTION_RATIO)
from .compat import wrap_ord
ENOUGH_DATA_THRESHOLD = 1024
SURE_YES = 0.99
SURE_NO = 0.01
MINIMUM_DATA_THRESHOLD = 3
class CharDistributionAnalysis:
def __init__(self):
# Mapping table to get frequency order from char order (get from
# GetOrder())
self._mCharToFreqOrder = None
self._mTableSize = None # Size of above table
# This is a constant value which varies from language to language,
# used in calculating confidence. See
# http://www.mozilla.org/projects/intl/UniversalCharsetDetection.html
# for further detail.
self._mTypicalDistributionRatio = None
self.reset()
def reset(self):
"""reset analyser, clear any state"""
# If this flag is set to True, detection is done and conclusion has
# been made
self._mDone = False
self._mTotalChars = 0 # Total characters encountered
# The number of characters whose frequency order is less than 512
self._mFreqChars = 0
def feed(self, aBuf, aCharLen):
"""feed a character with known length"""
if aCharLen == 2:
# we only care about 2-bytes character in our distribution analysis
order = self.get_order(aBuf)
else:
order = -1
if order >= 0:
self._mTotalChars += 1
# order is valid
if order < self._mTableSize:
if 512 > self._mCharToFreqOrder[order]:
self._mFreqChars += 1
def get_confidence(self):
"""return confidence based on existing data"""
# if we didn't receive any character in our consideration range,
# return negative answer
if self._mTotalChars <= 0 or self._mFreqChars <= MINIMUM_DATA_THRESHOLD:
return SURE_NO
if self._mTotalChars != self._mFreqChars:
r = (self._mFreqChars / ((self._mTotalChars - self._mFreqChars)
* self._mTypicalDistributionRatio))
if r < SURE_YES:
return r
# normalize confidence (we don't want to be 100% sure)
return SURE_YES
def got_enough_data(self):
# It is not necessary to receive all data to draw conclusion.
# For charset detection, certain amount of data is enough
return self._mTotalChars > ENOUGH_DATA_THRESHOLD
def get_order(self, aBuf):
# We do not handle characters based on the original encoding string,
# but convert this encoding string to a number, here called order.
# This allows multiple encodings of a language to share one frequency
# table.
return -1
class EUCTWDistributionAnalysis(CharDistributionAnalysis):
def __init__(self):
CharDistributionAnalysis.__init__(self)
self._mCharToFreqOrder = EUCTWCharToFreqOrder
self._mTableSize = EUCTW_TABLE_SIZE
self._mTypicalDistributionRatio = EUCTW_TYPICAL_DISTRIBUTION_RATIO
def get_order(self, aBuf):
# for euc-TW encoding, we are interested
# first byte range: 0xc4 -- 0xfe
# second byte range: 0xa1 -- 0xfe
# no validation needed here. State machine has done that
first_char = wrap_ord(aBuf[0])
if first_char >= 0xC4:
return 94 * (first_char - 0xC4) + wrap_ord(aBuf[1]) - 0xA1
else:
return -1
class EUCKRDistributionAnalysis(CharDistributionAnalysis):
def __init__(self):
CharDistributionAnalysis.__init__(self)
self._mCharToFreqOrder = EUCKRCharToFreqOrder
self._mTableSize = EUCKR_TABLE_SIZE
self._mTypicalDistributionRatio = EUCKR_TYPICAL_DISTRIBUTION_RATIO
def get_order(self, aBuf):
# for euc-KR encoding, we are interested
# first byte range: 0xb0 -- 0xfe
# second byte range: 0xa1 -- 0xfe
# no validation needed here. State machine has done that
first_char = wrap_ord(aBuf[0])
if first_char >= 0xB0:
return 94 * (first_char - 0xB0) + wrap_ord(aBuf[1]) - 0xA1
else:
return -1
class GB2312DistributionAnalysis(CharDistributionAnalysis):
def __init__(self):
CharDistributionAnalysis.__init__(self)
self._mCharToFreqOrder = GB2312CharToFreqOrder
self._mTableSize = GB2312_TABLE_SIZE
self._mTypicalDistributionRatio = GB2312_TYPICAL_DISTRIBUTION_RATIO
def get_order(self, aBuf):
# for GB2312 encoding, we are interested
# first byte range: 0xb0 -- 0xfe
# second byte range: 0xa1 -- 0xfe
# no validation needed here. State machine has done that
first_char, second_char = wrap_ord(aBuf[0]), wrap_ord(aBuf[1])
if (first_char >= 0xB0) and (second_char >= 0xA1):
return 94 * (first_char - 0xB0) + second_char - 0xA1
else:
return -1
class Big5DistributionAnalysis(CharDistributionAnalysis):
def __init__(self):
CharDistributionAnalysis.__init__(self)
self._mCharToFreqOrder = Big5CharToFreqOrder
self._mTableSize = BIG5_TABLE_SIZE
self._mTypicalDistributionRatio = BIG5_TYPICAL_DISTRIBUTION_RATIO
def get_order(self, aBuf):
# for big5 encoding, we are interested
# first byte range: 0xa4 -- 0xfe
# second byte range: 0x40 -- 0x7e , 0xa1 -- 0xfe
# no validation needed here. State machine has done that
first_char, second_char = wrap_ord(aBuf[0]), wrap_ord(aBuf[1])
if first_char >= 0xA4:
if second_char >= 0xA1:
return 157 * (first_char - 0xA4) + second_char - 0xA1 + 63
else:
return 157 * (first_char - 0xA4) + second_char - 0x40
else:
return -1
class SJISDistributionAnalysis(CharDistributionAnalysis):
def __init__(self):
CharDistributionAnalysis.__init__(self)
self._mCharToFreqOrder = JISCharToFreqOrder
self._mTableSize = JIS_TABLE_SIZE
self._mTypicalDistributionRatio = JIS_TYPICAL_DISTRIBUTION_RATIO
def get_order(self, aBuf):
# for sjis encoding, we are interested
# first byte range: 0x81 -- 0x9f , 0xe0 -- 0xfe
# second byte range: 0x40 -- 0x7e, 0x81 -- oxfe
# no validation needed here. State machine has done that
first_char, second_char = wrap_ord(aBuf[0]), wrap_ord(aBuf[1])
if (first_char >= 0x81) and (first_char <= 0x9F):
order = 188 * (first_char - 0x81)
elif (first_char >= 0xE0) and (first_char <= 0xEF):
order = 188 * (first_char - 0xE0 + 31)
else:
return -1
order = order + second_char - 0x40
if second_char > 0x7F:
order = -1
return order
class EUCJPDistributionAnalysis(CharDistributionAnalysis):
def __init__(self):
CharDistributionAnalysis.__init__(self)
self._mCharToFreqOrder = JISCharToFreqOrder
self._mTableSize = JIS_TABLE_SIZE
self._mTypicalDistributionRatio = JIS_TYPICAL_DISTRIBUTION_RATIO
def get_order(self, aBuf):
# for euc-JP encoding, we are interested
# first byte range: 0xa0 -- 0xfe
# second byte range: 0xa1 -- 0xfe
# no validation needed here. State machine has done that
char = wrap_ord(aBuf[0])
if char >= 0xA0:
return 94 * (char - 0xA1) + wrap_ord(aBuf[1]) - 0xa1
else:
return -1
|
niloynibhochaudhury/ud858
|
refs/heads/master
|
Lesson_3/00_Conference_Central/conference.py
|
34
|
#!/usr/bin/env python
"""
conference.py -- Udacity conference server-side Python App Engine API;
uses Google Cloud Endpoints
$Id: conference.py,v 1.25 2014/05/24 23:42:19 wesc Exp wesc $
created by wesc on 2014 apr 21
"""
__author__ = 'wesc+api@google.com (Wesley Chun)'
from datetime import datetime
import endpoints
from protorpc import messages
from protorpc import message_types
from protorpc import remote
from google.appengine.ext import ndb
from models import Profile
from models import ProfileMiniForm
from models import ProfileForm
from models import TeeShirtSize
from settings import WEB_CLIENT_ID
EMAIL_SCOPE = endpoints.EMAIL_SCOPE
API_EXPLORER_CLIENT_ID = endpoints.API_EXPLORER_CLIENT_ID
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
@endpoints.api( name='conference',
version='v1',
allowed_client_ids=[WEB_CLIENT_ID, API_EXPLORER_CLIENT_ID],
scopes=[EMAIL_SCOPE])
class ConferenceApi(remote.Service):
"""Conference API v0.1"""
# - - - Profile objects - - - - - - - - - - - - - - - - - - -
def _copyProfileToForm(self, prof):
"""Copy relevant fields from Profile to ProfileForm."""
# copy relevant fields from Profile to ProfileForm
pf = ProfileForm()
for field in pf.all_fields():
if hasattr(prof, field.name):
# convert t-shirt string to Enum; just copy others
if field.name == 'teeShirtSize':
setattr(pf, field.name, getattr(TeeShirtSize, getattr(prof, field.name)))
else:
setattr(pf, field.name, getattr(prof, field.name))
pf.check_initialized()
return pf
def _getProfileFromUser(self):
"""Return user Profile from datastore, creating new one if non-existent."""
user = endpoints.get_current_user()
if not user:
raise endpoints.UnauthorizedException('Authorization required')
# TODO 1
# step 1. copy utils.py from additions folder to this folder
# and import getUserId from it
# step 2. get user id by calling getUserId(user)
# step 3. create a new key of kind Profile from the id
# TODO 3
# get the entity from datastore by using get() on the key
profile = None
if not profile:
profile = Profile(
key = None, # TODO 1 step 4. replace with the key from step 3
displayName = user.nickname(),
mainEmail= user.email(),
teeShirtSize = str(TeeShirtSize.NOT_SPECIFIED),
)
# TODO 2
# save the profile to datastore
return profile # return Profile
def _doProfile(self, save_request=None):
"""Get user Profile and return to user, possibly updating it first."""
# get user Profile
prof = self._getProfileFromUser()
# if saveProfile(), process user-modifyable fields
if save_request:
for field in ('displayName', 'teeShirtSize'):
if hasattr(save_request, field):
val = getattr(save_request, field)
if val:
setattr(prof, field, str(val))
# TODO 4
# put the modified profile to datastore
# return ProfileForm
return self._copyProfileToForm(prof)
@endpoints.method(message_types.VoidMessage, ProfileForm,
path='profile', http_method='GET', name='getProfile')
def getProfile(self, request):
"""Return user profile."""
return self._doProfile()
@endpoints.method(ProfileMiniForm, ProfileForm,
path='profile', http_method='POST', name='saveProfile')
def saveProfile(self, request):
"""Update & return user profile."""
return self._doProfile(request)
# registers API
api = endpoints.api_server([ConferenceApi])
|
rcbops/melange-buildpackage
|
refs/heads/master
|
melange/db/sqlalchemy/migrate_repo/__init__.py
|
73
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
|
mollstam/UnrealPy
|
refs/heads/master
|
UnrealPyEmbed/Development/Python/2015.08.07-Python2710-x64-Source-vs2015/Python27/Source/Python-2.7.10/Lib/encodings/utf_32_be.py
|
703
|
"""
Python 'utf-32-be' Codec
"""
import codecs
### Codec APIs
encode = codecs.utf_32_be_encode
def decode(input, errors='strict'):
return codecs.utf_32_be_decode(input, errors, True)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.utf_32_be_encode(input, self.errors)[0]
class IncrementalDecoder(codecs.BufferedIncrementalDecoder):
_buffer_decode = codecs.utf_32_be_decode
class StreamWriter(codecs.StreamWriter):
encode = codecs.utf_32_be_encode
class StreamReader(codecs.StreamReader):
decode = codecs.utf_32_be_decode
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='utf-32-be',
encode=encode,
decode=decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
|
pk400/catering
|
refs/heads/master
|
myvenv/lib/python3.4/site-packages/pip/status_codes.py
|
408
|
SUCCESS = 0
ERROR = 1
UNKNOWN_ERROR = 2
VIRTUALENV_NOT_FOUND = 3
PREVIOUS_BUILD_DIR_ERROR = 4
NO_MATCHES_FOUND = 23
|
jazkarta/edx-platform-for-isc
|
refs/heads/backport-auto-certification
|
common/lib/sandbox-packages/verifiers/tests_draganddrop.py
|
173
|
import unittest
import draganddrop
from .draganddrop import PositionsCompare
import json
class Test_PositionsCompare(unittest.TestCase):
""" describe"""
def test_nested_list_and_list1(self):
self.assertEqual(PositionsCompare([[1, 2], 40]), PositionsCompare([1, 3]))
def test_nested_list_and_list2(self):
self.assertNotEqual(PositionsCompare([1, 12]), PositionsCompare([1, 1]))
def test_list_and_list1(self):
self.assertNotEqual(PositionsCompare([[1, 2], 12]), PositionsCompare([1, 15]))
def test_list_and_list2(self):
self.assertEqual(PositionsCompare([1, 11]), PositionsCompare([1, 1]))
def test_numerical_list_and_string_list(self):
self.assertNotEqual(PositionsCompare([1, 2]), PositionsCompare(["1"]))
def test_string_and_string_list1(self):
self.assertEqual(PositionsCompare("1"), PositionsCompare(["1"]))
def test_string_and_string_list2(self):
self.assertEqual(PositionsCompare("abc"), PositionsCompare("abc"))
def test_string_and_string_list3(self):
self.assertNotEqual(PositionsCompare("abd"), PositionsCompare("abe"))
def test_float_and_string(self):
self.assertNotEqual(PositionsCompare([3.5, 5.7]), PositionsCompare(["1"]))
def test_floats_and_ints(self):
self.assertEqual(PositionsCompare([3.5, 4.5]), PositionsCompare([5, 7]))
class Test_DragAndDrop_Grade(unittest.TestCase):
def test_targets_are_draggable_1(self):
user_input = json.dumps([
{'p': 'p_l'},
{'up': {'first': {'p': 'p_l'}}}
])
correct_answer = [
{
'draggables': ['p'],
'targets': ['p_l', 'p_r'],
'rule': 'anyof'
},
{
'draggables': ['up'],
'targets': [
'p_l[p][first]'
],
'rule': 'anyof'
}
]
self.assertTrue(draganddrop.grade(user_input, correct_answer))
def test_targets_are_draggable_2(self):
user_input = json.dumps([
{'p': 'p_l'},
{'p': 'p_r'},
{'s': 's_l'},
{'s': 's_r'},
{'up': {'1': {'p': 'p_l'}}},
{'up': {'3': {'p': 'p_l'}}},
{'up': {'1': {'p': 'p_r'}}},
{'up': {'3': {'p': 'p_r'}}},
{'up_and_down': {'1': {'s': 's_l'}}},
{'up_and_down': {'1': {'s': 's_r'}}}
])
correct_answer = [
{
'draggables': ['p'],
'targets': ['p_l', 'p_r'],
'rule': 'unordered_equal'
},
{
'draggables': ['s'],
'targets': ['s_l', 's_r'],
'rule': 'unordered_equal'
},
{
'draggables': ['up_and_down'],
'targets': ['s_l[s][1]', 's_r[s][1]'],
'rule': 'unordered_equal'
},
{
'draggables': ['up'],
'targets': [
'p_l[p][1]',
'p_l[p][3]',
'p_r[p][1]',
'p_r[p][3]',
],
'rule': 'unordered_equal'
}
]
self.assertTrue(draganddrop.grade(user_input, correct_answer))
def test_targets_are_draggable_2_manual_parsing(self):
user_input = json.dumps([
{'up': 'p_l[p][1]'},
{'p': 'p_l'},
{'up': 'p_l[p][3]'},
{'up': 'p_r[p][1]'},
{'p': 'p_r'},
{'up': 'p_r[p][3]'},
{'up_and_down': 's_l[s][1]'},
{'s': 's_l'},
{'up_and_down': 's_r[s][1]'},
{'s': 's_r'}
])
correct_answer = [
{
'draggables': ['p'],
'targets': ['p_l', 'p_r'],
'rule': 'unordered_equal'
},
{
'draggables': ['s'],
'targets': ['s_l', 's_r'],
'rule': 'unordered_equal'
},
{
'draggables': ['up_and_down'],
'targets': ['s_l[s][1]', 's_r[s][1]'],
'rule': 'unordered_equal'
},
{
'draggables': ['up'],
'targets': [
'p_l[p][1]',
'p_l[p][3]',
'p_r[p][1]',
'p_r[p][3]',
],
'rule': 'unordered_equal'
}
]
self.assertTrue(draganddrop.grade(user_input, correct_answer))
def test_targets_are_draggable_3_nested(self):
user_input = json.dumps([
{'molecule': 'left_side_tagret'},
{'molecule': 'right_side_tagret'},
{'p': {'p_target': {'molecule': 'left_side_tagret'}}},
{'p': {'p_target': {'molecule': 'right_side_tagret'}}},
{'s': {'s_target': {'molecule': 'left_side_tagret'}}},
{'s': {'s_target': {'molecule': 'right_side_tagret'}}},
{'up': {'1': {'p': {'p_target': {'molecule': 'left_side_tagret'}}}}},
{'up': {'3': {'p': {'p_target': {'molecule': 'left_side_tagret'}}}}},
{'up': {'1': {'p': {'p_target': {'molecule': 'right_side_tagret'}}}}},
{'up': {'3': {'p': {'p_target': {'molecule': 'right_side_tagret'}}}}},
{'up_and_down': {'1': {'s': {'s_target': {'molecule': 'left_side_tagret'}}}}},
{'up_and_down': {'1': {'s': {'s_target': {'molecule': 'right_side_tagret'}}}}}
])
correct_answer = [
{
'draggables': ['molecule'],
'targets': ['left_side_tagret', 'right_side_tagret'],
'rule': 'unordered_equal'
},
{
'draggables': ['p'],
'targets': [
'left_side_tagret[molecule][p_target]',
'right_side_tagret[molecule][p_target]',
],
'rule': 'unordered_equal'
},
{
'draggables': ['s'],
'targets': [
'left_side_tagret[molecule][s_target]',
'right_side_tagret[molecule][s_target]',
],
'rule': 'unordered_equal'
},
{
'draggables': ['up_and_down'],
'targets': [
'left_side_tagret[molecule][s_target][s][1]',
'right_side_tagret[molecule][s_target][s][1]',
],
'rule': 'unordered_equal'
},
{
'draggables': ['up'],
'targets': [
'left_side_tagret[molecule][p_target][p][1]',
'left_side_tagret[molecule][p_target][p][3]',
'right_side_tagret[molecule][p_target][p][1]',
'right_side_tagret[molecule][p_target][p][3]',
],
'rule': 'unordered_equal'
}
]
self.assertTrue(draganddrop.grade(user_input, correct_answer))
def test_targets_are_draggable_4_real_example(self):
user_input = json.dumps([
{'single_draggable': 's_l'},
{'single_draggable': 's_r'},
{'single_draggable': 'p_sigma'},
{'single_draggable': 'p_sigma*'},
{'single_draggable': 's_sigma'},
{'single_draggable': 's_sigma*'},
{'double_draggable': 'p_pi*'},
{'double_draggable': 'p_pi'},
{'triple_draggable': 'p_l'},
{'triple_draggable': 'p_r'},
{'up': {'1': {'triple_draggable': 'p_l'}}},
{'up': {'2': {'triple_draggable': 'p_l'}}},
{'up': {'2': {'triple_draggable': 'p_r'}}},
{'up': {'3': {'triple_draggable': 'p_r'}}},
{'up_and_down': {'1': {'single_draggable': 's_l'}}},
{'up_and_down': {'1': {'single_draggable': 's_r'}}},
{'up_and_down': {'1': {'single_draggable': 's_sigma'}}},
{'up_and_down': {'1': {'single_draggable': 's_sigma*'}}},
{'up_and_down': {'1': {'double_draggable': 'p_pi'}}},
{'up_and_down': {'2': {'double_draggable': 'p_pi'}}}
])
# 10 targets:
# s_l, s_r, p_l, p_r, s_sigma, s_sigma*, p_pi, p_sigma, p_pi*, p_sigma*
#
# 3 draggable objects, which have targets (internal target ids - 1, 2, 3):
# single_draggable, double_draggable, triple_draggable
#
# 2 draggable objects:
# up, up_and_down
correct_answer = [
{
'draggables': ['triple_draggable'],
'targets': ['p_l', 'p_r'],
'rule': 'unordered_equal'
},
{
'draggables': ['double_draggable'],
'targets': ['p_pi', 'p_pi*'],
'rule': 'unordered_equal'
},
{
'draggables': ['single_draggable'],
'targets': ['s_l', 's_r', 's_sigma', 's_sigma*', 'p_sigma', 'p_sigma*'],
'rule': 'unordered_equal'
},
{
'draggables': ['up'],
'targets': [
'p_l[triple_draggable][1]',
'p_l[triple_draggable][2]',
'p_r[triple_draggable][2]',
'p_r[triple_draggable][3]',
],
'rule': 'unordered_equal'
},
{
'draggables': ['up_and_down'],
'targets': [
's_l[single_draggable][1]',
's_r[single_draggable][1]',
's_sigma[single_draggable][1]',
's_sigma*[single_draggable][1]',
'p_pi[double_draggable][1]',
'p_pi[double_draggable][2]',
],
'rule': 'unordered_equal'
},
]
self.assertTrue(draganddrop.grade(user_input, correct_answer))
def test_targets_true(self):
user_input = '[{"1": "t1"}, \
{"name_with_icon": "t2"}]'
correct_answer = {'1': 't1', 'name_with_icon': 't2'}
self.assertTrue(draganddrop.grade(user_input, correct_answer))
def test_expect_no_actions_wrong(self):
user_input = '[{"1": "t1"}, \
{"name_with_icon": "t2"}]'
correct_answer = []
self.assertFalse(draganddrop.grade(user_input, correct_answer))
def test_expect_no_actions_right(self):
user_input = '[]'
correct_answer = []
self.assertTrue(draganddrop.grade(user_input, correct_answer))
def test_targets_false(self):
user_input = '[{"1": "t1"}, \
{"name_with_icon": "t2"}]'
correct_answer = {'1': 't3', 'name_with_icon': 't2'}
self.assertFalse(draganddrop.grade(user_input, correct_answer))
def test_multiple_images_per_target_true(self):
user_input = '[{"1": "t1"}, {"name_with_icon": "t2"}, \
{"2": "t1"}]'
correct_answer = {'1': 't1', 'name_with_icon': 't2', '2': 't1'}
self.assertTrue(draganddrop.grade(user_input, correct_answer))
def test_multiple_images_per_target_false(self):
user_input = '[{"1": "t1"}, {"name_with_icon": "t2"}, \
{"2": "t1"}]'
correct_answer = {'1': 't2', 'name_with_icon': 't2', '2': 't1'}
self.assertFalse(draganddrop.grade(user_input, correct_answer))
def test_targets_and_positions(self):
user_input = '[{"1": [10,10]}, \
{"name_with_icon": [[10,10],4]}]'
correct_answer = {'1': [10, 10], 'name_with_icon': [[10, 10], 4]}
self.assertTrue(draganddrop.grade(user_input, correct_answer))
def test_position_and_targets(self):
user_input = '[{"1": "t1"}, {"name_with_icon": "t2"}]'
correct_answer = {'1': 't1', 'name_with_icon': 't2'}
self.assertTrue(draganddrop.grade(user_input, correct_answer))
def test_positions_exact(self):
user_input = '[{"1": [10, 10]}, {"name_with_icon": [20, 20]}]'
correct_answer = {'1': [10, 10], 'name_with_icon': [20, 20]}
self.assertTrue(draganddrop.grade(user_input, correct_answer))
def test_positions_false(self):
user_input = '[{"1": [10, 10]}, {"name_with_icon": [20, 20]}]'
correct_answer = {'1': [25, 25], 'name_with_icon': [20, 20]}
self.assertFalse(draganddrop.grade(user_input, correct_answer))
def test_positions_true_in_radius(self):
user_input = '[{"1": [10, 10]}, {"name_with_icon": [20, 20]}]'
correct_answer = {'1': [14, 14], 'name_with_icon': [20, 20]}
self.assertTrue(draganddrop.grade(user_input, correct_answer))
def test_positions_true_in_manual_radius(self):
user_input = '[{"1": [10, 10]}, {"name_with_icon": [20, 20]}]'
correct_answer = {'1': [[40, 10], 30], 'name_with_icon': [20, 20]}
self.assertTrue(draganddrop.grade(user_input, correct_answer))
def test_positions_false_in_manual_radius(self):
user_input = '[{"1": [10, 10]}, {"name_with_icon": [20, 20]}]'
correct_answer = {'1': [[40, 10], 29], 'name_with_icon': [20, 20]}
self.assertFalse(draganddrop.grade(user_input, correct_answer))
def test_correct_answer_not_has_key_from_user_answer(self):
user_input = '[{"1": "t1"}, {"name_with_icon": "t2"}]'
correct_answer = {'3': 't3', 'name_with_icon': 't2'}
self.assertFalse(draganddrop.grade(user_input, correct_answer))
def test_anywhere(self):
"""Draggables can be places anywhere on base image.
Place grass in the middle of the image and ant in the
right upper corner."""
user_input = '[{"ant":[610.5,57.449951171875]},\
{"grass":[322.5,199.449951171875]}]'
correct_answer = {'grass': [[300, 200], 200], 'ant': [[500, 0], 200]}
self.assertTrue(draganddrop.grade(user_input, correct_answer))
def test_lcao_correct(self):
"""Describe carbon molecule in LCAO-MO"""
user_input = '[{"1":"s_left"}, \
{"5":"s_right"},{"4":"s_sigma"},{"6":"s_sigma_star"},{"7":"p_left_1"}, \
{"8":"p_left_2"},{"10":"p_right_1"},{"9":"p_right_2"}, \
{"2":"p_pi_1"},{"3":"p_pi_2"},{"11":"s_sigma_name"}, \
{"13":"s_sigma_star_name"},{"15":"p_pi_name"},{"16":"p_pi_star_name"}, \
{"12":"p_sigma_name"},{"14":"p_sigma_star_name"}]'
correct_answer = [{
'draggables': ['1', '2', '3', '4', '5', '6'],
'targets': [
's_left', 's_right', 's_sigma', 's_sigma_star', 'p_pi_1', 'p_pi_2'
],
'rule': 'anyof'
}, {
'draggables': ['7', '8', '9', '10'],
'targets': ['p_left_1', 'p_left_2', 'p_right_1', 'p_right_2'],
'rule': 'anyof'
}, {
'draggables': ['11', '12'],
'targets': ['s_sigma_name', 'p_sigma_name'],
'rule': 'anyof'
}, {
'draggables': ['13', '14'],
'targets': ['s_sigma_star_name', 'p_sigma_star_name'],
'rule': 'anyof'
}, {
'draggables': ['15'],
'targets': ['p_pi_name'],
'rule': 'anyof'
}, {
'draggables': ['16'],
'targets': ['p_pi_star_name'],
'rule': 'anyof'
}]
self.assertTrue(draganddrop.grade(user_input, correct_answer))
def test_lcao_extra_element_incorrect(self):
"""Describe carbon molecule in LCAO-MO"""
user_input = '[{"1":"s_left"}, \
{"5":"s_right"},{"4":"s_sigma"},{"6":"s_sigma_star"},{"7":"p_left_1"}, \
{"8":"p_left_2"},{"17":"p_left_3"},{"10":"p_right_1"},{"9":"p_right_2"}, \
{"2":"p_pi_1"},{"3":"p_pi_2"},{"11":"s_sigma_name"}, \
{"13":"s_sigma_star_name"},{"15":"p_pi_name"},{"16":"p_pi_star_name"}, \
{"12":"p_sigma_name"},{"14":"p_sigma_star_name"}]'
correct_answer = [{
'draggables': ['1', '2', '3', '4', '5', '6'],
'targets': [
's_left', 's_right', 's_sigma', 's_sigma_star', 'p_pi_1', 'p_pi_2'
],
'rule': 'anyof'
}, {
'draggables': ['7', '8', '9', '10'],
'targets': ['p_left_1', 'p_left_2', 'p_right_1', 'p_right_2'],
'rule': 'anyof'
}, {
'draggables': ['11', '12'],
'targets': ['s_sigma_name', 'p_sigma_name'],
'rule': 'anyof'
}, {
'draggables': ['13', '14'],
'targets': ['s_sigma_star_name', 'p_sigma_star_name'],
'rule': 'anyof'
}, {
'draggables': ['15'],
'targets': ['p_pi_name'],
'rule': 'anyof'
}, {
'draggables': ['16'],
'targets': ['p_pi_star_name'],
'rule': 'anyof'
}]
self.assertFalse(draganddrop.grade(user_input, correct_answer))
def test_reuse_draggable_no_mupliples(self):
"""Test reusable draggables (no mupltiple draggables per target)"""
user_input = '[{"1":"target1"}, \
{"2":"target2"},{"1":"target3"},{"2":"target4"},{"2":"target5"}, \
{"3":"target6"}]'
correct_answer = [
{
'draggables': ['1'],
'targets': ['target1', 'target3'],
'rule': 'anyof'
},
{
'draggables': ['2'],
'targets': ['target2', 'target4', 'target5'],
'rule': 'anyof'
},
{
'draggables': ['3'],
'targets': ['target6'],
'rule': 'anyof'
}
]
self.assertTrue(draganddrop.grade(user_input, correct_answer))
def test_reuse_draggable_with_mupliples(self):
"""Test reusable draggables with mupltiple draggables per target"""
user_input = '[{"1":"target1"}, \
{"2":"target2"},{"1":"target1"},{"2":"target4"},{"2":"target4"}, \
{"3":"target6"}]'
correct_answer = [
{
'draggables': ['1'],
'targets': ['target1', 'target3'],
'rule': 'anyof'
},
{
'draggables': ['2'],
'targets': ['target2', 'target4'],
'rule': 'anyof'
},
{
'draggables': ['3'],
'targets': ['target6'],
'rule': 'anyof'
}
]
self.assertTrue(draganddrop.grade(user_input, correct_answer))
def test_reuse_many_draggable_with_mupliples(self):
"""Test reusable draggables with mupltiple draggables per target"""
user_input = '[{"1":"target1"}, \
{"2":"target2"},{"1":"target1"},{"2":"target4"},{"2":"target4"}, \
{"3":"target6"}, {"4": "target3"}, {"5": "target4"}, \
{"5": "target5"}, {"6": "target2"}]'
correct_answer = [
{
'draggables': ['1', '4'],
'targets': ['target1', 'target3'],
'rule': 'anyof'
},
{
'draggables': ['2', '6'],
'targets': ['target2', 'target4'],
'rule': 'anyof'
},
{
'draggables': ['5'],
'targets': ['target4', 'target5'],
'rule': 'anyof'
},
{
'draggables': ['3'],
'targets': ['target6'],
'rule': 'anyof'
}
]
self.assertTrue(draganddrop.grade(user_input, correct_answer))
def test_reuse_many_draggable_with_mupliples_wrong(self):
"""Test reusable draggables with mupltiple draggables per target"""
user_input = '[{"1":"target1"}, \
{"2":"target2"},{"1":"target1"}, \
{"2":"target3"}, \
{"2":"target4"}, \
{"3":"target6"}, {"4": "target3"}, {"5": "target4"}, \
{"5": "target5"}, {"6": "target2"}]'
correct_answer = [
{
'draggables': ['1', '4'],
'targets': ['target1', 'target3'],
'rule': 'anyof'
},
{
'draggables': ['2', '6'],
'targets': ['target2', 'target4'],
'rule': 'anyof'
},
{
'draggables': ['5'],
'targets': ['target4', 'target5'],
'rule': 'anyof'
},
{
'draggables': ['3'],
'targets': ['target6'],
'rule': 'anyof'
}]
self.assertFalse(draganddrop.grade(user_input, correct_answer))
def test_label_10_targets_with_a_b_c_false(self):
"""Test reusable draggables (no mupltiple draggables per target)"""
user_input = '[{"a":"target1"}, \
{"b":"target2"},{"c":"target3"},{"a":"target4"},{"b":"target5"}, \
{"c":"target6"}, {"a":"target7"},{"b":"target8"},{"c":"target9"}, \
{"a":"target1"}]'
correct_answer = [
{
'draggables': ['a'],
'targets': ['target1', 'target4', 'target7', 'target10'],
'rule': 'unordered_equal'
},
{
'draggables': ['b'],
'targets': ['target2', 'target5', 'target8'],
'rule': 'unordered_equal'
},
{
'draggables': ['c'],
'targets': ['target3', 'target6', 'target9'],
'rule': 'unordered_equal'
}
]
self.assertFalse(draganddrop.grade(user_input, correct_answer))
def test_label_10_targets_with_a_b_c_(self):
"""Test reusable draggables (no mupltiple draggables per target)"""
user_input = '[{"a":"target1"}, \
{"b":"target2"},{"c":"target3"},{"a":"target4"},{"b":"target5"}, \
{"c":"target6"}, {"a":"target7"},{"b":"target8"},{"c":"target9"}, \
{"a":"target10"}]'
correct_answer = [
{
'draggables': ['a'],
'targets': ['target1', 'target4', 'target7', 'target10'],
'rule': 'unordered_equal'
},
{
'draggables': ['b'],
'targets': ['target2', 'target5', 'target8'],
'rule': 'unordered_equal'
},
{
'draggables': ['c'],
'targets': ['target3', 'target6', 'target9'],
'rule': 'unordered_equal'
}
]
self.assertTrue(draganddrop.grade(user_input, correct_answer))
def test_label_10_targets_with_a_b_c_multiple(self):
"""Test reusable draggables (mupltiple draggables per target)"""
user_input = '[{"a":"target1"}, \
{"b":"target2"},{"c":"target3"},{"b":"target5"}, \
{"c":"target6"}, {"a":"target7"},{"b":"target8"},{"c":"target9"}, \
{"a":"target1"}]'
correct_answer = [
{
'draggables': ['a', 'a', 'a'],
'targets': ['target1', 'target4', 'target7', 'target10'],
'rule': 'anyof+number'
},
{
'draggables': ['b', 'b', 'b'],
'targets': ['target2', 'target5', 'target8'],
'rule': 'anyof+number'
},
{
'draggables': ['c', 'c', 'c'],
'targets': ['target3', 'target6', 'target9'],
'rule': 'anyof+number'
}
]
self.assertTrue(draganddrop.grade(user_input, correct_answer))
def test_label_10_targets_with_a_b_c_multiple_false(self):
"""Test reusable draggables (mupltiple draggables per target)"""
user_input = '[{"a":"target1"}, \
{"b":"target2"},{"c":"target3"},{"a":"target4"},{"b":"target5"}, \
{"c":"target6"}, {"a":"target7"},{"b":"target8"},{"c":"target9"}, \
{"a":"target1"}]'
correct_answer = [
{
'draggables': ['a', 'a', 'a'],
'targets': ['target1', 'target4', 'target7', 'target10'],
'rule': 'anyof+number'
},
{
'draggables': ['b', 'b', 'b'],
'targets': ['target2', 'target5', 'target8'],
'rule': 'anyof+number'
},
{
'draggables': ['c', 'c', 'c'],
'targets': ['target3', 'target6', 'target9'],
'rule': 'anyof+number'
}
]
self.assertFalse(draganddrop.grade(user_input, correct_answer))
def test_label_10_targets_with_a_b_c_reused(self):
"""Test a b c in 10 labels reused"""
user_input = '[{"a":"target1"}, \
{"b":"target2"},{"c":"target3"},{"b":"target5"}, \
{"c":"target6"}, {"b":"target8"},{"c":"target9"}, \
{"a":"target10"}]'
correct_answer = [
{
'draggables': ['a', 'a'],
'targets': ['target1', 'target10'],
'rule': 'unordered_equal+number'
},
{
'draggables': ['b', 'b', 'b'],
'targets': ['target2', 'target5', 'target8'],
'rule': 'unordered_equal+number'
},
{
'draggables': ['c', 'c', 'c'],
'targets': ['target3', 'target6', 'target9'],
'rule': 'unordered_equal+number'
}
]
self.assertTrue(draganddrop.grade(user_input, correct_answer))
def test_label_10_targets_with_a_b_c_reused_false(self):
"""Test a b c in 10 labels reused false"""
user_input = '[{"a":"target1"}, \
{"b":"target2"},{"c":"target3"},{"b":"target5"}, {"a":"target8"},\
{"c":"target6"}, {"b":"target8"},{"c":"target9"}, \
{"a":"target10"}]'
correct_answer = [
{
'draggables': ['a', 'a'],
'targets': ['target1', 'target10'],
'rule': 'unordered_equal+number'
},
{
'draggables': ['b', 'b', 'b'],
'targets': ['target2', 'target5', 'target8'],
'rule': 'unordered_equal+number'
},
{
'draggables': ['c', 'c', 'c'],
'targets': ['target3', 'target6', 'target9'],
'rule': 'unordered_equal+number'
}
]
self.assertFalse(draganddrop.grade(user_input, correct_answer))
def test_mixed_reuse_and_not_reuse(self):
"""Test reusable draggables """
user_input = '[{"a":"target1"}, \
{"b":"target2"},{"c":"target3"}, {"a":"target4"},\
{"a":"target5"}]'
correct_answer = [
{
'draggables': ['a', 'b'],
'targets': ['target1', 'target2', 'target4', 'target5'],
'rule': 'anyof'
},
{
'draggables': ['c'],
'targets': ['target3'],
'rule': 'exact'
}
]
self.assertTrue(draganddrop.grade(user_input, correct_answer))
def test_mixed_reuse_and_not_reuse_number(self):
"""Test reusable draggables with number """
user_input = '[{"a":"target1"}, \
{"b":"target2"},{"c":"target3"}, {"a":"target4"}]'
correct_answer = [
{
'draggables': ['a', 'a', 'b'],
'targets': ['target1', 'target2', 'target4'],
'rule': 'anyof+number'
},
{
'draggables': ['c'],
'targets': ['target3'],
'rule': 'exact'
}
]
self.assertTrue(draganddrop.grade(user_input, correct_answer))
def test_mixed_reuse_and_not_reuse_number_false(self):
"""Test reusable draggables with numbers, but wrong"""
user_input = '[{"a":"target1"}, \
{"b":"target2"},{"c":"target3"}, {"a":"target4"}, {"a":"target10"}]'
correct_answer = [
{
'draggables': ['a', 'a', 'b'],
'targets': ['target1', 'target2', 'target4', 'target10'],
'rule': 'anyof_number'
},
{
'draggables': ['c'],
'targets': ['target3'],
'rule': 'exact'
}
]
self.assertFalse(draganddrop.grade(user_input, correct_answer))
def test_alternative_correct_answer(self):
user_input = '[{"name_with_icon":"t1"},\
{"name_with_icon":"t1"},{"name_with_icon":"t1"},{"name4":"t1"}, \
{"name4":"t1"}]'
correct_answer = [
{'draggables': ['name4'], 'targets': ['t1', 't1'], 'rule': 'exact'},
{'draggables': ['name_with_icon'], 'targets': ['t1', 't1', 't1'],
'rule': 'exact'}
]
self.assertTrue(draganddrop.grade(user_input, correct_answer))
class Test_DragAndDrop_Populate(unittest.TestCase):
def test_1(self):
correct_answer = {'1': [[40, 10], 29], 'name_with_icon': [20, 20]}
user_input = '[{"1": [10, 10]}, {"name_with_icon": [20, 20]}]'
dnd = draganddrop.DragAndDrop(correct_answer, user_input)
correct_groups = [['1'], ['name_with_icon']]
correct_positions = [{'exact': [[[40, 10], 29]]}, {'exact': [[20, 20]]}]
user_groups = [['1'], ['name_with_icon']]
user_positions = [{'user': [[10, 10]]}, {'user': [[20, 20]]}]
self.assertEqual(correct_groups, dnd.correct_groups)
self.assertEqual(correct_positions, dnd.correct_positions)
self.assertEqual(user_groups, dnd.user_groups)
self.assertEqual(user_positions, dnd.user_positions)
class Test_DraAndDrop_Compare_Positions(unittest.TestCase):
def test_1(self):
dnd = draganddrop.DragAndDrop({'1': 't1'}, '[{"1": "t1"}]')
self.assertTrue(dnd.compare_positions(correct=[[1, 1], [2, 3]],
user=[[2, 3], [1, 1]],
flag='anyof'))
def test_2a(self):
dnd = draganddrop.DragAndDrop({'1': 't1'}, '[{"1": "t1"}]')
self.assertTrue(dnd.compare_positions(correct=[[1, 1], [2, 3]],
user=[[2, 3], [1, 1]],
flag='exact'))
def test_2b(self):
dnd = draganddrop.DragAndDrop({'1': 't1'}, '[{"1": "t1"}]')
self.assertFalse(dnd.compare_positions(correct=[[1, 1], [2, 3]],
user=[[2, 13], [1, 1]],
flag='exact'))
def test_3(self):
dnd = draganddrop.DragAndDrop({'1': 't1'}, '[{"1": "t1"}]')
self.assertFalse(dnd.compare_positions(correct=["a", "b"],
user=["a", "b", "c"],
flag='anyof'))
def test_4(self):
dnd = draganddrop.DragAndDrop({'1': 't1'}, '[{"1": "t1"}]')
self.assertTrue(dnd.compare_positions(correct=["a", "b", "c"],
user=["a", "b"],
flag='anyof'))
def test_5(self):
dnd = draganddrop.DragAndDrop({'1': 't1'}, '[{"1": "t1"}]')
self.assertFalse(dnd.compare_positions(correct=["a", "b", "c"],
user=["a", "c", "b"],
flag='exact'))
def test_6(self):
dnd = draganddrop.DragAndDrop({'1': 't1'}, '[{"1": "t1"}]')
self.assertTrue(dnd.compare_positions(correct=["a", "b", "c"],
user=["a", "c", "b"],
flag='anyof'))
def test_7(self):
dnd = draganddrop.DragAndDrop({'1': 't1'}, '[{"1": "t1"}]')
self.assertFalse(dnd.compare_positions(correct=["a", "b", "b"],
user=["a", "c", "b"],
flag='anyof'))
def suite():
testcases = [Test_PositionsCompare,
Test_DragAndDrop_Populate,
Test_DragAndDrop_Grade,
Test_DraAndDrop_Compare_Positions]
suites = []
for testcase in testcases:
suites.append(unittest.TestLoader().loadTestsFromTestCase(testcase))
return unittest.TestSuite(suites)
if __name__ == "__main__":
unittest.TextTestRunner(verbosity=2).run(suite())
|
OlivierLarrieu/HYDV2_EFL
|
refs/heads/master
|
Xlib/keysymdef/__init__.py
|
6
|
# Xlib.keysymdef -- X keysym defs
#
# Copyright (C) 2001 Peter Liljenberg <petli@ctrl-c.liu.se>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
__all__ = [
'apl',
'arabic',
'cyrillic',
'greek',
'hebrew',
'katakana',
'korean',
'latin1',
'latin2',
'latin3',
'latin4',
'miscellany',
'publishing',
'special',
'technical',
'thai',
'xf86',
'xk3270',
'xkb',
]
|
chrisrburns/obstool
|
refs/heads/master
|
obstool/urls.py
|
1
|
"""obstool URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic.base import TemplateView
from django.views.static import serve
from settings import MEDIA_ROOT,SITE_ROOT,BOKEH_JS,BOKEH_CSS
import navigator.urls
urlpatterns = [
url(r'^index.html$', TemplateView.as_view(template_name='main.html')),
url(r'^$', TemplateView.as_view(template_name='main.html')),
url(r'^navigator/', include('navigator.urls', namespace='navigator')),
url(r'^admin/', admin.site.urls),
url(r'^media/js/bokeh/(?P<path>.*)$', serve, {'document_root':BOKEH_JS}),
url(r'^media/css/bokeh/(?P<path>.*)$', serve, {'document_root':BOKEH_CSS}),
url(r'^media/(?P<path>.*)$', serve, {'document_root':MEDIA_ROOT}),
url(r'(?P<path>.*)$', serve, {'document_root':SITE_ROOT}),
]
|
FlymeOS/tools
|
refs/heads/marshmallow-6.0
|
__init__.py
|
12133432
| |
developerfm/zulip
|
refs/heads/master
|
analytics/management/commands/__init__.py
|
12133432
| |
Russell-IO/ansible
|
refs/heads/devel
|
lib/ansible/modules/network/layer2/__init__.py
|
12133432
| |
Daksh/Colors
|
refs/heads/master
|
colorsc/linux32_26/__init__.py
|
12133432
| |
arokem/scipy
|
refs/heads/master
|
scipy/sparse/construct.py
|
1
|
"""Functions to construct sparse matrices
"""
from __future__ import division, print_function, absolute_import
__docformat__ = "restructuredtext en"
__all__ = ['spdiags', 'eye', 'identity', 'kron', 'kronsum',
'hstack', 'vstack', 'bmat', 'rand', 'random', 'diags', 'block_diag']
import numpy as np
from .sputils import upcast, get_index_dtype, isscalarlike
from .csr import csr_matrix
from .csc import csc_matrix
from .bsr import bsr_matrix
from .coo import coo_matrix
from .dia import dia_matrix
from .base import issparse
def spdiags(data, diags, m, n, format=None):
"""
Return a sparse matrix from diagonals.
Parameters
----------
data : array_like
matrix diagonals stored row-wise
diags : diagonals to set
- k = 0 the main diagonal
- k > 0 the k-th upper diagonal
- k < 0 the k-th lower diagonal
m, n : int
shape of the result
format : str, optional
Format of the result. By default (format=None) an appropriate sparse
matrix format is returned. This choice is subject to change.
See Also
--------
diags : more convenient form of this function
dia_matrix : the sparse DIAgonal format.
Examples
--------
>>> from scipy.sparse import spdiags
>>> data = np.array([[1, 2, 3, 4], [1, 2, 3, 4], [1, 2, 3, 4]])
>>> diags = np.array([0, -1, 2])
>>> spdiags(data, diags, 4, 4).toarray()
array([[1, 0, 3, 0],
[1, 2, 0, 4],
[0, 2, 3, 0],
[0, 0, 3, 4]])
"""
return dia_matrix((data, diags), shape=(m,n)).asformat(format)
def diags(diagonals, offsets=0, shape=None, format=None, dtype=None):
"""
Construct a sparse matrix from diagonals.
Parameters
----------
diagonals : sequence of array_like
Sequence of arrays containing the matrix diagonals,
corresponding to `offsets`.
offsets : sequence of int or an int, optional
Diagonals to set:
- k = 0 the main diagonal (default)
- k > 0 the kth upper diagonal
- k < 0 the kth lower diagonal
shape : tuple of int, optional
Shape of the result. If omitted, a square matrix large enough
to contain the diagonals is returned.
format : {"dia", "csr", "csc", "lil", ...}, optional
Matrix format of the result. By default (format=None) an
appropriate sparse matrix format is returned. This choice is
subject to change.
dtype : dtype, optional
Data type of the matrix.
See Also
--------
spdiags : construct matrix from diagonals
Notes
-----
This function differs from `spdiags` in the way it handles
off-diagonals.
The result from `diags` is the sparse equivalent of::
np.diag(diagonals[0], offsets[0])
+ ...
+ np.diag(diagonals[k], offsets[k])
Repeated diagonal offsets are disallowed.
.. versionadded:: 0.11
Examples
--------
>>> from scipy.sparse import diags
>>> diagonals = [[1, 2, 3, 4], [1, 2, 3], [1, 2]]
>>> diags(diagonals, [0, -1, 2]).toarray()
array([[1, 0, 1, 0],
[1, 2, 0, 2],
[0, 2, 3, 0],
[0, 0, 3, 4]])
Broadcasting of scalars is supported (but shape needs to be
specified):
>>> diags([1, -2, 1], [-1, 0, 1], shape=(4, 4)).toarray()
array([[-2., 1., 0., 0.],
[ 1., -2., 1., 0.],
[ 0., 1., -2., 1.],
[ 0., 0., 1., -2.]])
If only one diagonal is wanted (as in `numpy.diag`), the following
works as well:
>>> diags([1, 2, 3], 1).toarray()
array([[ 0., 1., 0., 0.],
[ 0., 0., 2., 0.],
[ 0., 0., 0., 3.],
[ 0., 0., 0., 0.]])
"""
# if offsets is not a sequence, assume that there's only one diagonal
if isscalarlike(offsets):
# now check that there's actually only one diagonal
if len(diagonals) == 0 or isscalarlike(diagonals[0]):
diagonals = [np.atleast_1d(diagonals)]
else:
raise ValueError("Different number of diagonals and offsets.")
else:
diagonals = list(map(np.atleast_1d, diagonals))
offsets = np.atleast_1d(offsets)
# Basic check
if len(diagonals) != len(offsets):
raise ValueError("Different number of diagonals and offsets.")
# Determine shape, if omitted
if shape is None:
m = len(diagonals[0]) + abs(int(offsets[0]))
shape = (m, m)
# Determine data type, if omitted
if dtype is None:
dtype = np.common_type(*diagonals)
# Construct data array
m, n = shape
M = max([min(m + offset, n - offset) + max(0, offset)
for offset in offsets])
M = max(0, M)
data_arr = np.zeros((len(offsets), M), dtype=dtype)
K = min(m, n)
for j, diagonal in enumerate(diagonals):
offset = offsets[j]
k = max(0, offset)
length = min(m + offset, n - offset, K)
if length < 0:
raise ValueError("Offset %d (index %d) out of bounds" % (offset, j))
try:
data_arr[j, k:k+length] = diagonal[...,:length]
except ValueError:
if len(diagonal) != length and len(diagonal) != 1:
raise ValueError(
"Diagonal length (index %d: %d at offset %d) does not "
"agree with matrix size (%d, %d)." % (
j, len(diagonal), offset, m, n))
raise
return dia_matrix((data_arr, offsets), shape=(m, n)).asformat(format)
def identity(n, dtype='d', format=None):
"""Identity matrix in sparse format
Returns an identity matrix with shape (n,n) using a given
sparse format and dtype.
Parameters
----------
n : int
Shape of the identity matrix.
dtype : dtype, optional
Data type of the matrix
format : str, optional
Sparse format of the result, e.g., format="csr", etc.
Examples
--------
>>> from scipy.sparse import identity
>>> identity(3).toarray()
array([[ 1., 0., 0.],
[ 0., 1., 0.],
[ 0., 0., 1.]])
>>> identity(3, dtype='int8', format='dia')
<3x3 sparse matrix of type '<class 'numpy.int8'>'
with 3 stored elements (1 diagonals) in DIAgonal format>
"""
return eye(n, n, dtype=dtype, format=format)
def eye(m, n=None, k=0, dtype=float, format=None):
"""Sparse matrix with ones on diagonal
Returns a sparse (m x n) matrix where the kth diagonal
is all ones and everything else is zeros.
Parameters
----------
m : int
Number of rows in the matrix.
n : int, optional
Number of columns. Default: `m`.
k : int, optional
Diagonal to place ones on. Default: 0 (main diagonal).
dtype : dtype, optional
Data type of the matrix.
format : str, optional
Sparse format of the result, e.g., format="csr", etc.
Examples
--------
>>> from scipy import sparse
>>> sparse.eye(3).toarray()
array([[ 1., 0., 0.],
[ 0., 1., 0.],
[ 0., 0., 1.]])
>>> sparse.eye(3, dtype=np.int8)
<3x3 sparse matrix of type '<class 'numpy.int8'>'
with 3 stored elements (1 diagonals) in DIAgonal format>
"""
if n is None:
n = m
m,n = int(m),int(n)
if m == n and k == 0:
# fast branch for special formats
if format in ['csr', 'csc']:
idx_dtype = get_index_dtype(maxval=n)
indptr = np.arange(n+1, dtype=idx_dtype)
indices = np.arange(n, dtype=idx_dtype)
data = np.ones(n, dtype=dtype)
cls = {'csr': csr_matrix, 'csc': csc_matrix}[format]
return cls((data,indices,indptr),(n,n))
elif format == 'coo':
idx_dtype = get_index_dtype(maxval=n)
row = np.arange(n, dtype=idx_dtype)
col = np.arange(n, dtype=idx_dtype)
data = np.ones(n, dtype=dtype)
return coo_matrix((data,(row,col)),(n,n))
diags = np.ones((1, max(0, min(m + k, n))), dtype=dtype)
return spdiags(diags, k, m, n).asformat(format)
def kron(A, B, format=None):
"""kronecker product of sparse matrices A and B
Parameters
----------
A : sparse or dense matrix
first matrix of the product
B : sparse or dense matrix
second matrix of the product
format : str, optional
format of the result (e.g. "csr")
Returns
-------
kronecker product in a sparse matrix format
Examples
--------
>>> from scipy import sparse
>>> A = sparse.csr_matrix(np.array([[0, 2], [5, 0]]))
>>> B = sparse.csr_matrix(np.array([[1, 2], [3, 4]]))
>>> sparse.kron(A, B).toarray()
array([[ 0, 0, 2, 4],
[ 0, 0, 6, 8],
[ 5, 10, 0, 0],
[15, 20, 0, 0]])
>>> sparse.kron(A, [[1, 2], [3, 4]]).toarray()
array([[ 0, 0, 2, 4],
[ 0, 0, 6, 8],
[ 5, 10, 0, 0],
[15, 20, 0, 0]])
"""
B = coo_matrix(B)
if (format is None or format == "bsr") and 2*B.nnz >= B.shape[0] * B.shape[1]:
# B is fairly dense, use BSR
A = csr_matrix(A,copy=True)
output_shape = (A.shape[0]*B.shape[0], A.shape[1]*B.shape[1])
if A.nnz == 0 or B.nnz == 0:
# kronecker product is the zero matrix
return coo_matrix(output_shape)
B = B.toarray()
data = A.data.repeat(B.size).reshape(-1,B.shape[0],B.shape[1])
data = data * B
return bsr_matrix((data,A.indices,A.indptr), shape=output_shape)
else:
# use COO
A = coo_matrix(A)
output_shape = (A.shape[0]*B.shape[0], A.shape[1]*B.shape[1])
if A.nnz == 0 or B.nnz == 0:
# kronecker product is the zero matrix
return coo_matrix(output_shape)
# expand entries of a into blocks
row = A.row.repeat(B.nnz)
col = A.col.repeat(B.nnz)
data = A.data.repeat(B.nnz)
row *= B.shape[0]
col *= B.shape[1]
# increment block indices
row,col = row.reshape(-1,B.nnz),col.reshape(-1,B.nnz)
row += B.row
col += B.col
row,col = row.reshape(-1),col.reshape(-1)
# compute block entries
data = data.reshape(-1,B.nnz) * B.data
data = data.reshape(-1)
return coo_matrix((data,(row,col)), shape=output_shape).asformat(format)
def kronsum(A, B, format=None):
"""kronecker sum of sparse matrices A and B
Kronecker sum of two sparse matrices is a sum of two Kronecker
products kron(I_n,A) + kron(B,I_m) where A has shape (m,m)
and B has shape (n,n) and I_m and I_n are identity matrices
of shape (m,m) and (n,n), respectively.
Parameters
----------
A
square matrix
B
square matrix
format : str
format of the result (e.g. "csr")
Returns
-------
kronecker sum in a sparse matrix format
Examples
--------
"""
A = coo_matrix(A)
B = coo_matrix(B)
if A.shape[0] != A.shape[1]:
raise ValueError('A is not square')
if B.shape[0] != B.shape[1]:
raise ValueError('B is not square')
dtype = upcast(A.dtype, B.dtype)
L = kron(eye(B.shape[0],dtype=dtype), A, format=format)
R = kron(B, eye(A.shape[0],dtype=dtype), format=format)
return (L+R).asformat(format) # since L + R is not always same format
def _compressed_sparse_stack(blocks, axis):
"""
Stacking fast path for CSR/CSC matrices
(i) vstack for CSR, (ii) hstack for CSC.
"""
other_axis = 1 if axis == 0 else 0
data = np.concatenate([b.data for b in blocks])
constant_dim = blocks[0].shape[other_axis]
idx_dtype = get_index_dtype(arrays=[b.indptr for b in blocks],
maxval=max(data.size, constant_dim))
indices = np.empty(data.size, dtype=idx_dtype)
indptr = np.empty(sum(b.shape[axis] for b in blocks) + 1, dtype=idx_dtype)
last_indptr = idx_dtype(0)
sum_dim = 0
sum_indices = 0
for b in blocks:
if b.shape[other_axis] != constant_dim:
raise ValueError('incompatible dimensions for axis %d' % other_axis)
indices[sum_indices:sum_indices+b.indices.size] = b.indices
sum_indices += b.indices.size
idxs = slice(sum_dim, sum_dim + b.shape[axis])
indptr[idxs] = b.indptr[:-1]
indptr[idxs] += last_indptr
sum_dim += b.shape[axis]
last_indptr += b.indptr[-1]
indptr[-1] = last_indptr
if axis == 0:
return csr_matrix((data, indices, indptr),
shape=(sum_dim, constant_dim))
else:
return csc_matrix((data, indices, indptr),
shape=(constant_dim, sum_dim))
def hstack(blocks, format=None, dtype=None):
"""
Stack sparse matrices horizontally (column wise)
Parameters
----------
blocks
sequence of sparse matrices with compatible shapes
format : str
sparse format of the result (e.g., "csr")
by default an appropriate sparse matrix format is returned.
This choice is subject to change.
dtype : dtype, optional
The data-type of the output matrix. If not given, the dtype is
determined from that of `blocks`.
See Also
--------
vstack : stack sparse matrices vertically (row wise)
Examples
--------
>>> from scipy.sparse import coo_matrix, hstack
>>> A = coo_matrix([[1, 2], [3, 4]])
>>> B = coo_matrix([[5], [6]])
>>> hstack([A,B]).toarray()
array([[1, 2, 5],
[3, 4, 6]])
"""
return bmat([blocks], format=format, dtype=dtype)
def vstack(blocks, format=None, dtype=None):
"""
Stack sparse matrices vertically (row wise)
Parameters
----------
blocks
sequence of sparse matrices with compatible shapes
format : str, optional
sparse format of the result (e.g., "csr")
by default an appropriate sparse matrix format is returned.
This choice is subject to change.
dtype : dtype, optional
The data-type of the output matrix. If not given, the dtype is
determined from that of `blocks`.
See Also
--------
hstack : stack sparse matrices horizontally (column wise)
Examples
--------
>>> from scipy.sparse import coo_matrix, vstack
>>> A = coo_matrix([[1, 2], [3, 4]])
>>> B = coo_matrix([[5, 6]])
>>> vstack([A, B]).toarray()
array([[1, 2],
[3, 4],
[5, 6]])
"""
return bmat([[b] for b in blocks], format=format, dtype=dtype)
def bmat(blocks, format=None, dtype=None):
"""
Build a sparse matrix from sparse sub-blocks
Parameters
----------
blocks : array_like
Grid of sparse matrices with compatible shapes.
An entry of None implies an all-zero matrix.
format : {'bsr', 'coo', 'csc', 'csr', 'dia', 'dok', 'lil'}, optional
The sparse format of the result (e.g. "csr"). By default an
appropriate sparse matrix format is returned.
This choice is subject to change.
dtype : dtype, optional
The data-type of the output matrix. If not given, the dtype is
determined from that of `blocks`.
Returns
-------
bmat : sparse matrix
See Also
--------
block_diag, diags
Examples
--------
>>> from scipy.sparse import coo_matrix, bmat
>>> A = coo_matrix([[1, 2], [3, 4]])
>>> B = coo_matrix([[5], [6]])
>>> C = coo_matrix([[7]])
>>> bmat([[A, B], [None, C]]).toarray()
array([[1, 2, 5],
[3, 4, 6],
[0, 0, 7]])
>>> bmat([[A, None], [None, C]]).toarray()
array([[1, 2, 0],
[3, 4, 0],
[0, 0, 7]])
"""
blocks = np.asarray(blocks, dtype='object')
if blocks.ndim != 2:
raise ValueError('blocks must be 2-D')
M,N = blocks.shape
# check for fast path cases
if (N == 1 and format in (None, 'csr') and all(isinstance(b, csr_matrix)
for b in blocks.flat)):
A = _compressed_sparse_stack(blocks[:,0], 0)
if dtype is not None:
A = A.astype(dtype)
return A
elif (M == 1 and format in (None, 'csc')
and all(isinstance(b, csc_matrix) for b in blocks.flat)):
A = _compressed_sparse_stack(blocks[0,:], 1)
if dtype is not None:
A = A.astype(dtype)
return A
block_mask = np.zeros(blocks.shape, dtype=bool)
brow_lengths = np.zeros(M, dtype=np.int64)
bcol_lengths = np.zeros(N, dtype=np.int64)
# convert everything to COO format
for i in range(M):
for j in range(N):
if blocks[i,j] is not None:
A = coo_matrix(blocks[i,j])
blocks[i,j] = A
block_mask[i,j] = True
if brow_lengths[i] == 0:
brow_lengths[i] = A.shape[0]
elif brow_lengths[i] != A.shape[0]:
msg = ('blocks[{i},:] has incompatible row dimensions. '
'Got blocks[{i},{j}].shape[0] == {got}, '
'expected {exp}.'.format(i=i, j=j,
exp=brow_lengths[i],
got=A.shape[0]))
raise ValueError(msg)
if bcol_lengths[j] == 0:
bcol_lengths[j] = A.shape[1]
elif bcol_lengths[j] != A.shape[1]:
msg = ('blocks[:,{j}] has incompatible row dimensions. '
'Got blocks[{i},{j}].shape[1] == {got}, '
'expected {exp}.'.format(i=i, j=j,
exp=bcol_lengths[j],
got=A.shape[1]))
raise ValueError(msg)
nnz = sum(block.nnz for block in blocks[block_mask])
if dtype is None:
all_dtypes = [blk.dtype for blk in blocks[block_mask]]
dtype = upcast(*all_dtypes) if all_dtypes else None
row_offsets = np.append(0, np.cumsum(brow_lengths))
col_offsets = np.append(0, np.cumsum(bcol_lengths))
shape = (row_offsets[-1], col_offsets[-1])
data = np.empty(nnz, dtype=dtype)
idx_dtype = get_index_dtype(maxval=max(shape))
row = np.empty(nnz, dtype=idx_dtype)
col = np.empty(nnz, dtype=idx_dtype)
nnz = 0
ii, jj = np.nonzero(block_mask)
for i, j in zip(ii, jj):
B = blocks[i, j]
idx = slice(nnz, nnz + B.nnz)
data[idx] = B.data
row[idx] = B.row + row_offsets[i]
col[idx] = B.col + col_offsets[j]
nnz += B.nnz
return coo_matrix((data, (row, col)), shape=shape).asformat(format)
def block_diag(mats, format=None, dtype=None):
"""
Build a block diagonal sparse matrix from provided matrices.
Parameters
----------
mats : sequence of matrices
Input matrices.
format : str, optional
The sparse format of the result (e.g., "csr"). If not given, the matrix
is returned in "coo" format.
dtype : dtype specifier, optional
The data-type of the output matrix. If not given, the dtype is
determined from that of `blocks`.
Returns
-------
res : sparse matrix
Notes
-----
.. versionadded:: 0.11.0
See Also
--------
bmat, diags
Examples
--------
>>> from scipy.sparse import coo_matrix, block_diag
>>> A = coo_matrix([[1, 2], [3, 4]])
>>> B = coo_matrix([[5], [6]])
>>> C = coo_matrix([[7]])
>>> block_diag((A, B, C)).toarray()
array([[1, 2, 0, 0],
[3, 4, 0, 0],
[0, 0, 5, 0],
[0, 0, 6, 0],
[0, 0, 0, 7]])
"""
nmat = len(mats)
rows = []
for ia, a in enumerate(mats):
row = [None]*nmat
if issparse(a):
row[ia] = a
else:
row[ia] = coo_matrix(a)
rows.append(row)
return bmat(rows, format=format, dtype=dtype)
def random(m, n, density=0.01, format='coo', dtype=None,
random_state=None, data_rvs=None):
"""Generate a sparse matrix of the given shape and density with randomly
distributed values.
Parameters
----------
m, n : int
shape of the matrix
density : real, optional
density of the generated matrix: density equal to one means a full
matrix, density of 0 means a matrix with no non-zero items.
format : str, optional
sparse matrix format.
dtype : dtype, optional
type of the returned matrix values.
random_state : {numpy.random.RandomState, int}, optional
Random number generator or random seed. If not given, the singleton
numpy.random will be used. This random state will be used
for sampling the sparsity structure, but not necessarily for sampling
the values of the structurally nonzero entries of the matrix.
data_rvs : callable, optional
Samples a requested number of random values.
This function should take a single argument specifying the length
of the ndarray that it will return. The structurally nonzero entries
of the sparse random matrix will be taken from the array sampled
by this function. By default, uniform [0, 1) random values will be
sampled using the same random state as is used for sampling
the sparsity structure.
Returns
-------
res : sparse matrix
Notes
-----
Only float types are supported for now.
Examples
--------
>>> from scipy.sparse import random
>>> from scipy import stats
>>> class CustomRandomState(np.random.RandomState):
... def randint(self, k):
... i = np.random.randint(k)
... return i - i % 2
>>> np.random.seed(12345)
>>> rs = CustomRandomState()
>>> rvs = stats.poisson(25, loc=10).rvs
>>> S = random(3, 4, density=0.25, random_state=rs, data_rvs=rvs)
>>> S.A
array([[ 36., 0., 33., 0.], # random
[ 0., 0., 0., 0.],
[ 0., 0., 36., 0.]])
>>> from scipy.sparse import random
>>> from scipy.stats import rv_continuous
>>> class CustomDistribution(rv_continuous):
... def _rvs(self, *args, **kwargs):
... return self._random_state.randn(*self._size)
>>> X = CustomDistribution(seed=2906)
>>> Y = X() # get a frozen version of the distribution
>>> S = random(3, 4, density=0.25, random_state=2906, data_rvs=Y.rvs)
>>> S.A
array([[ 0. , 0. , 0. , 0. ],
[ 0.13569738, 1.9467163 , -0.81205367, 0. ],
[ 0. , 0. , 0. , 0. ]])
"""
if density < 0 or density > 1:
raise ValueError("density expected to be 0 <= density <= 1")
dtype = np.dtype(dtype)
mn = m * n
tp = np.intc
if mn > np.iinfo(tp).max:
tp = np.int64
if mn > np.iinfo(tp).max:
msg = """\
Trying to generate a random sparse matrix such as the product of dimensions is
greater than %d - this is not supported on this machine
"""
raise ValueError(msg % np.iinfo(tp).max)
# Number of non zero values
k = int(density * m * n)
if random_state is None:
random_state = np.random
elif isinstance(random_state, (int, np.integer)):
random_state = np.random.RandomState(random_state)
if data_rvs is None:
if np.issubdtype(dtype, np.integer):
randint = random_state.randint
def data_rvs(n):
return randint(np.iinfo(dtype).min, np.iinfo(dtype).max,
n, dtype=dtype)
elif np.issubdtype(dtype, np.complexfloating):
def data_rvs(n):
return random_state.rand(n) + random_state.rand(n) * 1j
else:
data_rvs = random_state.rand
ind = random_state.choice(mn, size=k, replace=False)
j = np.floor(ind * 1. / m).astype(tp, copy=False)
i = (ind - j * m).astype(tp, copy=False)
vals = data_rvs(k).astype(dtype, copy=False)
return coo_matrix((vals, (i, j)), shape=(m, n)).asformat(format,
copy=False)
def rand(m, n, density=0.01, format="coo", dtype=None, random_state=None):
"""Generate a sparse matrix of the given shape and density with uniformly
distributed values.
Parameters
----------
m, n : int
shape of the matrix
density : real, optional
density of the generated matrix: density equal to one means a full
matrix, density of 0 means a matrix with no non-zero items.
format : str, optional
sparse matrix format.
dtype : dtype, optional
type of the returned matrix values.
random_state : {numpy.random.RandomState, int}, optional
Random number generator or random seed. If not given, the singleton
numpy.random will be used.
Returns
-------
res : sparse matrix
Notes
-----
Only float types are supported for now.
See Also
--------
scipy.sparse.random : Similar function that allows a user-specified random
data source.
Examples
--------
>>> from scipy.sparse import rand
>>> matrix = rand(3, 4, density=0.25, format="csr", random_state=42)
>>> matrix
<3x4 sparse matrix of type '<class 'numpy.float64'>'
with 3 stored elements in Compressed Sparse Row format>
>>> matrix.todense()
matrix([[0.05641158, 0. , 0. , 0.65088847],
[0. , 0. , 0. , 0.14286682],
[0. , 0. , 0. , 0. ]])
"""
return random(m, n, density, format, dtype, random_state)
|
abircse06/youtube-dl
|
refs/heads/master
|
youtube_dl/extractor/stanfordoc.py
|
173
|
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
ExtractorError,
orderedSet,
unescapeHTML,
)
class StanfordOpenClassroomIE(InfoExtractor):
IE_NAME = 'stanfordoc'
IE_DESC = 'Stanford Open ClassRoom'
_VALID_URL = r'https?://openclassroom\.stanford\.edu(?P<path>/?|(/MainFolder/(?:HomePage|CoursePage|VideoPage)\.php([?]course=(?P<course>[^&]+)(&video=(?P<video>[^&]+))?(&.*)?)?))$'
_TEST = {
'url': 'http://openclassroom.stanford.edu/MainFolder/VideoPage.php?course=PracticalUnix&video=intro-environment&speed=100',
'md5': '544a9468546059d4e80d76265b0443b8',
'info_dict': {
'id': 'PracticalUnix_intro-environment',
'ext': 'mp4',
'title': 'Intro Environment',
}
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
if mobj.group('course') and mobj.group('video'): # A specific video
course = mobj.group('course')
video = mobj.group('video')
info = {
'id': course + '_' + video,
'uploader': None,
'upload_date': None,
}
baseUrl = 'http://openclassroom.stanford.edu/MainFolder/courses/' + course + '/videos/'
xmlUrl = baseUrl + video + '.xml'
mdoc = self._download_xml(xmlUrl, info['id'])
try:
info['title'] = mdoc.findall('./title')[0].text
info['url'] = baseUrl + mdoc.findall('./videoFile')[0].text
except IndexError:
raise ExtractorError('Invalid metadata XML file')
return info
elif mobj.group('course'): # A course page
course = mobj.group('course')
info = {
'id': course,
'_type': 'playlist',
'uploader': None,
'upload_date': None,
}
coursepage = self._download_webpage(
url, info['id'],
note='Downloading course info page',
errnote='Unable to download course info page')
info['title'] = self._html_search_regex(
r'<h1>([^<]+)</h1>', coursepage, 'title', default=info['id'])
info['description'] = self._html_search_regex(
r'(?s)<description>([^<]+)</description>',
coursepage, 'description', fatal=False)
links = orderedSet(re.findall('<a href="(VideoPage.php\?[^"]+)">', coursepage))
info['entries'] = [self.url_result(
'http://openclassroom.stanford.edu/MainFolder/%s' % unescapeHTML(l)
) for l in links]
return info
else: # Root page
info = {
'id': 'Stanford OpenClassroom',
'_type': 'playlist',
'uploader': None,
'upload_date': None,
}
info['title'] = info['id']
rootURL = 'http://openclassroom.stanford.edu/MainFolder/HomePage.php'
rootpage = self._download_webpage(rootURL, info['id'],
errnote='Unable to download course info page')
links = orderedSet(re.findall('<a href="(CoursePage.php\?[^"]+)">', rootpage))
info['entries'] = [self.url_result(
'http://openclassroom.stanford.edu/MainFolder/%s' % unescapeHTML(l)
) for l in links]
return info
|
redfin/react-server
|
refs/heads/master
|
packages/react-server-website/deployment/encrypt_credentials.py
|
6
|
import argparse
from base64 import b64decode, b64encode
import boto3
import json
"""
Use this script to generate a new encrypted data blob that contains secrets we
need on the production machine.
"""
def encrypt(args):
data = {
'asini-slack.api.token' : args.asini_slack_key,
'react-server-slack.api.token' : args.react_server_slack_key,
}
kms = boto3.client('kms', 'us-west-2')
data = b64encode(kms.encrypt(
KeyId='alias/react-server-key', Plaintext=json.dumps(data)
)['CiphertextBlob'])
print data
print kms.decrypt(CiphertextBlob=b64decode(data))['Plaintext']
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Encrypt credentials.')
parser.add_argument('--asini-slack-api-key',
dest='asini_slack_key',
help='Asini Slack API key', required=True)
parser.add_argument('--react-server-slack-api-key',
dest='react_server_slack_key',
help='React Server Slack API key', required=True)
args = parser.parse_args()
encrypt(args)
|
prutseltje/ansible
|
refs/heads/devel
|
test/units/plugins/lookup/test_password.py
|
71
|
# -*- coding: utf-8 -*-
# (c) 2015, Toshio Kuratomi <tkuratomi@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import passlib
from passlib.handlers import pbkdf2
from units.mock.loader import DictDataLoader
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import mock_open, patch
from ansible.errors import AnsibleError
from ansible.module_utils.six import text_type
from ansible.module_utils.six.moves import builtins
from ansible.plugins.loader import PluginLoader
from ansible.plugins.lookup import password
from ansible.utils import encrypt
DEFAULT_CHARS = sorted([u'ascii_letters', u'digits', u".,:-_"])
DEFAULT_CANDIDATE_CHARS = u'.,:-_abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789'
# Currently there isn't a new-style
old_style_params_data = (
# Simple case
dict(
term=u'/path/to/file',
filename=u'/path/to/file',
params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=DEFAULT_CHARS),
candidate_chars=DEFAULT_CANDIDATE_CHARS,
),
# Special characters in path
dict(
term=u'/path/with/embedded spaces and/file',
filename=u'/path/with/embedded spaces and/file',
params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=DEFAULT_CHARS),
candidate_chars=DEFAULT_CANDIDATE_CHARS,
),
dict(
term=u'/path/with/equals/cn=com.ansible',
filename=u'/path/with/equals/cn=com.ansible',
params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=DEFAULT_CHARS),
candidate_chars=DEFAULT_CANDIDATE_CHARS,
),
dict(
term=u'/path/with/unicode/くらとみ/file',
filename=u'/path/with/unicode/くらとみ/file',
params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=DEFAULT_CHARS),
candidate_chars=DEFAULT_CANDIDATE_CHARS,
),
# Mix several special chars
dict(
term=u'/path/with/utf 8 and spaces/くらとみ/file',
filename=u'/path/with/utf 8 and spaces/くらとみ/file',
params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=DEFAULT_CHARS),
candidate_chars=DEFAULT_CANDIDATE_CHARS,
),
dict(
term=u'/path/with/encoding=unicode/くらとみ/file',
filename=u'/path/with/encoding=unicode/くらとみ/file',
params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=DEFAULT_CHARS),
candidate_chars=DEFAULT_CANDIDATE_CHARS,
),
dict(
term=u'/path/with/encoding=unicode/くらとみ/and spaces file',
filename=u'/path/with/encoding=unicode/くらとみ/and spaces file',
params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=DEFAULT_CHARS),
candidate_chars=DEFAULT_CANDIDATE_CHARS,
),
# Simple parameters
dict(
term=u'/path/to/file length=42',
filename=u'/path/to/file',
params=dict(length=42, encrypt=None, chars=DEFAULT_CHARS),
candidate_chars=DEFAULT_CANDIDATE_CHARS,
),
dict(
term=u'/path/to/file encrypt=pbkdf2_sha256',
filename=u'/path/to/file',
params=dict(length=password.DEFAULT_LENGTH, encrypt='pbkdf2_sha256', chars=DEFAULT_CHARS),
candidate_chars=DEFAULT_CANDIDATE_CHARS,
),
dict(
term=u'/path/to/file chars=abcdefghijklmnop',
filename=u'/path/to/file',
params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=[u'abcdefghijklmnop']),
candidate_chars=u'abcdefghijklmnop',
),
dict(
term=u'/path/to/file chars=digits,abc,def',
filename=u'/path/to/file',
params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=sorted([u'digits', u'abc', u'def'])),
candidate_chars=u'abcdef0123456789',
),
# Including comma in chars
dict(
term=u'/path/to/file chars=abcdefghijklmnop,,digits',
filename=u'/path/to/file',
params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=sorted([u'abcdefghijklmnop', u',', u'digits'])),
candidate_chars=u',abcdefghijklmnop0123456789',
),
dict(
term=u'/path/to/file chars=,,',
filename=u'/path/to/file',
params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=[u',']),
candidate_chars=u',',
),
# Including = in chars
dict(
term=u'/path/to/file chars=digits,=,,',
filename=u'/path/to/file',
params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=sorted([u'digits', u'=', u','])),
candidate_chars=u',=0123456789',
),
dict(
term=u'/path/to/file chars=digits,abc=def',
filename=u'/path/to/file',
params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=sorted([u'digits', u'abc=def'])),
candidate_chars=u'abc=def0123456789',
),
# Including unicode in chars
dict(
term=u'/path/to/file chars=digits,くらとみ,,',
filename=u'/path/to/file',
params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=sorted([u'digits', u'くらとみ', u','])),
candidate_chars=u',0123456789くらとみ',
),
# Including only unicode in chars
dict(
term=u'/path/to/file chars=くらとみ',
filename=u'/path/to/file',
params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=sorted([u'くらとみ'])),
candidate_chars=u'くらとみ',
),
# Include ':' in path
dict(
term=u'/path/to/file_with:colon chars=ascii_letters,digits',
filename=u'/path/to/file_with:colon',
params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=sorted([u'ascii_letters', u'digits'])),
candidate_chars=u'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789',
),
# Including special chars in both path and chars
# Special characters in path
dict(
term=u'/path/with/embedded spaces and/file chars=abc=def',
filename=u'/path/with/embedded spaces and/file',
params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=[u'abc=def']),
candidate_chars=u'abc=def',
),
dict(
term=u'/path/with/equals/cn=com.ansible chars=abc=def',
filename=u'/path/with/equals/cn=com.ansible',
params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=[u'abc=def']),
candidate_chars=u'abc=def',
),
dict(
term=u'/path/with/unicode/くらとみ/file chars=くらとみ',
filename=u'/path/with/unicode/くらとみ/file',
params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=[u'くらとみ']),
candidate_chars=u'くらとみ',
),
)
class TestParseParameters(unittest.TestCase):
def test(self):
for testcase in old_style_params_data:
filename, params = password._parse_parameters(testcase['term'])
params['chars'].sort()
self.assertEqual(filename, testcase['filename'])
self.assertEqual(params, testcase['params'])
def test_unrecognized_value(self):
testcase = dict(term=u'/path/to/file chars=くらとみi sdfsdf',
filename=u'/path/to/file',
params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=[u'くらとみ']),
candidate_chars=u'くらとみ')
self.assertRaises(AnsibleError, password._parse_parameters, testcase['term'])
def test_invalid_params(self):
testcase = dict(term=u'/path/to/file chars=くらとみi somethign_invalid=123',
filename=u'/path/to/file',
params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=[u'くらとみ']),
candidate_chars=u'くらとみ')
self.assertRaises(AnsibleError, password._parse_parameters, testcase['term'])
class TestReadPasswordFile(unittest.TestCase):
def setUp(self):
self.os_path_exists = password.os.path.exists
def tearDown(self):
password.os.path.exists = self.os_path_exists
def test_no_password_file(self):
password.os.path.exists = lambda x: False
self.assertEqual(password._read_password_file(b'/nonexistent'), None)
def test_with_password_file(self):
password.os.path.exists = lambda x: True
with patch.object(builtins, 'open', mock_open(read_data=b'Testing\n')) as m:
self.assertEqual(password._read_password_file(b'/etc/motd'), u'Testing')
class TestGenCandidateChars(unittest.TestCase):
def _assert_gen_candidate_chars(self, testcase):
expected_candidate_chars = testcase['candidate_chars']
params = testcase['params']
chars_spec = params['chars']
res = password._gen_candidate_chars(chars_spec)
self.assertEquals(res, expected_candidate_chars)
def test_gen_candidate_chars(self):
for testcase in old_style_params_data:
self._assert_gen_candidate_chars(testcase)
class TestRandomPassword(unittest.TestCase):
def _assert_valid_chars(self, res, chars):
for res_char in res:
self.assertIn(res_char, chars)
def test_default(self):
res = password.random_password()
self.assertEquals(len(res), password.DEFAULT_LENGTH)
self.assertTrue(isinstance(res, text_type))
self._assert_valid_chars(res, DEFAULT_CANDIDATE_CHARS)
def test_zero_length(self):
res = password.random_password(length=0)
self.assertEquals(len(res), 0)
self.assertTrue(isinstance(res, text_type))
self._assert_valid_chars(res, u',')
def test_just_a_common(self):
res = password.random_password(length=1, chars=u',')
self.assertEquals(len(res), 1)
self.assertEquals(res, u',')
def test_free_will(self):
# A Rush and Spinal Tap reference twofer
res = password.random_password(length=11, chars=u'a')
self.assertEquals(len(res), 11)
self.assertEquals(res, 'aaaaaaaaaaa')
self._assert_valid_chars(res, u'a')
def test_unicode(self):
res = password.random_password(length=11, chars=u'くらとみ')
self._assert_valid_chars(res, u'くらとみ')
self.assertEquals(len(res), 11)
def test_gen_password(self):
for testcase in old_style_params_data:
params = testcase['params']
candidate_chars = testcase['candidate_chars']
params_chars_spec = password._gen_candidate_chars(params['chars'])
password_string = password.random_password(length=params['length'],
chars=params_chars_spec)
self.assertEquals(len(password_string),
params['length'],
msg='generated password=%s has length (%s) instead of expected length (%s)' %
(password_string, len(password_string), params['length']))
for char in password_string:
self.assertIn(char, candidate_chars,
msg='%s not found in %s from chars spect %s' %
(char, candidate_chars, params['chars']))
class TestRandomSalt(unittest.TestCase):
def test(self):
res = password._random_salt()
expected_salt_candidate_chars = u'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789./'
self.assertEquals(len(res), 8)
for res_char in res:
self.assertIn(res_char, expected_salt_candidate_chars)
class TestParseContent(unittest.TestCase):
def test_empty_password_file(self):
plaintext_password, salt = password._parse_content(u'')
self.assertEquals(plaintext_password, u'')
self.assertEquals(salt, None)
def test(self):
expected_content = u'12345678'
file_content = expected_content
plaintext_password, salt = password._parse_content(file_content)
self.assertEquals(plaintext_password, expected_content)
self.assertEquals(salt, None)
def test_with_salt(self):
expected_content = u'12345678 salt=87654321'
file_content = expected_content
plaintext_password, salt = password._parse_content(file_content)
self.assertEquals(plaintext_password, u'12345678')
self.assertEquals(salt, u'87654321')
class TestFormatContent(unittest.TestCase):
def test_no_encrypt(self):
self.assertEqual(
password._format_content(password=u'hunter42',
salt=u'87654321',
encrypt=False),
u'hunter42 salt=87654321')
def test_no_encrypt_no_salt(self):
self.assertEqual(
password._format_content(password=u'hunter42',
salt=None,
encrypt=False),
u'hunter42')
def test_encrypt(self):
self.assertEqual(
password._format_content(password=u'hunter42',
salt=u'87654321',
encrypt='pbkdf2_sha256'),
u'hunter42 salt=87654321')
def test_encrypt_no_salt(self):
self.assertRaises(AssertionError, password._format_content, u'hunter42', None, 'pbkdf2_sha256')
class TestWritePasswordFile(unittest.TestCase):
def setUp(self):
self.makedirs_safe = password.makedirs_safe
self.os_chmod = password.os.chmod
password.makedirs_safe = lambda path, mode: None
password.os.chmod = lambda path, mode: None
def tearDown(self):
password.makedirs_safe = self.makedirs_safe
password.os.chmod = self.os_chmod
def test_content_written(self):
with patch.object(builtins, 'open', mock_open()) as m:
password._write_password_file(b'/this/is/a/test/caf\xc3\xa9', u'Testing Café')
m.assert_called_once_with(b'/this/is/a/test/caf\xc3\xa9', 'wb')
m().write.assert_called_once_with(u'Testing Café\n'.encode('utf-8'))
class TestLookupModule(unittest.TestCase):
def setUp(self):
self.fake_loader = DictDataLoader({'/path/to/somewhere': 'sdfsdf'})
self.password_lookup = password.LookupModule(loader=self.fake_loader)
self.os_path_exists = password.os.path.exists
# Different releases of passlib default to a different number of rounds
self.sha256 = passlib.registry.get_crypt_handler('pbkdf2_sha256')
sha256_for_tests = pbkdf2.create_pbkdf2_hash("sha256", 32, 20000)
passlib.registry.register_crypt_handler(sha256_for_tests, force=True)
def tearDown(self):
password.os.path.exists = self.os_path_exists
passlib.registry.register_crypt_handler(self.sha256, force=True)
@patch.object(PluginLoader, '_get_paths')
@patch('ansible.plugins.lookup.password._write_password_file')
def test_no_encrypt(self, mock_get_paths, mock_write_file):
mock_get_paths.return_value = ['/path/one', '/path/two', '/path/three']
results = self.password_lookup.run([u'/path/to/somewhere'], None)
# FIXME: assert something useful
for result in results:
self.assertEquals(len(result), password.DEFAULT_LENGTH)
self.assertIsInstance(result, text_type)
@patch.object(PluginLoader, '_get_paths')
@patch('ansible.plugins.lookup.password._write_password_file')
def test_encrypt(self, mock_get_paths, mock_write_file):
mock_get_paths.return_value = ['/path/one', '/path/two', '/path/three']
results = self.password_lookup.run([u'/path/to/somewhere encrypt=pbkdf2_sha256'], None)
# pbkdf2 format plus hash
expected_password_length = 76
for result in results:
self.assertEquals(len(result), expected_password_length)
# result should have 5 parts split by '$'
str_parts = result.split('$', 5)
# verify the result is parseable by the passlib
crypt_parts = passlib.hash.pbkdf2_sha256.parsehash(result)
# verify it used the right algo type
self.assertEquals(str_parts[1], 'pbkdf2-sha256')
self.assertEquals(len(str_parts), 5)
# verify the string and parsehash agree on the number of rounds
self.assertEquals(int(str_parts[2]), crypt_parts['rounds'])
self.assertIsInstance(result, text_type)
@patch.object(PluginLoader, '_get_paths')
@patch('ansible.plugins.lookup.password._write_password_file')
def test_password_already_created_encrypt(self, mock_get_paths, mock_write_file):
mock_get_paths.return_value = ['/path/one', '/path/two', '/path/three']
password.os.path.exists = lambda x: True
with patch.object(builtins, 'open', mock_open(read_data=b'hunter42 salt=87654321\n')) as m:
results = self.password_lookup.run([u'/path/to/somewhere chars=anything encrypt=pbkdf2_sha256'], None)
for result in results:
self.assertEqual(result, u'$pbkdf2-sha256$20000$ODc2NTQzMjE$Uikde0cv0BKaRaAXMrUQB.zvG4GmnjClwjghwIRf2gU')
@patch.object(PluginLoader, '_get_paths')
@patch('ansible.plugins.lookup.password._write_password_file')
def test_password_already_created_no_encrypt(self, mock_get_paths, mock_write_file):
mock_get_paths.return_value = ['/path/one', '/path/two', '/path/three']
password.os.path.exists = lambda x: True
with patch.object(builtins, 'open', mock_open(read_data=b'hunter42 salt=87654321\n')) as m:
results = self.password_lookup.run([u'/path/to/somewhere chars=anything'], None)
for result in results:
self.assertEqual(result, u'hunter42')
@patch.object(PluginLoader, '_get_paths')
@patch('ansible.plugins.lookup.password._write_password_file')
def test_only_a(self, mock_get_paths, mock_write_file):
mock_get_paths.return_value = ['/path/one', '/path/two', '/path/three']
results = self.password_lookup.run([u'/path/to/somewhere chars=a'], None)
for result in results:
self.assertEquals(result, u'a' * password.DEFAULT_LENGTH)
|
trondhindenes/ansible
|
refs/heads/devel
|
lib/ansible/modules/network/avi/avi_cluster.py
|
20
|
#!/usr/bin/python
#
# @author: Gaurav Rastogi (grastogi@avinetworks.com)
# Eric Anderson (eanderson@avinetworks.com)
# module_check: supported
#
# Copyright: (c) 2017 Gaurav Rastogi, <grastogi@avinetworks.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: avi_cluster
author: Gaurav Rastogi (grastogi@avinetworks.com)
short_description: Module for setup of Cluster Avi RESTful Object
description:
- This module is used to configure Cluster object
- more examples at U(https://github.com/avinetworks/devops)
requirements: [ avisdk ]
version_added: "2.4"
options:
state:
description:
- The state that should be applied on the entity.
default: present
choices: ["absent", "present"]
avi_api_update_method:
description:
- Default method for object update is HTTP PUT.
- Setting to patch will override that behavior to use HTTP PATCH.
version_added: "2.5"
default: put
choices: ["put", "patch"]
avi_api_patch_op:
description:
- Patch operation to use when using avi_api_update_method as patch.
version_added: "2.5"
choices: ["add", "replace", "delete"]
name:
description:
- Name of the object.
required: true
nodes:
description:
- List of clusternode.
rejoin_nodes_automatically:
description:
- Re-join cluster nodes automatically in the event one of the node is reset to factory.
- Default value when not specified in API or module is interpreted by Avi Controller as True.
type: bool
tenant_ref:
description:
- It is a reference to an object of type tenant.
url:
description:
- Avi controller URL of the object.
uuid:
description:
- Unique object identifier of the object.
virtual_ip:
description:
- A virtual ip address.
- This ip address will be dynamically reconfigured so that it always is the ip of the cluster leader.
extends_documentation_fragment:
- avi
'''
EXAMPLES = """
- name: Example to create Cluster object
avi_cluster:
controller: 10.10.25.42
username: admin
password: something
state: present
name: sample_cluster
"""
RETURN = '''
obj:
description: Cluster (api/cluster) object
returned: success, changed
type: dict
'''
from ansible.module_utils.basic import AnsibleModule
try:
from ansible.module_utils.network.avi.avi import (
avi_common_argument_spec, HAS_AVI, avi_ansible_api)
except ImportError:
HAS_AVI = False
def main():
argument_specs = dict(
state=dict(default='present',
choices=['absent', 'present']),
avi_api_update_method=dict(default='put',
choices=['put', 'patch']),
avi_api_patch_op=dict(choices=['add', 'replace', 'delete']),
name=dict(type='str', required=True),
nodes=dict(type='list',),
rejoin_nodes_automatically=dict(type='bool',),
tenant_ref=dict(type='str',),
url=dict(type='str',),
uuid=dict(type='str',),
virtual_ip=dict(type='dict',),
)
argument_specs.update(avi_common_argument_spec())
module = AnsibleModule(
argument_spec=argument_specs, supports_check_mode=True)
if not HAS_AVI:
return module.fail_json(msg=(
'Avi python API SDK (avisdk>=17.1) is not installed. '
'For more details visit https://github.com/avinetworks/sdk.'))
return avi_ansible_api(module, 'cluster',
set([]))
if __name__ == '__main__':
main()
|
pombredanne/pushmanager
|
refs/heads/master
|
tests/test_bookmarklet.py
|
6
|
import contextlib
import mock
from pushmanager_main import CreateRequestBookmarkletHandler
from pushmanager_main import CheckSitesBookmarkletHandler
import testing as T
class BookmarkletTest(T.TestCase, T.AsyncTestCase):
def get_handlers(self):
return [
(CreateRequestBookmarkletHandler.url, CreateRequestBookmarkletHandler),
(CheckSitesBookmarkletHandler.url, CheckSitesBookmarkletHandler),
]
@contextlib.contextmanager
def page(self, handler):
with mock.patch.object(handler, "get_current_user"):
handler.get_current_user.return_value = "testuser"
response = self.fetch(str(handler.url))
yield response
def test_create_request_bookmarklet(self):
with self.page(CreateRequestBookmarkletHandler) as response:
# We'll get a javascript as the body, just check some
# variable names/strings that we know is there in the
# script.
T.assert_equal(response.error, None)
T.assert_in("ticketNumberToURL", response.body)
T.assert_in("codeReview", response.body)
def test_check_sites_bookmarklet(self):
with self.page(CheckSitesBookmarkletHandler) as response:
# See comment above in test_create_request_bookmarklet
T.assert_equal(response.error, None)
T.assert_in("window.open", response.body)
|
jfelectron/python-driver
|
refs/heads/master
|
tests/unit/io/test_eventletreactor.py
|
7
|
# Copyright 2013-2015 DataStax, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
import unittest2 as unittest
except ImportError:
import unittest # noqa
from tests.unit.io.utils import submit_and_wait_for_completion, TimerCallback
from tests import is_eventlet_monkey_patched
import time
try:
from cassandra.io.eventletreactor import EventletConnection
except ImportError:
EventletConnection = None # noqa
class EventletTimerTest(unittest.TestCase):
def setUp(self):
if EventletConnection is None:
raise unittest.SkipTest("Eventlet libraries not available")
if not is_eventlet_monkey_patched():
raise unittest.SkipTest("Can't test eventlet without monkey patching")
EventletConnection.initialize_reactor()
def test_multi_timer_validation(self, *args):
"""
Verify that timer timeouts are honored appropriately
"""
# Tests timers submitted in order at various timeouts
submit_and_wait_for_completion(self, EventletConnection, 0, 100, 1, 100)
# Tests timers submitted in reverse order at various timeouts
submit_and_wait_for_completion(self, EventletConnection, 100, 0, -1, 100)
# Tests timers submitted in varying order at various timeouts
submit_and_wait_for_completion(self, EventletConnection, 0, 100, 1, 100, True)
def test_timer_cancellation(self):
"""
Verify that timer cancellation is honored
"""
# Various lists for tracking callback stage
timeout = .1
callback = TimerCallback(timeout)
timer = EventletConnection.create_timer(timeout, callback.invoke)
timer.cancel()
# Release context allow for timer thread to run.
time.sleep(.2)
timer_manager = EventletConnection._timers
# Assert that the cancellation was honored
self.assertFalse(timer_manager._queue)
self.assertFalse(timer_manager._new_timers)
self.assertFalse(callback.was_invoked())
|
xavierdutreilh/django-mailviews
|
refs/heads/master
|
mailviews/tests/manage.py
|
3
|
#!/usr/bin/env python
import logging
import sys
from mailviews.tests import settings
logging.basicConfig(level=logging.DEBUG)
if __name__ == "__main__":
try:
from django.core.management import execute_manager
execute_manager(settings)
except ImportError:
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
qq1012803704/zulip
|
refs/heads/master
|
zerver/__init__.py
|
12133432
| |
k3nnyfr/s2a_fr-nsis
|
refs/heads/master
|
s2a/Python/Lib/site-packages/serial/urlhandler/__init__.py
|
12133432
| |
softak/webfaction_demo
|
refs/heads/master
|
apps/pages/__init__.py
|
12133432
| |
nhenezi/kuma
|
refs/heads/master
|
vendor/packages/pylint/test/input/func_noerror_classes_meth_signature.py
|
6
|
# pylint: disable-msg=C0111,R0922,R0903
"""#2485
W0222 "Signature differs from overriden method" false positive
"""
__revision__ = 1
class Super(object):
def method(self, param):
raise NotImplementedError
class Sub(Super):
def method(self, param = 'abc'):
pass
|
pdellaert/ansible
|
refs/heads/devel
|
lib/ansible/modules/remote_management/oneview/oneview_fcoe_network_info.py
|
21
|
#!/usr/bin/python
# Copyright (c) 2016-2017 Hewlett Packard Enterprise Development LP
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: oneview_fcoe_network_info
short_description: Retrieve the information about one or more of the OneView FCoE Networks
description:
- Retrieve the information about one or more of the FCoE Networks from OneView.
- This module was called C(oneview_fcoe_network_facts) before Ansible 2.9, returning C(ansible_facts).
Note that the M(oneview_fcoe_network_info) module no longer returns C(ansible_facts)!
version_added: "2.4"
requirements:
- hpOneView >= 2.0.1
author:
- Felipe Bulsoni (@fgbulsoni)
- Thiago Miotto (@tmiotto)
- Adriane Cardozo (@adriane-cardozo)
options:
name:
description:
- FCoE Network name.
extends_documentation_fragment:
- oneview
- oneview.factsparams
'''
EXAMPLES = '''
- name: Gather information about all FCoE Networks
oneview_fcoe_network_info:
config: /etc/oneview/oneview_config.json
delegate_to: localhost
register: result
- debug:
msg: "{{ result.fcoe_networks }}"
- name: Gather paginated, filtered and sorted information about FCoE Networks
oneview_fcoe_network_info:
config: /etc/oneview/oneview_config.json
params:
start: 0
count: 3
sort: 'name:descending'
filter: 'vlanId=2'
delegate_to: localhost
register: result
- debug:
msg: "{{ result.fcoe_networks }}"
- name: Gather information about a FCoE Network by name
oneview_fcoe_network_info:
config: /etc/oneview/oneview_config.json
name: Test FCoE Network Information
delegate_to: localhost
register: result
- debug:
msg: "{{ result.fcoe_networks }}"
'''
RETURN = '''
fcoe_networks:
description: Has all the OneView information about the FCoE Networks.
returned: Always, but can be null.
type: dict
'''
from ansible.module_utils.oneview import OneViewModuleBase
class FcoeNetworkInfoModule(OneViewModuleBase):
def __init__(self):
argument_spec = dict(
name=dict(type='str'),
params=dict(type='dict'),
)
super(FcoeNetworkInfoModule, self).__init__(additional_arg_spec=argument_spec)
self.is_old_facts = self.module._name == 'oneview_fcoe_network_facts'
if self.is_old_facts:
self.module.deprecate("The 'oneview_fcoe_network_facts' module has been renamed to 'oneview_fcoe_network_info', "
"and the renamed one no longer returns ansible_facts", version='2.13')
def execute_module(self):
if self.module.params['name']:
fcoe_networks = self.oneview_client.fcoe_networks.get_by('name', self.module.params['name'])
else:
fcoe_networks = self.oneview_client.fcoe_networks.get_all(**self.facts_params)
if self.is_old_facts:
return dict(changed=False,
ansible_facts=dict(fcoe_networks=fcoe_networks))
else:
return dict(changed=False, fcoe_networks=fcoe_networks)
def main():
FcoeNetworkInfoModule().run()
if __name__ == '__main__':
main()
|
phlizik/xbmctorrent
|
refs/heads/master
|
resources/site-packages/html5lib/treebuilders/_base.py
|
715
|
from __future__ import absolute_import, division, unicode_literals
from six import text_type
from ..constants import scopingElements, tableInsertModeElements, namespaces
# The scope markers are inserted when entering object elements,
# marquees, table cells, and table captions, and are used to prevent formatting
# from "leaking" into tables, object elements, and marquees.
Marker = None
listElementsMap = {
None: (frozenset(scopingElements), False),
"button": (frozenset(scopingElements | set([(namespaces["html"], "button")])), False),
"list": (frozenset(scopingElements | set([(namespaces["html"], "ol"),
(namespaces["html"], "ul")])), False),
"table": (frozenset([(namespaces["html"], "html"),
(namespaces["html"], "table")]), False),
"select": (frozenset([(namespaces["html"], "optgroup"),
(namespaces["html"], "option")]), True)
}
class Node(object):
def __init__(self, name):
"""Node representing an item in the tree.
name - The tag name associated with the node
parent - The parent of the current node (or None for the document node)
value - The value of the current node (applies to text nodes and
comments
attributes - a dict holding name, value pairs for attributes of the node
childNodes - a list of child nodes of the current node. This must
include all elements but not necessarily other node types
_flags - A list of miscellaneous flags that can be set on the node
"""
self.name = name
self.parent = None
self.value = None
self.attributes = {}
self.childNodes = []
self._flags = []
def __str__(self):
attributesStr = " ".join(["%s=\"%s\"" % (name, value)
for name, value in
self.attributes.items()])
if attributesStr:
return "<%s %s>" % (self.name, attributesStr)
else:
return "<%s>" % (self.name)
def __repr__(self):
return "<%s>" % (self.name)
def appendChild(self, node):
"""Insert node as a child of the current node
"""
raise NotImplementedError
def insertText(self, data, insertBefore=None):
"""Insert data as text in the current node, positioned before the
start of node insertBefore or to the end of the node's text.
"""
raise NotImplementedError
def insertBefore(self, node, refNode):
"""Insert node as a child of the current node, before refNode in the
list of child nodes. Raises ValueError if refNode is not a child of
the current node"""
raise NotImplementedError
def removeChild(self, node):
"""Remove node from the children of the current node
"""
raise NotImplementedError
def reparentChildren(self, newParent):
"""Move all the children of the current node to newParent.
This is needed so that trees that don't store text as nodes move the
text in the correct way
"""
# XXX - should this method be made more general?
for child in self.childNodes:
newParent.appendChild(child)
self.childNodes = []
def cloneNode(self):
"""Return a shallow copy of the current node i.e. a node with the same
name and attributes but with no parent or child nodes
"""
raise NotImplementedError
def hasContent(self):
"""Return true if the node has children or text, false otherwise
"""
raise NotImplementedError
class ActiveFormattingElements(list):
def append(self, node):
equalCount = 0
if node != Marker:
for element in self[::-1]:
if element == Marker:
break
if self.nodesEqual(element, node):
equalCount += 1
if equalCount == 3:
self.remove(element)
break
list.append(self, node)
def nodesEqual(self, node1, node2):
if not node1.nameTuple == node2.nameTuple:
return False
if not node1.attributes == node2.attributes:
return False
return True
class TreeBuilder(object):
"""Base treebuilder implementation
documentClass - the class to use for the bottommost node of a document
elementClass - the class to use for HTML Elements
commentClass - the class to use for comments
doctypeClass - the class to use for doctypes
"""
# Document class
documentClass = None
# The class to use for creating a node
elementClass = None
# The class to use for creating comments
commentClass = None
# The class to use for creating doctypes
doctypeClass = None
# Fragment class
fragmentClass = None
def __init__(self, namespaceHTMLElements):
if namespaceHTMLElements:
self.defaultNamespace = "http://www.w3.org/1999/xhtml"
else:
self.defaultNamespace = None
self.reset()
def reset(self):
self.openElements = []
self.activeFormattingElements = ActiveFormattingElements()
# XXX - rename these to headElement, formElement
self.headPointer = None
self.formPointer = None
self.insertFromTable = False
self.document = self.documentClass()
def elementInScope(self, target, variant=None):
# If we pass a node in we match that. if we pass a string
# match any node with that name
exactNode = hasattr(target, "nameTuple")
listElements, invert = listElementsMap[variant]
for node in reversed(self.openElements):
if (node.name == target and not exactNode or
node == target and exactNode):
return True
elif (invert ^ (node.nameTuple in listElements)):
return False
assert False # We should never reach this point
def reconstructActiveFormattingElements(self):
# Within this algorithm the order of steps described in the
# specification is not quite the same as the order of steps in the
# code. It should still do the same though.
# Step 1: stop the algorithm when there's nothing to do.
if not self.activeFormattingElements:
return
# Step 2 and step 3: we start with the last element. So i is -1.
i = len(self.activeFormattingElements) - 1
entry = self.activeFormattingElements[i]
if entry == Marker or entry in self.openElements:
return
# Step 6
while entry != Marker and entry not in self.openElements:
if i == 0:
# This will be reset to 0 below
i = -1
break
i -= 1
# Step 5: let entry be one earlier in the list.
entry = self.activeFormattingElements[i]
while True:
# Step 7
i += 1
# Step 8
entry = self.activeFormattingElements[i]
clone = entry.cloneNode() # Mainly to get a new copy of the attributes
# Step 9
element = self.insertElement({"type": "StartTag",
"name": clone.name,
"namespace": clone.namespace,
"data": clone.attributes})
# Step 10
self.activeFormattingElements[i] = element
# Step 11
if element == self.activeFormattingElements[-1]:
break
def clearActiveFormattingElements(self):
entry = self.activeFormattingElements.pop()
while self.activeFormattingElements and entry != Marker:
entry = self.activeFormattingElements.pop()
def elementInActiveFormattingElements(self, name):
"""Check if an element exists between the end of the active
formatting elements and the last marker. If it does, return it, else
return false"""
for item in self.activeFormattingElements[::-1]:
# Check for Marker first because if it's a Marker it doesn't have a
# name attribute.
if item == Marker:
break
elif item.name == name:
return item
return False
def insertRoot(self, token):
element = self.createElement(token)
self.openElements.append(element)
self.document.appendChild(element)
def insertDoctype(self, token):
name = token["name"]
publicId = token["publicId"]
systemId = token["systemId"]
doctype = self.doctypeClass(name, publicId, systemId)
self.document.appendChild(doctype)
def insertComment(self, token, parent=None):
if parent is None:
parent = self.openElements[-1]
parent.appendChild(self.commentClass(token["data"]))
def createElement(self, token):
"""Create an element but don't insert it anywhere"""
name = token["name"]
namespace = token.get("namespace", self.defaultNamespace)
element = self.elementClass(name, namespace)
element.attributes = token["data"]
return element
def _getInsertFromTable(self):
return self._insertFromTable
def _setInsertFromTable(self, value):
"""Switch the function used to insert an element from the
normal one to the misnested table one and back again"""
self._insertFromTable = value
if value:
self.insertElement = self.insertElementTable
else:
self.insertElement = self.insertElementNormal
insertFromTable = property(_getInsertFromTable, _setInsertFromTable)
def insertElementNormal(self, token):
name = token["name"]
assert isinstance(name, text_type), "Element %s not unicode" % name
namespace = token.get("namespace", self.defaultNamespace)
element = self.elementClass(name, namespace)
element.attributes = token["data"]
self.openElements[-1].appendChild(element)
self.openElements.append(element)
return element
def insertElementTable(self, token):
"""Create an element and insert it into the tree"""
element = self.createElement(token)
if self.openElements[-1].name not in tableInsertModeElements:
return self.insertElementNormal(token)
else:
# We should be in the InTable mode. This means we want to do
# special magic element rearranging
parent, insertBefore = self.getTableMisnestedNodePosition()
if insertBefore is None:
parent.appendChild(element)
else:
parent.insertBefore(element, insertBefore)
self.openElements.append(element)
return element
def insertText(self, data, parent=None):
"""Insert text data."""
if parent is None:
parent = self.openElements[-1]
if (not self.insertFromTable or (self.insertFromTable and
self.openElements[-1].name
not in tableInsertModeElements)):
parent.insertText(data)
else:
# We should be in the InTable mode. This means we want to do
# special magic element rearranging
parent, insertBefore = self.getTableMisnestedNodePosition()
parent.insertText(data, insertBefore)
def getTableMisnestedNodePosition(self):
"""Get the foster parent element, and sibling to insert before
(or None) when inserting a misnested table node"""
# The foster parent element is the one which comes before the most
# recently opened table element
# XXX - this is really inelegant
lastTable = None
fosterParent = None
insertBefore = None
for elm in self.openElements[::-1]:
if elm.name == "table":
lastTable = elm
break
if lastTable:
# XXX - we should really check that this parent is actually a
# node here
if lastTable.parent:
fosterParent = lastTable.parent
insertBefore = lastTable
else:
fosterParent = self.openElements[
self.openElements.index(lastTable) - 1]
else:
fosterParent = self.openElements[0]
return fosterParent, insertBefore
def generateImpliedEndTags(self, exclude=None):
name = self.openElements[-1].name
# XXX td, th and tr are not actually needed
if (name in frozenset(("dd", "dt", "li", "option", "optgroup", "p", "rp", "rt"))
and name != exclude):
self.openElements.pop()
# XXX This is not entirely what the specification says. We should
# investigate it more closely.
self.generateImpliedEndTags(exclude)
def getDocument(self):
"Return the final tree"
return self.document
def getFragment(self):
"Return the final fragment"
# assert self.innerHTML
fragment = self.fragmentClass()
self.openElements[0].reparentChildren(fragment)
return fragment
def testSerializer(self, node):
"""Serialize the subtree of node in the format required by unit tests
node - the node from which to start serializing"""
raise NotImplementedError
|
thomdixon/pysha2
|
refs/heads/master
|
sha2/sha224.py
|
1
|
#!/usr/bin/python
__author__ = 'Thomas Dixon'
__license__ = 'MIT'
from sha2.sha256 import sha256
def new(m=None):
return sha224(m)
class sha224(sha256):
_h = (0xc1059ed8L, 0x367cd507L, 0x3070dd17L, 0xf70e5939L,
0xffc00b31L, 0x68581511L, 0x64f98fa7L, 0xbefa4fa4L)
_output_size = 7
digest_size = 28
|
mopsalarm/feedbacky
|
refs/heads/master
|
main.py
|
1
|
import argparse
import base64
import traceback
import zlib
import flask
from flask import request, jsonify
from mailer import Mailer, Message
def parse_arguments():
parser = argparse.ArgumentParser()
parser.add_argument("--port", default=5000, type=int, help="Port to bind the http api to")
parser.add_argument("--host", default="smtp.gmail.com", help="Host of the smtp server")
parser.add_argument("--user", required=True, help="Username to use to login into the smtp server")
parser.add_argument("--password", required=True, help="Password for the smtp server")
parser.add_argument("--receiver", required=True, help="Address of the receiver of feedback mails")
return parser.parse_args()
def make_app(args):
app = flask.Flask(__name__)
@app.route("/post", methods=["POST"])
def post():
version = request.form["version"]
username = request.form.get("name", "")
feedback = request.form.get("feedback", "")
if "logcat64" in request.form:
logcat = base64.b64decode(request.form.get("logcat64"))
logcat = zlib.decompress(logcat, 32+15).decode("utf8")
else:
logcat = request.form.get("logcat", "")
send_feedback_mail(version, username, feedback, logcat)
return jsonify(success=True)
def send_feedback_mail(version, username, feedback, logcat):
# noinspection PyBroadException
try:
msg = Message(From=args.user, To=args.receiver, charset="utf8")
msg.Subject = u"Feedback {} ({})".format(version, username)
msg.Body = u"User: {0} http://pr0gramm.com/user/{0}\nFeedback: {1}\n\nLogcat: {2}\n".format(username, feedback, logcat)
mailer = Mailer(args.host, port=587, use_tls=True, usr=args.user, pwd=args.password)
mailer.send(msg)
except:
traceback.print_exc()
return app
def main():
args = parse_arguments()
app = make_app(args)
app.run(host="0.0.0.0", port=args.port, debug=False)
if __name__ == '__main__':
main()
|
Printrbot/FirmwareUpdatr
|
refs/heads/master
|
dfu/src/pyserial-2.7/examples/wxTerminal.py
|
22
|
#!/usr/bin/env python
# generated by wxGlade 0.3.1 on Fri Oct 03 23:23:45 2003
#from wxPython.wx import *
import wx
import wxSerialConfigDialog
import serial
import threading
#----------------------------------------------------------------------
# Create an own event type, so that GUI updates can be delegated
# this is required as on some platforms only the main thread can
# access the GUI without crashing. wxMutexGuiEnter/wxMutexGuiLeave
# could be used too, but an event is more elegant.
SERIALRX = wx.NewEventType()
# bind to serial data receive events
EVT_SERIALRX = wx.PyEventBinder(SERIALRX, 0)
class SerialRxEvent(wx.PyCommandEvent):
eventType = SERIALRX
def __init__(self, windowID, data):
wx.PyCommandEvent.__init__(self, self.eventType, windowID)
self.data = data
def Clone(self):
self.__class__(self.GetId(), self.data)
#----------------------------------------------------------------------
ID_CLEAR = wx.NewId()
ID_SAVEAS = wx.NewId()
ID_SETTINGS = wx.NewId()
ID_TERM = wx.NewId()
ID_EXIT = wx.NewId()
NEWLINE_CR = 0
NEWLINE_LF = 1
NEWLINE_CRLF = 2
class TerminalSetup:
"""Placeholder for various terminal settings. Used to pass the
options to the TerminalSettingsDialog."""
def __init__(self):
self.echo = False
self.unprintable = False
self.newline = NEWLINE_CRLF
class TerminalSettingsDialog(wx.Dialog):
"""Simple dialog with common terminal settings like echo, newline mode."""
def __init__(self, *args, **kwds):
self.settings = kwds['settings']
del kwds['settings']
# begin wxGlade: TerminalSettingsDialog.__init__
kwds["style"] = wx.DEFAULT_DIALOG_STYLE
wx.Dialog.__init__(self, *args, **kwds)
self.checkbox_echo = wx.CheckBox(self, -1, "Local Echo")
self.checkbox_unprintable = wx.CheckBox(self, -1, "Show unprintable characters")
self.radio_box_newline = wx.RadioBox(self, -1, "Newline Handling", choices=["CR only", "LF only", "CR+LF"], majorDimension=0, style=wx.RA_SPECIFY_ROWS)
self.button_ok = wx.Button(self, -1, "OK")
self.button_cancel = wx.Button(self, -1, "Cancel")
self.__set_properties()
self.__do_layout()
# end wxGlade
self.__attach_events()
self.checkbox_echo.SetValue(self.settings.echo)
self.checkbox_unprintable.SetValue(self.settings.unprintable)
self.radio_box_newline.SetSelection(self.settings.newline)
def __set_properties(self):
# begin wxGlade: TerminalSettingsDialog.__set_properties
self.SetTitle("Terminal Settings")
self.radio_box_newline.SetSelection(0)
self.button_ok.SetDefault()
# end wxGlade
def __do_layout(self):
# begin wxGlade: TerminalSettingsDialog.__do_layout
sizer_2 = wx.BoxSizer(wx.VERTICAL)
sizer_3 = wx.BoxSizer(wx.HORIZONTAL)
sizer_4 = wx.StaticBoxSizer(wx.StaticBox(self, -1, "Input/Output"), wx.VERTICAL)
sizer_4.Add(self.checkbox_echo, 0, wx.ALL, 4)
sizer_4.Add(self.checkbox_unprintable, 0, wx.ALL, 4)
sizer_4.Add(self.radio_box_newline, 0, 0, 0)
sizer_2.Add(sizer_4, 0, wx.EXPAND, 0)
sizer_3.Add(self.button_ok, 0, 0, 0)
sizer_3.Add(self.button_cancel, 0, 0, 0)
sizer_2.Add(sizer_3, 0, wx.ALL|wx.ALIGN_RIGHT, 4)
self.SetAutoLayout(1)
self.SetSizer(sizer_2)
sizer_2.Fit(self)
sizer_2.SetSizeHints(self)
self.Layout()
# end wxGlade
def __attach_events(self):
self.Bind(wx.EVT_BUTTON, self.OnOK, id = self.button_ok.GetId())
self.Bind(wx.EVT_BUTTON, self.OnCancel, id = self.button_cancel.GetId())
def OnOK(self, events):
"""Update data wil new values and close dialog."""
self.settings.echo = self.checkbox_echo.GetValue()
self.settings.unprintable = self.checkbox_unprintable.GetValue()
self.settings.newline = self.radio_box_newline.GetSelection()
self.EndModal(wx.ID_OK)
def OnCancel(self, events):
"""Do not update data but close dialog."""
self.EndModal(wx.ID_CANCEL)
# end of class TerminalSettingsDialog
class TerminalFrame(wx.Frame):
"""Simple terminal program for wxPython"""
def __init__(self, *args, **kwds):
self.serial = serial.Serial()
self.serial.timeout = 0.5 #make sure that the alive event can be checked from time to time
self.settings = TerminalSetup() #placeholder for the settings
self.thread = None
self.alive = threading.Event()
# begin wxGlade: TerminalFrame.__init__
kwds["style"] = wx.DEFAULT_FRAME_STYLE
wx.Frame.__init__(self, *args, **kwds)
self.text_ctrl_output = wx.TextCtrl(self, -1, "", style=wx.TE_MULTILINE|wx.TE_READONLY)
# Menu Bar
self.frame_terminal_menubar = wx.MenuBar()
self.SetMenuBar(self.frame_terminal_menubar)
wxglade_tmp_menu = wx.Menu()
wxglade_tmp_menu.Append(ID_CLEAR, "&Clear", "", wx.ITEM_NORMAL)
wxglade_tmp_menu.Append(ID_SAVEAS, "&Save Text As...", "", wx.ITEM_NORMAL)
wxglade_tmp_menu.AppendSeparator()
wxglade_tmp_menu.Append(ID_SETTINGS, "&Port Settings...", "", wx.ITEM_NORMAL)
wxglade_tmp_menu.Append(ID_TERM, "&Terminal Settings...", "", wx.ITEM_NORMAL)
wxglade_tmp_menu.AppendSeparator()
wxglade_tmp_menu.Append(ID_EXIT, "&Exit", "", wx.ITEM_NORMAL)
self.frame_terminal_menubar.Append(wxglade_tmp_menu, "&File")
# Menu Bar end
self.__set_properties()
self.__do_layout()
# end wxGlade
self.__attach_events() #register events
self.OnPortSettings(None) #call setup dialog on startup, opens port
if not self.alive.isSet():
self.Close()
def StartThread(self):
"""Start the receiver thread"""
self.thread = threading.Thread(target=self.ComPortThread)
self.thread.setDaemon(1)
self.alive.set()
self.thread.start()
def StopThread(self):
"""Stop the receiver thread, wait util it's finished."""
if self.thread is not None:
self.alive.clear() #clear alive event for thread
self.thread.join() #wait until thread has finished
self.thread = None
def __set_properties(self):
# begin wxGlade: TerminalFrame.__set_properties
self.SetTitle("Serial Terminal")
self.SetSize((546, 383))
# end wxGlade
def __do_layout(self):
# begin wxGlade: TerminalFrame.__do_layout
sizer_1 = wx.BoxSizer(wx.VERTICAL)
sizer_1.Add(self.text_ctrl_output, 1, wx.EXPAND, 0)
self.SetAutoLayout(1)
self.SetSizer(sizer_1)
self.Layout()
# end wxGlade
def __attach_events(self):
#register events at the controls
self.Bind(wx.EVT_MENU, self.OnClear, id = ID_CLEAR)
self.Bind(wx.EVT_MENU, self.OnSaveAs, id = ID_SAVEAS)
self.Bind(wx.EVT_MENU, self.OnExit, id = ID_EXIT)
self.Bind(wx.EVT_MENU, self.OnPortSettings, id = ID_SETTINGS)
self.Bind(wx.EVT_MENU, self.OnTermSettings, id = ID_TERM)
self.text_ctrl_output.Bind(wx.EVT_CHAR, self.OnKey)
self.Bind(EVT_SERIALRX, self.OnSerialRead)
self.Bind(wx.EVT_CLOSE, self.OnClose)
def OnExit(self, event):
"""Menu point Exit"""
self.Close()
def OnClose(self, event):
"""Called on application shutdown."""
self.StopThread() #stop reader thread
self.serial.close() #cleanup
self.Destroy() #close windows, exit app
def OnSaveAs(self, event):
"""Save contents of output window."""
filename = None
dlg = wx.FileDialog(None, "Save Text As...", ".", "", "Text File|*.txt|All Files|*", wx.SAVE)
if dlg.ShowModal() == wx.ID_OK:
filename = dlg.GetPath()
dlg.Destroy()
if filename is not None:
f = file(filename, 'w')
text = self.text_ctrl_output.GetValue()
if type(text) == unicode:
text = text.encode("latin1") #hm, is that a good asumption?
f.write(text)
f.close()
def OnClear(self, event):
"""Clear contents of output window."""
self.text_ctrl_output.Clear()
def OnPortSettings(self, event=None):
"""Show the portsettings dialog. The reader thread is stopped for the
settings change."""
if event is not None: #will be none when called on startup
self.StopThread()
self.serial.close()
ok = False
while not ok:
dialog_serial_cfg = wxSerialConfigDialog.SerialConfigDialog(None, -1, "",
show=wxSerialConfigDialog.SHOW_BAUDRATE|wxSerialConfigDialog.SHOW_FORMAT|wxSerialConfigDialog.SHOW_FLOW,
serial=self.serial
)
result = dialog_serial_cfg.ShowModal()
dialog_serial_cfg.Destroy()
#open port if not called on startup, open it on startup and OK too
if result == wx.ID_OK or event is not None:
try:
self.serial.open()
except serial.SerialException, e:
dlg = wx.MessageDialog(None, str(e), "Serial Port Error", wx.OK | wx.ICON_ERROR)
dlg.ShowModal()
dlg.Destroy()
else:
self.StartThread()
self.SetTitle("Serial Terminal on %s [%s, %s%s%s%s%s]" % (
self.serial.portstr,
self.serial.baudrate,
self.serial.bytesize,
self.serial.parity,
self.serial.stopbits,
self.serial.rtscts and ' RTS/CTS' or '',
self.serial.xonxoff and ' Xon/Xoff' or '',
)
)
ok = True
else:
#on startup, dialog aborted
self.alive.clear()
ok = True
def OnTermSettings(self, event):
"""Menu point Terminal Settings. Show the settings dialog
with the current terminal settings"""
dialog = TerminalSettingsDialog(None, -1, "", settings=self.settings)
result = dialog.ShowModal()
dialog.Destroy()
def OnKey(self, event):
"""Key event handler. if the key is in the ASCII range, write it to the serial port.
Newline handling and local echo is also done here."""
code = event.GetKeyCode()
if code < 256: #is it printable?
if code == 13: #is it a newline? (check for CR which is the RETURN key)
if self.settings.echo: #do echo if needed
self.text_ctrl_output.AppendText('\n')
if self.settings.newline == NEWLINE_CR:
self.serial.write('\r') #send CR
elif self.settings.newline == NEWLINE_LF:
self.serial.write('\n') #send LF
elif self.settings.newline == NEWLINE_CRLF:
self.serial.write('\r\n') #send CR+LF
else:
char = chr(code)
if self.settings.echo: #do echo if needed
self.text_ctrl_output.WriteText(char)
self.serial.write(char) #send the charcater
else:
print "Extra Key:", code
def OnSerialRead(self, event):
"""Handle input from the serial port."""
text = event.data
if self.settings.unprintable:
text = ''.join([(c >= ' ') and c or '<%d>' % ord(c) for c in text])
self.text_ctrl_output.AppendText(text)
def ComPortThread(self):
"""Thread that handles the incomming traffic. Does the basic input
transformation (newlines) and generates an SerialRxEvent"""
while self.alive.isSet(): #loop while alive event is true
text = self.serial.read(1) #read one, with timout
if text: #check if not timeout
n = self.serial.inWaiting() #look if there is more to read
if n:
text = text + self.serial.read(n) #get it
#newline transformation
if self.settings.newline == NEWLINE_CR:
text = text.replace('\r', '\n')
elif self.settings.newline == NEWLINE_LF:
pass
elif self.settings.newline == NEWLINE_CRLF:
text = text.replace('\r\n', '\n')
event = SerialRxEvent(self.GetId(), text)
self.GetEventHandler().AddPendingEvent(event)
#~ self.OnSerialRead(text) #output text in window
# end of class TerminalFrame
class MyApp(wx.App):
def OnInit(self):
wx.InitAllImageHandlers()
frame_terminal = TerminalFrame(None, -1, "")
self.SetTopWindow(frame_terminal)
frame_terminal.Show(1)
return 1
# end of class MyApp
if __name__ == "__main__":
app = MyApp(0)
app.MainLoop()
|
bastik/youtube-dl
|
refs/heads/master
|
youtube_dl/extractor/eporner.py
|
129
|
# coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
parse_duration,
str_to_int,
)
class EpornerIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?eporner\.com/hd-porn/(?P<id>\d+)/(?P<display_id>[\w-]+)'
_TEST = {
'url': 'http://www.eporner.com/hd-porn/95008/Infamous-Tiffany-Teen-Strip-Tease-Video/',
'md5': '39d486f046212d8e1b911c52ab4691f8',
'info_dict': {
'id': '95008',
'display_id': 'Infamous-Tiffany-Teen-Strip-Tease-Video',
'ext': 'mp4',
'title': 'Infamous Tiffany Teen Strip Tease Video',
'duration': 1838,
'view_count': int,
'age_limit': 18,
}
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
display_id = mobj.group('display_id')
webpage = self._download_webpage(url, display_id)
title = self._html_search_regex(
r'<title>(.*?) - EPORNER', webpage, 'title')
redirect_url = 'http://www.eporner.com/config5/%s' % video_id
player_code = self._download_webpage(
redirect_url, display_id, note='Downloading player config')
sources = self._search_regex(
r'(?s)sources\s*:\s*\[\s*({.+?})\s*\]', player_code, 'sources')
formats = []
for video_url, format_id in re.findall(r'file\s*:\s*"([^"]+)",\s*label\s*:\s*"([^"]+)"', sources):
fmt = {
'url': video_url,
'format_id': format_id,
}
m = re.search(r'^(\d+)', format_id)
if m:
fmt['height'] = int(m.group(1))
formats.append(fmt)
self._sort_formats(formats)
duration = parse_duration(self._html_search_meta('duration', webpage))
view_count = str_to_int(self._search_regex(
r'id="cinemaviews">\s*([0-9,]+)\s*<small>views',
webpage, 'view count', fatal=False))
return {
'id': video_id,
'display_id': display_id,
'title': title,
'duration': duration,
'view_count': view_count,
'formats': formats,
'age_limit': 18,
}
|
mKeRix/home-assistant
|
refs/heads/dev
|
homeassistant/util/yaml/objects.py
|
10
|
"""Custom yaml object types."""
class NodeListClass(list):
"""Wrapper class to be able to add attributes on a list."""
class NodeStrClass(str):
"""Wrapper class to be able to add attributes on a string."""
|
betoesquivel/fil2014
|
refs/heads/master
|
build/django/build/lib.linux-x86_64-2.7/django/conf/locale/__init__.py
|
111
|
# -*- encoding: utf-8 -*-
from __future__ import unicode_literals
# About name_local: capitalize it as if your language name was appearing
# inside a sentence in your language.
LANG_INFO = {
'af': {
'bidi': False,
'code': 'af',
'name': 'Afrikaans',
'name_local': 'Afrikaans',
},
'ar': {
'bidi': True,
'code': 'ar',
'name': 'Arabic',
'name_local': 'العربيّة',
},
'az': {
'bidi': True,
'code': 'az',
'name': 'Azerbaijani',
'name_local': 'azərbaycan dili',
},
'be': {
'bidi': False,
'code': 'be',
'name': 'Belarusian',
'name_local': 'беларуская',
},
'bg': {
'bidi': False,
'code': 'bg',
'name': 'Bulgarian',
'name_local': 'български',
},
'bn': {
'bidi': False,
'code': 'bn',
'name': 'Bengali',
'name_local': 'বাংলা',
},
'br': {
'bidi': False,
'code': 'br',
'name': 'Breton',
'name_local': 'brezhoneg',
},
'bs': {
'bidi': False,
'code': 'bs',
'name': 'Bosnian',
'name_local': 'bosanski',
},
'ca': {
'bidi': False,
'code': 'ca',
'name': 'Catalan',
'name_local': 'català',
},
'cs': {
'bidi': False,
'code': 'cs',
'name': 'Czech',
'name_local': 'česky',
},
'cy': {
'bidi': False,
'code': 'cy',
'name': 'Welsh',
'name_local': 'Cymraeg',
},
'da': {
'bidi': False,
'code': 'da',
'name': 'Danish',
'name_local': 'dansk',
},
'de': {
'bidi': False,
'code': 'de',
'name': 'German',
'name_local': 'Deutsch',
},
'el': {
'bidi': False,
'code': 'el',
'name': 'Greek',
'name_local': 'Ελληνικά',
},
'en': {
'bidi': False,
'code': 'en',
'name': 'English',
'name_local': 'English',
},
'en-gb': {
'bidi': False,
'code': 'en-gb',
'name': 'British English',
'name_local': 'British English',
},
'eo': {
'bidi': False,
'code': 'eo',
'name': 'Esperanto',
'name_local': 'Esperanto',
},
'es': {
'bidi': False,
'code': 'es',
'name': 'Spanish',
'name_local': 'español',
},
'es-ar': {
'bidi': False,
'code': 'es-ar',
'name': 'Argentinian Spanish',
'name_local': 'español de Argentina',
},
'es-mx': {
'bidi': False,
'code': 'es-mx',
'name': 'Mexican Spanish',
'name_local': 'español de Mexico',
},
'es-ni': {
'bidi': False,
'code': 'es-ni',
'name': 'Nicaraguan Spanish',
'name_local': 'español de Nicaragua',
},
'es-ve': {
'bidi': False,
'code': 'es-ve',
'name': 'Venezuelan Spanish',
'name_local': 'español de Venezuela',
},
'et': {
'bidi': False,
'code': 'et',
'name': 'Estonian',
'name_local': 'eesti',
},
'eu': {
'bidi': False,
'code': 'eu',
'name': 'Basque',
'name_local': 'Basque',
},
'fa': {
'bidi': True,
'code': 'fa',
'name': 'Persian',
'name_local': 'فارسی',
},
'fi': {
'bidi': False,
'code': 'fi',
'name': 'Finnish',
'name_local': 'suomi',
},
'fr': {
'bidi': False,
'code': 'fr',
'name': 'French',
'name_local': 'français',
},
'fy-nl': {
'bidi': False,
'code': 'fy-nl',
'name': 'Frisian',
'name_local': 'Frisian',
},
'ga': {
'bidi': False,
'code': 'ga',
'name': 'Irish',
'name_local': 'Gaeilge',
},
'gl': {
'bidi': False,
'code': 'gl',
'name': 'Galician',
'name_local': 'galego',
},
'he': {
'bidi': True,
'code': 'he',
'name': 'Hebrew',
'name_local': 'עברית',
},
'hi': {
'bidi': False,
'code': 'hi',
'name': 'Hindi',
'name_local': 'Hindi',
},
'hr': {
'bidi': False,
'code': 'hr',
'name': 'Croatian',
'name_local': 'Hrvatski',
},
'hu': {
'bidi': False,
'code': 'hu',
'name': 'Hungarian',
'name_local': 'Magyar',
},
'ia': {
'bidi': False,
'code': 'ia',
'name': 'Interlingua',
'name_local': 'Interlingua',
},
'id': {
'bidi': False,
'code': 'id',
'name': 'Indonesian',
'name_local': 'Bahasa Indonesia',
},
'is': {
'bidi': False,
'code': 'is',
'name': 'Icelandic',
'name_local': 'Íslenska',
},
'it': {
'bidi': False,
'code': 'it',
'name': 'Italian',
'name_local': 'italiano',
},
'ja': {
'bidi': False,
'code': 'ja',
'name': 'Japanese',
'name_local': '日本語',
},
'ka': {
'bidi': False,
'code': 'ka',
'name': 'Georgian',
'name_local': 'ქართული',
},
'kk': {
'bidi': False,
'code': 'kk',
'name': 'Kazakh',
'name_local': 'Қазақ',
},
'km': {
'bidi': False,
'code': 'km',
'name': 'Khmer',
'name_local': 'Khmer',
},
'kn': {
'bidi': False,
'code': 'kn',
'name': 'Kannada',
'name_local': 'Kannada',
},
'ko': {
'bidi': False,
'code': 'ko',
'name': 'Korean',
'name_local': '한국어',
},
'lb': {
'bidi': False,
'code': 'lb',
'name': 'Luxembourgish',
'name_local': 'Lëtzebuergesch',
},
'lt': {
'bidi': False,
'code': 'lt',
'name': 'Lithuanian',
'name_local': 'Lietuviškai',
},
'lv': {
'bidi': False,
'code': 'lv',
'name': 'Latvian',
'name_local': 'latviešu',
},
'mk': {
'bidi': False,
'code': 'mk',
'name': 'Macedonian',
'name_local': 'Македонски',
},
'ml': {
'bidi': False,
'code': 'ml',
'name': 'Malayalam',
'name_local': 'Malayalam',
},
'mn': {
'bidi': False,
'code': 'mn',
'name': 'Mongolian',
'name_local': 'Mongolian',
},
'my': {
'bidi': False,
'code': 'my',
'name': 'Burmese',
'name_local': 'မြန်မာဘာသာ',
},
'nb': {
'bidi': False,
'code': 'nb',
'name': 'Norwegian Bokmal',
'name_local': 'norsk (bokmål)',
},
'ne': {
'bidi': False,
'code': 'ne',
'name': 'Nepali',
'name_local': 'नेपाली',
},
'nl': {
'bidi': False,
'code': 'nl',
'name': 'Dutch',
'name_local': 'Nederlands',
},
'nn': {
'bidi': False,
'code': 'nn',
'name': 'Norwegian Nynorsk',
'name_local': 'norsk (nynorsk)',
},
'no': {
'bidi': False,
'code': 'no',
'name': 'Norwegian',
'name_local': 'norsk',
},
'os': {
'bidi': False,
'code': 'os',
'name': 'Ossetic',
'name_local': 'Ирон',
},
'pa': {
'bidi': False,
'code': 'pa',
'name': 'Punjabi',
'name_local': 'Punjabi',
},
'pl': {
'bidi': False,
'code': 'pl',
'name': 'Polish',
'name_local': 'polski',
},
'pt': {
'bidi': False,
'code': 'pt',
'name': 'Portuguese',
'name_local': 'Português',
},
'pt-br': {
'bidi': False,
'code': 'pt-br',
'name': 'Brazilian Portuguese',
'name_local': 'Português Brasileiro',
},
'ro': {
'bidi': False,
'code': 'ro',
'name': 'Romanian',
'name_local': 'Română',
},
'ru': {
'bidi': False,
'code': 'ru',
'name': 'Russian',
'name_local': 'Русский',
},
'sk': {
'bidi': False,
'code': 'sk',
'name': 'Slovak',
'name_local': 'slovenský',
},
'sl': {
'bidi': False,
'code': 'sl',
'name': 'Slovenian',
'name_local': 'Slovenščina',
},
'sq': {
'bidi': False,
'code': 'sq',
'name': 'Albanian',
'name_local': 'shqip',
},
'sr': {
'bidi': False,
'code': 'sr',
'name': 'Serbian',
'name_local': 'српски',
},
'sr-latn': {
'bidi': False,
'code': 'sr-latn',
'name': 'Serbian Latin',
'name_local': 'srpski (latinica)',
},
'sv': {
'bidi': False,
'code': 'sv',
'name': 'Swedish',
'name_local': 'svenska',
},
'sw': {
'bidi': False,
'code': 'sw',
'name': 'Swahili',
'name_local': 'Kiswahili',
},
'ta': {
'bidi': False,
'code': 'ta',
'name': 'Tamil',
'name_local': 'தமிழ்',
},
'te': {
'bidi': False,
'code': 'te',
'name': 'Telugu',
'name_local': 'తెలుగు',
},
'th': {
'bidi': False,
'code': 'th',
'name': 'Thai',
'name_local': 'ภาษาไทย',
},
'tr': {
'bidi': False,
'code': 'tr',
'name': 'Turkish',
'name_local': 'Türkçe',
},
'tt': {
'bidi': False,
'code': 'tt',
'name': 'Tatar',
'name_local': 'Татарча',
},
'udm': {
'bidi': False,
'code': 'udm',
'name': 'Udmurt',
'name_local': 'Удмурт',
},
'uk': {
'bidi': False,
'code': 'uk',
'name': 'Ukrainian',
'name_local': 'Українська',
},
'ur': {
'bidi': True,
'code': 'ur',
'name': 'Urdu',
'name_local': 'اردو',
},
'vi': {
'bidi': False,
'code': 'vi',
'name': 'Vietnamese',
'name_local': 'Tiếng Việt',
},
'zh-cn': {
'bidi': False,
'code': 'zh-cn',
'name': 'Simplified Chinese',
'name_local': '简体中文',
},
'zh-tw': {
'bidi': False,
'code': 'zh-tw',
'name': 'Traditional Chinese',
'name_local': '繁體中文',
}
}
|
Dhivyap/ansible
|
refs/heads/devel
|
test/units/modules/remote_management/oneview/test_oneview_network_set.py
|
68
|
# Copyright (c) 2016-2017 Hewlett Packard Enterprise Development LP
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from units.compat import unittest, mock
from hpe_test_utils import OneViewBaseTestCase
from oneview_module_loader import NetworkSetModule
FAKE_MSG_ERROR = 'Fake message error'
NETWORK_SET = dict(
name='OneViewSDK Test Network Set',
networkUris=['/rest/ethernet-networks/aaa-bbb-ccc']
)
NETWORK_SET_WITH_NEW_NAME = dict(name='OneViewSDK Test Network Set - Renamed')
PARAMS_FOR_PRESENT = dict(
config='config.json',
state='present',
data=dict(name=NETWORK_SET['name'],
networkUris=['/rest/ethernet-networks/aaa-bbb-ccc'])
)
PARAMS_WITH_CHANGES = dict(
config='config.json',
state='present',
data=dict(name=NETWORK_SET['name'],
newName=NETWORK_SET['name'] + " - Renamed",
networkUris=['/rest/ethernet-networks/aaa-bbb-ccc', 'Name of a Network'])
)
PARAMS_FOR_ABSENT = dict(
config='config.json',
state='absent',
data=dict(name=NETWORK_SET['name'])
)
class NetworkSetModuleSpec(unittest.TestCase,
OneViewBaseTestCase):
"""
OneViewBaseTestCase has common tests for class constructor and main function,
also provides the mocks used in this test case.
"""
def setUp(self):
self.configure_mocks(self, NetworkSetModule)
self.resource = self.mock_ov_client.network_sets
self.ethernet_network_client = self.mock_ov_client.ethernet_networks
def test_should_create_new_network_set(self):
self.resource.get_by.return_value = []
self.resource.create.return_value = NETWORK_SET
self.mock_ansible_module.params = PARAMS_FOR_PRESENT
NetworkSetModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=True,
msg=NetworkSetModule.MSG_CREATED,
ansible_facts=dict(network_set=NETWORK_SET)
)
def test_should_not_update_when_data_is_equals(self):
self.resource.get_by.return_value = [NETWORK_SET]
self.mock_ansible_module.params = PARAMS_FOR_PRESENT
NetworkSetModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False,
msg=NetworkSetModule.MSG_ALREADY_PRESENT,
ansible_facts=dict(network_set=NETWORK_SET)
)
def test_update_when_data_has_modified_attributes(self):
data_merged = dict(name=NETWORK_SET['name'] + " - Renamed",
networkUris=['/rest/ethernet-networks/aaa-bbb-ccc',
'/rest/ethernet-networks/ddd-eee-fff']
)
self.resource.get_by.side_effect = [NETWORK_SET], []
self.resource.update.return_value = data_merged
self.ethernet_network_client.get_by.return_value = [{'uri': '/rest/ethernet-networks/ddd-eee-fff'}]
self.mock_ansible_module.params = PARAMS_WITH_CHANGES
NetworkSetModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=True,
msg=NetworkSetModule.MSG_UPDATED,
ansible_facts=dict(network_set=data_merged)
)
def test_should_raise_exception_when_ethernet_network_not_found(self):
self.resource.get_by.side_effect = [NETWORK_SET], []
self.ethernet_network_client.get_by.return_value = []
self.mock_ansible_module.params = PARAMS_WITH_CHANGES
NetworkSetModule().run()
self.mock_ansible_module.fail_json.assert_called_once_with(
exception=mock.ANY,
msg=NetworkSetModule.MSG_ETHERNET_NETWORK_NOT_FOUND + "Name of a Network"
)
def test_should_remove_network(self):
self.resource.get_by.return_value = [NETWORK_SET]
self.mock_ansible_module.params = PARAMS_FOR_ABSENT
NetworkSetModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=True,
msg=NetworkSetModule.MSG_DELETED
)
def test_should_do_nothing_when_network_set_not_exist(self):
self.resource.get_by.return_value = []
self.mock_ansible_module.params = PARAMS_FOR_ABSENT
NetworkSetModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False,
msg=NetworkSetModule.MSG_ALREADY_ABSENT
)
def test_update_scopes_when_different(self):
params_to_scope = PARAMS_FOR_PRESENT.copy()
params_to_scope['data']['scopeUris'] = ['test']
self.mock_ansible_module.params = params_to_scope
resource_data = NETWORK_SET.copy()
resource_data['scopeUris'] = ['fake']
resource_data['uri'] = 'rest/network-sets/fake'
self.resource.get_by.return_value = [resource_data]
patch_return = resource_data.copy()
patch_return['scopeUris'] = ['test']
self.resource.patch.return_value = patch_return
NetworkSetModule().run()
self.resource.patch.assert_called_once_with('rest/network-sets/fake',
operation='replace',
path='/scopeUris',
value=['test'])
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=True,
ansible_facts=dict(network_set=patch_return),
msg=NetworkSetModule.MSG_UPDATED
)
def test_should_do_nothing_when_scopes_are_the_same(self):
params_to_scope = PARAMS_FOR_PRESENT.copy()
params_to_scope['data']['scopeUris'] = ['test']
self.mock_ansible_module.params = params_to_scope
resource_data = NETWORK_SET.copy()
resource_data['scopeUris'] = ['test']
self.resource.get_by.return_value = [resource_data]
NetworkSetModule().run()
self.resource.patch.not_been_called()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False,
ansible_facts=dict(network_set=resource_data),
msg=NetworkSetModule.MSG_ALREADY_PRESENT
)
if __name__ == '__main__':
unittest.main()
|
hupf/passwordchest
|
refs/heads/master
|
src/loxodo/twofish/twofish.py
|
1
|
## twofish.py - pure Python implementation of the Twofish algorithm.
## Bjorn Edstrom <be@bjrn.se> 13 december 2007.
##
## Copyrights
## ==========
##
## This code is a derived from an implementation by Dr Brian Gladman
## (gladman@seven77.demon.co.uk) which is subject to the following license.
## This Python implementation is not subject to any other license.
##
##/* This is an independent implementation of the encryption algorithm: */
##/* */
##/* Twofish by Bruce Schneier and colleagues */
##/* */
##/* which is a candidate algorithm in the Advanced Encryption Standard */
##/* programme of the US National Institute of Standards and Technology. */
##/* */
##/* Copyright in this implementation is held by Dr B R Gladman but I */
##/* hereby give permission for its free direct or derivative use subject */
##/* to acknowledgment of its origin and compliance with any conditions */
##/* that the originators of t he algorithm place on its exploitation. */
##/* */
##/* My thanks to Doug Whiting and Niels Ferguson for comments that led */
##/* to improvements in this implementation. */
##/* */
##/* Dr Brian Gladman (gladman@seven77.demon.co.uk) 14th January 1999 */
##
## The above copyright notice must not be removed.
##
## Information
## ===========
##
## Anyone thinking of using this code should reconsider. It's slow.
## Try python-mcrypt instead. In case a faster library is not installed
## on the target system, this code can be used as a portable fallback.
# pylint: disable-all
block_size = 16
key_size = 32
class Twofish:
def __init__(self, key=None):
"""Twofish."""
if key:
self.set_key(key)
def set_key(self, key):
"""Init."""
key_len = len(key)
if key_len not in [16, 24, 32]:
# XXX: add padding?
raise KeyError, "key must be 16, 24 or 32 bytes"
if key_len % 4:
# XXX: add padding?
raise KeyError, "key not a multiple of 4"
if key_len > 32:
# XXX: prune?
raise KeyError, "key_len > 32"
self.context = TWI()
key_word32 = [0] * 32
i = 0
while key:
key_word32[i] = struct.unpack("<L", key[0:4])[0]
key = key[4:]
i += 1
set_key(self.context, key_word32, key_len)
def decrypt(self, block):
"""Decrypt blocks."""
if len(block) % 16:
raise ValueError, "block size must be a multiple of 16"
plaintext = ''
while block:
a, b, c, d = struct.unpack("<4L", block[:16])
temp = [a, b, c, d]
decrypt(self.context, temp)
plaintext += struct.pack("<4L", *temp)
block = block[16:]
return plaintext
def encrypt(self, block):
"""Encrypt blocks."""
if len(block) % 16:
raise ValueError, "block size must be a multiple of 16"
ciphertext = ''
while block:
a, b, c, d = struct.unpack("<4L", block[0:16])
temp = [a, b, c, d]
encrypt(self.context, temp)
ciphertext += struct.pack("<4L", *temp)
block = block[16:]
return ciphertext
def get_name(self):
"""Return the name of the cipher."""
return "Twofish"
def get_block_size(self):
"""Get cipher block size in bytes."""
return 16
def get_key_size(self):
"""Get cipher key size in bytes."""
return 32
#
# Private.
#
import struct
import sys
WORD_BIGENDIAN = 0
if sys.byteorder == 'big':
WORD_BIGENDIAN = 1
def rotr32(x, n):
return (x >> n) | ((x << (32 - n)) & 0xFFFFFFFF)
def rotl32(x, n):
return ((x << n) & 0xFFFFFFFF) | (x >> (32 - n))
def byteswap32(x):
return ((x & 0xff) << 24) | (((x >> 8) & 0xff) << 16) | \
(((x >> 16) & 0xff) << 8) | ((x >> 24) & 0xff)
class TWI:
def __init__(self):
self.k_len = 0 # word32
self.l_key = [0]*40 # word32
self.s_key = [0]*4 # word32
self.qt_gen = 0 # word32
self.q_tab = [[0]*256, [0]*256] # byte
self.mt_gen = 0 # word32
self.m_tab = [[0]*256, [0]*256, [0]*256, [0]*256] # word32
self.mk_tab = [[0]*256, [0]*256, [0]*256, [0]*256] # word32
def byte(x, n):
return (x >> (8 * n)) & 0xff
tab_5b = [0, 90, 180, 238]
tab_ef = [0, 238, 180, 90]
ror4 = [0, 8, 1, 9, 2, 10, 3, 11, 4, 12, 5, 13, 6, 14, 7, 15]
ashx = [0, 9, 2, 11, 4, 13, 6, 15, 8, 1, 10, 3, 12, 5, 14, 7]
qt0 = [[8, 1, 7, 13, 6, 15, 3, 2, 0, 11, 5, 9, 14, 12, 10, 4],
[2, 8, 11, 13, 15, 7, 6, 14, 3, 1, 9, 4, 0, 10, 12, 5]]
qt1 = [[14, 12, 11, 8, 1, 2, 3, 5, 15, 4, 10, 6, 7, 0, 9, 13],
[1, 14, 2, 11, 4, 12, 3, 7, 6, 13, 10, 5, 15, 9, 0, 8]]
qt2 = [[11, 10, 5, 14, 6, 13, 9, 0, 12, 8, 15, 3, 2, 4, 7, 1],
[4, 12, 7, 5, 1, 6, 9, 10, 0, 14, 13, 8, 2, 11, 3, 15]]
qt3 = [[13, 7, 15, 4, 1, 2, 6, 14, 9, 11, 3, 0, 8, 5, 12, 10],
[11, 9, 5, 1, 12, 3, 13, 14, 6, 4, 7, 15, 2, 0, 8, 10]]
def qp(n, x): # word32, byte
n %= 0x100000000
x %= 0x100
a0 = x >> 4;
b0 = x & 15;
a1 = a0 ^ b0;
b1 = ror4[b0] ^ ashx[a0];
a2 = qt0[n][a1];
b2 = qt1[n][b1];
a3 = a2 ^ b2;
b3 = ror4[b2] ^ ashx[a2];
a4 = qt2[n][a3];
b4 = qt3[n][b3];
return (b4 << 4) | a4;
def gen_qtab(pkey):
for i in xrange(256):
pkey.q_tab[0][i] = qp(0, i)
pkey.q_tab[1][i] = qp(1, i)
def gen_mtab(pkey):
for i in xrange(256):
f01 = pkey.q_tab[1][i]
f01 = pkey.q_tab[1][i];
f5b = ((f01) ^ ((f01) >> 2) ^ tab_5b[(f01) & 3]);
fef = ((f01) ^ ((f01) >> 1) ^ ((f01) >> 2) ^ tab_ef[(f01) & 3]);
pkey.m_tab[0][i] = f01 + (f5b << 8) + (fef << 16) + (fef << 24);
pkey.m_tab[2][i] = f5b + (fef << 8) + (f01 << 16) + (fef << 24);
f01 = pkey.q_tab[0][i];
f5b = ((f01) ^ ((f01) >> 2) ^ tab_5b[(f01) & 3]);
fef = ((f01) ^ ((f01) >> 1) ^ ((f01) >> 2) ^ tab_ef[(f01) & 3]);
pkey.m_tab[1][i] = fef + (fef << 8) + (f5b << 16) + (f01 << 24);
pkey.m_tab[3][i] = f5b + (f01 << 8) + (fef << 16) + (f5b << 24);
def gen_mk_tab(pkey, key):
if pkey.k_len == 2:
for i in xrange(256):
by = i % 0x100
pkey.mk_tab[0][i] = pkey.m_tab[0][pkey.q_tab[0][pkey.q_tab[0][by] ^ byte(key[1],0)] ^ byte(key[0],0)];
pkey.mk_tab[1][i] = pkey.m_tab[1][pkey.q_tab[0][pkey.q_tab[1][by] ^ byte(key[1],1)] ^ byte(key[0],1)];
pkey.mk_tab[2][i] = pkey.m_tab[2][pkey.q_tab[1][pkey.q_tab[0][by] ^ byte(key[1],2)] ^ byte(key[0],2)];
pkey.mk_tab[3][i] = pkey.m_tab[3][pkey.q_tab[1][pkey.q_tab[1][by] ^ byte(key[1],3)] ^ byte(key[0],3)];
if pkey.k_len == 3:
for i in xrange(256):
by = i % 0x100
pkey.mk_tab[0][i] = pkey.m_tab[0][pkey.q_tab[0][pkey.q_tab[0][pkey.q_tab[1][by] ^ byte(key[2], 0)] ^ byte(key[1], 0)] ^ byte(key[0], 0)];
pkey.mk_tab[1][i] = pkey.m_tab[1][pkey.q_tab[0][pkey.q_tab[1][pkey.q_tab[1][by] ^ byte(key[2], 1)] ^ byte(key[1], 1)] ^ byte(key[0], 1)];
pkey.mk_tab[2][i] = pkey.m_tab[2][pkey.q_tab[1][pkey.q_tab[0][pkey.q_tab[0][by] ^ byte(key[2], 2)] ^ byte(key[1], 2)] ^ byte(key[0], 2)];
pkey.mk_tab[3][i] = pkey.m_tab[3][pkey.q_tab[1][pkey.q_tab[1][pkey.q_tab[0][by] ^ byte(key[2], 3)] ^ byte(key[1], 3)] ^ byte(key[0], 3)];
if pkey.k_len == 4:
for i in xrange(256):
by = i % 0x100
pkey.mk_tab[0][i] = pkey.m_tab[0][pkey.q_tab[0][pkey.q_tab[0][pkey.q_tab[1][pkey.q_tab[1][by] ^ byte(key[3], 0)] ^ byte(key[2], 0)] ^ byte(key[1], 0)] ^ byte(key[0], 0)];
pkey.mk_tab[1][i] = pkey.m_tab[1][pkey.q_tab[0][pkey.q_tab[1][pkey.q_tab[1][pkey.q_tab[0][by] ^ byte(key[3], 1)] ^ byte(key[2], 1)] ^ byte(key[1], 1)] ^ byte(key[0], 1)];
pkey.mk_tab[2][i] = pkey.m_tab[2][pkey.q_tab[1][pkey.q_tab[0][pkey.q_tab[0][pkey.q_tab[0][by] ^ byte(key[3], 2)] ^ byte(key[2], 2)] ^ byte(key[1], 2)] ^ byte(key[0], 2)];
pkey.mk_tab[3][i] = pkey.m_tab[3][pkey.q_tab[1][pkey.q_tab[1][pkey.q_tab[0][pkey.q_tab[1][by] ^ byte(key[3], 3)] ^ byte(key[2], 3)] ^ byte(key[1], 3)] ^ byte(key[0], 3)];
def h_fun(pkey, x, key):
b0 = byte(x, 0);
b1 = byte(x, 1);
b2 = byte(x, 2);
b3 = byte(x, 3);
if pkey.k_len >= 4:
b0 = pkey.q_tab[1][b0] ^ byte(key[3], 0);
b1 = pkey.q_tab[0][b1] ^ byte(key[3], 1);
b2 = pkey.q_tab[0][b2] ^ byte(key[3], 2);
b3 = pkey.q_tab[1][b3] ^ byte(key[3], 3);
if pkey.k_len >= 3:
b0 = pkey.q_tab[1][b0] ^ byte(key[2], 0);
b1 = pkey.q_tab[1][b1] ^ byte(key[2], 1);
b2 = pkey.q_tab[0][b2] ^ byte(key[2], 2);
b3 = pkey.q_tab[0][b3] ^ byte(key[2], 3);
if pkey.k_len >= 2:
b0 = pkey.q_tab[0][pkey.q_tab[0][b0] ^ byte(key[1], 0)] ^ byte(key[0], 0);
b1 = pkey.q_tab[0][pkey.q_tab[1][b1] ^ byte(key[1], 1)] ^ byte(key[0], 1);
b2 = pkey.q_tab[1][pkey.q_tab[0][b2] ^ byte(key[1], 2)] ^ byte(key[0], 2);
b3 = pkey.q_tab[1][pkey.q_tab[1][b3] ^ byte(key[1], 3)] ^ byte(key[0], 3);
return pkey.m_tab[0][b0] ^ pkey.m_tab[1][b1] ^ pkey.m_tab[2][b2] ^ pkey.m_tab[3][b3];
def mds_rem(p0, p1):
i, t, u = 0, 0, 0
for i in xrange(8):
t = p1 >> 24
p1 = ((p1 << 8) & 0xffffffff) | (p0 >> 24)
p0 = (p0 << 8) & 0xffffffff
u = (t << 1) & 0xffffffff
if t & 0x80:
u ^= 0x0000014d
p1 ^= t ^ ((u << 16) & 0xffffffff)
u ^= (t >> 1)
if t & 0x01:
u ^= 0x0000014d >> 1
p1 ^= ((u << 24) & 0xffffffff) | ((u << 8) & 0xffffffff)
return p1
def set_key(pkey, in_key, key_len):
pkey.qt_gen = 0
if not pkey.qt_gen:
gen_qtab(pkey)
pkey.qt_gen = 1
pkey.mt_gen = 0
if not pkey.mt_gen:
gen_mtab(pkey)
pkey.mt_gen = 1
pkey.k_len = (key_len * 8) / 64
a = 0
b = 0
me_key = [0,0,0,0]
mo_key = [0,0,0,0]
for i in xrange(pkey.k_len):
if WORD_BIGENDIAN:
a = byteswap32(in_key[i + 1])
me_key[i] = a
b = byteswap32(in_key[i + i + 1])
else:
a = in_key[i + i]
me_key[i] = a
b = in_key[i + i + 1]
mo_key[i] = b
pkey.s_key[pkey.k_len - i - 1] = mds_rem(a, b);
for i in xrange(0, 40, 2):
a = (0x01010101 * i) % 0x100000000;
b = (a + 0x01010101) % 0x100000000;
a = h_fun(pkey, a, me_key);
b = rotl32(h_fun(pkey, b, mo_key), 8);
pkey.l_key[i] = (a + b) % 0x100000000;
pkey.l_key[i + 1] = rotl32((a + 2 * b) % 0x100000000, 9);
gen_mk_tab(pkey, pkey.s_key)
def encrypt(pkey, in_blk):
blk = [0, 0, 0, 0]
if WORD_BIGENDIAN:
blk[0] = byteswap32(in_blk[0]) ^ pkey.l_key[0];
blk[1] = byteswap32(in_blk[1]) ^ pkey.l_key[1];
blk[2] = byteswap32(in_blk[2]) ^ pkey.l_key[2];
blk[3] = byteswap32(in_blk[3]) ^ pkey.l_key[3];
else:
blk[0] = in_blk[0] ^ pkey.l_key[0];
blk[1] = in_blk[1] ^ pkey.l_key[1];
blk[2] = in_blk[2] ^ pkey.l_key[2];
blk[3] = in_blk[3] ^ pkey.l_key[3];
for i in xrange(8):
t1 = ( pkey.mk_tab[0][byte(blk[1],3)] ^ pkey.mk_tab[1][byte(blk[1],0)] ^ pkey.mk_tab[2][byte(blk[1],1)] ^ pkey.mk_tab[3][byte(blk[1],2)] );
t0 = ( pkey.mk_tab[0][byte(blk[0],0)] ^ pkey.mk_tab[1][byte(blk[0],1)] ^ pkey.mk_tab[2][byte(blk[0],2)] ^ pkey.mk_tab[3][byte(blk[0],3)] );
blk[2] = rotr32(blk[2] ^ ((t0 + t1 + pkey.l_key[4 * (i) + 8]) % 0x100000000), 1);
blk[3] = rotl32(blk[3], 1) ^ ((t0 + 2 * t1 + pkey.l_key[4 * (i) + 9]) % 0x100000000);
t1 = ( pkey.mk_tab[0][byte(blk[3],3)] ^ pkey.mk_tab[1][byte(blk[3],0)] ^ pkey.mk_tab[2][byte(blk[3],1)] ^ pkey.mk_tab[3][byte(blk[3],2)] );
t0 = ( pkey.mk_tab[0][byte(blk[2],0)] ^ pkey.mk_tab[1][byte(blk[2],1)] ^ pkey.mk_tab[2][byte(blk[2],2)] ^ pkey.mk_tab[3][byte(blk[2],3)] );
blk[0] = rotr32(blk[0] ^ ((t0 + t1 + pkey.l_key[4 * (i) + 10]) % 0x100000000), 1);
blk[1] = rotl32(blk[1], 1) ^ ((t0 + 2 * t1 + pkey.l_key[4 * (i) + 11]) % 0x100000000);
if WORD_BIGENDIAN:
in_blk[0] = byteswap32(blk[2] ^ pkey.l_key[4]);
in_blk[1] = byteswap32(blk[3] ^ pkey.l_key[5]);
in_blk[2] = byteswap32(blk[0] ^ pkey.l_key[6]);
in_blk[3] = byteswap32(blk[1] ^ pkey.l_key[7]);
else:
in_blk[0] = blk[2] ^ pkey.l_key[4];
in_blk[1] = blk[3] ^ pkey.l_key[5];
in_blk[2] = blk[0] ^ pkey.l_key[6];
in_blk[3] = blk[1] ^ pkey.l_key[7];
return
def decrypt(pkey, in_blk):
blk = [0, 0, 0, 0]
if WORD_BIGENDIAN:
blk[0] = byteswap32(in_blk[0]) ^ pkey.l_key[4];
blk[1] = byteswap32(in_blk[1]) ^ pkey.l_key[5];
blk[2] = byteswap32(in_blk[2]) ^ pkey.l_key[6];
blk[3] = byteswap32(in_blk[3]) ^ pkey.l_key[7];
else:
blk[0] = in_blk[0] ^ pkey.l_key[4];
blk[1] = in_blk[1] ^ pkey.l_key[5];
blk[2] = in_blk[2] ^ pkey.l_key[6];
blk[3] = in_blk[3] ^ pkey.l_key[7];
for i in xrange(7, -1, -1):
t1 = ( pkey.mk_tab[0][byte(blk[1],3)] ^ pkey.mk_tab[1][byte(blk[1],0)] ^ pkey.mk_tab[2][byte(blk[1],1)] ^ pkey.mk_tab[3][byte(blk[1],2)] )
t0 = ( pkey.mk_tab[0][byte(blk[0],0)] ^ pkey.mk_tab[1][byte(blk[0],1)] ^ pkey.mk_tab[2][byte(blk[0],2)] ^ pkey.mk_tab[3][byte(blk[0],3)] )
blk[2] = rotl32(blk[2], 1) ^ ((t0 + t1 + pkey.l_key[4 * (i) + 10]) % 0x100000000)
blk[3] = rotr32(blk[3] ^ ((t0 + 2 * t1 + pkey.l_key[4 * (i) + 11]) % 0x100000000), 1)
t1 = ( pkey.mk_tab[0][byte(blk[3],3)] ^ pkey.mk_tab[1][byte(blk[3],0)] ^ pkey.mk_tab[2][byte(blk[3],1)] ^ pkey.mk_tab[3][byte(blk[3],2)] )
t0 = ( pkey.mk_tab[0][byte(blk[2],0)] ^ pkey.mk_tab[1][byte(blk[2],1)] ^ pkey.mk_tab[2][byte(blk[2],2)] ^ pkey.mk_tab[3][byte(blk[2],3)] )
blk[0] = rotl32(blk[0], 1) ^ ((t0 + t1 + pkey.l_key[4 * (i) + 8]) % 0x100000000)
blk[1] = rotr32(blk[1] ^ ((t0 + 2 * t1 + pkey.l_key[4 * (i) + 9]) % 0x100000000), 1)
if WORD_BIGENDIAN:
in_blk[0] = byteswap32(blk[2] ^ pkey.l_key[0]);
in_blk[1] = byteswap32(blk[3] ^ pkey.l_key[1]);
in_blk[2] = byteswap32(blk[0] ^ pkey.l_key[2]);
in_blk[3] = byteswap32(blk[1] ^ pkey.l_key[3]);
else:
in_blk[0] = blk[2] ^ pkey.l_key[0];
in_blk[1] = blk[3] ^ pkey.l_key[1];
in_blk[2] = blk[0] ^ pkey.l_key[2];
in_blk[3] = blk[1] ^ pkey.l_key[3];
return
__testkey = '\xD4\x3B\xB7\x55\x6E\xA3\x2E\x46\xF2\xA2\x82\xB7\xD4\x5B\x4E\x0D\x57\xFF\x73\x9D\x4D\xC9\x2C\x1B\xD7\xFC\x01\x70\x0C\xC8\x21\x6F'
__testdat = '\x90\xAF\xE9\x1B\xB2\x88\x54\x4F\x2C\x32\xDC\x23\x9B\x26\x35\xE6'
assert 'l\xb4V\x1c@\xbf\n\x97\x05\x93\x1c\xb6\xd4\x08\xe7\xfa' == Twofish(__testkey).encrypt(__testdat)
assert __testdat == Twofish(__testkey).decrypt('l\xb4V\x1c@\xbf\n\x97\x05\x93\x1c\xb6\xd4\x08\xe7\xfa')
|
GustavoHennig/ansible
|
refs/heads/devel
|
lib/ansible/modules/cloud/lxd/lxd_container.py
|
27
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2016, Hiroaki Nakamura <hnakamur@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: lxd_container
short_description: Manage LXD Containers
version_added: "2.2"
description:
- Management of LXD containers
author: "Hiroaki Nakamura (@hnakamur)"
options:
name:
description:
- Name of a container.
required: true
architecture:
description:
- The architecture for the container (e.g. "x86_64" or "i686").
See U(https://github.com/lxc/lxd/blob/master/doc/rest-api.md#post-1)
required: false
config:
description:
- 'The config for the container (e.g. {"limits.cpu": "2"}).
See U(https://github.com/lxc/lxd/blob/master/doc/rest-api.md#post-1)'
- If the container already exists and its "config" value in metadata
obtained from
GET /1.0/containers/<name>
U(https://github.com/lxc/lxd/blob/master/doc/rest-api.md#10containersname)
are different, they this module tries to apply the configurations.
- The key starts with 'volatile.' are ignored for this comparison.
- Not all config values are supported to apply the existing container.
Maybe you need to delete and recreate a container.
required: false
devices:
description:
- 'The devices for the container
(e.g. { "rootfs": { "path": "/dev/kvm", "type": "unix-char" }).
See U(https://github.com/lxc/lxd/blob/master/doc/rest-api.md#post-1)'
required: false
ephemeral:
description:
- Whether or not the container is ephemeral (e.g. true or false).
See U(https://github.com/lxc/lxd/blob/master/doc/rest-api.md#post-1)
required: false
source:
description:
- 'The source for the container
(e.g. { "type": "image",
"mode": "pull",
"server": "https://images.linuxcontainers.org",
"protocol": "lxd",
"alias": "ubuntu/xenial/amd64" }).
See U(https://github.com/lxc/lxd/blob/master/doc/rest-api.md#post-1)'
required: false
state:
choices:
- started
- stopped
- restarted
- absent
- frozen
description:
- Define the state of a container.
required: false
default: started
timeout:
description:
- A timeout for changing the state of the container.
- This is also used as a timeout for waiting until IPv4 addresses
are set to the all network interfaces in the container after
starting or restarting.
required: false
default: 30
wait_for_ipv4_addresses:
description:
- If this is true, the C(lxd_container) waits until IPv4 addresses
are set to the all network interfaces in the container after
starting or restarting.
required: false
default: false
force_stop:
description:
- If this is true, the C(lxd_container) forces to stop the container
when it stops or restarts the container.
required: false
default: false
url:
description:
- The unix domain socket path or the https URL for the LXD server.
required: false
default: unix:/var/lib/lxd/unix.socket
key_file:
description:
- The client certificate key file path.
required: false
default: '"{}/.config/lxc/client.key" .format(os.environ["HOME"])'
cert_file:
description:
- The client certificate file path.
required: false
default: '"{}/.config/lxc/client.crt" .format(os.environ["HOME"])'
trust_password:
description:
- The client trusted password.
- You need to set this password on the LXD server before
running this module using the following command.
lxc config set core.trust_password <some random password>
See U(https://www.stgraber.org/2016/04/18/lxd-api-direct-interaction/)
- If trust_password is set, this module send a request for
authentication before sending any requests.
required: false
notes:
- Containers must have a unique name. If you attempt to create a container
with a name that already existed in the users namespace the module will
simply return as "unchanged".
- There are two ways to can run commands in containers, using the command
module or using the ansible lxd connection plugin bundled in Ansible >=
2.1, the later requires python to be installed in the container which can
be done with the command module.
- You can copy a file from the host to the container
with the Ansible M(copy) and M(template) module and the `lxd` connection plugin.
See the example below.
- You can copy a file in the creatd container to the localhost
with `command=lxc file pull container_name/dir/filename filename`.
See the first example below.
'''
EXAMPLES = '''
# An example for creating a Ubuntu container and install python
- hosts: localhost
connection: local
tasks:
- name: Create a started container
lxd_container:
name: mycontainer
state: started
source:
type: image
mode: pull
server: https://images.linuxcontainers.org
protocol: lxd
alias: ubuntu/xenial/amd64
profiles: ["default"]
wait_for_ipv4_addresses: true
timeout: 600
- name: check python is installed in container
delegate_to: mycontainer
raw: dpkg -s python
register: python_install_check
failed_when: python_install_check.rc not in [0, 1]
changed_when: false
- name: install python in container
delegate_to: mycontainer
raw: apt-get install -y python
when: python_install_check.rc == 1
# An example for deleting a container
- hosts: localhost
connection: local
tasks:
- name: Delete a container
lxd_container:
name: mycontainer
state: absent
# An example for restarting a container
- hosts: localhost
connection: local
tasks:
- name: Restart a container
lxd_container:
name: mycontainer
state: restarted
# An example for restarting a container using https to connect to the LXD server
- hosts: localhost
connection: local
tasks:
- name: Restart a container
lxd_container:
url: https://127.0.0.1:8443
# These cert_file and key_file values are equal to the default values.
#cert_file: "{{ lookup('env', 'HOME') }}/.config/lxc/client.crt"
#key_file: "{{ lookup('env', 'HOME') }}/.config/lxc/client.key"
trust_password: mypassword
name: mycontainer
state: restarted
# Note your container must be in the inventory for the below example.
#
# [containers]
# mycontainer ansible_connection=lxd
#
- hosts:
- mycontainer
tasks:
- name: copy /etc/hosts in the created container to localhost with name "mycontainer-hosts"
fetch:
src: /etc/hosts
dest: /tmp/mycontainer-hosts
flat: true
'''
RETURN='''
addresses:
description: Mapping from the network device name to a list of IPv4 addresses in the container
returned: when state is started or restarted
type: object
sample: {"eth0": ["10.155.92.191"]}
old_state:
description: The old state of the container
returned: when state is started or restarted
type: string
sample: "stopped"
logs:
description: The logs of requests and responses.
returned: when ansible-playbook is invoked with -vvvv.
type: list
sample: "(too long to be placed here)"
actions:
description: List of actions performed for the container.
returned: success
type: list
sample: '["create", "start"]'
'''
import os
from ansible.module_utils.lxd import LXDClient, LXDClientException
# LXD_ANSIBLE_STATES is a map of states that contain values of methods used
# when a particular state is evoked.
LXD_ANSIBLE_STATES = {
'started': '_started',
'stopped': '_stopped',
'restarted': '_restarted',
'absent': '_destroyed',
'frozen': '_frozen'
}
# ANSIBLE_LXD_STATES is a map of states of lxd containers to the Ansible
# lxc_container module state parameter value.
ANSIBLE_LXD_STATES = {
'Running': 'started',
'Stopped': 'stopped',
'Frozen': 'frozen',
}
# CONFIG_PARAMS is a list of config attribute names.
CONFIG_PARAMS = [
'architecture', 'config', 'devices', 'ephemeral', 'profiles', 'source'
]
try:
callable(all)
except NameError:
# For python <2.5
# This definition is copied from https://docs.python.org/2/library/functions.html#all
def all(iterable):
for element in iterable:
if not element:
return False
return True
class LXDContainerManagement(object):
def __init__(self, module):
"""Management of LXC containers via Ansible.
:param module: Processed Ansible Module.
:type module: ``object``
"""
self.module = module
self.name = self.module.params['name']
self._build_config()
self.state = self.module.params['state']
self.timeout = self.module.params['timeout']
self.wait_for_ipv4_addresses = self.module.params['wait_for_ipv4_addresses']
self.force_stop = self.module.params['force_stop']
self.addresses = None
self.url = self.module.params['url']
self.key_file = self.module.params.get('key_file', None)
self.cert_file = self.module.params.get('cert_file', None)
self.debug = self.module._verbosity >= 4
try:
self.client = LXDClient(
self.url, key_file=self.key_file, cert_file=self.cert_file,
debug=self.debug
)
except LXDClientException as e:
self.module.fail_json(msg=e.msg)
self.trust_password = self.module.params.get('trust_password', None)
self.actions = []
def _build_config(self):
self.config = {}
for attr in CONFIG_PARAMS:
param_val = self.module.params.get(attr, None)
if param_val is not None:
self.config[attr] = param_val
def _get_container_json(self):
return self.client.do(
'GET', '/1.0/containers/{0}'.format(self.name),
ok_error_codes=[404]
)
def _get_container_state_json(self):
return self.client.do(
'GET', '/1.0/containers/{0}/state'.format(self.name),
ok_error_codes=[404]
)
@staticmethod
def _container_json_to_module_state(resp_json):
if resp_json['type'] == 'error':
return 'absent'
return ANSIBLE_LXD_STATES[resp_json['metadata']['status']]
def _change_state(self, action, force_stop=False):
body_json={'action': action, 'timeout': self.timeout}
if force_stop:
body_json['force'] = True
return self.client.do('PUT', '/1.0/containers/{0}/state'.format(self.name), body_json=body_json)
def _create_container(self):
config = self.config.copy()
config['name'] = self.name
self.client.do('POST', '/1.0/containers', config)
self.actions.append('create')
def _start_container(self):
self._change_state('start')
self.actions.append('start')
def _stop_container(self):
self._change_state('stop', self.force_stop)
self.actions.append('stop')
def _restart_container(self):
self._change_state('restart', self.force_stop)
self.actions.append('restart')
def _delete_container(self):
self.client.do('DELETE', '/1.0/containers/{0}'.format(self.name))
self.actions.append('delete')
def _freeze_container(self):
self._change_state('freeze')
self.actions.append('freeze')
def _unfreeze_container(self):
self._change_state('unfreeze')
self.actions.append('unfreez')
def _container_ipv4_addresses(self, ignore_devices=['lo']):
resp_json = self._get_container_state_json()
network = resp_json['metadata']['network'] or {}
network = dict((k, v) for k, v in network.items() if k not in ignore_devices) or {}
addresses = dict((k, [a['address'] for a in v['addresses'] if a['family'] == 'inet']) for k, v in network.items()) or {}
return addresses
@staticmethod
def _has_all_ipv4_addresses(addresses):
return len(addresses) > 0 and all([len(v) > 0 for v in addresses.values()])
def _get_addresses(self):
try:
due = datetime.datetime.now() + datetime.timedelta(seconds=self.timeout)
while datetime.datetime.now() < due:
time.sleep(1)
addresses = self._container_ipv4_addresses()
if self._has_all_ipv4_addresses(addresses):
self.addresses = addresses
return
except LXDClientException as e:
e.msg = 'timeout for getting IPv4 addresses'
raise
def _started(self):
if self.old_state == 'absent':
self._create_container()
self._start_container()
else:
if self.old_state == 'frozen':
self._unfreeze_container()
elif self.old_state == 'stopped':
self._start_container()
if self._needs_to_apply_container_configs():
self._apply_container_configs()
if self.wait_for_ipv4_addresses:
self._get_addresses()
def _stopped(self):
if self.old_state == 'absent':
self._create_container()
else:
if self.old_state == 'stopped':
if self._needs_to_apply_container_configs():
self._start_container()
self._apply_container_configs()
self._stop_container()
else:
if self.old_state == 'frozen':
self._unfreeze_container()
if self._needs_to_apply_container_configs():
self._apply_container_configs()
self._stop_container()
def _restarted(self):
if self.old_state == 'absent':
self._create_container()
self._start_container()
else:
if self.old_state == 'frozen':
self._unfreeze_container()
if self._needs_to_apply_container_configs():
self._apply_container_configs()
self._restart_container()
if self.wait_for_ipv4_addresses:
self._get_addresses()
def _destroyed(self):
if self.old_state != 'absent':
if self.old_state == 'frozen':
self._unfreeze_container()
if self.old_state != 'stopped':
self._stop_container()
self._delete_container()
def _frozen(self):
if self.old_state == 'absent':
self._create_container()
self._start_container()
self._freeze_container()
else:
if self.old_state == 'stopped':
self._start_container()
if self._needs_to_apply_container_configs():
self._apply_container_configs()
self._freeze_container()
def _needs_to_change_container_config(self, key):
if key not in self.config:
return False
if key == 'config':
old_configs = dict((k, v) for k, v in self.old_container_json['metadata'][key].items() if not k.startswith('volatile.'))
else:
old_configs = self.old_container_json['metadata'][key]
return self.config[key] != old_configs
def _needs_to_apply_container_configs(self):
return (
self._needs_to_change_container_config('architecture') or
self._needs_to_change_container_config('config') or
self._needs_to_change_container_config('ephemeral') or
self._needs_to_change_container_config('devices') or
self._needs_to_change_container_config('profiles')
)
def _apply_container_configs(self):
old_metadata = self.old_container_json['metadata']
body_json = {
'architecture': old_metadata['architecture'],
'config': old_metadata['config'],
'devices': old_metadata['devices'],
'profiles': old_metadata['profiles']
}
if self._needs_to_change_container_config('architecture'):
body_json['architecture'] = self.config['architecture']
if self._needs_to_change_container_config('config'):
for k, v in self.config['config'].items():
body_json['config'][k] = v
if self._needs_to_change_container_config('ephemeral'):
body_json['ephemeral'] = self.config['ephemeral']
if self._needs_to_change_container_config('devices'):
body_json['devices'] = self.config['devices']
if self._needs_to_change_container_config('profiles'):
body_json['profiles'] = self.config['profiles']
self.client.do('PUT', '/1.0/containers/{0}'.format(self.name), body_json=body_json)
self.actions.append('apply_container_configs')
def run(self):
"""Run the main method."""
try:
if self.trust_password is not None:
self.client.authenticate(self.trust_password)
self.old_container_json = self._get_container_json()
self.old_state = self._container_json_to_module_state(self.old_container_json)
action = getattr(self, LXD_ANSIBLE_STATES[self.state])
action()
state_changed = len(self.actions) > 0
result_json = {
'log_verbosity': self.module._verbosity,
'changed': state_changed,
'old_state': self.old_state,
'actions': self.actions
}
if self.client.debug:
result_json['logs'] = self.client.logs
if self.addresses is not None:
result_json['addresses'] = self.addresses
self.module.exit_json(**result_json)
except LXDClientException as e:
state_changed = len(self.actions) > 0
fail_params = {
'msg': e.msg,
'changed': state_changed,
'actions': self.actions
}
if self.client.debug:
fail_params['logs'] = e.kwargs['logs']
self.module.fail_json(**fail_params)
def main():
"""Ansible Main module."""
module = AnsibleModule(
argument_spec=dict(
name=dict(
type='str',
required=True
),
architecture=dict(
type='str',
),
config=dict(
type='dict',
),
description=dict(
type='str',
),
devices=dict(
type='dict',
),
ephemeral=dict(
type='bool',
),
profiles=dict(
type='list',
),
source=dict(
type='dict',
),
state=dict(
choices=LXD_ANSIBLE_STATES.keys(),
default='started'
),
timeout=dict(
type='int',
default=30
),
wait_for_ipv4_addresses=dict(
type='bool',
default=False
),
force_stop=dict(
type='bool',
default=False
),
url=dict(
type='str',
default='unix:/var/lib/lxd/unix.socket'
),
key_file=dict(
type='str',
default='{}/.config/lxc/client.key'.format(os.environ['HOME'])
),
cert_file=dict(
type='str',
default='{}/.config/lxc/client.crt'.format(os.environ['HOME'])
),
trust_password=dict( type='str', no_log=True )
),
supports_check_mode=False,
)
lxd_manage = LXDContainerManagement(module=module)
lxd_manage.run()
# import module bits
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
|
benlk/harvey-senior-homes
|
refs/heads/master
|
fabfile/utils.py
|
3
|
#!/usr/bin/env python
# _*_ coding:utf-8 _*_
import app_config
import boto
import logging
from boto.s3.connection import OrdinaryCallingFormat
from fabric.api import local, task
logging.basicConfig(format=app_config.LOG_FORMAT)
logger = logging.getLogger(__name__)
logger.setLevel(app_config.LOG_LEVEL)
"""
Utilities used by multiple commands.
"""
from fabric.api import prompt
def confirm(message):
"""
Verify a users intentions.
"""
answer = prompt(message, default="Not at all")
if answer.lower() not in ('y', 'yes', 'buzz off', 'screw you'):
exit()
def get_bucket(bucket_name):
"""
Established a connection and gets s3 bucket
"""
if '.' in bucket_name:
s3 = boto.connect_s3(calling_format=OrdinaryCallingFormat())
else:
s3 = boto.connect_s3()
return s3.get_bucket(bucket_name)
@task
def install_font(force='true'):
"""
Install font
"""
print 'Installing font'
if force != 'true':
try:
with open('www/css/icon/npr-app-template.css') and open('www/css/font/npr-app-template.svg'):
logger.info('Font installed, skipping.')
return
except IOError:
pass
local('node_modules/fontello-cli/bin/fontello-cli install --config fontello/config.json --css www/css/icon --font www/css/font/')
@task
def open_font():
"""
Open font in Fontello GUI in your browser
"""
local('node_modules/fontello-cli/bin/fontello-cli open --config fontello/config.json')
|
danakj/chromium
|
refs/heads/master
|
tools/grit/grit/node/structure.py
|
7
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
'''The <structure> element.
'''
import os
import platform
import re
from grit import exception
from grit import util
from grit.node import base
from grit.node import variant
import grit.gather.admin_template
import grit.gather.chrome_html
import grit.gather.chrome_scaled_image
import grit.gather.igoogle_strings
import grit.gather.muppet_strings
import grit.gather.policy_json
import grit.gather.rc
import grit.gather.tr_html
import grit.gather.txt
import grit.format.rc
import grit.format.rc_header
# Type of the gatherer to use for each type attribute
_GATHERERS = {
'accelerators' : grit.gather.rc.Accelerators,
'admin_template' : grit.gather.admin_template.AdmGatherer,
'chrome_html' : grit.gather.chrome_html.ChromeHtml,
'chrome_scaled_image' : grit.gather.chrome_scaled_image.ChromeScaledImage,
'dialog' : grit.gather.rc.Dialog,
'igoogle' : grit.gather.igoogle_strings.IgoogleStrings,
'menu' : grit.gather.rc.Menu,
'muppet' : grit.gather.muppet_strings.MuppetStrings,
'rcdata' : grit.gather.rc.RCData,
'tr_html' : grit.gather.tr_html.TrHtml,
'txt' : grit.gather.txt.TxtFile,
'version' : grit.gather.rc.Version,
'policy_template_metafile' : grit.gather.policy_json.PolicyJson,
}
# TODO(joi) Print a warning if the 'variant_of_revision' attribute indicates
# that a skeleton variant is older than the original file.
class StructureNode(base.Node):
'''A <structure> element.'''
# Regular expression for a local variable definition. Each definition
# is of the form NAME=VALUE, where NAME cannot contain '=' or ',' and
# VALUE must escape all commas: ',' -> ',,'. Each variable definition
# should be separated by a comma with no extra whitespace.
# Example: THING1=foo,THING2=bar
variable_pattern = re.compile('([^,=\s]+)=((?:,,|[^,])*)')
def __init__(self):
super(StructureNode, self).__init__()
# Keep track of the last filename we flattened to, so we can
# avoid doing it more than once.
self._last_flat_filename = None
# See _Substitute; this substituter is used for local variables and
# the root substituter is used for global variables.
self.substituter = None
def _IsValidChild(self, child):
return isinstance(child, variant.SkeletonNode)
def _ParseVariables(self, variables):
'''Parse a variable string into a dictionary.'''
matches = StructureNode.variable_pattern.findall(variables)
return dict((name, value.replace(',,', ',')) for name, value in matches)
def EndParsing(self):
super(StructureNode, self).EndParsing()
# Now that we have attributes and children, instantiate the gatherers.
gathertype = _GATHERERS[self.attrs['type']]
self.gatherer = gathertype(self.attrs['file'],
self.attrs['name'],
self.attrs['encoding'])
self.gatherer.SetGrdNode(self)
self.gatherer.SetUberClique(self.UberClique())
if hasattr(self.GetRoot(), 'defines'):
self.gatherer.SetDefines(self.GetRoot().defines)
self.gatherer.SetAttributes(self.attrs)
if self.ExpandVariables():
self.gatherer.SetFilenameExpansionFunction(self._Substitute)
# Parse local variables and instantiate the substituter.
if self.attrs['variables']:
variables = self.attrs['variables']
self.substituter = util.Substituter()
self.substituter.AddSubstitutions(self._ParseVariables(variables))
self.skeletons = {} # Maps expressions to skeleton gatherers
for child in self.children:
assert isinstance(child, variant.SkeletonNode)
skel = gathertype(child.attrs['file'],
self.attrs['name'],
child.GetEncodingToUse(),
is_skeleton=True)
skel.SetGrdNode(self) # TODO(benrg): Or child? Only used for ToRealPath
skel.SetUberClique(self.UberClique())
if hasattr(self.GetRoot(), 'defines'):
skel.SetDefines(self.GetRoot().defines)
if self.ExpandVariables():
skel.SetFilenameExpansionFunction(self._Substitute)
self.skeletons[child.attrs['expr']] = skel
def MandatoryAttributes(self):
return ['type', 'name', 'file']
def DefaultAttributes(self):
return { 'encoding' : 'cp1252',
'exclude_from_rc' : 'false',
'line_end' : 'unix',
'output_encoding' : 'utf-8',
'generateid': 'true',
'expand_variables' : 'false',
'output_filename' : '',
'fold_whitespace': 'false',
# Run an arbitrary command after translation is complete
# so that it doesn't interfere with what's in translation
# console.
'run_command' : '',
# Leave empty to run on all platforms, comma-separated
# for one or more specific platforms. Values must match
# output of platform.system().
'run_command_on_platforms' : '',
'allowexternalscript': 'false',
# preprocess takes the same code path as flattenhtml, but it
# disables any processing/inlining outside of <if> and <include>.
'preprocess': 'false',
'flattenhtml': 'false',
'fallback_to_low_resolution': 'default',
# TODO(joi) this is a hack - should output all generated files
# as SCons dependencies; however, for now there is a bug I can't
# find where GRIT doesn't build the matching fileset, therefore
# this hack so that only the files you really need are marked as
# dependencies.
'sconsdep' : 'false',
'variables': '',
}
def IsExcludedFromRc(self):
return self.attrs['exclude_from_rc'] == 'true'
def Process(self, output_dir):
"""Writes the processed data to output_dir. In the case of a chrome_html
structure this will add references to other scale factors. If flattening
this will also write file references to be base64 encoded data URLs. The
name of the new file is returned."""
filename = self.ToRealPath(self.GetInputPath())
flat_filename = os.path.join(output_dir,
self.attrs['name'] + '_' + os.path.basename(filename))
if self._last_flat_filename == flat_filename:
return
with open(flat_filename, 'wb') as outfile:
if self.ExpandVariables():
text = self.gatherer.GetText()
file_contents = self._Substitute(text).encode('utf-8')
else:
file_contents = self.gatherer.GetData('', 'utf-8')
outfile.write(file_contents)
self._last_flat_filename = flat_filename
return os.path.basename(flat_filename)
def GetLineEnd(self):
'''Returns the end-of-line character or characters for files output because
of this node ('\r\n', '\n', or '\r' depending on the 'line_end' attribute).
'''
if self.attrs['line_end'] == 'unix':
return '\n'
elif self.attrs['line_end'] == 'windows':
return '\r\n'
elif self.attrs['line_end'] == 'mac':
return '\r'
else:
raise exception.UnexpectedAttribute(
"Attribute 'line_end' must be one of 'unix' (default), 'windows' or 'mac'")
def GetCliques(self):
return self.gatherer.GetCliques()
def GetDataPackPair(self, lang, encoding):
"""Returns a (id, string|None) pair that represents the resource id and raw
bytes of the data (or None if no resource is generated). This is used to
generate the data pack data file.
"""
from grit.format import rc_header
id_map = rc_header.GetIds(self.GetRoot())
id = id_map[self.GetTextualIds()[0]]
if self.ExpandVariables():
text = self.gatherer.GetText()
return id, util.Encode(self._Substitute(text), encoding)
return id, self.gatherer.GetData(lang, encoding)
def GetHtmlResourceFilenames(self):
"""Returns a set of all filenames inlined by this node."""
return self.gatherer.GetHtmlResourceFilenames()
def GetInputPath(self):
return self.gatherer.GetInputPath()
def GetTextualIds(self):
if not hasattr(self, 'gatherer'):
# This case is needed because this method is called by
# GritNode.ValidateUniqueIds before RunGatherers has been called.
# TODO(benrg): Fix this?
return [self.attrs['name']]
return self.gatherer.GetTextualIds()
def RunPreSubstitutionGatherer(self, debug=False):
if debug:
print 'Running gatherer %s for file %s' % (
str(type(self.gatherer)), self.GetInputPath())
# Note: Parse() is idempotent, therefore this method is also.
self.gatherer.Parse()
for skel in self.skeletons.values():
skel.Parse()
def GetSkeletonGatherer(self):
'''Returns the gatherer for the alternate skeleton that should be used,
based on the expressions for selecting skeletons, or None if the skeleton
from the English version of the structure should be used.
'''
for expr in self.skeletons:
if self.EvaluateCondition(expr):
return self.skeletons[expr]
return None
def HasFileForLanguage(self):
return self.attrs['type'] in ['tr_html', 'admin_template', 'txt',
'muppet', 'igoogle', 'chrome_scaled_image',
'chrome_html']
def ExpandVariables(self):
'''Variable expansion on structures is controlled by an XML attribute.
However, old files assume that expansion is always on for Rc files.
Returns:
A boolean.
'''
attrs = self.GetRoot().attrs
if 'grit_version' in attrs and attrs['grit_version'] > 1:
return self.attrs['expand_variables'] == 'true'
else:
return (self.attrs['expand_variables'] == 'true' or
self.attrs['file'].lower().endswith('.rc'))
def _Substitute(self, text):
'''Perform local and global variable substitution.'''
if self.substituter:
text = self.substituter.Substitute(text)
return self.GetRoot().GetSubstituter().Substitute(text)
def RunCommandOnCurrentPlatform(self):
if self.attrs['run_command_on_platforms'] == '':
return True
else:
target_platforms = self.attrs['run_command_on_platforms'].split(',')
return platform.system() in target_platforms
def FileForLanguage(self, lang, output_dir, create_file=True,
return_if_not_generated=True):
'''Returns the filename of the file associated with this structure,
for the specified language.
Args:
lang: 'fr'
output_dir: 'c:\temp'
create_file: True
'''
assert self.HasFileForLanguage()
# If the source language is requested, and no extra changes are requested,
# use the existing file.
if ((not lang or lang == self.GetRoot().GetSourceLanguage()) and
self.attrs['expand_variables'] != 'true' and
(not self.attrs['run_command'] or
not self.RunCommandOnCurrentPlatform())):
if return_if_not_generated:
input_path = self.GetInputPath()
if input_path is None:
return None
return self.ToRealPath(input_path)
else:
return None
if self.attrs['output_filename'] != '':
filename = self.attrs['output_filename']
else:
filename = os.path.basename(self.attrs['file'])
assert len(filename)
filename = '%s_%s' % (lang, filename)
filename = os.path.join(output_dir, filename)
# Only create the output if it was requested by the call.
if create_file:
text = self.gatherer.Translate(
lang,
pseudo_if_not_available=self.PseudoIsAllowed(),
fallback_to_english=self.ShouldFallbackToEnglish(),
skeleton_gatherer=self.GetSkeletonGatherer())
file_contents = util.FixLineEnd(text, self.GetLineEnd())
if self.ExpandVariables():
# Note that we reapply substitution a second time here.
# This is because a) we need to look inside placeholders
# b) the substitution values are language-dependent
file_contents = self._Substitute(file_contents)
with open(filename, 'wb') as file_object:
output_stream = util.WrapOutputStream(file_object,
self.attrs['output_encoding'])
output_stream.write(file_contents)
if self.attrs['run_command'] and self.RunCommandOnCurrentPlatform():
# Run arbitrary commands after translation is complete so that it
# doesn't interfere with what's in translation console.
command = self.attrs['run_command'] % {'filename': filename}
result = os.system(command)
assert result == 0, '"%s" failed.' % command
return filename
def IsResourceMapSource(self):
return True
def GeneratesResourceMapEntry(self, output_all_resource_defines,
is_active_descendant):
if output_all_resource_defines:
return True
return is_active_descendant
@staticmethod
def Construct(parent, name, type, file, encoding='cp1252'):
'''Creates a new node which is a child of 'parent', with attributes set
by parameters of the same name.
'''
node = StructureNode()
node.StartParsing('structure', parent)
node.HandleAttribute('name', name)
node.HandleAttribute('type', type)
node.HandleAttribute('file', file)
node.HandleAttribute('encoding', encoding)
node.EndParsing()
return node
def SubstituteMessages(self, substituter):
'''Propagates substitution to gatherer.
Args:
substituter: a grit.util.Substituter object.
'''
assert hasattr(self, 'gatherer')
if self.ExpandVariables():
self.gatherer.SubstituteMessages(substituter)
|
semonte/intellij-community
|
refs/heads/master
|
python/testData/testRunner/env/doc/test1.py
|
84
|
def factorial(n):
"""Return the factorial of n, an exact integer >= 0.
If the result is small enough to fit in an int, return an int.
Else return a long.
>>> [factorial(n) for n in range(6)]
[1, 1, 2, 6, 24, 120]
"""
import math
if not n >= 0:
raise ValueError("n must be >= 0")
if math.floor(n) != n:
raise ValueError("n must be exact integer")
if n+1 == n: # catch a value like 1e300
raise OverflowError("n too large")
result = 1
factor = 2
while factor <= n:
result *= factor
factor += 1
return result
class FirstGoodTest:
"""
>>> [factorial(n) for n in range(6)]
[1, 1, 2, 6, 24, 120]
"""
def test_passes(self):
pass
class SecondGoodTest:
def test_passes(self):
"""
>>> [factorial(n) for n in range(6)]
[1, 1, 2, 6, 24, 120]
"""
pass
|
cp0153/programarcade
|
refs/heads/master
|
134.py
|
1
|
class Ball():
# class attributes
# Ball position
x = 0
y = 0
# Ball's vector
change_x = 0
change_y = 0
# Ball size
size = 10
# ball color
color = [255, 255, 255]
# ---- class methods ----
def move(self):
self.x += self.change_x
self.y += slef.change_y
def draw(self, screen):
pygame.draw.circle(screen, self.color, [self.x, self.y], self.size )
theBall = Ball()
theBall.x = 100
theBall.y = 100
theBall.change_x = 2
theBall.change_y = 1
theBall.color = [255,0,0]
|
hyperized/ansible
|
refs/heads/devel
|
test/integration/targets/sns_topic/files/sns_topic_lambda/sns_topic_lambda.py
|
77
|
from __future__ import print_function
def handler(event, context):
print(event)
return True
|
kaichogami/scikit-learn
|
refs/heads/master
|
examples/model_selection/plot_underfitting_overfitting.py
|
53
|
"""
============================
Underfitting vs. Overfitting
============================
This example demonstrates the problems of underfitting and overfitting and
how we can use linear regression with polynomial features to approximate
nonlinear functions. The plot shows the function that we want to approximate,
which is a part of the cosine function. In addition, the samples from the
real function and the approximations of different models are displayed. The
models have polynomial features of different degrees. We can see that a
linear function (polynomial with degree 1) is not sufficient to fit the
training samples. This is called **underfitting**. A polynomial of degree 4
approximates the true function almost perfectly. However, for higher degrees
the model will **overfit** the training data, i.e. it learns the noise of the
training data.
We evaluate quantitatively **overfitting** / **underfitting** by using
cross-validation. We calculate the mean squared error (MSE) on the validation
set, the higher, the less likely the model generalizes correctly from the
training data.
"""
print(__doc__)
import numpy as np
import matplotlib.pyplot as plt
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import PolynomialFeatures
from sklearn.linear_model import LinearRegression
from sklearn.model_selection import cross_val_score
np.random.seed(0)
n_samples = 30
degrees = [1, 4, 15]
true_fun = lambda X: np.cos(1.5 * np.pi * X)
X = np.sort(np.random.rand(n_samples))
y = true_fun(X) + np.random.randn(n_samples) * 0.1
plt.figure(figsize=(14, 5))
for i in range(len(degrees)):
ax = plt.subplot(1, len(degrees), i + 1)
plt.setp(ax, xticks=(), yticks=())
polynomial_features = PolynomialFeatures(degree=degrees[i],
include_bias=False)
linear_regression = LinearRegression()
pipeline = Pipeline([("polynomial_features", polynomial_features),
("linear_regression", linear_regression)])
pipeline.fit(X[:, np.newaxis], y)
# Evaluate the models using crossvalidation
scores = cross_val_score(pipeline, X[:, np.newaxis], y,
scoring="mean_squared_error", cv=10)
X_test = np.linspace(0, 1, 100)
plt.plot(X_test, pipeline.predict(X_test[:, np.newaxis]), label="Model")
plt.plot(X_test, true_fun(X_test), label="True function")
plt.scatter(X, y, label="Samples")
plt.xlabel("x")
plt.ylabel("y")
plt.xlim((0, 1))
plt.ylim((-2, 2))
plt.legend(loc="best")
plt.title("Degree {}\nMSE = {:.2e}(+/- {:.2e})".format(
degrees[i], -scores.mean(), scores.std()))
plt.show()
|
xiangshouding/nv
|
refs/heads/master
|
node/node_modules/pygmentize-bundled/vendor/pygments/pygments/formatters/other.py
|
49
|
# -*- coding: utf-8 -*-
"""
pygments.formatters.other
~~~~~~~~~~~~~~~~~~~~~~~~~
Other formatters: NullFormatter, RawTokenFormatter.
:copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.formatter import Formatter
from pygments.util import OptionError, get_choice_opt
from pygments.token import Token
from pygments.console import colorize
__all__ = ['NullFormatter', 'RawTokenFormatter', 'TestcaseFormatter']
class NullFormatter(Formatter):
"""
Output the text unchanged without any formatting.
"""
name = 'Text only'
aliases = ['text', 'null']
filenames = ['*.txt']
def format(self, tokensource, outfile):
enc = self.encoding
for ttype, value in tokensource:
if enc:
outfile.write(value.encode(enc))
else:
outfile.write(value)
class RawTokenFormatter(Formatter):
r"""
Format tokens as a raw representation for storing token streams.
The format is ``tokentype<TAB>repr(tokenstring)\n``. The output can later
be converted to a token stream with the `RawTokenLexer`, described in the
:doc:`lexer list <lexers>`.
Only two options are accepted:
`compress`
If set to ``'gz'`` or ``'bz2'``, compress the output with the given
compression algorithm after encoding (default: ``''``).
`error_color`
If set to a color name, highlight error tokens using that color. If
set but with no value, defaults to ``'red'``.
.. versionadded:: 0.11
"""
name = 'Raw tokens'
aliases = ['raw', 'tokens']
filenames = ['*.raw']
unicodeoutput = False
def __init__(self, **options):
Formatter.__init__(self, **options)
if self.encoding:
raise OptionError('the raw formatter does not support the '
'encoding option')
self.encoding = 'ascii' # let pygments.format() do the right thing
self.compress = get_choice_opt(options, 'compress',
['', 'none', 'gz', 'bz2'], '')
self.error_color = options.get('error_color', None)
if self.error_color is True:
self.error_color = 'red'
if self.error_color is not None:
try:
colorize(self.error_color, '')
except KeyError:
raise ValueError("Invalid color %r specified" %
self.error_color)
def format(self, tokensource, outfile):
try:
outfile.write(b'')
except TypeError:
raise TypeError('The raw tokens formatter needs a binary '
'output file')
if self.compress == 'gz':
import gzip
outfile = gzip.GzipFile('', 'wb', 9, outfile)
def write(text):
outfile.write(text.encode())
flush = outfile.flush
elif self.compress == 'bz2':
import bz2
compressor = bz2.BZ2Compressor(9)
def write(text):
outfile.write(compressor.compress(text.encode()))
def flush():
outfile.write(compressor.flush())
outfile.flush()
else:
def write(text):
outfile.write(text.encode())
flush = outfile.flush
if self.error_color:
for ttype, value in tokensource:
line = "%s\t%r\n" % (ttype, value)
if ttype is Token.Error:
write(colorize(self.error_color, line))
else:
write(line)
else:
for ttype, value in tokensource:
write("%s\t%r\n" % (ttype, value))
flush()
TESTCASE_BEFORE = u'''\
def testNeedsName(self):
fragment = %r
tokens = [
'''
TESTCASE_AFTER = u'''\
]
self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
'''
class TestcaseFormatter(Formatter):
"""
Format tokens as appropriate for a new testcase.
.. versionadded:: 2.0
"""
name = 'Testcase'
aliases = ['testcase']
def __init__(self, **options):
Formatter.__init__(self, **options)
#if self.encoding != 'utf-8':
# print >>sys.stderr, "NOTICE: Forcing encoding to utf-8, as all Pygments source is"
if self.encoding is not None and self.encoding != 'utf-8':
raise ValueError("Only None and utf-u are allowed encodings.")
def format(self, tokensource, outfile):
indentation = ' ' * 12
rawbuf = []
outbuf = []
for ttype, value in tokensource:
rawbuf.append(value)
outbuf.append('%s(%s, %r),\n' % (indentation, ttype, value))
before = TESTCASE_BEFORE % (u''.join(rawbuf),)
during = u''.join(outbuf)
after = TESTCASE_AFTER
if self.encoding is None:
outfile.write(before + during + after)
else:
outfile.write(before.encode('utf-8'))
outfile.write(during.encode('utf-8'))
outfile.write(after.encode('utf-8'))
outfile.flush()
|
tgecho/pipedream
|
refs/heads/master
|
pipedream/tests/exceptions_test.py
|
1
|
import pytest
class FooBar(Exception):
pass
def test_exceptions_without_handler(dispatcher):
@dispatcher.add
def a():
1/0
with pytest.raises(ZeroDivisionError):
assert dispatcher.call('a')
def test_handled_exceptions(dispatcher):
@dispatcher.error_handler
def handle(error):
return 'handled'
@dispatcher.add()
def a():
1/0
assert dispatcher.call('a') == 'handled'
def test_unhandled_exceptions(dispatcher):
@dispatcher.error_handler
def handler(error):
return error
@dispatcher.add()
def a():
1/0
with pytest.raises(ZeroDivisionError):
assert dispatcher.call('a')
def test_converted_exceptions(dispatcher):
@dispatcher.error_handler
def handler(error):
raise FooBar()
@dispatcher.add()
def a():
1/0
with pytest.raises(FooBar):
assert dispatcher.call('a')
|
almeidapaulopt/frappe
|
refs/heads/develop
|
frappe/printing/doctype/letter_head/__init__.py
|
12133432
| |
kholidfu/django
|
refs/heads/master
|
tests/template_tests/syntax_tests/__init__.py
|
12133432
| |
craigderington/studentloan5
|
refs/heads/master
|
studentloan5/Lib/site-packages/django/contrib/sitemaps/management/__init__.py
|
12133432
| |
HugoKuo/keystone-essex3
|
refs/heads/master
|
keystone/middleware/auth_basic.py
|
2
|
#!/usr/bin/env python
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (c) 2010-2011 OpenStack, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
BASIC AUTH MIDDLEWARE - STUB
This WSGI component should perform multiple jobs:
* validate incoming basic claims
* perform all basic auth interactions with clients
* collect and forward identity information from the authentication process
such as user name, groups, etc...
This is an Auth component as per: http://wiki.openstack.org/openstack-authn
"""
import eventlet
from eventlet import wsgi
import os
import logging
from paste.deploy import loadapp
import urlparse
from webob.exc import Request, Response
from webob.exc import HTTPUnauthorized
from keystone.common.bufferedhttp import http_connect_raw as http_connect
PROTOCOL_NAME = "Basic Authentication"
logger = logging.getLogger(__name__) # pylint: disable=C0103
def _decorate_request_headers(header, value, proxy_headers, env):
proxy_headers[header] = value
env["HTTP_%s" % header] = value
class AuthProtocol(object):
"""Auth Middleware that handles authenticating client calls"""
def __init__(self, app, conf):
logger.info("Starting the %s component", PROTOCOL_NAME)
self.conf = conf
self.app = app
#if app is set, then we are in a WSGI pipeline and requests get passed
# on to app. If it is not set, this component should forward requests
# where to find the OpenStack service (if not in local WSGI chain)
# these settings are only used if this component is acting as a proxy
# and the OpenSTack service is running remotely
self.service_protocol = conf.get('service_protocol', 'https')
self.service_host = conf.get('service_host')
self.service_port = int(conf.get('service_port'))
self.service_url = '%s://%s:%s' % (self.service_protocol,
self.service_host,
self.service_port)
# used to verify this component with the OpenStack service or PAPIAuth
self.service_pass = conf.get('service_pass')
# delay_auth_decision means we still allow unauthenticated requests
# through and we let the downstream service make the final decision
self.delay_auth_decision = int(conf.get('delay_auth_decision', 0))
def __call__(self, env, start_response):
def custom_start_response(status, headers):
if self.delay_auth_decision:
headers.append(('WWW-Authenticate',
"Basic realm='Use guest/guest'"))
return start_response(status, headers)
#Prep headers to proxy request to remote service
proxy_headers = env.copy()
user = ''
#Look for authentication
if 'HTTP_AUTHORIZATION' not in env:
#No credentials were provided
if self.delay_auth_decision:
_decorate_request_headers("X_IDENTITY_STATUS", "Invalid",
proxy_headers, env)
else:
# If the user isn't authenticated, we reject the request and
# return 401 indicating we need Basic Auth credentials.
ret = HTTPUnauthorized("Authentication required",
[('WWW-Authenticate',
'Basic realm="Use guest/guest"')])
return ret(env, start_response)
else:
# Claims were provided - validate them
import base64
auth_header = env['HTTP_AUTHORIZATION']
_auth_type, encoded_creds = auth_header.split(None, 1)
user, password = base64.b64decode(encoded_creds).split(':', 1)
if not self.validateCreds(user, password):
#Claims were rejected
if not self.delay_auth_decision:
# Reject request (or ask for valid claims)
ret = HTTPUnauthorized("Authentication required",
[('WWW-Authenticate',
'Basic realm="Use guest/guest"')])
return ret(env, start_response)
else:
# Claims are valid, forward request
_decorate_request_headers("X_IDENTITY_STATUS", "Invalid",
proxy_headers, env)
# TODO(Ziad): add additional details we may need,
# like tenant and group info
_decorate_request_headers('X_AUTHORIZATION', "Proxy %s" % user,
proxy_headers, env)
_decorate_request_headers("X_IDENTITY_STATUS", "Confirmed",
proxy_headers, env)
_decorate_request_headers('X_TENANT', 'blank',
proxy_headers, env)
#Auth processed, headers added now decide how to pass on the call
if self.app:
# Pass to downstream WSGI component
env['HTTP_AUTHORIZATION'] = "Basic %s" % self.service_pass
return self.app(env, custom_start_response)
proxy_headers['AUTHORIZATION'] = "Basic %s" % self.service_pass
# We are forwarding to a remote service (no downstream WSGI app)
req = Request(proxy_headers)
parsed = urlparse(req.url)
conn = http_connect(self.service_host, self.service_port, \
req.method, parsed.path, \
proxy_headers, \
ssl=(self.service_protocol == 'https'))
resp = conn.getresponse()
data = resp.read()
#TODO(ziad): use a more sophisticated proxy
# we are rewriting the headers now
return Response(status=resp.status, body=data)(env, start_response)
def validateCreds(self, username, password):
#stub for password validation.
# import ConfigParser
# import hashlib
#usersConfig = ConfigParser.ConfigParser()
#usersConfig.readfp(open('/etc/openstack/users.ini'))
#password = hashlib.sha1(password).hexdigest()
#for un, pwd in usersConfig.items('users'):
#TODO(Ziad): add intelligent credential validation (instead of hard
# coded)
if username == 'guest' and password == 'guest':
return True
return False
def filter_factory(global_conf, ** local_conf):
"""Returns a WSGI filter app for use with paste.deploy."""
conf = global_conf.copy()
conf.update(local_conf)
def auth_filter(app):
return AuthProtocol(app, conf)
return auth_filter
def app_factory(global_conf, ** local_conf):
conf = global_conf.copy()
conf.update(local_conf)
return AuthProtocol(None, conf)
if __name__ == "__main__":
app = loadapp("config:" + \
os.path.join(os.path.abspath(os.path.dirname(__file__)),
os.pardir,
os.pardir,
"examples/paste/auth_basic.ini"),
global_conf={"log_name": "auth_basic.log"})
wsgi.server(eventlet.listen(('', 8090)), app)
|
saurabh6790/ON-RISAPP
|
refs/heads/master
|
patches/march_2013/p01_c_form.py
|
30
|
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
import webnotes
def execute():
for cform in webnotes.conn.sql("""select name from `tabC-Form` where docstatus=2"""):
webnotes.conn.sql("""update `tabSales Invoice` set c_form_no=null
where c_form_no=%s""", cform[0])
|
kustodian/ansible
|
refs/heads/devel
|
lib/ansible/plugins/action/edgeos_config.py
|
10
|
#
# Copyright 2018 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.plugins.action.network import ActionModule as ActionNetworkModule
class ActionModule(ActionNetworkModule):
def run(self, tmp=None, task_vars=None):
del tmp # tmp no longer has any effect
self._config_module = True
if self._play_context.connection.split('.')[-1] != 'network_cli':
return {'failed': True, 'msg': 'Connection type %s is not valid for this module. Must use fully qualified'
' name of network_cli connection type.' % self._play_context.connection}
return super(ActionModule, self).run(task_vars=task_vars)
|
ebigelow/LOTlib
|
refs/heads/master
|
LOTlib/Examples/GrammarInferenceDemo/Model/Grammar.py
|
3
|
__author__ = 'eric'
from LOTlib.Grammar import Grammar
# ------------------------------------------------------------------------------------------------------------
# This grammar has 20 rules |Expressions| * |Constants|
simple_grammar = Grammar()
simple_grammar.add_rule('START', '', ['SET'], 1.)
# Mapping expressions over sets of numbers
simple_grammar.add_rule('SET', 'mapset_', ['FUNC', 'RANGE'], 1.)
simple_grammar.add_rule('RANGE', 'range_set_', ['1', '100'], 1.)
simple_grammar.add_rule('FUNC', 'lambda', ['EXPR'], 1., bv_type='X', bv_p=1.)
# Expressions
simple_grammar.add_rule('EXPR', 'times_', ['X', '1'], 1.)
simple_grammar.add_rule('EXPR', 'times_', ['X', '2'], 1.)
simple_grammar.add_rule('EXPR', 'times_', ['X', '3'], 1.)
simple_grammar.add_rule('EXPR', 'times_', ['X', '4'], 1.)
simple_grammar.add_rule('EXPR', 'times_', ['X', '5'], 1.)
simple_grammar.add_rule('EXPR', 'times_', ['X', '6'], 1.)
simple_grammar.add_rule('EXPR', 'times_', ['X', '7'], 1.)
simple_grammar.add_rule('EXPR', 'times_', ['X', '8'], 1.)
simple_grammar.add_rule('EXPR', 'times_', ['X', '9'], 1.)
simple_grammar.add_rule('EXPR', 'times_', ['X', '10'], 1.)
|
getzze/python-ivi
|
refs/heads/master
|
ivi/agilent/agilentMSOX4104A.py
|
7
|
"""
Python Interchangeable Virtual Instrument Library
Copyright (c) 2014 Alex Forencich
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from .agilent4000A import *
class agilentMSOX4104A(agilent4000A):
"Agilent InfiniiVision MSOX4104A IVI oscilloscope driver"
def __init__(self, *args, **kwargs):
self.__dict__.setdefault('_instrument_id', 'MSO-X 4104A')
super(agilentMSOX4104A, self).__init__(*args, **kwargs)
self._analog_channel_count = 4
self._digital_channel_count = 16
self._channel_count = self._analog_channel_count + self._digital_channel_count
self._bandwidth = 1e9
self._init_channels()
|
FrontSide/Sizun
|
refs/heads/master
|
sizun/controllers/externalexecutor.py
|
1
|
"""
Sizun - Software Quality Inspection
MIT License
(C) 2015 David Rieger
"""
from subprocess import Popen, PIPE
from flask import current_app as app
class ExternalExecutor():
def exe(commandlist, resultdelimiter=None, instream=None):
"""
Executes an external executable witht the
command and parameters given in commandlist
Returns a list of all lines from stdout
"""
app.logger.debug("command is :: {}".format(" ".join(commandlist)))
try:
_proc = Popen(commandlist, stdout=PIPE, stderr=PIPE, stdin=instream)
except FileNotFoundError:
raise ExternalDependencyError("Executable \"{}\" not found. Is it installed?".format(commandlist[0]))
if _proc.returncode is not (0 or None):
raise ExternalExecutionError("Failed to execute \"{}\"".format(" ".join(commandlist)),
returncode=_agproc.returncode,
stderr=_agproc.stderr.read().decode("utf-8"))
_boutput = _proc.stdout
if resultdelimiter:
return [l.decode("utf-8").split(resultdelimiter) for l in _boutput.readlines()]
else:
return [l.decode("utf-8").rstrip("\n").strip() for l in _boutput.readlines()]
|
hasecbinusr/pysal
|
refs/heads/master
|
pysal/core/IOHandlers/tests/test_mat.py
|
20
|
import unittest
import pysal
from pysal.core.IOHandlers.mat import MatIO
import tempfile
import os
import warnings
class test_MatIO(unittest.TestCase):
def setUp(self):
self.test_file = test_file = pysal.examples.get_path('spat-sym-us.mat')
self.obj = MatIO(test_file, 'r')
def test_close(self):
f = self.obj
f.close()
self.failUnlessRaises(ValueError, f.read)
def test_read(self):
w = self.obj.read()
self.assertEqual(46, w.n)
self.assertEqual(4.0869565217391308, w.mean_neighbors)
self.assertEqual([1.0, 1.0, 1.0, 1.0], w[1].values())
def test_seek(self):
self.test_read()
self.failUnlessRaises(StopIteration, self.obj.read)
self.obj.seek(0)
self.test_read()
def test_write(self):
w = self.obj.read()
f = tempfile.NamedTemporaryFile(
suffix='.mat', dir=pysal.examples.get_path(''))
fname = f.name
f.close()
o = pysal.open(fname, 'w')
with warnings.catch_warnings(record=True) as warn:
warnings.simplefilter("always")
o.write(w)
if len(warn) > 0:
assert issubclass(warn[0].category, FutureWarning)
o.close()
wnew = pysal.open(fname, 'r').read()
self.assertEqual(wnew.pct_nonzero, w.pct_nonzero)
os.remove(fname)
if __name__ == '__main__':
unittest.main()
|
mrquim/repository.mrquim
|
refs/heads/master
|
script.module.liveresolver/lib/liveresolver/modules/f4mproxy/utils/python_rc4.py
|
207
|
# Author: Trevor Perrin
# See the LICENSE file for legal information regarding use of this file.
"""Pure-Python RC4 implementation."""
from .rc4 import RC4
from .cryptomath import *
def new(key):
return Python_RC4(key)
class Python_RC4(RC4):
def __init__(self, keyBytes):
RC4.__init__(self, keyBytes, "python")
S = [i for i in range(256)]
j = 0
for i in range(256):
j = (j + S[i] + keyBytes[i % len(keyBytes)]) % 256
S[i], S[j] = S[j], S[i]
self.S = S
self.i = 0
self.j = 0
def encrypt(self, plaintextBytes):
ciphertextBytes = plaintextBytes[:]
S = self.S
i = self.i
j = self.j
for x in range(len(ciphertextBytes)):
i = (i + 1) % 256
j = (j + S[i]) % 256
S[i], S[j] = S[j], S[i]
t = (S[i] + S[j]) % 256
ciphertextBytes[x] ^= S[t]
self.i = i
self.j = j
return ciphertextBytes
def decrypt(self, ciphertext):
return self.encrypt(ciphertext)
|
MerlijnWajer/pyroTorrent
|
refs/heads/master
|
lib/filerequester.py
|
1
|
"""
.. _torrentrequester-class:
TorrentRequester
================
The TorrentRequester is a class created to quickly and efficiently query all the
torrents in a view. It only uses one XMLRPC request. All the methods you can
perform on TorrentRequester are identical to the methods on
:ref:`torrent-class`. (Although set* methods have not been implemented)
Example usage:
.. code-block:: python
t = TorrentRequester('hostname')
t.get_name().get_hash() # Chaining commands is possible
t.get_upload_throttle() # As well as calling another method on it.
print t.all()
"""
# Also change return type? not list of list but perhaps a dict or class?
# Properly implement flush?
import xmlrpc.client
from model import torrentfile
from lib.baserequester import BaseRequester, \
InvalidTorrentCommandException
from config import rtorrent_config
# XXX: Create baseclass for rtorrent-multicall's. BaseRequester
class TorrentFileRequester(BaseRequester):
"""
"""
def __init__(self, target, *first_args):
BaseRequester.__init__(self, target, first_args)
self.first_args = first_args
def dofetch(self, *rpc_commands):
return self.s.f.multicall(*(self.first_args + ('',) + rpc_commands))
def _convert_command(self, command):
"""
Convert command based on torrent._rpc_methods to rtorrent command.
"""
if command in torrentfile._rpc_methods:
return torrentfile._rpc_methods[command][0]
else:
raise InvalidTorrentCommandException("%s is not a valid command" %
command)
|
lubart2517/lubart_repo
|
refs/heads/master
|
lubart_repo/contrib/sites/migrations/__init__.py
|
147
|
"""
To understand why this file is here, please read:
http://cookiecutter-django.readthedocs.org/en/latest/faq.html#why-is-there-a-django-contrib-sites-directory-in-cookiecutter-django
"""
# -*- coding: utf-8 -*-
|
chrisjsewell/ipypublish
|
refs/heads/develop
|
ipypublish/sphinx/tests/test_notebook.py
|
1
|
# -*- coding: utf-8 -*-
"""
test_sphinx
~~~~~~~~~~~
General Sphinx test and check output.
"""
import pytest
import sphinx
from ipypublish.sphinx.tests import get_test_source_dir
from ipypublish.tests.utils import HTML2JSONParser
@pytest.mark.sphinx(buildername="html", srcdir=get_test_source_dir("notebook"))
def test_basic(app, status, warning, get_sphinx_app_output, data_regression):
app.build()
assert "build succeeded" in status.getvalue() # Build succeeded
warnings = warning.getvalue().strip()
assert warnings == ""
output = get_sphinx_app_output(app, buildername="html")
parser = HTML2JSONParser()
parser.feed(output)
if sphinx.version_info >= (2,):
data_regression.check(parser.parsed, basename="test_basic_v2")
else:
data_regression.check(parser.parsed, basename="test_basic_v1")
@pytest.mark.sphinx(
buildername="html", srcdir=get_test_source_dir("notebook_cell_decor")
)
def test_cell_decoration(app, status, warning, get_sphinx_app_output, data_regression):
""" test a notebook with prompts and toggle buttons"""
app.build()
assert "build succeeded" in status.getvalue() # Build succeeded
warnings = warning.getvalue().strip()
assert warnings == ""
output = get_sphinx_app_output(app, buildername="html")
parser = HTML2JSONParser()
parser.feed(output)
if sphinx.version_info >= (2,):
data_regression.check(parser.parsed, basename="test_cell_decoration_v2")
else:
data_regression.check(parser.parsed, basename="test_cell_decoration_v1")
@pytest.mark.sphinx(
buildername="html", srcdir=get_test_source_dir("notebook_ipywidget")
)
def test_ipywidget(app, status, warning, get_sphinx_app_output, data_regression):
""" test which contains an ipywidgets and the widget state has been saved."""
app.build()
assert "build succeeded" in status.getvalue() # Build succeeded
warnings = warning.getvalue().strip()
assert warnings == ""
output = get_sphinx_app_output(app, buildername="html")
parser = HTML2JSONParser()
parser.feed(output)
data_regression.check(parser.parsed, basename="test_ipywidget")
|
oldhill/miniramp
|
refs/heads/master
|
utils/soundcloud_utils.py
|
1
|
import json
import logging
import urllib2
def username_to_user_id(artistUsername):
""" SoundCloud username to id converter
"""
artistUrl = 'http://soundcloud.com/%s&client_id=YOUR_CLIENT_ID' % artistUsername
# This 'resolve' prefix URL is necessary to get info by username rather than ID number.
# doc: http://developers.soundcloud.com/docs/api/reference#resolve
resolvePrefix = 'http://api.soundcloud.com/resolve.json?url='
artistString = urllib2.urlopen(resolvePrefix + artistUrl).read()
artistObject = json.loads(artistString)
return artistObject['id']
def get_followings(artistId):
""" Get all users followed by user identified by @artistId
"""
apiUrl = 'http://api.soundcloud.com/users/%s' % artistId
followingsUrl = apiUrl + '/followings.json?client_id=YOUR_CLIENT_ID'
followingsString = urllib2.urlopen(followingsUrl).read()
followingsObj = json.loads(followingsString)
return followingsObj
|
Greennut/ostproject
|
refs/heads/master
|
django/contrib/auth/management/commands/changepassword.py
|
97
|
import getpass
from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from django.contrib.auth.models import User
from django.db import DEFAULT_DB_ALIAS
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--database', action='store', dest='database',
default=DEFAULT_DB_ALIAS, help='Specifies the database to use. Default is "default".'),
)
help = "Change a user's password for django.contrib.auth."
requires_model_validation = False
def _get_pass(self, prompt="Password: "):
p = getpass.getpass(prompt=prompt)
if not p:
raise CommandError("aborted")
return p
def handle(self, *args, **options):
if len(args) > 1:
raise CommandError("need exactly one or zero arguments for username")
if args:
username, = args
else:
username = getpass.getuser()
try:
u = User.objects.using(options.get('database')).get(username=username)
except User.DoesNotExist:
raise CommandError("user '%s' does not exist" % username)
self.stdout.write("Changing password for user '%s'\n" % u.username)
MAX_TRIES = 3
count = 0
p1, p2 = 1, 2 # To make them initially mismatch.
while p1 != p2 and count < MAX_TRIES:
p1 = self._get_pass()
p2 = self._get_pass("Password (again): ")
if p1 != p2:
self.stdout.write("Passwords do not match. Please try again.\n")
count = count + 1
if count == MAX_TRIES:
raise CommandError("Aborting password change for user '%s' after %s attempts" % (username, count))
u.set_password(p1)
u.save()
return "Password changed successfully for user '%s'" % u.username
|
tinloaf/home-assistant
|
refs/heads/dev
|
homeassistant/components/binary_sensor/verisure.py
|
5
|
"""
Interfaces with Verisure sensors.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/binary_sensor.verisure/
"""
import logging
from homeassistant.components.binary_sensor import BinarySensorDevice
from homeassistant.components.verisure import CONF_DOOR_WINDOW
from homeassistant.components.verisure import HUB as hub
_LOGGER = logging.getLogger(__name__)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Verisure binary sensors."""
sensors = []
hub.update_overview()
if int(hub.config.get(CONF_DOOR_WINDOW, 1)):
sensors.extend([
VerisureDoorWindowSensor(device_label)
for device_label in hub.get(
"$.doorWindow.doorWindowDevice[*].deviceLabel")])
add_entities(sensors)
class VerisureDoorWindowSensor(BinarySensorDevice):
"""Representation of a Verisure door window sensor."""
def __init__(self, device_label):
"""Initialize the Verisure door window sensor."""
self._device_label = device_label
@property
def name(self):
"""Return the name of the binary sensor."""
return hub.get_first(
"$.doorWindow.doorWindowDevice[?(@.deviceLabel=='%s')].area",
self._device_label)
@property
def is_on(self):
"""Return the state of the sensor."""
return hub.get_first(
"$.doorWindow.doorWindowDevice[?(@.deviceLabel=='%s')].state",
self._device_label) == "OPEN"
@property
def available(self):
"""Return True if entity is available."""
return hub.get_first(
"$.doorWindow.doorWindowDevice[?(@.deviceLabel=='%s')]",
self._device_label) is not None
# pylint: disable=no-self-use
def update(self):
"""Update the state of the sensor."""
hub.update_overview()
|
valdecdev/odoo
|
refs/heads/master
|
addons/base_import_module/models/ir_module.py
|
10
|
import logging
import os
import sys
import zipfile
from os.path import join as opj
import openerp
from openerp.osv import osv
from openerp.tools import convert_file
from openerp.tools.translate import _
from openerp.exceptions import UserError
_logger = logging.getLogger(__name__)
MAX_FILE_SIZE = 100 * 1024 * 1024 # in megabytes
class view(osv.osv):
_inherit = "ir.module.module"
def import_module(self, cr, uid, module, path, force=False, context=None):
known_mods = self.browse(cr, uid, self.search(cr, uid, []))
known_mods_names = dict([(m.name, m) for m in known_mods])
installed_mods = [m.name for m in known_mods if m.state == 'installed']
terp = openerp.modules.load_information_from_description_file(module, mod_path=path)
values = self.get_values_from_terp(terp)
unmet_dependencies = set(terp['depends']).difference(installed_mods)
if unmet_dependencies:
msg = _("Unmet module dependencies: %s")
raise UserError(msg % ', '.join(unmet_dependencies))
mod = known_mods_names.get(module)
if mod:
self.write(cr, uid, mod.id, dict(state='installed', **values))
mode = 'update' if not force else 'init'
else:
assert terp.get('installable', True), "Module not installable"
self.create(cr, uid, dict(name=module, state='installed', **values))
mode = 'init'
for kind in ['data', 'init_xml', 'update_xml']:
for filename in terp[kind]:
_logger.info("module %s: loading %s", module, filename)
noupdate = False
if filename.endswith('.csv') and kind in ('init', 'init_xml'):
noupdate = True
pathname = opj(path, filename)
idref = {}
convert_file(cr, module, filename, idref, mode=mode, noupdate=noupdate, kind=kind, pathname=pathname)
path_static = opj(path, 'static')
ir_attach = self.pool['ir.attachment']
if os.path.isdir(path_static):
for root, dirs, files in os.walk(path_static):
for static_file in files:
full_path = opj(root, static_file)
with open(full_path, 'r') as fp:
data = fp.read().encode('base64')
url_path = '/%s%s' % (module, full_path.split(path)[1].replace(os.path.sep, '/'))
url_path = url_path.decode(sys.getfilesystemencoding())
filename = os.path.split(url_path)[1]
values = dict(
name=filename,
datas_fname=filename,
url=url_path,
res_model='ir.ui.view',
type='binary',
datas=data,
)
att_id = ir_attach.search(cr, uid, [('url', '=', url_path), ('type', '=', 'binary'), ('res_model', '=', 'ir.ui.view')], context=context)
if att_id:
ir_attach.write(cr, uid, att_id, values, context=context)
else:
ir_attach.create(cr, uid, values, context=context)
return True
def import_zipfile(self, cr, uid, module_file, force=False, context=None):
if not module_file:
raise Exception("No file sent.")
if not zipfile.is_zipfile(module_file):
raise UserError(_('File is not a zip file!'))
success = []
errors = dict()
module_names = []
with zipfile.ZipFile(module_file, "r") as z:
for zf in z.filelist:
if zf.file_size > MAX_FILE_SIZE:
msg = _("File '%s' exceed maximum allowed file size")
raise UserError(msg % zf.filename)
with openerp.tools.osutil.tempdir() as module_dir:
z.extractall(module_dir)
dirs = [d for d in os.listdir(module_dir) if os.path.isdir(opj(module_dir, d))]
for mod_name in dirs:
module_names.append(mod_name)
try:
# assert mod_name.startswith('theme_')
path = opj(module_dir, mod_name)
self.import_module(cr, uid, mod_name, path, force=force, context=context)
success.append(mod_name)
except Exception, e:
errors[mod_name] = str(e)
r = ["Successfully imported module '%s'" % mod for mod in success]
for mod, error in errors.items():
r.append("Error while importing module '%s': %r" % (mod, error))
return '\n'.join(r), module_names
|
robovm/robovm-studio
|
refs/heads/master
|
python/lib/Lib/distutils/command/upload.py
|
87
|
"""distutils.command.upload
Implements the Distutils 'upload' subcommand (upload package to PyPI)."""
from distutils.errors import *
from distutils.core import Command
from distutils.spawn import spawn
from distutils import log
from hashlib import md5
import os
import socket
import platform
import ConfigParser
import httplib
import base64
import urlparse
import cStringIO as StringIO
class upload(Command):
description = "upload binary package to PyPI"
DEFAULT_REPOSITORY = 'http://pypi.python.org/pypi'
user_options = [
('repository=', 'r',
"url of repository [default: %s]" % DEFAULT_REPOSITORY),
('show-response', None,
'display full response text from server'),
('sign', 's',
'sign files to upload using gpg'),
('identity=', 'i', 'GPG identity used to sign files'),
]
boolean_options = ['show-response', 'sign']
def initialize_options(self):
self.username = ''
self.password = ''
self.repository = ''
self.show_response = 0
self.sign = False
self.identity = None
def finalize_options(self):
if self.identity and not self.sign:
raise DistutilsOptionError(
"Must use --sign for --identity to have meaning"
)
if os.environ.has_key('HOME'):
rc = os.path.join(os.environ['HOME'], '.pypirc')
if os.path.exists(rc):
self.announce('Using PyPI login from %s' % rc)
config = ConfigParser.ConfigParser({
'username':'',
'password':'',
'repository':''})
config.read(rc)
if not self.repository:
self.repository = config.get('server-login', 'repository')
if not self.username:
self.username = config.get('server-login', 'username')
if not self.password:
self.password = config.get('server-login', 'password')
if not self.repository:
self.repository = self.DEFAULT_REPOSITORY
def run(self):
if not self.distribution.dist_files:
raise DistutilsOptionError("No dist file created in earlier command")
for command, pyversion, filename in self.distribution.dist_files:
self.upload_file(command, pyversion, filename)
def upload_file(self, command, pyversion, filename):
# Sign if requested
if self.sign:
gpg_args = ["gpg", "--detach-sign", "-a", filename]
if self.identity:
gpg_args[2:2] = ["--local-user", self.identity]
spawn(gpg_args,
dry_run=self.dry_run)
# Fill in the data - send all the meta-data in case we need to
# register a new release
content = open(filename,'rb').read()
meta = self.distribution.metadata
data = {
# action
':action': 'file_upload',
'protcol_version': '1',
# identify release
'name': meta.get_name(),
'version': meta.get_version(),
# file content
'content': (os.path.basename(filename),content),
'filetype': command,
'pyversion': pyversion,
'md5_digest': md5(content).hexdigest(),
# additional meta-data
'metadata_version' : '1.0',
'summary': meta.get_description(),
'home_page': meta.get_url(),
'author': meta.get_contact(),
'author_email': meta.get_contact_email(),
'license': meta.get_licence(),
'description': meta.get_long_description(),
'keywords': meta.get_keywords(),
'platform': meta.get_platforms(),
'classifiers': meta.get_classifiers(),
'download_url': meta.get_download_url(),
# PEP 314
'provides': meta.get_provides(),
'requires': meta.get_requires(),
'obsoletes': meta.get_obsoletes(),
}
comment = ''
if command == 'bdist_rpm':
dist, version, id = platform.dist()
if dist:
comment = 'built for %s %s' % (dist, version)
elif command == 'bdist_dumb':
comment = 'built for %s' % platform.platform(terse=1)
data['comment'] = comment
if self.sign:
data['gpg_signature'] = (os.path.basename(filename) + ".asc",
open(filename+".asc").read())
# set up the authentication
auth = "Basic " + base64.encodestring(self.username + ":" + self.password).strip()
# Build up the MIME payload for the POST data
boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
sep_boundary = '\n--' + boundary
end_boundary = sep_boundary + '--'
body = StringIO.StringIO()
for key, value in data.items():
# handle multiple entries for the same name
if type(value) != type([]):
value = [value]
for value in value:
if type(value) is tuple:
fn = ';filename="%s"' % value[0]
value = value[1]
else:
fn = ""
value = str(value)
body.write(sep_boundary)
body.write('\nContent-Disposition: form-data; name="%s"'%key)
body.write(fn)
body.write("\n\n")
body.write(value)
if value and value[-1] == '\r':
body.write('\n') # write an extra newline (lurve Macs)
body.write(end_boundary)
body.write("\n")
body = body.getvalue()
self.announce("Submitting %s to %s" % (filename, self.repository), log.INFO)
# build the Request
# We can't use urllib2 since we need to send the Basic
# auth right with the first request
schema, netloc, url, params, query, fragments = \
urlparse.urlparse(self.repository)
assert not params and not query and not fragments
if schema == 'http':
http = httplib.HTTPConnection(netloc)
elif schema == 'https':
http = httplib.HTTPSConnection(netloc)
else:
raise AssertionError, "unsupported schema "+schema
data = ''
loglevel = log.INFO
try:
http.connect()
http.putrequest("POST", url)
http.putheader('Content-type',
'multipart/form-data; boundary=%s'%boundary)
http.putheader('Content-length', str(len(body)))
http.putheader('Authorization', auth)
http.endheaders()
http.send(body)
except socket.error, e:
self.announce(str(e), log.ERROR)
return
r = http.getresponse()
if r.status == 200:
self.announce('Server response (%s): %s' % (r.status, r.reason),
log.INFO)
else:
self.announce('Upload failed (%s): %s' % (r.status, r.reason),
log.ERROR)
if self.show_response:
print '-'*75, r.read(), '-'*75
|
lmcro/letsencrypt
|
refs/heads/master
|
letsencrypt-auto-source/pieces/pipstrap.py
|
25
|
#!/usr/bin/env python
"""A small script that can act as a trust root for installing pip 8
Embed this in your project, and your VCS checkout is all you have to trust. In
a post-peep era, this lets you claw your way to a hash-checking version of pip,
with which you can install the rest of your dependencies safely. All it assumes
is Python 2.6 or better and *some* version of pip already installed. If
anything goes wrong, it will exit with a non-zero status code.
"""
# This is here so embedded copies are MIT-compliant:
# Copyright (c) 2016 Erik Rose
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
from __future__ import print_function
from hashlib import sha256
from os.path import join
from pipes import quote
from shutil import rmtree
try:
from subprocess import check_output
except ImportError:
from subprocess import CalledProcessError, PIPE, Popen
def check_output(*popenargs, **kwargs):
if 'stdout' in kwargs:
raise ValueError('stdout argument not allowed, it will be '
'overridden.')
process = Popen(stdout=PIPE, *popenargs, **kwargs)
output, unused_err = process.communicate()
retcode = process.poll()
if retcode:
cmd = kwargs.get("args")
if cmd is None:
cmd = popenargs[0]
raise CalledProcessError(retcode, cmd)
return output
from sys import exit, version_info
from tempfile import mkdtemp
try:
from urllib2 import build_opener, HTTPHandler, HTTPSHandler
except ImportError:
from urllib.request import build_opener, HTTPHandler, HTTPSHandler
try:
from urlparse import urlparse
except ImportError:
from urllib.parse import urlparse # 3.4
__version__ = 1, 1, 1
# wheel has a conditional dependency on argparse:
maybe_argparse = (
[('https://pypi.python.org/packages/source/a/argparse/'
'argparse-1.4.0.tar.gz',
'62b089a55be1d8949cd2bc7e0df0bddb9e028faefc8c32038cc84862aefdd6e4')]
if version_info < (2, 7, 0) else [])
PACKAGES = maybe_argparse + [
# Pip has no dependencies, as it vendors everything:
('https://pypi.python.org/packages/source/p/pip/pip-8.0.3.tar.gz',
'30f98b66f3fe1069c529a491597d34a1c224a68640c82caf2ade5f88aa1405e8'),
# This version of setuptools has only optional dependencies:
('https://pypi.python.org/packages/source/s/setuptools/'
'setuptools-20.2.2.tar.gz',
'24fcfc15364a9fe09a220f37d2dcedc849795e3de3e4b393ee988e66a9cbd85a'),
('https://pypi.python.org/packages/source/w/wheel/wheel-0.29.0.tar.gz',
'1ebb8ad7e26b448e9caa4773d2357849bf80ff9e313964bcaf79cbf0201a1648')
]
class HashError(Exception):
def __str__(self):
url, path, actual, expected = self.args
return ('{url} did not match the expected hash {expected}. Instead, '
'it was {actual}. The file (left at {path}) may have been '
'tampered with.'.format(**locals()))
def hashed_download(url, temp, digest):
"""Download ``url`` to ``temp``, make sure it has the SHA-256 ``digest``,
and return its path."""
# Based on pip 1.4.1's URLOpener but with cert verification removed. Python
# >=2.7.9 verifies HTTPS certs itself, and, in any case, the cert
# authenticity has only privacy (not arbitrary code execution)
# implications, since we're checking hashes.
def opener():
opener = build_opener(HTTPSHandler())
# Strip out HTTPHandler to prevent MITM spoof:
for handler in opener.handlers:
if isinstance(handler, HTTPHandler):
opener.handlers.remove(handler)
return opener
def read_chunks(response, chunk_size):
while True:
chunk = response.read(chunk_size)
if not chunk:
break
yield chunk
response = opener().open(url)
path = join(temp, urlparse(url).path.split('/')[-1])
actual_hash = sha256()
with open(path, 'wb') as file:
for chunk in read_chunks(response, 4096):
file.write(chunk)
actual_hash.update(chunk)
actual_digest = actual_hash.hexdigest()
if actual_digest != digest:
raise HashError(url, path, actual_digest, digest)
return path
def main():
temp = mkdtemp(prefix='pipstrap-')
try:
downloads = [hashed_download(url, temp, digest)
for url, digest in PACKAGES]
check_output('pip install --no-index --no-deps -U ' +
' '.join(quote(d) for d in downloads),
shell=True)
except HashError as exc:
print(exc)
except Exception:
rmtree(temp)
raise
else:
rmtree(temp)
return 0
return 1
if __name__ == '__main__':
exit(main())
|
caot/intellij-community
|
refs/heads/master
|
python/testData/inspections/PyUnboundLocalVariableInspection/InstanceFieldAndGlobal.py
|
83
|
x = 1
class C:
def __init__(self):
self.x = x #pass
|
vikas1885/test1
|
refs/heads/master
|
common/djangoapps/util/config_parse.py
|
197
|
"""
Helper functions for configuration parsing
"""
import collections
def convert_tokens(tokens):
"""
This function is called on the token
dictionary that is imported from a yaml file.
It returns a new dictionary where
all strings containing 'None' are converted
to a literal None due to a bug in Ansible
"""
if tokens == 'None':
return None
elif isinstance(tokens, basestring) or (not isinstance(tokens, collections.Iterable)):
return tokens
elif isinstance(tokens, dict):
return {
convert_tokens(k): convert_tokens(v)
for k, v in tokens.items()
}
else:
return [convert_tokens(v) for v in tokens]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.