repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
|---|---|---|---|---|
lamperi/aoc
|
refs/heads/master
|
2017/20/solve.py
|
1
|
example = """p=< 3,0,0>, v=< 2,0,0>, a=<-1,0,0>
p=< 4,0,0>, v=< 0,0,0>, a=<-2,0,0>
"""
data = open("input.txt").read()
def sum_vec(v1, v2):
return [i1+i2 for i1, i2 in zip(v1, v2)]
def func(data):
partices = []
for line in data.splitlines():
parts = line.split(">")[:3]
p = [int(s.strip()) for s in parts[0].split("<")[1].split(",")]
v = [int(s.strip()) for s in parts[1].split("<")[1].split(",")]
a = [int(s.strip()) for s in parts[2].split("<")[1].split(",")]
partices.append({"p": p, "v": v, "a": a, "d": sum(abs(k) for k in p)})
for j in range(3000):
min_p = None
min_d = None
for i, particle in enumerate(partices):
particle["v"] = sum_vec(particle["v"], particle["a"])
particle["p"] = sum_vec(particle["p"], particle["v"])
particle["d"] = sum(abs(k) for k in particle["p"])
if min_d is None or particle["d"] < min_d:
min_d = particle["d"]
min_p = i
return {"Closest to origin": min_p}
print("Test", func(example))
print("Part 1",func(data))
def func(data):
partices = []
for line in data.splitlines():
parts = line.split(">")[:3]
p = [int(s.strip()) for s in parts[0].split("<")[1].split(",")]
v = [int(s.strip()) for s in parts[1].split("<")[1].split(",")]
a = [int(s.strip()) for s in parts[2].split("<")[1].split(",")]
partices.append({"p": p, "v": v, "a": a, "d": sum(abs(k) for k in p), "alive": True})
for j in range(3000):
places = {}
for i, particle in enumerate(partices):
if not particle["alive"]:
continue
particle["v"] = sum_vec(particle["v"], particle["a"])
particle["p"] = sum_vec(particle["p"], particle["v"])
particle["d"] = sum(abs(k) for k in particle["p"])
pos = tuple(particle["p"])
if pos not in places:
places[pos] = []
places[pos].append(i)
alive = 0
for items in places.values():
if len(items) > 1:
for item in items:
partices[item]["alive"] = False
else:
alive += 1
return {"Still alive": alive}
print("Part 2",func(data))
|
jpautom/scikit-learn
|
refs/heads/master
|
sklearn/ensemble/tests/test_partial_dependence.py
|
365
|
"""
Testing for the partial dependence module.
"""
import numpy as np
from numpy.testing import assert_array_equal
from sklearn.utils.testing import assert_raises
from sklearn.utils.testing import if_matplotlib
from sklearn.ensemble.partial_dependence import partial_dependence
from sklearn.ensemble.partial_dependence import plot_partial_dependence
from sklearn.ensemble import GradientBoostingClassifier
from sklearn.ensemble import GradientBoostingRegressor
from sklearn import datasets
# toy sample
X = [[-2, -1], [-1, -1], [-1, -2], [1, 1], [1, 2], [2, 1]]
y = [-1, -1, -1, 1, 1, 1]
T = [[-1, -1], [2, 2], [3, 2]]
true_result = [-1, 1, 1]
# also load the boston dataset
boston = datasets.load_boston()
# also load the iris dataset
iris = datasets.load_iris()
def test_partial_dependence_classifier():
# Test partial dependence for classifier
clf = GradientBoostingClassifier(n_estimators=10, random_state=1)
clf.fit(X, y)
pdp, axes = partial_dependence(clf, [0], X=X, grid_resolution=5)
# only 4 grid points instead of 5 because only 4 unique X[:,0] vals
assert pdp.shape == (1, 4)
assert axes[0].shape[0] == 4
# now with our own grid
X_ = np.asarray(X)
grid = np.unique(X_[:, 0])
pdp_2, axes = partial_dependence(clf, [0], grid=grid)
assert axes is None
assert_array_equal(pdp, pdp_2)
def test_partial_dependence_multiclass():
# Test partial dependence for multi-class classifier
clf = GradientBoostingClassifier(n_estimators=10, random_state=1)
clf.fit(iris.data, iris.target)
grid_resolution = 25
n_classes = clf.n_classes_
pdp, axes = partial_dependence(
clf, [0], X=iris.data, grid_resolution=grid_resolution)
assert pdp.shape == (n_classes, grid_resolution)
assert len(axes) == 1
assert axes[0].shape[0] == grid_resolution
def test_partial_dependence_regressor():
# Test partial dependence for regressor
clf = GradientBoostingRegressor(n_estimators=10, random_state=1)
clf.fit(boston.data, boston.target)
grid_resolution = 25
pdp, axes = partial_dependence(
clf, [0], X=boston.data, grid_resolution=grid_resolution)
assert pdp.shape == (1, grid_resolution)
assert axes[0].shape[0] == grid_resolution
def test_partial_dependecy_input():
# Test input validation of partial dependence.
clf = GradientBoostingClassifier(n_estimators=10, random_state=1)
clf.fit(X, y)
assert_raises(ValueError, partial_dependence,
clf, [0], grid=None, X=None)
assert_raises(ValueError, partial_dependence,
clf, [0], grid=[0, 1], X=X)
# first argument must be an instance of BaseGradientBoosting
assert_raises(ValueError, partial_dependence,
{}, [0], X=X)
# Gradient boosting estimator must be fit
assert_raises(ValueError, partial_dependence,
GradientBoostingClassifier(), [0], X=X)
assert_raises(ValueError, partial_dependence, clf, [-1], X=X)
assert_raises(ValueError, partial_dependence, clf, [100], X=X)
# wrong ndim for grid
grid = np.random.rand(10, 2, 1)
assert_raises(ValueError, partial_dependence, clf, [0], grid=grid)
@if_matplotlib
def test_plot_partial_dependence():
# Test partial dependence plot function.
clf = GradientBoostingRegressor(n_estimators=10, random_state=1)
clf.fit(boston.data, boston.target)
grid_resolution = 25
fig, axs = plot_partial_dependence(clf, boston.data, [0, 1, (0, 1)],
grid_resolution=grid_resolution,
feature_names=boston.feature_names)
assert len(axs) == 3
assert all(ax.has_data for ax in axs)
# check with str features and array feature names
fig, axs = plot_partial_dependence(clf, boston.data, ['CRIM', 'ZN',
('CRIM', 'ZN')],
grid_resolution=grid_resolution,
feature_names=boston.feature_names)
assert len(axs) == 3
assert all(ax.has_data for ax in axs)
# check with list feature_names
feature_names = boston.feature_names.tolist()
fig, axs = plot_partial_dependence(clf, boston.data, ['CRIM', 'ZN',
('CRIM', 'ZN')],
grid_resolution=grid_resolution,
feature_names=feature_names)
assert len(axs) == 3
assert all(ax.has_data for ax in axs)
@if_matplotlib
def test_plot_partial_dependence_input():
# Test partial dependence plot function input checks.
clf = GradientBoostingClassifier(n_estimators=10, random_state=1)
# not fitted yet
assert_raises(ValueError, plot_partial_dependence,
clf, X, [0])
clf.fit(X, y)
assert_raises(ValueError, plot_partial_dependence,
clf, np.array(X)[:, :0], [0])
# first argument must be an instance of BaseGradientBoosting
assert_raises(ValueError, plot_partial_dependence,
{}, X, [0])
# must be larger than -1
assert_raises(ValueError, plot_partial_dependence,
clf, X, [-1])
# too large feature value
assert_raises(ValueError, plot_partial_dependence,
clf, X, [100])
# str feature but no feature_names
assert_raises(ValueError, plot_partial_dependence,
clf, X, ['foobar'])
# not valid features value
assert_raises(ValueError, plot_partial_dependence,
clf, X, [{'foo': 'bar'}])
@if_matplotlib
def test_plot_partial_dependence_multiclass():
# Test partial dependence plot function on multi-class input.
clf = GradientBoostingClassifier(n_estimators=10, random_state=1)
clf.fit(iris.data, iris.target)
grid_resolution = 25
fig, axs = plot_partial_dependence(clf, iris.data, [0, 1],
label=0,
grid_resolution=grid_resolution)
assert len(axs) == 2
assert all(ax.has_data for ax in axs)
# now with symbol labels
target = iris.target_names[iris.target]
clf = GradientBoostingClassifier(n_estimators=10, random_state=1)
clf.fit(iris.data, target)
grid_resolution = 25
fig, axs = plot_partial_dependence(clf, iris.data, [0, 1],
label='setosa',
grid_resolution=grid_resolution)
assert len(axs) == 2
assert all(ax.has_data for ax in axs)
# label not in gbrt.classes_
assert_raises(ValueError, plot_partial_dependence,
clf, iris.data, [0, 1], label='foobar',
grid_resolution=grid_resolution)
# label not provided
assert_raises(ValueError, plot_partial_dependence,
clf, iris.data, [0, 1],
grid_resolution=grid_resolution)
|
otherness-space/myProject002
|
refs/heads/master
|
my_project_002/lib/python2.7/site-packages/django/utils/dateformat.py
|
116
|
"""
PHP date() style date formatting
See http://www.php.net/date for format strings
Usage:
>>> import datetime
>>> d = datetime.datetime.now()
>>> df = DateFormat(d)
>>> print(df.format('jS F Y H:i'))
7th October 2003 11:39
>>>
"""
from __future__ import unicode_literals
import re
import time
import calendar
import datetime
from django.utils.dates import MONTHS, MONTHS_3, MONTHS_ALT, MONTHS_AP, WEEKDAYS, WEEKDAYS_ABBR
from django.utils.tzinfo import LocalTimezone
from django.utils.translation import ugettext as _
from django.utils.encoding import force_text
from django.utils import six
from django.utils.timezone import is_aware, is_naive
re_formatchars = re.compile(r'(?<!\\)([aAbBcdDeEfFgGhHiIjlLmMnNoOPrsStTUuwWyYzZ])')
re_escaped = re.compile(r'\\(.)')
class Formatter(object):
def format(self, formatstr):
pieces = []
for i, piece in enumerate(re_formatchars.split(force_text(formatstr))):
if i % 2:
pieces.append(force_text(getattr(self, piece)()))
elif piece:
pieces.append(re_escaped.sub(r'\1', piece))
return ''.join(pieces)
class TimeFormat(Formatter):
def __init__(self, t):
self.data = t
def a(self):
"'a.m.' or 'p.m.'"
if self.data.hour > 11:
return _('p.m.')
return _('a.m.')
def A(self):
"'AM' or 'PM'"
if self.data.hour > 11:
return _('PM')
return _('AM')
def B(self):
"Swatch Internet time"
raise NotImplementedError
def f(self):
"""
Time, in 12-hour hours and minutes, with minutes left off if they're
zero.
Examples: '1', '1:30', '2:05', '2'
Proprietary extension.
"""
if self.data.minute == 0:
return self.g()
return '%s:%s' % (self.g(), self.i())
def g(self):
"Hour, 12-hour format without leading zeros; i.e. '1' to '12'"
if self.data.hour == 0:
return 12
if self.data.hour > 12:
return self.data.hour - 12
return self.data.hour
def G(self):
"Hour, 24-hour format without leading zeros; i.e. '0' to '23'"
return self.data.hour
def h(self):
"Hour, 12-hour format; i.e. '01' to '12'"
return '%02d' % self.g()
def H(self):
"Hour, 24-hour format; i.e. '00' to '23'"
return '%02d' % self.G()
def i(self):
"Minutes; i.e. '00' to '59'"
return '%02d' % self.data.minute
def P(self):
"""
Time, in 12-hour hours, minutes and 'a.m.'/'p.m.', with minutes left off
if they're zero and the strings 'midnight' and 'noon' if appropriate.
Examples: '1 a.m.', '1:30 p.m.', 'midnight', 'noon', '12:30 p.m.'
Proprietary extension.
"""
if self.data.minute == 0 and self.data.hour == 0:
return _('midnight')
if self.data.minute == 0 and self.data.hour == 12:
return _('noon')
return '%s %s' % (self.f(), self.a())
def s(self):
"Seconds; i.e. '00' to '59'"
return '%02d' % self.data.second
def u(self):
"Microseconds; i.e. '000000' to '999999'"
return '%06d' %self.data.microsecond
class DateFormat(TimeFormat):
year_days = [None, 0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334]
def __init__(self, dt):
# Accepts either a datetime or date object.
self.data = dt
self.timezone = None
if isinstance(dt, datetime.datetime):
if is_naive(dt):
self.timezone = LocalTimezone(dt)
else:
self.timezone = dt.tzinfo
def b(self):
"Month, textual, 3 letters, lowercase; e.g. 'jan'"
return MONTHS_3[self.data.month]
def c(self):
"""
ISO 8601 Format
Example : '2008-01-02T10:30:00.000123'
"""
return self.data.isoformat()
def d(self):
"Day of the month, 2 digits with leading zeros; i.e. '01' to '31'"
return '%02d' % self.data.day
def D(self):
"Day of the week, textual, 3 letters; e.g. 'Fri'"
return WEEKDAYS_ABBR[self.data.weekday()]
def e(self):
"Timezone name if available"
try:
if hasattr(self.data, 'tzinfo') and self.data.tzinfo:
# Have to use tzinfo.tzname and not datetime.tzname
# because datatime.tzname does not expect Unicode
return self.data.tzinfo.tzname(self.data) or ""
except NotImplementedError:
pass
return ""
def E(self):
"Alternative month names as required by some locales. Proprietary extension."
return MONTHS_ALT[self.data.month]
def F(self):
"Month, textual, long; e.g. 'January'"
return MONTHS[self.data.month]
def I(self):
"'1' if Daylight Savings Time, '0' otherwise."
if self.timezone and self.timezone.dst(self.data):
return '1'
else:
return '0'
def j(self):
"Day of the month without leading zeros; i.e. '1' to '31'"
return self.data.day
def l(self):
"Day of the week, textual, long; e.g. 'Friday'"
return WEEKDAYS[self.data.weekday()]
def L(self):
"Boolean for whether it is a leap year; i.e. True or False"
return calendar.isleap(self.data.year)
def m(self):
"Month; i.e. '01' to '12'"
return '%02d' % self.data.month
def M(self):
"Month, textual, 3 letters; e.g. 'Jan'"
return MONTHS_3[self.data.month].title()
def n(self):
"Month without leading zeros; i.e. '1' to '12'"
return self.data.month
def N(self):
"Month abbreviation in Associated Press style. Proprietary extension."
return MONTHS_AP[self.data.month]
def o(self):
"ISO 8601 year number matching the ISO week number (W)"
return self.data.isocalendar()[0]
def O(self):
"Difference to Greenwich time in hours; e.g. '+0200', '-0430'"
seconds = self.Z()
sign = '-' if seconds < 0 else '+'
seconds = abs(seconds)
return "%s%02d%02d" % (sign, seconds // 3600, (seconds // 60) % 60)
def r(self):
"RFC 2822 formatted date; e.g. 'Thu, 21 Dec 2000 16:01:07 +0200'"
return self.format('D, j M Y H:i:s O')
def S(self):
"English ordinal suffix for the day of the month, 2 characters; i.e. 'st', 'nd', 'rd' or 'th'"
if self.data.day in (11, 12, 13): # Special case
return 'th'
last = self.data.day % 10
if last == 1:
return 'st'
if last == 2:
return 'nd'
if last == 3:
return 'rd'
return 'th'
def t(self):
"Number of days in the given month; i.e. '28' to '31'"
return '%02d' % calendar.monthrange(self.data.year, self.data.month)[1]
def T(self):
"Time zone of this machine; e.g. 'EST' or 'MDT'"
name = self.timezone and self.timezone.tzname(self.data) or None
if name is None:
name = self.format('O')
return six.text_type(name)
def U(self):
"Seconds since the Unix epoch (January 1 1970 00:00:00 GMT)"
if isinstance(self.data, datetime.datetime) and is_aware(self.data):
return int(calendar.timegm(self.data.utctimetuple()))
else:
return int(time.mktime(self.data.timetuple()))
def w(self):
"Day of the week, numeric, i.e. '0' (Sunday) to '6' (Saturday)"
return (self.data.weekday() + 1) % 7
def W(self):
"ISO-8601 week number of year, weeks starting on Monday"
# Algorithm from http://www.personal.ecu.edu/mccartyr/ISOwdALG.txt
week_number = None
jan1_weekday = self.data.replace(month=1, day=1).weekday() + 1
weekday = self.data.weekday() + 1
day_of_year = self.z()
if day_of_year <= (8 - jan1_weekday) and jan1_weekday > 4:
if jan1_weekday == 5 or (jan1_weekday == 6 and calendar.isleap(self.data.year-1)):
week_number = 53
else:
week_number = 52
else:
if calendar.isleap(self.data.year):
i = 366
else:
i = 365
if (i - day_of_year) < (4 - weekday):
week_number = 1
else:
j = day_of_year + (7 - weekday) + (jan1_weekday - 1)
week_number = j // 7
if jan1_weekday > 4:
week_number -= 1
return week_number
def y(self):
"Year, 2 digits; e.g. '99'"
return six.text_type(self.data.year)[2:]
def Y(self):
"Year, 4 digits; e.g. '1999'"
return self.data.year
def z(self):
"Day of the year; i.e. '0' to '365'"
doy = self.year_days[self.data.month] + self.data.day
if self.L() and self.data.month > 2:
doy += 1
return doy
def Z(self):
"""
Time zone offset in seconds (i.e. '-43200' to '43200'). The offset for
timezones west of UTC is always negative, and for those east of UTC is
always positive.
"""
if not self.timezone:
return 0
offset = self.timezone.utcoffset(self.data)
# `offset` is a datetime.timedelta. For negative values (to the west of
# UTC) only days can be negative (days=-1) and seconds are always
# positive. e.g. UTC-1 -> timedelta(days=-1, seconds=82800, microseconds=0)
# Positive offsets have days=0
return offset.days * 86400 + offset.seconds
def format(value, format_string):
"Convenience function"
df = DateFormat(value)
return df.format(format_string)
def time_format(value, format_string):
"Convenience function"
tf = TimeFormat(value)
return tf.format(format_string)
|
SergeySatskiy/cdm-pythonparser
|
refs/heads/master
|
legacy/src/unittest/nested_classes.py
|
2
|
class A( a, b ):
class B():
def __init__( self ):
pass
def __str__( self, k = "ask" ):
def nested():
class C:
def f():
pass
pass
pass
def z( self ):
pass
def g():
class D:
def __init__( self, y ):
pass
pass
|
pgleeson/TempRepo3
|
refs/heads/master
|
lib/jython/Lib/xmlrpclib.py
|
85
|
#
# XML-RPC CLIENT LIBRARY
# $Id: xmlrpclib.py 41594 2005-12-04 19:11:17Z andrew.kuchling $
#
# an XML-RPC client interface for Python.
#
# the marshalling and response parser code can also be used to
# implement XML-RPC servers.
#
# Notes:
# this version is designed to work with Python 2.1 or newer.
#
# History:
# 1999-01-14 fl Created
# 1999-01-15 fl Changed dateTime to use localtime
# 1999-01-16 fl Added Binary/base64 element, default to RPC2 service
# 1999-01-19 fl Fixed array data element (from Skip Montanaro)
# 1999-01-21 fl Fixed dateTime constructor, etc.
# 1999-02-02 fl Added fault handling, handle empty sequences, etc.
# 1999-02-10 fl Fixed problem with empty responses (from Skip Montanaro)
# 1999-06-20 fl Speed improvements, pluggable parsers/transports (0.9.8)
# 2000-11-28 fl Changed boolean to check the truth value of its argument
# 2001-02-24 fl Added encoding/Unicode/SafeTransport patches
# 2001-02-26 fl Added compare support to wrappers (0.9.9/1.0b1)
# 2001-03-28 fl Make sure response tuple is a singleton
# 2001-03-29 fl Don't require empty params element (from Nicholas Riley)
# 2001-06-10 fl Folded in _xmlrpclib accelerator support (1.0b2)
# 2001-08-20 fl Base xmlrpclib.Error on built-in Exception (from Paul Prescod)
# 2001-09-03 fl Allow Transport subclass to override getparser
# 2001-09-10 fl Lazy import of urllib, cgi, xmllib (20x import speedup)
# 2001-10-01 fl Remove containers from memo cache when done with them
# 2001-10-01 fl Use faster escape method (80% dumps speedup)
# 2001-10-02 fl More dumps microtuning
# 2001-10-04 fl Make sure import expat gets a parser (from Guido van Rossum)
# 2001-10-10 sm Allow long ints to be passed as ints if they don't overflow
# 2001-10-17 sm Test for int and long overflow (allows use on 64-bit systems)
# 2001-11-12 fl Use repr() to marshal doubles (from Paul Felix)
# 2002-03-17 fl Avoid buffered read when possible (from James Rucker)
# 2002-04-07 fl Added pythondoc comments
# 2002-04-16 fl Added __str__ methods to datetime/binary wrappers
# 2002-05-15 fl Added error constants (from Andrew Kuchling)
# 2002-06-27 fl Merged with Python CVS version
# 2002-10-22 fl Added basic authentication (based on code from Phillip Eby)
# 2003-01-22 sm Add support for the bool type
# 2003-02-27 gvr Remove apply calls
# 2003-04-24 sm Use cStringIO if available
# 2003-04-25 ak Add support for nil
# 2003-06-15 gn Add support for time.struct_time
# 2003-07-12 gp Correct marshalling of Faults
# 2003-10-31 mvl Add multicall support
# 2004-08-20 mvl Bump minimum supported Python version to 2.1
#
# Copyright (c) 1999-2002 by Secret Labs AB.
# Copyright (c) 1999-2002 by Fredrik Lundh.
#
# info@pythonware.com
# http://www.pythonware.com
#
# --------------------------------------------------------------------
# The XML-RPC client interface is
#
# Copyright (c) 1999-2002 by Secret Labs AB
# Copyright (c) 1999-2002 by Fredrik Lundh
#
# By obtaining, using, and/or copying this software and/or its
# associated documentation, you agree that you have read, understood,
# and will comply with the following terms and conditions:
#
# Permission to use, copy, modify, and distribute this software and
# its associated documentation for any purpose and without fee is
# hereby granted, provided that the above copyright notice appears in
# all copies, and that both that copyright notice and this permission
# notice appear in supporting documentation, and that the name of
# Secret Labs AB or the author not be used in advertising or publicity
# pertaining to distribution of the software without specific, written
# prior permission.
#
# SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD
# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT-
# ABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR
# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY
# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
# OF THIS SOFTWARE.
# --------------------------------------------------------------------
#
# things to look into some day:
# TODO: sort out True/False/boolean issues for Python 2.3
"""
An XML-RPC client interface for Python.
The marshalling and response parser code can also be used to
implement XML-RPC servers.
Exported exceptions:
Error Base class for client errors
ProtocolError Indicates an HTTP protocol error
ResponseError Indicates a broken response package
Fault Indicates an XML-RPC fault package
Exported classes:
ServerProxy Represents a logical connection to an XML-RPC server
MultiCall Executor of boxcared xmlrpc requests
Boolean boolean wrapper to generate a "boolean" XML-RPC value
DateTime dateTime wrapper for an ISO 8601 string or time tuple or
localtime integer value to generate a "dateTime.iso8601"
XML-RPC value
Binary binary data wrapper
SlowParser Slow but safe standard parser (based on xmllib)
Marshaller Generate an XML-RPC params chunk from a Python data structure
Unmarshaller Unmarshal an XML-RPC response from incoming XML event message
Transport Handles an HTTP transaction to an XML-RPC server
SafeTransport Handles an HTTPS transaction to an XML-RPC server
Exported constants:
True
False
Exported functions:
boolean Convert any Python value to an XML-RPC boolean
getparser Create instance of the fastest available parser & attach
to an unmarshalling object
dumps Convert an argument tuple or a Fault instance to an XML-RPC
request (or response, if the methodresponse option is used).
loads Convert an XML-RPC packet to unmarshalled data plus a method
name (None if not present).
"""
import re, string, time, operator
from types import *
# --------------------------------------------------------------------
# Internal stuff
try:
unicode
except NameError:
unicode = None # unicode support not available
try:
import datetime
except ImportError:
datetime = None
try:
_bool_is_builtin = False.__class__.__name__ == "bool"
except NameError:
_bool_is_builtin = 0
def _decode(data, encoding, is8bit=re.compile("[\x80-\xff]").search):
# decode non-ascii string (if possible)
if unicode and encoding and is8bit(data):
data = unicode(data, encoding)
return data
def escape(s, replace=string.replace):
s = replace(s, "&", "&")
s = replace(s, "<", "<")
return replace(s, ">", ">",)
if unicode:
def _stringify(string):
# convert to 7-bit ascii if possible
try:
return string.encode("ascii")
except UnicodeError:
return string
else:
def _stringify(string):
return string
__version__ = "1.0.1"
# xmlrpc integer limits
MAXINT = 2L**31-1
MININT = -2L**31
# --------------------------------------------------------------------
# Error constants (from Dan Libby's specification at
# http://xmlrpc-epi.sourceforge.net/specs/rfc.fault_codes.php)
# Ranges of errors
PARSE_ERROR = -32700
SERVER_ERROR = -32600
APPLICATION_ERROR = -32500
SYSTEM_ERROR = -32400
TRANSPORT_ERROR = -32300
# Specific errors
NOT_WELLFORMED_ERROR = -32700
UNSUPPORTED_ENCODING = -32701
INVALID_ENCODING_CHAR = -32702
INVALID_XMLRPC = -32600
METHOD_NOT_FOUND = -32601
INVALID_METHOD_PARAMS = -32602
INTERNAL_ERROR = -32603
# --------------------------------------------------------------------
# Exceptions
##
# Base class for all kinds of client-side errors.
class Error(Exception):
"""Base class for client errors."""
def __str__(self):
return repr(self)
##
# Indicates an HTTP-level protocol error. This is raised by the HTTP
# transport layer, if the server returns an error code other than 200
# (OK).
#
# @param url The target URL.
# @param errcode The HTTP error code.
# @param errmsg The HTTP error message.
# @param headers The HTTP header dictionary.
class ProtocolError(Error):
"""Indicates an HTTP protocol error."""
def __init__(self, url, errcode, errmsg, headers):
Error.__init__(self)
self.url = url
self.errcode = errcode
self.errmsg = errmsg
self.headers = headers
def __repr__(self):
return (
"<ProtocolError for %s: %s %s>" %
(self.url, self.errcode, self.errmsg)
)
##
# Indicates a broken XML-RPC response package. This exception is
# raised by the unmarshalling layer, if the XML-RPC response is
# malformed.
class ResponseError(Error):
"""Indicates a broken response package."""
pass
##
# Indicates an XML-RPC fault response package. This exception is
# raised by the unmarshalling layer, if the XML-RPC response contains
# a fault string. This exception can also used as a class, to
# generate a fault XML-RPC message.
#
# @param faultCode The XML-RPC fault code.
# @param faultString The XML-RPC fault string.
class Fault(Error):
"""Indicates an XML-RPC fault package."""
def __init__(self, faultCode, faultString, **extra):
Error.__init__(self)
self.faultCode = faultCode
self.faultString = faultString
def __repr__(self):
return (
"<Fault %s: %s>" %
(self.faultCode, repr(self.faultString))
)
# --------------------------------------------------------------------
# Special values
##
# Wrapper for XML-RPC boolean values. Use the xmlrpclib.True and
# xmlrpclib.False constants, or the xmlrpclib.boolean() function, to
# generate boolean XML-RPC values.
#
# @param value A boolean value. Any true value is interpreted as True,
# all other values are interpreted as False.
if _bool_is_builtin:
boolean = Boolean = bool
# to avoid breaking code which references xmlrpclib.{True,False}
True, False = True, False
else:
class Boolean:
"""Boolean-value wrapper.
Use True or False to generate a "boolean" XML-RPC value.
"""
def __init__(self, value = 0):
self.value = operator.truth(value)
def encode(self, out):
out.write("<value><boolean>%d</boolean></value>\n" % self.value)
def __cmp__(self, other):
if isinstance(other, Boolean):
other = other.value
return cmp(self.value, other)
def __repr__(self):
if self.value:
return "<Boolean True at %x>" % id(self)
else:
return "<Boolean False at %x>" % id(self)
def __int__(self):
return self.value
def __nonzero__(self):
return self.value
True, False = Boolean(1), Boolean(0)
##
# Map true or false value to XML-RPC boolean values.
#
# @def boolean(value)
# @param value A boolean value. Any true value is mapped to True,
# all other values are mapped to False.
# @return xmlrpclib.True or xmlrpclib.False.
# @see Boolean
# @see True
# @see False
def boolean(value, _truefalse=(False, True)):
"""Convert any Python value to XML-RPC 'boolean'."""
return _truefalse[operator.truth(value)]
##
# Wrapper for XML-RPC DateTime values. This converts a time value to
# the format used by XML-RPC.
# <p>
# The value can be given as a string in the format
# "yyyymmddThh:mm:ss", as a 9-item time tuple (as returned by
# time.localtime()), or an integer value (as returned by time.time()).
# The wrapper uses time.localtime() to convert an integer to a time
# tuple.
#
# @param value The time, given as an ISO 8601 string, a time
# tuple, or a integer time value.
class DateTime:
"""DateTime wrapper for an ISO 8601 string or time tuple or
localtime integer value to generate 'dateTime.iso8601' XML-RPC
value.
"""
def __init__(self, value=0):
if not isinstance(value, StringType):
if datetime and isinstance(value, datetime.datetime):
self.value = value.strftime("%Y%m%dT%H:%M:%S")
return
if datetime and isinstance(value, datetime.date):
self.value = value.strftime("%Y%m%dT%H:%M:%S")
return
if datetime and isinstance(value, datetime.time):
today = datetime.datetime.now().strftime("%Y%m%d")
self.value = value.strftime(today+"T%H:%M:%S")
return
if not isinstance(value, (TupleType, time.struct_time)):
if value == 0:
value = time.time()
value = time.localtime(value)
value = time.strftime("%Y%m%dT%H:%M:%S", value)
self.value = value
def __cmp__(self, other):
if isinstance(other, DateTime):
other = other.value
return cmp(self.value, other)
##
# Get date/time value.
#
# @return Date/time value, as an ISO 8601 string.
def __str__(self):
return self.value
def __repr__(self):
return "<DateTime %s at %x>" % (repr(self.value), id(self))
def decode(self, data):
data = str(data)
self.value = string.strip(data)
def encode(self, out):
out.write("<value><dateTime.iso8601>")
out.write(self.value)
out.write("</dateTime.iso8601></value>\n")
def _datetime(data):
# decode xml element contents into a DateTime structure.
value = DateTime()
value.decode(data)
return value
def _datetime_type(data):
t = time.strptime(data, "%Y%m%dT%H:%M:%S")
return datetime.datetime(*tuple(t)[:6])
##
# Wrapper for binary data. This can be used to transport any kind
# of binary data over XML-RPC, using BASE64 encoding.
#
# @param data An 8-bit string containing arbitrary data.
import base64
try:
import cStringIO as StringIO
except ImportError:
import StringIO
class Binary:
"""Wrapper for binary data."""
def __init__(self, data=None):
self.data = data
##
# Get buffer contents.
#
# @return Buffer contents, as an 8-bit string.
def __str__(self):
return self.data or ""
def __cmp__(self, other):
if isinstance(other, Binary):
other = other.data
return cmp(self.data, other)
def decode(self, data):
self.data = base64.decodestring(data)
def encode(self, out):
out.write("<value><base64>\n")
base64.encode(StringIO.StringIO(self.data), out)
out.write("</base64></value>\n")
def _binary(data):
# decode xml element contents into a Binary structure
value = Binary()
value.decode(data)
return value
WRAPPERS = (DateTime, Binary)
if not _bool_is_builtin:
WRAPPERS = WRAPPERS + (Boolean,)
# --------------------------------------------------------------------
# XML parsers
try:
# optional xmlrpclib accelerator
import _xmlrpclib
FastParser = _xmlrpclib.Parser
FastUnmarshaller = _xmlrpclib.Unmarshaller
except (AttributeError, ImportError):
FastParser = FastUnmarshaller = None
try:
import _xmlrpclib
FastMarshaller = _xmlrpclib.Marshaller
except (AttributeError, ImportError):
FastMarshaller = None
#
# the SGMLOP parser is about 15x faster than Python's builtin
# XML parser. SGMLOP sources can be downloaded from:
#
# http://www.pythonware.com/products/xml/sgmlop.htm
#
try:
import sgmlop
if not hasattr(sgmlop, "XMLParser"):
raise ImportError
except ImportError:
SgmlopParser = None # sgmlop accelerator not available
else:
class SgmlopParser:
def __init__(self, target):
# setup callbacks
self.finish_starttag = target.start
self.finish_endtag = target.end
self.handle_data = target.data
self.handle_xml = target.xml
# activate parser
self.parser = sgmlop.XMLParser()
self.parser.register(self)
self.feed = self.parser.feed
self.entity = {
"amp": "&", "gt": ">", "lt": "<",
"apos": "'", "quot": '"'
}
def close(self):
try:
self.parser.close()
finally:
self.parser = self.feed = None # nuke circular reference
def handle_proc(self, tag, attr):
m = re.search("encoding\s*=\s*['\"]([^\"']+)[\"']", attr)
if m:
self.handle_xml(m.group(1), 1)
def handle_entityref(self, entity):
# <string> entity
try:
self.handle_data(self.entity[entity])
except KeyError:
self.handle_data("&%s;" % entity)
try:
from xml.parsers import expat
if not hasattr(expat, "ParserCreate"):
raise ImportError
except ImportError:
ExpatParser = None # expat not available
else:
class ExpatParser:
# fast expat parser for Python 2.0 and later. this is about
# 50% slower than sgmlop, on roundtrip testing
def __init__(self, target):
self._parser = parser = expat.ParserCreate(None, None)
self._target = target
parser.StartElementHandler = target.start
parser.EndElementHandler = target.end
parser.CharacterDataHandler = target.data
encoding = None
if not parser.returns_unicode:
encoding = "utf-8"
target.xml(encoding, None)
def feed(self, data):
self._parser.Parse(data, 0)
def close(self):
self._parser.Parse("", 1) # end of data
del self._target, self._parser # get rid of circular references
class SlowParser:
"""Default XML parser (based on xmllib.XMLParser)."""
# this is about 10 times slower than sgmlop, on roundtrip
# testing.
def __init__(self, target):
import xmllib # lazy subclassing (!)
if xmllib.XMLParser not in SlowParser.__bases__:
SlowParser.__bases__ = (xmllib.XMLParser,)
self.handle_xml = target.xml
self.unknown_starttag = target.start
self.handle_data = target.data
self.handle_cdata = target.data
self.unknown_endtag = target.end
try:
xmllib.XMLParser.__init__(self, accept_utf8=1)
except TypeError:
xmllib.XMLParser.__init__(self) # pre-2.0
# --------------------------------------------------------------------
# XML-RPC marshalling and unmarshalling code
##
# XML-RPC marshaller.
#
# @param encoding Default encoding for 8-bit strings. The default
# value is None (interpreted as UTF-8).
# @see dumps
class Marshaller:
"""Generate an XML-RPC params chunk from a Python data structure.
Create a Marshaller instance for each set of parameters, and use
the "dumps" method to convert your data (represented as a tuple)
to an XML-RPC params chunk. To write a fault response, pass a
Fault instance instead. You may prefer to use the "dumps" module
function for this purpose.
"""
# by the way, if you don't understand what's going on in here,
# that's perfectly ok.
def __init__(self, encoding=None, allow_none=0):
self.memo = {}
self.data = None
self.encoding = encoding
self.allow_none = allow_none
dispatch = {}
def dumps(self, values):
out = []
write = out.append
dump = self.__dump
if isinstance(values, Fault):
# fault instance
write("<fault>\n")
dump({'faultCode': values.faultCode,
'faultString': values.faultString},
write)
write("</fault>\n")
else:
# parameter block
# FIXME: the xml-rpc specification allows us to leave out
# the entire <params> block if there are no parameters.
# however, changing this may break older code (including
# old versions of xmlrpclib.py), so this is better left as
# is for now. See @XMLRPC3 for more information. /F
write("<params>\n")
for v in values:
write("<param>\n")
dump(v, write)
write("</param>\n")
write("</params>\n")
result = string.join(out, "")
return result
def __dump(self, value, write):
try:
f = self.dispatch[type(value)]
except KeyError:
raise TypeError, "cannot marshal %s objects" % type(value)
else:
f(self, value, write)
def dump_nil (self, value, write):
if not self.allow_none:
raise TypeError, "cannot marshal None unless allow_none is enabled"
write("<value><nil/></value>")
dispatch[NoneType] = dump_nil
def dump_int(self, value, write):
# in case ints are > 32 bits
if value > MAXINT or value < MININT:
raise OverflowError, "int exceeds XML-RPC limits"
write("<value><int>")
write(str(value))
write("</int></value>\n")
dispatch[IntType] = dump_int
if _bool_is_builtin:
def dump_bool(self, value, write):
write("<value><boolean>")
write(value and "1" or "0")
write("</boolean></value>\n")
dispatch[bool] = dump_bool
def dump_long(self, value, write):
if value > MAXINT or value < MININT:
raise OverflowError, "long int exceeds XML-RPC limits"
write("<value><int>")
write(str(int(value)))
write("</int></value>\n")
dispatch[LongType] = dump_long
def dump_double(self, value, write):
write("<value><double>")
write(repr(value))
write("</double></value>\n")
dispatch[FloatType] = dump_double
def dump_string(self, value, write, escape=escape):
write("<value><string>")
write(escape(value))
write("</string></value>\n")
dispatch[StringType] = dump_string
if unicode:
def dump_unicode(self, value, write, escape=escape):
value = value.encode(self.encoding)
write("<value><string>")
write(escape(value))
write("</string></value>\n")
dispatch[UnicodeType] = dump_unicode
def dump_array(self, value, write):
i = id(value)
if self.memo.has_key(i):
raise TypeError, "cannot marshal recursive sequences"
self.memo[i] = None
dump = self.__dump
write("<value><array><data>\n")
for v in value:
dump(v, write)
write("</data></array></value>\n")
del self.memo[i]
dispatch[TupleType] = dump_array
dispatch[ListType] = dump_array
def dump_struct(self, value, write, escape=escape):
i = id(value)
if self.memo.has_key(i):
raise TypeError, "cannot marshal recursive dictionaries"
self.memo[i] = None
dump = self.__dump
write("<value><struct>\n")
for k, v in value.items():
write("<member>\n")
if type(k) is not StringType:
if unicode and type(k) is UnicodeType:
k = k.encode(self.encoding)
else:
raise TypeError, "dictionary key must be string"
write("<name>%s</name>\n" % escape(k))
dump(v, write)
write("</member>\n")
write("</struct></value>\n")
del self.memo[i]
dispatch[DictType] = dump_struct
if datetime:
def dump_datetime(self, value, write):
write("<value><dateTime.iso8601>")
write(value.strftime("%Y%m%dT%H:%M:%S"))
write("</dateTime.iso8601></value>\n")
dispatch[datetime.datetime] = dump_datetime
def dump_date(self, value, write):
write("<value><dateTime.iso8601>")
write(value.strftime("%Y%m%dT00:00:00"))
write("</dateTime.iso8601></value>\n")
dispatch[datetime.date] = dump_date
def dump_time(self, value, write):
write("<value><dateTime.iso8601>")
write(datetime.datetime.now().date().strftime("%Y%m%dT"))
write(value.strftime("%H:%M:%S"))
write("</dateTime.iso8601></value>\n")
dispatch[datetime.time] = dump_time
def dump_instance(self, value, write):
# check for special wrappers
if value.__class__ in WRAPPERS:
self.write = write
value.encode(self)
del self.write
else:
# store instance attributes as a struct (really?)
self.dump_struct(value.__dict__, write)
dispatch[InstanceType] = dump_instance
##
# XML-RPC unmarshaller.
#
# @see loads
class Unmarshaller:
"""Unmarshal an XML-RPC response, based on incoming XML event
messages (start, data, end). Call close() to get the resulting
data structure.
Note that this reader is fairly tolerant, and gladly accepts bogus
XML-RPC data without complaining (but not bogus XML).
"""
# and again, if you don't understand what's going on in here,
# that's perfectly ok.
def __init__(self, use_datetime=0):
self._type = None
self._stack = []
self._marks = []
self._data = []
self._methodname = None
self._encoding = "utf-8"
self.append = self._stack.append
self._use_datetime = use_datetime
if use_datetime and not datetime:
raise ValueError, "the datetime module is not available"
def close(self):
# return response tuple and target method
if self._type is None or self._marks:
raise ResponseError()
if self._type == "fault":
raise Fault(**self._stack[0])
return tuple(self._stack)
def getmethodname(self):
return self._methodname
#
# event handlers
def xml(self, encoding, standalone):
self._encoding = encoding
# FIXME: assert standalone == 1 ???
def start(self, tag, attrs):
# prepare to handle this element
if tag == "array" or tag == "struct":
self._marks.append(len(self._stack))
self._data = []
self._value = (tag == "value")
def data(self, text):
self._data.append(text)
def end(self, tag, join=string.join):
# call the appropriate end tag handler
try:
f = self.dispatch[tag]
except KeyError:
pass # unknown tag ?
else:
return f(self, join(self._data, ""))
#
# accelerator support
def end_dispatch(self, tag, data):
# dispatch data
try:
f = self.dispatch[tag]
except KeyError:
pass # unknown tag ?
else:
return f(self, data)
#
# element decoders
dispatch = {}
def end_nil (self, data):
self.append(None)
self._value = 0
dispatch["nil"] = end_nil
def end_boolean(self, data):
if data == "0":
self.append(False)
elif data == "1":
self.append(True)
else:
raise TypeError, "bad boolean value"
self._value = 0
dispatch["boolean"] = end_boolean
def end_int(self, data):
self.append(int(data))
self._value = 0
dispatch["i4"] = end_int
dispatch["int"] = end_int
def end_double(self, data):
self.append(float(data))
self._value = 0
dispatch["double"] = end_double
def end_string(self, data):
if self._encoding:
data = _decode(data, self._encoding)
self.append(_stringify(data))
self._value = 0
dispatch["string"] = end_string
dispatch["name"] = end_string # struct keys are always strings
def end_array(self, data):
mark = self._marks.pop()
# map arrays to Python lists
self._stack[mark:] = [self._stack[mark:]]
self._value = 0
dispatch["array"] = end_array
def end_struct(self, data):
mark = self._marks.pop()
# map structs to Python dictionaries
dict = {}
items = self._stack[mark:]
for i in range(0, len(items), 2):
dict[_stringify(items[i])] = items[i+1]
self._stack[mark:] = [dict]
self._value = 0
dispatch["struct"] = end_struct
def end_base64(self, data):
value = Binary()
value.decode(data)
self.append(value)
self._value = 0
dispatch["base64"] = end_base64
def end_dateTime(self, data):
value = DateTime()
value.decode(data)
if self._use_datetime:
value = _datetime_type(data)
self.append(value)
dispatch["dateTime.iso8601"] = end_dateTime
def end_value(self, data):
# if we stumble upon a value element with no internal
# elements, treat it as a string element
if self._value:
self.end_string(data)
dispatch["value"] = end_value
def end_params(self, data):
self._type = "params"
dispatch["params"] = end_params
def end_fault(self, data):
self._type = "fault"
dispatch["fault"] = end_fault
def end_methodName(self, data):
if self._encoding:
data = _decode(data, self._encoding)
self._methodname = data
self._type = "methodName" # no params
dispatch["methodName"] = end_methodName
## Multicall support
#
class _MultiCallMethod:
# some lesser magic to store calls made to a MultiCall object
# for batch execution
def __init__(self, call_list, name):
self.__call_list = call_list
self.__name = name
def __getattr__(self, name):
return _MultiCallMethod(self.__call_list, "%s.%s" % (self.__name, name))
def __call__(self, *args):
self.__call_list.append((self.__name, args))
class MultiCallIterator:
"""Iterates over the results of a multicall. Exceptions are
thrown in response to xmlrpc faults."""
def __init__(self, results):
self.results = results
def __getitem__(self, i):
item = self.results[i]
if type(item) == type({}):
raise Fault(item['faultCode'], item['faultString'])
elif type(item) == type([]):
return item[0]
else:
raise ValueError,\
"unexpected type in multicall result"
class MultiCall:
"""server -> a object used to boxcar method calls
server should be a ServerProxy object.
Methods can be added to the MultiCall using normal
method call syntax e.g.:
multicall = MultiCall(server_proxy)
multicall.add(2,3)
multicall.get_address("Guido")
To execute the multicall, call the MultiCall object e.g.:
add_result, address = multicall()
"""
def __init__(self, server):
self.__server = server
self.__call_list = []
def __repr__(self):
return "<MultiCall at %x>" % id(self)
__str__ = __repr__
def __getattr__(self, name):
return _MultiCallMethod(self.__call_list, name)
def __call__(self):
marshalled_list = []
for name, args in self.__call_list:
marshalled_list.append({'methodName' : name, 'params' : args})
return MultiCallIterator(self.__server.system.multicall(marshalled_list))
# --------------------------------------------------------------------
# convenience functions
##
# Create a parser object, and connect it to an unmarshalling instance.
# This function picks the fastest available XML parser.
#
# return A (parser, unmarshaller) tuple.
def getparser(use_datetime=0):
"""getparser() -> parser, unmarshaller
Create an instance of the fastest available parser, and attach it
to an unmarshalling object. Return both objects.
"""
if use_datetime and not datetime:
raise ValueError, "the datetime module is not available"
if FastParser and FastUnmarshaller:
if use_datetime:
mkdatetime = _datetime_type
else:
mkdatetime = _datetime
target = FastUnmarshaller(True, False, _binary, mkdatetime, Fault)
parser = FastParser(target)
else:
target = Unmarshaller(use_datetime=use_datetime)
if FastParser:
parser = FastParser(target)
elif SgmlopParser:
parser = SgmlopParser(target)
elif ExpatParser:
parser = ExpatParser(target)
else:
parser = SlowParser(target)
return parser, target
##
# Convert a Python tuple or a Fault instance to an XML-RPC packet.
#
# @def dumps(params, **options)
# @param params A tuple or Fault instance.
# @keyparam methodname If given, create a methodCall request for
# this method name.
# @keyparam methodresponse If given, create a methodResponse packet.
# If used with a tuple, the tuple must be a singleton (that is,
# it must contain exactly one element).
# @keyparam encoding The packet encoding.
# @return A string containing marshalled data.
def dumps(params, methodname=None, methodresponse=None, encoding=None,
allow_none=0):
"""data [,options] -> marshalled data
Convert an argument tuple or a Fault instance to an XML-RPC
request (or response, if the methodresponse option is used).
In addition to the data object, the following options can be given
as keyword arguments:
methodname: the method name for a methodCall packet
methodresponse: true to create a methodResponse packet.
If this option is used with a tuple, the tuple must be
a singleton (i.e. it can contain only one element).
encoding: the packet encoding (default is UTF-8)
All 8-bit strings in the data structure are assumed to use the
packet encoding. Unicode strings are automatically converted,
where necessary.
"""
assert isinstance(params, TupleType) or isinstance(params, Fault),\
"argument must be tuple or Fault instance"
if isinstance(params, Fault):
methodresponse = 1
elif methodresponse and isinstance(params, TupleType):
assert len(params) == 1, "response tuple must be a singleton"
if not encoding:
encoding = "utf-8"
if FastMarshaller:
m = FastMarshaller(encoding)
else:
m = Marshaller(encoding, allow_none)
data = m.dumps(params)
if encoding != "utf-8":
xmlheader = "<?xml version='1.0' encoding='%s'?>\n" % str(encoding)
else:
xmlheader = "<?xml version='1.0'?>\n" # utf-8 is default
# standard XML-RPC wrappings
if methodname:
# a method call
if not isinstance(methodname, StringType):
methodname = methodname.encode(encoding)
data = (
xmlheader,
"<methodCall>\n"
"<methodName>", methodname, "</methodName>\n",
data,
"</methodCall>\n"
)
elif methodresponse:
# a method response, or a fault structure
data = (
xmlheader,
"<methodResponse>\n",
data,
"</methodResponse>\n"
)
else:
return data # return as is
return string.join(data, "")
##
# Convert an XML-RPC packet to a Python object. If the XML-RPC packet
# represents a fault condition, this function raises a Fault exception.
#
# @param data An XML-RPC packet, given as an 8-bit string.
# @return A tuple containing the unpacked data, and the method name
# (None if not present).
# @see Fault
def loads(data, use_datetime=0):
"""data -> unmarshalled data, method name
Convert an XML-RPC packet to unmarshalled data plus a method
name (None if not present).
If the XML-RPC packet represents a fault condition, this function
raises a Fault exception.
"""
p, u = getparser(use_datetime=use_datetime)
p.feed(data)
p.close()
return u.close(), u.getmethodname()
# --------------------------------------------------------------------
# request dispatcher
class _Method:
# some magic to bind an XML-RPC method to an RPC server.
# supports "nested" methods (e.g. examples.getStateName)
def __init__(self, send, name):
self.__send = send
self.__name = name
def __getattr__(self, name):
return _Method(self.__send, "%s.%s" % (self.__name, name))
def __call__(self, *args):
return self.__send(self.__name, args)
##
# Standard transport class for XML-RPC over HTTP.
# <p>
# You can create custom transports by subclassing this method, and
# overriding selected methods.
class Transport:
"""Handles an HTTP transaction to an XML-RPC server."""
# client identifier (may be overridden)
user_agent = "xmlrpclib.py/%s (by www.pythonware.com)" % __version__
def __init__(self, use_datetime=0):
self._use_datetime = use_datetime
##
# Send a complete request, and parse the response.
#
# @param host Target host.
# @param handler Target PRC handler.
# @param request_body XML-RPC request body.
# @param verbose Debugging flag.
# @return Parsed response.
def request(self, host, handler, request_body, verbose=0):
# issue XML-RPC request
h = self.make_connection(host)
if verbose:
h.set_debuglevel(1)
self.send_request(h, handler, request_body)
self.send_host(h, host)
self.send_user_agent(h)
self.send_content(h, request_body)
errcode, errmsg, headers = h.getreply()
if errcode != 200:
raise ProtocolError(
host + handler,
errcode, errmsg,
headers
)
self.verbose = verbose
try:
sock = h._conn.sock
except AttributeError:
sock = None
return self._parse_response(h.getfile(), sock)
##
# Create parser.
#
# @return A 2-tuple containing a parser and a unmarshaller.
def getparser(self):
# get parser and unmarshaller
return getparser(use_datetime=self._use_datetime)
##
# Get authorization info from host parameter
# Host may be a string, or a (host, x509-dict) tuple; if a string,
# it is checked for a "user:pw@host" format, and a "Basic
# Authentication" header is added if appropriate.
#
# @param host Host descriptor (URL or (URL, x509 info) tuple).
# @return A 3-tuple containing (actual host, extra headers,
# x509 info). The header and x509 fields may be None.
def get_host_info(self, host):
x509 = {}
if isinstance(host, TupleType):
host, x509 = host
import urllib
auth, host = urllib.splituser(host)
if auth:
import base64
auth = base64.encodestring(urllib.unquote(auth))
auth = string.join(string.split(auth), "") # get rid of whitespace
extra_headers = [
("Authorization", "Basic " + auth)
]
else:
extra_headers = None
return host, extra_headers, x509
##
# Connect to server.
#
# @param host Target host.
# @return A connection handle.
def make_connection(self, host):
# create a HTTP connection object from a host descriptor
import httplib
host, extra_headers, x509 = self.get_host_info(host)
return httplib.HTTP(host)
##
# Send request header.
#
# @param connection Connection handle.
# @param handler Target RPC handler.
# @param request_body XML-RPC body.
def send_request(self, connection, handler, request_body):
connection.putrequest("POST", handler)
##
# Send host name.
#
# @param connection Connection handle.
# @param host Host name.
def send_host(self, connection, host):
host, extra_headers, x509 = self.get_host_info(host)
connection.putheader("Host", host)
if extra_headers:
if isinstance(extra_headers, DictType):
extra_headers = extra_headers.items()
for key, value in extra_headers:
connection.putheader(key, value)
##
# Send user-agent identifier.
#
# @param connection Connection handle.
def send_user_agent(self, connection):
connection.putheader("User-Agent", self.user_agent)
##
# Send request body.
#
# @param connection Connection handle.
# @param request_body XML-RPC request body.
def send_content(self, connection, request_body):
connection.putheader("Content-Type", "text/xml")
connection.putheader("Content-Length", str(len(request_body)))
connection.endheaders()
if request_body:
connection.send(request_body)
##
# Parse response.
#
# @param file Stream.
# @return Response tuple and target method.
def parse_response(self, file):
# compatibility interface
return self._parse_response(file, None)
##
# Parse response (alternate interface). This is similar to the
# parse_response method, but also provides direct access to the
# underlying socket object (where available).
#
# @param file Stream.
# @param sock Socket handle (or None, if the socket object
# could not be accessed).
# @return Response tuple and target method.
def _parse_response(self, file, sock):
# read response from input file/socket, and parse it
p, u = self.getparser()
while 1:
if sock:
response = sock.recv(1024)
else:
response = file.read(1024)
if not response:
break
if self.verbose:
print "body:", repr(response)
p.feed(response)
file.close()
p.close()
return u.close()
##
# Standard transport class for XML-RPC over HTTPS.
class SafeTransport(Transport):
"""Handles an HTTPS transaction to an XML-RPC server."""
# FIXME: mostly untested
def make_connection(self, host):
# create a HTTPS connection object from a host descriptor
# host may be a string, or a (host, x509-dict) tuple
import httplib
host, extra_headers, x509 = self.get_host_info(host)
try:
HTTPS = httplib.HTTPS
except AttributeError:
raise NotImplementedError(
"your version of httplib doesn't support HTTPS"
)
else:
return HTTPS(host, None, **(x509 or {}))
##
# Standard server proxy. This class establishes a virtual connection
# to an XML-RPC server.
# <p>
# This class is available as ServerProxy and Server. New code should
# use ServerProxy, to avoid confusion.
#
# @def ServerProxy(uri, **options)
# @param uri The connection point on the server.
# @keyparam transport A transport factory, compatible with the
# standard transport class.
# @keyparam encoding The default encoding used for 8-bit strings
# (default is UTF-8).
# @keyparam verbose Use a true value to enable debugging output.
# (printed to standard output).
# @see Transport
class ServerProxy:
"""uri [,options] -> a logical connection to an XML-RPC server
uri is the connection point on the server, given as
scheme://host/target.
The standard implementation always supports the "http" scheme. If
SSL socket support is available (Python 2.0), it also supports
"https".
If the target part and the slash preceding it are both omitted,
"/RPC2" is assumed.
The following options can be given as keyword arguments:
transport: a transport factory
encoding: the request encoding (default is UTF-8)
All 8-bit strings passed to the server proxy are assumed to use
the given encoding.
"""
def __init__(self, uri, transport=None, encoding=None, verbose=0,
allow_none=0, use_datetime=0):
# establish a "logical" server connection
# get the url
import urllib
type, uri = urllib.splittype(uri)
if type not in ("http", "https"):
raise IOError, "unsupported XML-RPC protocol"
self.__host, self.__handler = urllib.splithost(uri)
if not self.__handler:
self.__handler = "/RPC2"
if transport is None:
if type == "https":
transport = SafeTransport(use_datetime=use_datetime)
else:
transport = Transport(use_datetime=use_datetime)
self.__transport = transport
self.__encoding = encoding
self.__verbose = verbose
self.__allow_none = allow_none
def __request(self, methodname, params):
# call a method on the remote server
request = dumps(params, methodname, encoding=self.__encoding,
allow_none=self.__allow_none)
response = self.__transport.request(
self.__host,
self.__handler,
request,
verbose=self.__verbose
)
if len(response) == 1:
response = response[0]
return response
def __repr__(self):
return (
"<ServerProxy for %s%s>" %
(self.__host, self.__handler)
)
__str__ = __repr__
def __getattr__(self, name):
# magic method dispatcher
return _Method(self.__request, name)
# note: to call a remote object with an non-standard name, use
# result getattr(server, "strange-python-name")(args)
# compatibility
Server = ServerProxy
# --------------------------------------------------------------------
# test code
if __name__ == "__main__":
# simple test program (from the XML-RPC specification)
# server = ServerProxy("http://localhost:8000") # local server
server = ServerProxy("http://time.xmlrpc.com/RPC2")
print server
try:
print server.currentTime.getCurrentTime()
except Error, v:
print "ERROR", v
multi = MultiCall(server)
multi.currentTime.getCurrentTime()
multi.currentTime.getCurrentTime()
try:
for response in multi():
print response
except Error, v:
print "ERROR", v
|
gabrielfalcao/lettuce
|
refs/heads/master
|
tests/integration/lib/Django-1.2.5/tests/regressiontests/__init__.py
|
12133432
| |
quru/wagtail
|
refs/heads/master
|
wagtail/wagtailcore/tests/__init__.py
|
12133432
| |
jounex/hue
|
refs/heads/master
|
desktop/core/ext-py/django-axes-1.4.0/examples/example/__init__.py
|
12133432
| |
tchernomax/ansible
|
refs/heads/devel
|
lib/ansible/modules/system/nosh.py
|
20
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2017, Thomas Caravia <taca@kadisius.eu>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: nosh
author:
- "Thomas Caravia"
version_added: "2.5"
short_description: Manage services with nosh
description:
- Control running and enabled state for system-wide or user services.
- BSD and Linux systems are supported.
options:
name:
required: true
description:
- Name of the service to manage.
state:
required: false
choices: [ started, stopped, reset, restarted, reloaded ]
description:
- C(started)/C(stopped) are idempotent actions that will not run
commands unless necessary.
C(restarted) will always bounce the service.
C(reloaded) will send a SIGHUP or start the service.
C(reset) will start or stop the service according to whether it is
enabled or not.
enabled:
required: false
type: bool
description:
- Enable or disable the service, independently of C(*.preset) file
preference or running state. Mutually exclusive with I(preset). Will take
effect prior to I(state=reset).
preset:
required: false
type: bool
description:
- Enable or disable the service according to local preferences in *.preset files.
Mutually exclusive with I(enabled). Only has an effect if set to true. Will take
effect prior to I(state=reset).
user:
required: false
default: 'no'
type: bool
description:
- Run system-control talking to the calling user's service manager, rather than
the system-wide service manager.
requirements:
- A system with an active nosh service manager, see Notes for further information.
notes:
- Information on the nosh utilities suite may be found at U(https://jdebp.eu/Softwares/nosh/).
'''
EXAMPLES = '''
- name: start dnscache if not running
nosh: name=dnscache state=started
- name: stop mpd, if running
nosh: name=mpd state=stopped
- name: restart unbound or start it if not already running
nosh:
name: unbound
state: restarted
- name: reload fail2ban or start it if not already running
nosh:
name: fail2ban
state: reloaded
- name: disable nsd
nosh: name=nsd enabled=no
- name: for package installers, set nginx running state according to local enable settings, preset and reset
nosh: name=nginx preset=True state=reset
- name: reboot the host if nosh is the system manager, would need a "wait_for*" task at least, not recommended as-is
nosh: name=reboot state=started
- name: using conditionals with the module facts
tasks:
- name: obtain information on tinydns service
nosh: name=tinydns
register: result
- name: fail if service not loaded
fail: msg="The {{ result.name }} service is not loaded"
when: not result.status
- name: fail if service is running
fail: msg="The {{ result.name }} service is running"
when: result.status and result.status['DaemontoolsEncoreState'] == "running"
'''
RETURN = '''
name:
description: name used to find the service
returned: success
type: string
sample: "sshd"
service_path:
description: resolved path for the service
returned: success
type: string
sample: "/var/sv/sshd"
enabled:
description: whether the service is enabled at system bootstrap
returned: success
type: boolean
sample: True
preset:
description: whether the enabled status reflects the one set in the relevant C(*.preset) file
returned: success
type: boolean
sample: False
state:
description: service process run state, C(None) if the service is not loaded and will not be started
returned: if state option is used
type: string
sample: "reloaded"
status:
description: a dictionary with the key=value pairs returned by `system-control show-json` or C(None) if the service is not loaded
returned: success
type: complex
contains: {
"After": [
"/etc/service-bundles/targets/basic",
"../sshdgenkeys",
"log"
],
"Before": [
"/etc/service-bundles/targets/shutdown"
],
"Conflicts": [],
"DaemontoolsEncoreState": "running",
"DaemontoolsState": "up",
"Enabled": true,
"LogService": "../cyclog@sshd",
"MainPID": 661,
"Paused": false,
"ReadyAfterRun": false,
"RemainAfterExit": false,
"Required-By": [],
"RestartExitStatusCode": 0,
"RestartExitStatusNumber": 0,
"RestartTimestamp": 4611686019935648081,
"RestartUTCTimestamp": 1508260140,
"RunExitStatusCode": 0,
"RunExitStatusNumber": 0,
"RunTimestamp": 4611686019935648081,
"RunUTCTimestamp": 1508260140,
"StartExitStatusCode": 1,
"StartExitStatusNumber": 0,
"StartTimestamp": 4611686019935648081,
"StartUTCTimestamp": 1508260140,
"StopExitStatusCode": 0,
"StopExitStatusNumber": 0,
"StopTimestamp": 4611686019935648081,
"StopUTCTimestamp": 1508260140,
"Stopped-By": [
"/etc/service-bundles/targets/shutdown"
],
"Timestamp": 4611686019935648081,
"UTCTimestamp": 1508260140,
"Want": "nothing",
"Wanted-By": [
"/etc/service-bundles/targets/server",
"/etc/service-bundles/targets/sockets"
],
"Wants": [
"/etc/service-bundles/targets/basic",
"../sshdgenkeys"
]
}
user:
description: whether the user-level service manager is called
returned: success
type: boolean
sample: False
'''
import json
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.service import fail_if_missing
from ansible.module_utils._text import to_native
def run_sys_ctl(module, args):
sys_ctl = [module.get_bin_path('system-control', required=True)]
if module.params['user']:
sys_ctl = sys_ctl + ['--user']
return module.run_command(sys_ctl + args)
def get_service_path(module, service):
(rc, out, err) = run_sys_ctl(module, ['find', service])
# fail if service not found
if rc != 0:
fail_if_missing(module, False, service, msg='host')
else:
return to_native(out).strip()
def service_is_enabled(module, service_path):
(rc, out, err) = run_sys_ctl(module, ['is-enabled', service_path])
return rc == 0
def service_is_preset_enabled(module, service_path):
(rc, out, err) = run_sys_ctl(module, ['preset', '--dry-run', service_path])
return to_native(out).strip().startswith("enable")
def service_is_loaded(module, service_path):
(rc, out, err) = run_sys_ctl(module, ['is-loaded', service_path])
return rc == 0
def get_service_status(module, service_path):
(rc, out, err) = run_sys_ctl(module, ['show-json', service_path])
# will fail if not service is not loaded
if err is not None and err:
module.fail_json(msg=err)
else:
json_out = json.loads(to_native(out).strip())
status = json_out[service_path] # descend past service path header
return status
def service_is_running(service_status):
return service_status['DaemontoolsEncoreState'] in set(['starting', 'started', 'running'])
def handle_enabled(module, result, service_path):
"""Enable or disable a service as needed.
- 'preset' will set the enabled state according to available preset file settings.
- 'enabled' will set the enabled state explicitly, independently of preset settings.
These options are set to "mutually exclusive" but the explicit 'enabled' option will
have priority if the check is bypassed.
"""
# computed prior in control flow
preset = result['preset']
enabled = result['enabled']
# preset, effect only if option set to true (no reverse preset)
if module.params['preset']:
action = 'preset'
# run preset if needed
if preset != module.params['preset']:
result['changed'] = True
if not module.check_mode:
(rc, out, err) = run_sys_ctl(module, [action, service_path])
if rc != 0:
module.fail_json(msg="Unable to %s service %s: %s" % (action, service_path, out + err))
result['preset'] = not preset
result['enabled'] = not enabled
# enabled/disabled state
if module.params['enabled'] is not None:
if module.params['enabled']:
action = 'enable'
else:
action = 'disable'
# change enable/disable if needed
if enabled != module.params['enabled']:
result['changed'] = True
if not module.check_mode:
(rc, out, err) = run_sys_ctl(module, [action, service_path])
if rc != 0:
module.fail_json(msg="Unable to %s service %s: %s" % (action, service_path, out + err))
result['enabled'] = not enabled
result['preset'] = not preset
def handle_state(module, result, service_path):
"""Set service running state as needed.
Takes into account the fact that a service may not be loaded (no supervise directory) in
which case it is 'stopped' as far as the service manager is concerned. No status information
can be obtained and the service can only be 'started'.
"""
# default to desired state, no action
result['state'] = module.params['state']
state = module.params['state']
action = None
# computed prior in control flow, possibly modified by handle_enabled()
enabled = result['enabled']
# service not loaded -> not started by manager, no status information
if not service_is_loaded(module, service_path):
if state in ['started', 'restarted', 'reloaded']:
action = 'start'
result['state'] = 'started'
elif state == 'reset':
if enabled:
action = 'start'
result['state'] = 'started'
else:
result['state'] = None
else:
result['state'] = None
# service is loaded
else:
# get status information
result['status'] = get_service_status(module, service_path)
running = service_is_running(result['status'])
if state == 'started':
if not running:
action = 'start'
elif state == 'stopped':
if running:
action = 'stop'
# reset = start/stop according to enabled status
elif state == 'reset':
if enabled is not running:
if running:
action = 'stop'
result['state'] = 'stopped'
else:
action = 'start'
result['state'] = 'started'
# start if not running, 'service' module constraint
elif state == 'restarted':
if not running:
action = 'start'
result['state'] = 'started'
else:
action = 'condrestart'
# start if not running, 'service' module constraint
elif state == 'reloaded':
if not running:
action = 'start'
result['state'] = 'started'
else:
action = 'hangup'
# change state as needed
if action:
result['changed'] = True
if not module.check_mode:
(rc, out, err) = run_sys_ctl(module, [action, service_path])
if rc != 0:
module.fail_json(msg="Unable to %s service %s: %s" % (action, service_path, err))
# ===========================================
# Main control flow
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(required=True),
state=dict(choices=['started', 'stopped', 'reset', 'restarted', 'reloaded'], type='str'),
enabled=dict(type='bool'),
preset=dict(type='bool'),
user=dict(type='bool', default=False),
),
supports_check_mode=True,
mutually_exclusive=[['enabled', 'preset']],
)
service = module.params['name']
rc = 0
out = err = ''
result = {
'name': service,
'changed': False,
'status': None,
}
# check service can be found (or fail) and get path
service_path = get_service_path(module, service)
# get preliminary service facts
result['service_path'] = service_path
result['user'] = module.params['user']
result['enabled'] = service_is_enabled(module, service_path)
result['preset'] = result['enabled'] is service_is_preset_enabled(module, service_path)
# set enabled state, service need not be loaded
if module.params['enabled'] is not None or module.params['preset']:
handle_enabled(module, result, service_path)
# set service running state
if module.params['state'] is not None:
handle_state(module, result, service_path)
# get final service status if possible
if service_is_loaded(module, service_path):
result['status'] = get_service_status(module, service_path)
module.exit_json(**result)
if __name__ == '__main__':
main()
|
hassoon3/odoo
|
refs/heads/8.0
|
addons/stock_dropshipping/__init__.py
|
223
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import stock_dropshipping
import wizard
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
nespinoza/secret-santa
|
refs/heads/master
|
run.py
|
1
|
# -*- coding: utf-8 -*-
from mail_utils import email_meta_loader
from mail_utils import assign_partner
from mail_utils import load_emails
from mail_utils import send_email
if __name__ == "__main__":
email = email_meta_loader()
emails = load_emails(email['list'])
assign_partner(emails)
for user in emails:
print('[INFO] Sending email to {}'.format(user['email']))
send_email(email, user)
|
virgree/odoo
|
refs/heads/8.0
|
addons/l10n_gr/__init__.py
|
438
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2008 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# $Id$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
#import sandwich_wizard
#import order_create
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
andreesg/bda.plone.orders
|
refs/heads/master
|
src/bda/plone/orders/browser/views.py
|
1
|
# -*- coding: utf-8 -*-
from AccessControl import Unauthorized
from Products.CMFPlone.interfaces import IPloneSiteRoot
from Products.Five import BrowserView
from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile
from Products.statusmessages.interfaces import IStatusMessage
from bda.plone.cart import ascur
from bda.plone.cart import get_object_by_uid
from bda.plone.checkout import message_factory as _co
from bda.plone.checkout.vocabularies import get_pycountry_name
from bda.plone.orders import interfaces as ifaces
from bda.plone.orders import message_factory as _
from bda.plone.orders import permissions
from bda.plone.orders import vocabularies as vocabs
from bda.plone.orders.browser.dropdown import BaseDropdown
from bda.plone.orders.common import BookingData
from bda.plone.orders.common import DT_FORMAT
from bda.plone.orders.common import OrderData
from bda.plone.orders.common import booking_update_comment
from bda.plone.orders.common import get_orders_soup
from bda.plone.orders.common import get_vendor_by_uid
from bda.plone.orders.common import get_vendor_uids_for
from bda.plone.orders.common import get_vendors_for
from bda.plone.orders.interfaces import IBuyable
from bda.plone.orders.transitions import do_transition_for
from bda.plone.orders.transitions import transitions_of_main_state
from bda.plone.orders.transitions import transitions_of_salaried_state
from plone.memoize import view
from repoze.catalog.query import Any
from repoze.catalog.query import Contains
from repoze.catalog.query import Eq
from souper.soup import LazyRecord
from souper.soup import get_soup
from yafowil.base import factory
from yafowil.controller import Controller
from yafowil.utils import Tag
from zExceptions import BadRequest
from zExceptions import Redirect
from zope.i18n import translate
from zope.i18nmessageid import Message
from zope.security import checkPermission
import json
import pkg_resources
import plone.api
import urllib
import uuid
IS_P4 = pkg_resources.require("Products.CMFPlone")[0].version[0] == '4'
class OrdersContentView(BrowserView):
def disable_border(self):
if IS_P4:
self.request.set('disable_border', True)
def disable_left_column(self):
self.request.set('disable_plone.leftcolumn', True)
def disable_right_column(self):
self.request.set('disable_plone.rightcolumn', True)
class Translate(object):
def __init__(self, request):
self.request = request
def __call__(self, msg):
if not isinstance(msg, Message):
return msg
return translate(msg, context=self.request)
class OrderDropdown(BaseDropdown):
@property
def order_data(self):
vendor_uid = self.request.form.get('vendor', '')
if vendor_uid:
vendor_uids = [vendor_uid]
else:
vendor_uids = get_vendor_uids_for()
return OrderData(
self.context,
order=self.record,
vendor_uids=vendor_uids
)
class OrderStateDropdown(OrderDropdown):
name = 'state'
css = 'dropdown change_order_state_dropdown'
action = 'orderstatetransition'
vocab = vocabs.state_vocab()
transitions = vocabs.state_transitions_vocab()
@property
def value(self):
return self.order_data.state
@property
def items(self):
transitions = transitions_of_main_state(self.value)
return self.create_items(transitions)
class OrderSalariedDropdown(OrderDropdown):
name = 'salaried'
css = 'dropdown change_order_salaried_dropdown'
action = 'ordersalariedtransition'
vocab = vocabs.salaried_vocab()
transitions = vocabs.salaried_transitions_vocab()
@property
def value(self):
return self.order_data.salaried or ifaces.SALARIED_NO
@property
def items(self):
transitions = transitions_of_salaried_state(self.value)
return self.create_items(transitions)
class Transition(BrowserView):
dropdown = None
@property
def vendor_uids(self):
vendor_uid = self.request.form.get('vendor', '')
if vendor_uid:
vendor_uids = [vendor_uid]
vendor = get_vendor_by_uid(self.context, vendor_uid)
user = plone.api.user.get_current()
if not user.checkPermission(permissions.ModifyOrders, vendor):
raise Unauthorized
else:
vendor_uids = get_vendor_uids_for()
if not vendor_uids:
raise Unauthorized
return vendor_uids
def __call__(self):
uid = self.request['uid']
transition = self.request['transition']
vendor_uids = self.vendor_uids
record = self.do_transition(uid, transition, vendor_uids)
return self.dropdown(self.context, self.request, record).render()
class OrderTransition(Transition):
def do_transition(self, uid, transition, vendor_uids):
order_data = OrderData(
self.context,
uid=uid,
vendor_uids=vendor_uids
)
do_transition_for(
order_data,
transition=transition,
context=self.context,
request=self.request
)
return order_data.order
class OrderStateTransition(OrderTransition):
dropdown = OrderStateDropdown
class OrderSalariedTransition(OrderTransition):
dropdown = OrderSalariedDropdown
class TableData(BrowserView):
soup_name = None
search_text_index = None
@property
def columns(self):
"""Return list of dicts with column definitions:
[{
'id': 'colid',
'label': 'Col Label',
'head': callback,
'renderer': callback,
}]
"""
raise NotImplementedError(u"Abstract DataTable does not implement "
u"``columns``.")
def query(self, soup):
"""Return 2-tuple with result length and lazy record iterator.
"""
raise NotImplementedError(u"Abstract DataTable does not implement "
u"``query``.")
def sort(self):
columns = self.columns
sortparams = dict()
sortcols_idx = int(self.request.form.get('iSortCol_0'))
sortparams['index'] = columns[sortcols_idx]['id']
sortparams['reverse'] = self.request.form.get('sSortDir_0') == 'desc'
return sortparams
def all(self, soup):
data = soup.storage.data
sort = self.sort()
sort_index = soup.catalog[sort['index']]
iids = sort_index.sort(data.keys(), reverse=sort['reverse'])
def lazyrecords():
for iid in iids:
yield LazyRecord(iid, soup)
return soup.storage.length.value, lazyrecords()
def slice(self, fullresult):
start = int(self.request.form['iDisplayStart'])
length = int(self.request.form['iDisplayLength'])
count = 0
for lr in fullresult:
if count >= start and count < (start + length):
yield lr
if count >= (start + length):
break
count += 1
def column_def(self, colname):
for column in self.columns:
if column['id'] == colname:
return column
def __call__(self):
soup = get_soup(self.soup_name, self.context)
aaData = list()
length, lazydata = self.query(soup)
columns = self.columns
colnames = [_['id'] for _ in columns]
# todo json response header einbaun
def record2list(record):
result = list()
for colname in colnames:
coldef = self.column_def(colname)
renderer = coldef.get('renderer')
if renderer:
value = renderer(colname, record)
else:
value = record.attrs.get(colname, '')
result.append(value)
return result
for lazyrecord in self.slice(lazydata):
aaData.append(record2list(lazyrecord()))
data = {
"sEcho": int(self.request.form['sEcho']),
"iTotalRecords": soup.storage.length.value,
"iTotalDisplayRecords": length,
"aaData": aaData,
}
self.request.response.setHeader("Content-type", "application/json")
return json.dumps(data)
class OrdersViewBase(OrdersContentView):
table_view_name = '@@orderstable'
def orders_table(self):
return self.context.restrictedTraverse(self.table_view_name)()
class OrdersView(OrdersViewBase):
def __call__(self):
# check if authenticated user is vendor
if not get_vendors_for():
raise Unauthorized
return super(OrdersView, self).__call__()
class MyOrdersView(OrdersViewBase):
table_view_name = '@@myorderstable'
class OrdersTableBase(BrowserView):
table_template = ViewPageTemplateFile('table.pt')
table_id = 'bdaploneorders'
data_view_name = '@@ordersdata'
def rendered_table(self):
return self.table_template(self)
def render_filter(self):
return None
def render_order_actions_head(self):
return None
def render_order_actions(self, colname, record):
return None
def render_salaried(self, colname, record):
salaried = OrderData(self.context, order=record).salaried\
or ifaces.SALARIED_NO
return translate(vocabs.salaried_vocab()[salaried],
context=self.request)
def render_state(self, colname, record):
state = OrderData(self.context, order=record).state
if not state:
return '-/-'
return translate(vocabs.state_vocab()[state], context=self.request)
def render_dt(self, colname, record):
value = record.attrs.get(colname, '')
if value:
value = value.strftime(DT_FORMAT)
return value
@property
def ajaxurl(self):
return u'{0}/{1}'.format(
self.context.absolute_url(),
self.data_view_name
)
@property
def columns(self):
return [{
'id': 'actions',
'label': _('actions', default=u'Actions'),
'head': self.render_order_actions_head,
'renderer': self.render_order_actions,
}, {
'id': 'created',
'label': _('date', default=u'Date'),
'renderer': self.render_dt,
}, {
'id': 'personal_data.lastname',
'label': _('lastname', default=u'Last Name'),
}, {
'id': 'personal_data.firstname',
'label': _('firstname', default=u'First Name'),
}, {
'id': 'personal_data.email',
'label': _('email', default=u'Email'),
}, {
'id': 'billing_address.city',
'label': _('city', default=u'City'),
}, {
'id': 'salaried',
'label': _('salaried', default=u'Salaried'),
'renderer': self.render_salaried,
}, {
'id': 'state',
'label': _('state', default=u'State'),
'renderer': self.render_state,
}]
def vendors_form_vocab():
vendors = vocabs.vendors_vocab_for()
return [('', _('all', default='All'))] + vendors
def customers_form_vocab():
customers = vocabs.customers_vocab_for()
return [('', _('all', default='All'))] + customers
def states_form_vocab():
states = vocabs.state_vocab()
return [('', _('all', default='All'))] + states.items()
def salaried_form_vocab():
salaried = vocabs.salaried_vocab()
return [('', _('all', default='All'))] + salaried.items()
class OrdersTable(OrdersTableBase):
def render_filter(self):
# vendor areas of current user
vendors = vendors_form_vocab()
vendor_selector = None
# vendor selection, include if more than one vendor
if len(vendors) > 2:
vendor_selector = factory(
'div:label:select',
name='vendor',
value=self.request.form.get('vendor', ''),
props={
'vocabulary': vendors,
'label': _('filter_for_vendors',
default=u'Filter for vendors'),
}
)
# customers of current user
customers = customers_form_vocab()
customer_selector = None
# customers selection, include if more than one customer
if len(customers) > 2:
customer_selector = factory(
'div:label:select',
name='customer',
value=self.request.form.get('customer', ''),
props={
'vocabulary': customers,
'label': _('filter_for_customers',
default=u'Filter for customers'),
}
)
states = states_form_vocab()
state_selector = factory(
'div:label:select',
name='state',
value=self.request.form.get('state', ''),
props={
'vocabulary': states,
'label': _('filter_for_state',
default=u'Filter for states'),
}
)
salaried = salaried_form_vocab()
salaried_selector = factory(
'div:label:select',
name='salaried',
value=self.request.form.get('salaried', ''),
props={
'vocabulary': salaried,
'label': _('filter_for_salaried',
default=u'Filter for salaried state'),
}
)
# concatenate filters
filter_widgets = ''
if vendor_selector:
filter_widgets += vendor_selector(request=self.request)
if customer_selector:
filter_widgets += customer_selector(request=self.request)
filter_widgets += state_selector(request=self.request)
filter_widgets += salaried_selector(request=self.request)
return filter_widgets
def render_order_actions_head(self):
tag = Tag(Translate(self.request))
select_all_orders_attrs = {
'name': 'select_all_orders',
'type': 'checkbox',
'class_': 'select_all_orders',
'title': _('select_all_orders',
default=u'Select all visible orders'),
}
select_all_orders = tag('input', **select_all_orders_attrs)
notify_customers_target = self.context.absolute_url()
notify_customers_attributes = {
'ajax:target': notify_customers_target,
'class_': 'notify_customers',
'href': '',
'title': _('notify_customers',
default=u'Notify customers of selected orders'),
}
notify_customers = tag('a', ' ', **notify_customers_attributes)
return select_all_orders + notify_customers
def render_order_actions(self, colname, record):
tag = Tag(Translate(self.request))
vendor_uid = self.request.form.get('vendor', '')
if vendor_uid:
view_order_target = '%s?uid=%s&vendor=%s' % (
self.context.absolute_url(),
str(record.attrs['uid']),
vendor_uid)
else:
view_order_target = '%s?uid=%s' % (
self.context.absolute_url(),
str(record.attrs['uid']))
view_order_attrs = {
'ajax:bind': 'click',
'ajax:target': view_order_target,
'ajax:overlay': 'order',
'class_': 'contenttype-document',
'href': '',
'title': _('view_order', default=u'View Order'),
}
view_order = tag('a', ' ', **view_order_attrs)
select_order_attrs = {
'name': 'select_order',
'type': 'checkbox',
'value': record.attrs['uid'],
'class_': 'select_order',
}
select_order = tag('input', **select_order_attrs)
return select_order + view_order
def check_modify_order(self, order):
vendor_uid = self.request.form.get('vendor', '')
if vendor_uid:
vendor_uids = [vendor_uid]
vendor = get_vendor_by_uid(self.context, vendor_uid)
user = plone.api.user.get_current()
if not user.checkPermission(permissions.ModifyOrders, vendor):
return False
else:
vendor_uids = get_vendor_uids_for()
if not vendor_uids:
return False
return True
def render_salaried(self, colname, record):
if not self.check_modify_order(record):
salaried = OrderData(self.context, order=record).salaried
return translate(vocabs.salaried_vocab()[salaried],
context=self.request)
return OrderSalariedDropdown(
self.context,
self.request,
record
).render()
def render_state(self, colname, record):
if not self.check_modify_order(record):
state = OrderData(self.context, order=record).state
return translate(vocabs.state_vocab()[state],
context=self.request)
return OrderStateDropdown(
self.context,
self.request,
record
).render()
@property
def ajaxurl(self):
params = [
('vendor', self.request.form.get('vendor')),
('customer', self.request.form.get('customer')),
('state', self.request.form.get('state')),
('salaried', self.request.form.get('salaried')),
]
query = urllib.urlencode(dict([it for it in params if it[1]]))
query = query and u'?{0}'.format(query) or ''
return u'{0:s}/{1:s}{2:s}'.format(
self.context.absolute_url(),
self.data_view_name,
query
)
def __call__(self):
# check if authenticated user is vendor
if not get_vendors_for():
raise Unauthorized
# disable diazo theming if ajax call
if '_' in self.request.form:
self.request.response.setHeader('X-Theme-Disabled', 'True')
return super(OrdersTable, self).__call__()
class MyOrdersTable(OrdersTableBase):
data_view_name = '@@myordersdata'
def render_order_actions(self, colname, record):
tag = Tag(Translate(self.request))
view_order_target = '%s?uid=%s' % (
self.context.absolute_url(), str(record.attrs['uid']))
view_order_attrs = {
'ajax:bind': 'click',
'ajax:target': view_order_target,
'ajax:overlay': 'myorder',
'class_': 'contenttype-document',
'href': '',
'title': _('view_order', default=u'View Order'),
}
view_order = tag('a', ' ', **view_order_attrs)
return view_order
def __call__(self):
# disable diazo theming if ajax call
if '_' in self.request.form:
self.request.response.setHeader('X-Theme-Disabled', 'True')
return super(MyOrdersTable, self).__call__()
class OrdersData(OrdersTable, TableData):
soup_name = 'bda_plone_orders_orders'
search_text_index = 'text'
def _get_buyables_in_context(self):
catalog = plone.api.portal.get_tool("portal_catalog")
path = '/'.join(self.context.getPhysicalPath())
brains = catalog(path=path, object_provides=IBuyable.__identifier__)
for brain in brains:
yield brain.UID
def query(self, soup):
# fetch user vendor uids
vendor_uids = get_vendor_uids_for()
# filter by given vendor uid or user vendor uids
vendor_uid = self.request.form.get('vendor')
if vendor_uid:
vendor_uid = uuid.UUID(vendor_uid)
# raise if given vendor uid not in user vendor uids
if vendor_uid not in vendor_uids:
raise Unauthorized
query = Any('vendor_uids', [vendor_uid])
else:
query = Any('vendor_uids', vendor_uids)
# filter by customer if given
customer = self.request.form.get('customer')
if customer:
query = query & Eq('creator', customer)
# Filter by state if given
state = self.request.form.get('state')
if state:
query = query & Eq('state', state)
# Filter by salaried if given
salaried = self.request.form.get('salaried')
if salaried:
query = query & Eq('salaried', salaried)
# filter by search term if given
term = self.request.form['sSearch'].decode('utf-8')
if term:
# append * for proper fulltext search
term += '*'
query = query & Contains(self.search_text_index, term)
# get buyable uids for given context, get all buyables on site root
# use explicit IPloneSiteRoot to make it play nice with lineage
if not IPloneSiteRoot.providedBy(self.context):
buyable_uids = self._get_buyables_in_context()
query = query & Any('buyable_uids', buyable_uids)
# query orders and return result
sort = self.sort()
res = soup.lazy(query,
sort_index=sort['index'],
reverse=sort['reverse'],
with_size=True)
length = res.next()
return length, res
class MyOrdersData(MyOrdersTable, TableData):
soup_name = 'bda_plone_orders_orders'
search_text_index = 'text'
def query(self, soup):
query = Eq('creator', plone.api.user.get_current().getId())
# filter by search term if given
term = self.request.form['sSearch'].decode('utf-8')
if term:
# append * for proper fulltext search
term += '*'
query = query & Contains(self.search_text_index, term)
# query orders and return result
sort = self.sort()
res = soup.lazy(query,
sort_index=sort['index'],
reverse=sort['reverse'],
with_size=True)
length = res.next()
return length, res
class OrderViewBase(BrowserView):
@property
@view.memoize
def order_data(self):
return OrderData(self.context, uid=self.uid)
@property
def uid(self):
return self.request.form.get('uid', None)
@property
def order(self):
if not self.uid:
err = _(
'statusmessage_err_no_order_uid_given',
default='Cannot show order information because no order uid was given.' # noqa
)
IStatusMessage(self.request).addStatusMessage(err, 'error')
raise Redirect(self.context.absolute_url())
return dict(self.order_data.order.attrs)
@property
def net(self):
return ascur(self.order_data.net)
@property
def vat(self):
return ascur(self.order_data.vat)
@property
def discount_net(self):
return ascur(self.order_data.discount_net)
@property
def discount_vat(self):
return ascur(self.order_data.discount_vat)
@property
def shipping_title(self):
# XXX: node.ext.zodb or souper bug with double linked list. figure out
order = self.order_data.order.attrs
# order = self.order
title = translate(order['shipping_label'], context=self.request)
if order['shipping_description']:
title += ' (%s)' % translate(order['shipping_description'],
context=self.request)
return title
@property
def shipping_net(self):
return ascur(self.order_data.shipping_net)
@property
def shipping_vat(self):
return ascur(self.order_data.shipping_vat)
@property
def shipping(self):
# B/C
return ascur(self.order_data.shipping)
@property
def total(self):
return ascur(self.order_data.total)
@property
def currency(self):
currency = None
for booking in self.order_data.bookings:
if currency is None:
currency = booking.attrs.get('currency')
if currency != booking.attrs.get('currency'):
return None
return currency
@property
def listing(self):
# XXX: discount
ret = list()
for booking in self.order_data.bookings:
obj = get_object_by_uid(self.context, booking.attrs['buyable_uid'])
state = vocabs.state_vocab()[booking.attrs.get('state')]
salaried = vocabs.salaried_vocab()[booking.attrs.get('salaried')]
ret.append({
'uid': booking.attrs['uid'],
'title': booking.attrs['title'],
'url': obj.absolute_url(),
'count': booking.attrs['buyable_count'],
'net': ascur(booking.attrs.get('net', 0.0)),
'discount_net': ascur(float(booking.attrs['discount_net'])),
'vat': booking.attrs.get('vat', 0.0),
'comment': booking.attrs['buyable_comment'],
'quantity_unit': booking.attrs.get('quantity_unit'),
'currency': booking.attrs.get('currency'),
'state': state,
'salaried': salaried,
})
return ret
@property
def can_modify_order(self):
return checkPermission('bda.plone.orders.ModifyOrders', self.context)
@property
def can_cancel_booking(self):
return (
self.can_modify_order and
self.order_data.state != ifaces.STATE_CANCELLED
)
@property
def gender(self):
gender = self.order['personal_data.gender']
if gender == 'male':
return _co('male', 'Male')
if gender == 'female':
return _co('female', 'Female')
return gender
@property
def payment(self):
# XXX: node.ext.zodb or souper bug with double linked list. figure out
order = self.order_data.order.attrs
# order = self.order
title = translate(order['payment_label'], context=self.request)
return title
@property
def salaried(self):
salaried = self.order_data.salaried or ifaces.SALARIED_NO
return vocabs.salaried_vocab()[salaried]
@property
def tid(self):
tid = [it for it in self.order_data.tid if it != 'none']
if not tid:
return _('none', default=u'None')
return ', '.join(tid)
@property
def state(self):
state = self.order_data.state or ifaces.STATE_NEW
return vocabs.state_vocab()[state]
@property
def created(self):
value = self.order.get('created', _('unknown', default=u'Unknown'))
if value:
value = value.strftime(DT_FORMAT)
return value
def exported(self, item):
return item['exported'] \
and _('yes', default=u'Yes') or _('no', default=u'No')
def country(self, country_id):
# return value if no id not available i.e. if no dropdown in use
try:
return get_pycountry_name(country_id)
except:
return country_id
class OrderView(OrderViewBase):
def __call__(self):
vendor_uid = self.request.form.get('vendor', '')
if vendor_uid:
self.vendor_uids = [vendor_uid]
vendor = get_vendor_by_uid(self.context, vendor_uid)
user = plone.api.user.get_current()
if not user.checkPermission(permissions.ModifyOrders, vendor):
raise Unauthorized
else:
self.vendor_uids = get_vendor_uids_for()
if not self.vendor_uids:
raise Unauthorized
return super(OrderView, self).__call__()
@property
@view.memoize
def order_data(self):
return OrderData(
self.context,
uid=self.uid,
vendor_uids=self.vendor_uids)
@property
def ordernumber(self):
return self.order_data.order.attrs['ordernumber']
class MyOrderView(OrderViewBase):
def __call__(self):
# check if order was created by authenticated user
user = plone.api.user.get_current()
if user.getId() != self.order['creator']:
raise Unauthorized
return super(MyOrderView, self).__call__()
@property
def ordernumber(self):
return self.order_data.order.attrs['ordernumber']
class DirectOrderView(OrderViewBase):
"""Direct Order view.
Expect ordernumber and email to grant access to the order details.
"""
order_auth_template = ViewPageTemplateFile('order_show.pt')
order_template = ViewPageTemplateFile('order.pt')
uid = None
ordernumber = ''
email = ''
def _form_handler(self, widget, data):
self.ordernumber = data['ordernumber'].extracted
self.email = data['email'].extracted
def render_auth_form(self):
# Render the authentication form for anonymous users.
req = self.request
action = req.getURL()
ordernumber = self.ordernumber or req.form.get('ordernumber', '')
email = self.email or req.form.get('email', '')
form = factory(
'form',
name='order_auth_form',
props={'action': action})
form['ordernumber'] = factory(
'div:label:error:text',
value=ordernumber,
props={
'label': _('anon_auth_label_ordernumber',
default=u'Ordernumber'),
'div.class': 'ordernumber',
'required': True,
})
form['email'] = factory(
'div:label:error:text',
value=email,
props={
'label': _('anon_auth_label_email', default=u'Email'),
'div.class': 'email',
'required': True,
})
form['submit'] = factory(
'div:label:submit',
props={
'label': _('anon_auth_label_submit', default=u'Submit'),
'div.class': 'submit',
'handler': self._form_handler,
'action': 'submit',
})
controller = Controller(form, req)
return controller.rendered
def render_order_template(self):
return self.order_template(self)
def __call__(self):
req = self.request
ordernumber = req.form.get('order_auth_form.ordernumber', None)
email = req.form.get('order_auth_form.email', None)
order = None
errs = []
if ordernumber and email:
orders_soup = get_orders_soup(self.context)
order = orders_soup.query(Eq('ordernumber', ordernumber))
order = order.next() # generator should have only one item
try:
assert(order.attrs['personal_data.email'] == email)
except AssertionError:
# Don't raise Unauthorized, as this allows to draw conclusions
# on existing ordernumbers
order = None
if not email:
err = _('anon_auth_err_email',
default=u'Please provide the email adress you used for '
u'submitting the order.')
errs.append(err)
if not ordernumber:
err = _('anon_auth_err_ordernumber',
default=u'Please provide the ordernumber')
errs.append(err)
if email and ordernumber and not order:
err = _('anon_auth_err_order',
default=u'No order could be found for the given '
u'credentials')
errs.append(err)
if not ordernumber and not email:
# first call of this form
errs = []
for err in errs:
IStatusMessage(self.request).addStatusMessage(err, 'error')
self.uid = order.attrs['uid'] if order else None
return self.order_auth_template(self)
class OrderDone(BrowserView):
# XXX: provide different headings and texts for states reservation and
# mixed
reservation_states = (ifaces.STATE_RESERVED, ifaces.STATE_MIXED)
@property
def order_data(self):
return OrderData(self.context, uid=self.request.get('uid'))
@property
def heading(self):
try:
if self.order_data.state in self.reservation_states:
return _('reservation_done', default=u'Reservation Done')
return _('order_done', default=u'Order Done')
except ValueError:
return _('unknown_order', default=u'Unknown Order')
@property
def id(self):
try:
return self.order_data.order.attrs['ordernumber']
except ValueError:
return _('unknown', default=u'Unknown')
@property
def text(self):
try:
if self.order_data.state in self.reservation_states:
return _('reservation_text',
default=u'Thanks for your Reservation.')
return _('order_text', default=u'Thanks for your Order.')
except ValueError:
return _('unknown_order_text',
default=u'Sorry, this order does not exist.')
class BookingCancel(BrowserView):
def __call__(self):
booking_uid = self.request.form.get('uid')
if not booking_uid:
raise BadRequest('value not given')
try:
booking_data = BookingData(self.context, uid=uuid.UUID(booking_uid)) # noqa
if booking_data.booking is None:
raise ValueError('invalid value (no booking found)')
do_transition_for(
booking_data,
transition=ifaces.STATE_TRANSITION_CANCEL,
context=self.context,
request=self.request
)
except ValueError:
raise BadRequest('something is wrong with the value')
plone.api.portal.show_message(
message=_(u"Booking cancelled."),
request=self.request,
type='info'
)
self.request.response.redirect(
self.context.absolute_url() + '/@@orders'
)
class BookingUpdateComment(BrowserView):
def __call__(self):
booking_uid = self.request.form.get('uid')
if not booking_uid:
raise BadRequest('value not given')
booking_comment = self.request.form.get('comment')
try:
booking_update_comment(
self,
uuid.UUID(booking_uid),
booking_comment
)
except ValueError:
raise BadRequest('something is wrong with the value')
|
alanjw/GreenOpenERP-Win-X86
|
refs/heads/7.0
|
python/Lib/site-packages/gdata/tlslite/utils/compat.py
|
361
|
"""Miscellaneous functions to mask Python version differences."""
import sys
import os
if sys.version_info < (2,2):
raise AssertionError("Python 2.2 or later required")
if sys.version_info < (2,3):
def enumerate(collection):
return zip(range(len(collection)), collection)
class Set:
def __init__(self, seq=None):
self.values = {}
if seq:
for e in seq:
self.values[e] = None
def add(self, e):
self.values[e] = None
def discard(self, e):
if e in self.values.keys():
del(self.values[e])
def union(self, s):
ret = Set()
for e in self.values.keys():
ret.values[e] = None
for e in s.values.keys():
ret.values[e] = None
return ret
def issubset(self, other):
for e in self.values.keys():
if e not in other.values.keys():
return False
return True
def __nonzero__( self):
return len(self.values.keys())
def __contains__(self, e):
return e in self.values.keys()
def __iter__(self):
return iter(set.values.keys())
if os.name != "java":
import array
def createByteArraySequence(seq):
return array.array('B', seq)
def createByteArrayZeros(howMany):
return array.array('B', [0] * howMany)
def concatArrays(a1, a2):
return a1+a2
def bytesToString(bytes):
return bytes.tostring()
def stringToBytes(s):
bytes = createByteArrayZeros(0)
bytes.fromstring(s)
return bytes
import math
def numBits(n):
if n==0:
return 0
s = "%x" % n
return ((len(s)-1)*4) + \
{'0':0, '1':1, '2':2, '3':2,
'4':3, '5':3, '6':3, '7':3,
'8':4, '9':4, 'a':4, 'b':4,
'c':4, 'd':4, 'e':4, 'f':4,
}[s[0]]
return int(math.floor(math.log(n, 2))+1)
BaseException = Exception
import sys
import traceback
def formatExceptionTrace(e):
newStr = "".join(traceback.format_exception(sys.exc_type, sys.exc_value, sys.exc_traceback))
return newStr
else:
#Jython 2.1 is missing lots of python 2.3 stuff,
#which we have to emulate here:
#NOTE: JYTHON SUPPORT NO LONGER WORKS, DUE TO USE OF GENERATORS.
#THIS CODE IS LEFT IN SO THAT ONE JYTHON UPDATES TO 2.2, IT HAS A
#CHANCE OF WORKING AGAIN.
import java
import jarray
def createByteArraySequence(seq):
if isinstance(seq, type("")): #If it's a string, convert
seq = [ord(c) for c in seq]
return jarray.array(seq, 'h') #use short instead of bytes, cause bytes are signed
def createByteArrayZeros(howMany):
return jarray.zeros(howMany, 'h') #use short instead of bytes, cause bytes are signed
def concatArrays(a1, a2):
l = list(a1)+list(a2)
return createByteArraySequence(l)
#WAY TOO SLOW - MUST BE REPLACED------------
def bytesToString(bytes):
return "".join([chr(b) for b in bytes])
def stringToBytes(s):
bytes = createByteArrayZeros(len(s))
for count, c in enumerate(s):
bytes[count] = ord(c)
return bytes
#WAY TOO SLOW - MUST BE REPLACED------------
def numBits(n):
if n==0:
return 0
n= 1L * n; #convert to long, if it isn't already
return n.__tojava__(java.math.BigInteger).bitLength()
#Adjust the string to an array of bytes
def stringToJavaByteArray(s):
bytes = jarray.zeros(len(s), 'b')
for count, c in enumerate(s):
x = ord(c)
if x >= 128: x -= 256
bytes[count] = x
return bytes
BaseException = java.lang.Exception
import sys
import traceback
def formatExceptionTrace(e):
newStr = "".join(traceback.format_exception(sys.exc_type, sys.exc_value, sys.exc_traceback))
return newStr
|
teramagazine/coin
|
refs/heads/master
|
qa/rpc-tests/invalidtxrequest.py
|
10
|
#!/usr/bin/env python3
# Copyright (c) 2015-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.test_framework import ComparisonTestFramework
from test_framework.comptool import TestManager, TestInstance, RejectResult
from test_framework.blocktools import *
import time
'''
In this test we connect to one node over p2p, and test tx requests.
'''
# Use the ComparisonTestFramework with 1 node: only use --testbinary.
class InvalidTxRequestTest(ComparisonTestFramework):
''' Can either run this test as 1 node with expected answers, or two and compare them.
Change the "outcome" variable from each TestInstance object to only do the comparison. '''
def __init__(self):
self.num_nodes = 1
def run_test(self):
test = TestManager(self, self.options.tmpdir)
test.add_all_connections(self.nodes)
self.tip = None
self.block_time = None
NetworkThread().start() # Start up network handling in another thread
test.run()
def get_tests(self):
if self.tip is None:
self.tip = int("0x" + self.nodes[0].getbestblockhash(), 0)
self.block_time = int(time.time())+1
'''
Create a new block with an anyone-can-spend coinbase
'''
height = 1
block = create_block(self.tip, create_coinbase(height), self.block_time)
self.block_time += 1
block.solve()
# Save the coinbase for later
self.block1 = block
self.tip = block.sha256
height += 1
yield TestInstance([[block, True]])
'''
Now we need that block to mature so we can spend the coinbase.
'''
test = TestInstance(sync_every_block=False)
for i in range(100):
block = create_block(self.tip, create_coinbase(height), self.block_time)
block.solve()
self.tip = block.sha256
self.block_time += 1
test.blocks_and_transactions.append([block, True])
height += 1
yield test
# b'\x64' is OP_NOTIF
# Transaction will be rejected with code 16 (REJECT_INVALID)
tx1 = create_transaction(self.block1.vtx[0], 0, b'\x64', 50 * COIN)
yield TestInstance([[tx1, RejectResult(16, b'mandatory-script-verify-flag-failed')]])
# TODO: test further transactions...
if __name__ == '__main__':
InvalidTxRequestTest().main()
|
openstack/keystone
|
refs/heads/master
|
keystone/common/sql/contract_repo/versions/078_placeholder.py
|
30
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# This is a placeholder for Ussuri backports. Do not use this number for new
# Victoria work. New Victoria work starts after all the placeholders.
def upgrade(migrate_engine):
pass
|
csutherl/sos
|
refs/heads/master
|
sos/plugins/quagga.py
|
12
|
# Copyright (C) 2007 Ranjith Rajaram <rrajaram@redhat.com>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
from sos.plugins import Plugin, RedHatPlugin
class Quagga(Plugin, RedHatPlugin):
"""Quagga routing service
"""
plugin_name = 'quagga'
profiles = ('network',)
files = ('/etc/quagga/zebra.conf',)
packages = ('quagga',)
def setup(self):
self.add_copy_spec("/etc/quagga/")
# vim: set et ts=4 sw=4 :
|
c3nav/c3nav
|
refs/heads/master
|
src/c3nav/routing/utils/draw.py
|
1
|
from django.conf import settings
def _ellipse_bbox(x, y, height):
x *= settings.RENDER_SCALE
y *= settings.RENDER_SCALE
y = height-y
return ((x - 2, y - 2), (x + 2, y + 2))
def _line_coords(from_point, to_point, height):
return (from_point.x * settings.RENDER_SCALE, height - (from_point.y * settings.RENDER_SCALE),
to_point.x * settings.RENDER_SCALE, height - (to_point.y * settings.RENDER_SCALE))
|
HyperBaton/ansible
|
refs/heads/devel
|
test/units/modules/storage/netapp/test_na_ontap_svm.py
|
37
|
# (c) 2018, NetApp, Inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
''' unit test template for ONTAP Ansible module '''
from __future__ import print_function
import json
import pytest
from units.compat import unittest
from units.compat.mock import patch, Mock
from ansible.module_utils import basic
from ansible.module_utils._text import to_bytes
import ansible.module_utils.netapp as netapp_utils
from ansible.modules.storage.netapp.na_ontap_svm \
import NetAppOntapSVM as svm_module # module under test
if not netapp_utils.has_netapp_lib():
pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
def set_module_args(args):
"""prepare arguments so that they will be picked up during module creation"""
args = json.dumps({'ANSIBLE_MODULE_ARGS': args})
basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access
class AnsibleExitJson(Exception):
"""Exception class to be raised by module.exit_json and caught by the test case"""
pass
class AnsibleFailJson(Exception):
"""Exception class to be raised by module.fail_json and caught by the test case"""
pass
def exit_json(*args, **kwargs): # pylint: disable=unused-argument
"""function to patch over exit_json; package return data into an exception"""
if 'changed' not in kwargs:
kwargs['changed'] = False
raise AnsibleExitJson(kwargs)
def fail_json(*args, **kwargs): # pylint: disable=unused-argument
"""function to patch over fail_json; package return data into an exception"""
kwargs['failed'] = True
raise AnsibleFailJson(kwargs)
class MockONTAPConnection(object):
''' mock server connection to ONTAP host '''
def __init__(self, kind=None, data=None):
''' save arguments '''
self.type = kind
self.params = data
self.xml_in = None
self.xml_out = None
def invoke_successfully(self, xml, enable_tunneling): # pylint: disable=unused-argument
''' mock invoke_successfully returning xml data '''
self.xml_in = xml
if self.type == 'vserver':
xml = self.build_vserver_info(self.params)
self.xml_out = xml
return xml
@staticmethod
def build_vserver_info(vserver):
''' build xml data for vserser-info '''
xml = netapp_utils.zapi.NaElement('xml')
data = {'num-records': 1, 'attributes-list': {'vserver-info': {
'vserver-name': vserver['name'],
'ipspace': vserver['ipspace'],
'root-volume': vserver['root_volume'],
'root-volume-aggregate': vserver['root_volume_aggregate'],
'language': vserver['language'],
'comment': vserver['comment'],
'snapshot-policy': vserver['snapshot_policy'],
'vserver-subtype': vserver['subtype'],
'allowed-protocols': [{'protocol': 'nfs'}, {'protocol': 'cifs'}],
'aggr-list': [{'aggr-name': 'aggr_1'}, {'aggr-name': 'aggr_2'}],
}}}
xml.translate_struct(data)
return xml
class TestMyModule(unittest.TestCase):
''' a group of related Unit Tests '''
def setUp(self):
self.mock_module_helper = patch.multiple(basic.AnsibleModule,
exit_json=exit_json,
fail_json=fail_json)
self.mock_module_helper.start()
self.addCleanup(self.mock_module_helper.stop)
self.server = MockONTAPConnection()
self.mock_vserver = {
'name': 'test_svm',
'root_volume': 'ansible_vol',
'root_volume_aggregate': 'ansible_aggr',
'aggr_list': 'aggr_1,aggr_2',
'ipspace': 'ansible_ipspace',
'subtype': 'default',
'language': 'c.utf_8',
'snapshot_policy': 'old_snapshot_policy',
'comment': 'this is a comment'
}
def mock_args(self):
return {
'name': self.mock_vserver['name'],
'root_volume': self.mock_vserver['root_volume'],
'root_volume_aggregate': self.mock_vserver['root_volume_aggregate'],
'aggr_list': self.mock_vserver['aggr_list'],
'ipspace': self.mock_vserver['ipspace'],
'comment': self.mock_vserver['comment'],
'subtype': 'default',
'hostname': 'test',
'username': 'test_user',
'password': 'test_pass!'
}
def get_vserver_mock_object(self, kind=None, data=None):
"""
Helper method to return an na_ontap_volume object
:param kind: passes this param to MockONTAPConnection()
:param data: passes this param to MockONTAPConnection()
:return: na_ontap_volume object
"""
vserver_obj = svm_module()
vserver_obj.asup_log_for_cserver = Mock(return_value=None)
vserver_obj.cluster = Mock()
vserver_obj.cluster.invoke_successfully = Mock()
if kind is None:
vserver_obj.server = MockONTAPConnection()
else:
if data is None:
vserver_obj.server = MockONTAPConnection(kind='vserver', data=self.mock_vserver)
else:
vserver_obj.server = MockONTAPConnection(kind='vserver', data=data)
return vserver_obj
def test_module_fail_when_required_args_missing(self):
''' required arguments are reported as errors '''
with pytest.raises(AnsibleFailJson) as exc:
set_module_args({})
svm_module()
print('Info: %s' % exc.value.args[0]['msg'])
def test_get_nonexistent_vserver(self):
''' test if get_vserver() throws an error if vserver is not specified '''
data = self.mock_args()
set_module_args(data)
result = self.get_vserver_mock_object().get_vserver()
assert result is None
def test_create_error_missing_name(self):
''' Test if create throws an error if name is not specified'''
data = self.mock_args()
del data['name']
set_module_args(data)
with pytest.raises(AnsibleFailJson) as exc:
self.get_vserver_mock_object('vserver').create_vserver()
msg = 'missing required arguments: name'
assert exc.value.args[0]['msg'] == msg
@patch('ansible.modules.storage.netapp.na_ontap_svm.NetAppOntapSVM.create_vserver')
def test_successful_create(self, create_vserver):
'''Test successful create'''
data = self.mock_args()
set_module_args(data)
with pytest.raises(AnsibleExitJson) as exc:
self.get_vserver_mock_object().apply()
assert exc.value.args[0]['changed']
create_vserver.assert_called_with()
@patch('ansible.modules.storage.netapp.na_ontap_svm.NetAppOntapSVM.create_vserver')
def test_create_idempotency(self, create_vserver):
'''Test successful create'''
data = self.mock_args()
set_module_args(data)
with pytest.raises(AnsibleExitJson) as exc:
self.get_vserver_mock_object('vserver').apply()
assert not exc.value.args[0]['changed']
create_vserver.assert_not_called()
def test_successful_delete(self):
'''Test successful delete'''
data = self.mock_args()
data['state'] = 'absent'
set_module_args(data)
with pytest.raises(AnsibleExitJson) as exc:
self.get_vserver_mock_object('vserver').apply()
assert exc.value.args[0]['changed']
@patch('ansible.modules.storage.netapp.na_ontap_svm.NetAppOntapSVM.delete_vserver')
def test_delete_idempotency(self, delete_vserver):
'''Test delete idempotency'''
data = self.mock_args()
data['state'] = 'absent'
set_module_args(data)
with pytest.raises(AnsibleExitJson) as exc:
self.get_vserver_mock_object().apply()
assert not exc.value.args[0]['changed']
delete_vserver.assert_not_called()
@patch('ansible.modules.storage.netapp.na_ontap_svm.NetAppOntapSVM.get_vserver')
def test_successful_rename(self, get_vserver):
'''Test successful rename'''
data = self.mock_args()
data['from_name'] = 'test_svm'
data['name'] = 'test_new_svm'
set_module_args(data)
current = {
'name': 'test_svm',
'root_volume': 'ansible_vol',
'root_volume_aggregate': 'ansible_aggr',
'ipspace': 'ansible_ipspace',
'subtype': 'default',
'language': 'c.utf_8'
}
get_vserver.side_effect = [
None,
current
]
with pytest.raises(AnsibleExitJson) as exc:
self.get_vserver_mock_object().apply()
assert exc.value.args[0]['changed']
def test_successful_modify_language(self):
'''Test successful modify language'''
data = self.mock_args()
data['language'] = 'c'
set_module_args(data)
with pytest.raises(AnsibleExitJson) as exc:
self.get_vserver_mock_object('vserver').apply()
assert exc.value.args[0]['changed']
def test_successful_modify_snapshot_policy(self):
'''Test successful modify language'''
data = self.mock_args()
data['snapshot_policy'] = 'new_snapshot_policy'
set_module_args(data)
with pytest.raises(AnsibleExitJson) as exc:
self.get_vserver_mock_object('vserver').apply()
assert exc.value.args[0]['changed']
def test_successful_modify_allowed_protocols(self):
'''Test successful modify allowed protocols'''
data = self.mock_args()
data['allowed_protocols'] = 'protocol_1,protocol_2'
set_module_args(data)
with pytest.raises(AnsibleExitJson) as exc:
self.get_vserver_mock_object('vserver').apply()
assert exc.value.args[0]['changed']
def test_successful_modify_aggr_list(self):
'''Test successful modify aggr-list'''
data = self.mock_args()
data['aggr_list'] = 'aggr_3,aggr_4'
set_module_args(data)
with pytest.raises(AnsibleExitJson) as exc:
self.get_vserver_mock_object('vserver').apply()
assert exc.value.args[0]['changed']
|
jmartinm/InvenioAuthorLists
|
refs/heads/master
|
modules/websubmit/lib/functions/Mail_Submitter.py
|
5
|
## This file is part of Invenio.
## Copyright (C) 2004, 2005, 2006, 2007, 2008, 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
__revision__ = "$Id$"
##
## Name: Mail_Submitter.py
## Description: function Mail_Submitter
## This function sends a confirmation email to the submitter
## of the document
## Author: T.Baron
##
## PARAMETERS: authorfile: name of the file containing the author
## titleFile: name of the file containing the title
## emailFile: name of the file containing the email
## status: one of "ADDED" (the document has been integrated
## into the database) or "APPROVAL" (an email has
## been sent to a referee - simple approval)
## edsrn: name of the file containing the reference
## newrnin: name of the file containing the 2nd reference
## (if any)
## OUTPUT: HTML
##
import os
import re
from invenio.config import CFG_SITE_NAME, \
CFG_SITE_URL, \
CFG_SITE_SUPPORT_EMAIL, \
CFG_SITE_RECORD
from invenio.websubmit_config import CFG_WEBSUBMIT_COPY_MAILS_TO_ADMIN
from invenio.mailutils import send_email
from invenio.websubmit_functions.Shared_Functions import get_nice_bibsched_related_message
def Mail_Submitter(parameters, curdir, form, user_info=None):
"""
This function send an email to the submitter to warn him the
document he has just submitted has been correctly received.
Parameters:
* authorfile: Name of the file containing the authors of the
document
* titleFile: Name of the file containing the title of the
document
* emailFile: Name of the file containing the email of the
submitter of the document
* status: Depending on the value of this parameter, the function
adds an additional text to the email. This parameter
can be one of: ADDED: The file has been integrated in
the database. APPROVAL: The file has been sent for
approval to a referee. or can stay empty.
* edsrn: Name of the file containing the reference of the
document
* newrnin: Name of the file containing the 2nd reference of the
document (if any)
"""
FROMADDR = '%s Submission Engine <%s>' % (CFG_SITE_NAME,CFG_SITE_SUPPORT_EMAIL)
# retrieve report number
edsrn = parameters['edsrn']
newrnin = parameters['newrnin']
fp = open("%s/%s" % (curdir,edsrn),"r")
rn = fp.read()
fp.close()
rn = re.sub("[\n\r]+","",rn)
if newrnin != "" and os.path.exists("%s/%s" % (curdir,newrnin)):
fp = open("%s/%s" % (curdir,newrnin),"r")
additional_rn = fp.read()
fp.close()
additional_rn = re.sub("[\n\r]+","",additional_rn)
fullrn = "%s and %s" % (additional_rn,rn)
else:
fullrn = rn
fullrn = fullrn.replace("\n"," ")
# The title is read from the file specified by 'titlefile'
try:
fp = open("%s/%s" % (curdir,parameters['titleFile']),"r")
m_title = fp.read().replace("\n"," ")
fp.close()
except:
m_title = "-"
# The name of the author is read from the file specified by 'authorfile'
try:
fp = open("%s/%s" % (curdir,parameters['authorfile']),"r")
m_author = fp.read().replace("\n"," ")
fp.close()
except:
m_author = "-"
# The submitters email address is read from the file specified by 'emailFile'
try:
fp = open("%s/%s" % (curdir,parameters['emailFile']),"r")
m_recipient = fp.read().replace ("\n"," ")
fp.close()
except:
m_recipient = ""
# create email body
email_txt = "The document %s\nTitle: %s\nAuthor(s): %s\n\nhas been correctly received\n\n" % (fullrn,m_title,m_author)
# The user is either informed that the document has been added to the database, or sent for approval
if parameters['status'] == "APPROVAL":
email_txt = email_txt + "An email has been sent to the referee. You will be warned by email as soon as the referee takes his/her decision regarding your document.\n\n"
elif parameters['status'] == "ADDED":
email_txt = email_txt + "It will be soon added to our Document Server.\n\nOnce inserted, you will be able to check the bibliographic information and the quality of the electronic documents at this URL:\n<%s/%s/%s>\nIf you detect an error please let us know by sending an email to %s. \n\n" % (CFG_SITE_URL,CFG_SITE_RECORD,sysno,CFG_SITE_SUPPORT_EMAIL)
email_txt += get_nice_bibsched_related_message(curdir)
email_txt = email_txt + "Thank you for using %s Submission Interface.\n" % CFG_SITE_NAME
# send the mail
send_email(FROMADDR, m_recipient.strip(), "%s: Document Received" % fullrn, email_txt, copy_to_admin=CFG_WEBSUBMIT_COPY_MAILS_TO_ADMIN)
return ""
|
ychen820/microblog
|
refs/heads/master
|
y/google-cloud-sdk/platform/google_appengine/lib/django-1.4/django/contrib/gis/db/backends/oracle/compiler.py
|
148
|
from django.contrib.gis.db.models.sql.compiler import GeoSQLCompiler as BaseGeoSQLCompiler
from django.db.backends.oracle import compiler
SQLCompiler = compiler.SQLCompiler
class GeoSQLCompiler(BaseGeoSQLCompiler, SQLCompiler):
pass
class SQLInsertCompiler(compiler.SQLInsertCompiler, GeoSQLCompiler):
pass
class SQLDeleteCompiler(compiler.SQLDeleteCompiler, GeoSQLCompiler):
pass
class SQLUpdateCompiler(compiler.SQLUpdateCompiler, GeoSQLCompiler):
pass
class SQLAggregateCompiler(compiler.SQLAggregateCompiler, GeoSQLCompiler):
pass
class SQLDateCompiler(compiler.SQLDateCompiler, GeoSQLCompiler):
pass
|
muntasirsyed/intellij-community
|
refs/heads/master
|
python/testData/completion/mro.after.py
|
83
|
class C(object):
pass
C.__mro__
|
LonglyCode/flask
|
refs/heads/master
|
docs/flaskdocext.py
|
192
|
import re
import inspect
_internal_mark_re = re.compile(r'^\s*:internal:\s*$(?m)')
def skip_member(app, what, name, obj, skip, options):
docstring = inspect.getdoc(obj)
if skip:
return True
return _internal_mark_re.search(docstring or '') is not None
def setup(app):
app.connect('autodoc-skip-member', skip_member)
|
tsufiev/horizon
|
refs/heads/master
|
openstack_dashboard/test/integration_tests/pages/pageobject.py
|
4
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from openstack_dashboard.test.integration_tests import basewebobject
class PageObject(basewebobject.BaseWebObject):
"""Base class for page objects."""
def __init__(self, driver, conf):
"""Constructor."""
super(PageObject, self).__init__(driver, conf)
self.login_url = self.conf.dashboard.login_url
self._page_title = None
@property
def page_title(self):
return self.driver.title
def is_the_current_page(self):
if self._page_title not in self.page_title:
raise AssertionError(
"Expected to find %s in page title, instead found: %s"
% (self._page_title, self.page_title))
return True
def get_url_current_page(self):
return self.driver.current_url
def close_window(self):
return self.driver.close()
def switch_window(self, window_name=None, window_index=None):
"""Switches focus between the webdriver windows.
Args:
- window_name: The name of the window to switch to.
- window_index: The index of the window handle to switch to.
If the method is called without arguments it switches to the
last window in the driver window_handles list.
In case only one window exists nothing effectively happens.
Usage:
page.switch_window('_new')
page.switch_window(2)
page.switch_window()
"""
if window_name is not None and window_index is not None:
raise ValueError("switch_window receives the window's name or "
"the window's index, not both.")
if window_name is not None:
self.driver.switch_to.window(window_name)
elif window_index is not None:
self.driver.switch_to.window(
self.driver.window_handles[window_index])
else:
self.driver.switch_to.window(self.driver.window_handles[-1])
def go_to_previous_page(self):
self.driver.back()
def go_to_next_page(self):
self.driver.forward()
def refresh_page(self):
self.driver.refresh()
def go_to_login_page(self):
self.driver.get(self.login_url)
self.is_the_current_page()
|
jkburges/phantomjs
|
refs/heads/master
|
src/qt/qtwebkit/Tools/Scripts/webkitpy/common/system/user_unittest.py
|
124
|
# Copyright (C) 2010 Research in Motion Ltd. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Research in Motion Ltd. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import unittest2 as unittest
from webkitpy.common.system.outputcapture import OutputCapture
from webkitpy.common.system.user import User
class UserTest(unittest.TestCase):
example_user_response = "example user response"
def test_prompt_repeat(self):
self.repeatsRemaining = 2
def mock_raw_input(message):
self.repeatsRemaining -= 1
if not self.repeatsRemaining:
return UserTest.example_user_response
return None
self.assertEqual(User.prompt("input", repeat=self.repeatsRemaining, raw_input=mock_raw_input), UserTest.example_user_response)
def test_prompt_when_exceeded_repeats(self):
self.repeatsRemaining = 2
def mock_raw_input(message):
self.repeatsRemaining -= 1
return None
self.assertEqual(User.prompt("input", repeat=self.repeatsRemaining, raw_input=mock_raw_input), None)
def test_prompt_with_multiple_lists(self):
def run_prompt_test(inputs, expected_result, can_choose_multiple=False):
def mock_raw_input(message):
return inputs.pop(0)
output_capture = OutputCapture()
actual_result = output_capture.assert_outputs(
self,
User.prompt_with_multiple_lists,
args=["title", ["subtitle1", "subtitle2"], [["foo", "bar"], ["foobar", "barbaz", "foobaz"]]],
kwargs={"can_choose_multiple": can_choose_multiple, "raw_input": mock_raw_input},
expected_stdout="title\n\nsubtitle1\n 1. foo\n 2. bar\n\nsubtitle2\n 3. foobar\n 4. barbaz\n 5. foobaz\n")
self.assertEqual(actual_result, expected_result)
self.assertEqual(len(inputs), 0)
run_prompt_test(["1"], "foo")
run_prompt_test(["badinput", "2"], "bar")
run_prompt_test(["3"], "foobar")
run_prompt_test(["4"], "barbaz")
run_prompt_test(["5"], "foobaz")
run_prompt_test(["1,2"], ["foo", "bar"], can_choose_multiple=True)
run_prompt_test(["1-3"], ["foo", "bar", "foobar"], can_choose_multiple=True)
run_prompt_test(["1-2,3"], ["foo", "bar", "foobar"], can_choose_multiple=True)
run_prompt_test(["2-1,3"], ["foobar"], can_choose_multiple=True)
run_prompt_test([" 1, 2 "], ["foo", "bar"], can_choose_multiple=True)
run_prompt_test(["all"], ["foo", "bar", 'foobar', 'barbaz', 'foobaz'], can_choose_multiple=True)
run_prompt_test([""], ["foo", "bar", 'foobar', 'barbaz', 'foobaz'], can_choose_multiple=True)
run_prompt_test([" "], ["foo", "bar", 'foobar', 'barbaz', 'foobaz'], can_choose_multiple=True)
run_prompt_test(["badinput", "all"], ["foo", "bar", 'foobar', 'barbaz', 'foobaz'], can_choose_multiple=True)
def test_prompt_with_list(self):
def run_prompt_test(inputs, expected_result, can_choose_multiple=False):
def mock_raw_input(message):
return inputs.pop(0)
output_capture = OutputCapture()
actual_result = output_capture.assert_outputs(
self,
User.prompt_with_list,
args=["title", ["foo", "bar"]],
kwargs={"can_choose_multiple": can_choose_multiple, "raw_input": mock_raw_input},
expected_stdout="title\n 1. foo\n 2. bar\n")
self.assertEqual(actual_result, expected_result)
self.assertEqual(len(inputs), 0)
run_prompt_test(["1"], "foo")
run_prompt_test(["badinput", "2"], "bar")
run_prompt_test(["1,2"], ["foo", "bar"], can_choose_multiple=True)
run_prompt_test([" 1, 2 "], ["foo", "bar"], can_choose_multiple=True)
run_prompt_test(["all"], ["foo", "bar"], can_choose_multiple=True)
run_prompt_test([""], ["foo", "bar"], can_choose_multiple=True)
run_prompt_test([" "], ["foo", "bar"], can_choose_multiple=True)
run_prompt_test(["badinput", "all"], ["foo", "bar"], can_choose_multiple=True)
def test_confirm(self):
test_cases = (
(("Continue? [Y/n]: ", True), (User.DEFAULT_YES, 'y')),
(("Continue? [Y/n]: ", False), (User.DEFAULT_YES, 'n')),
(("Continue? [Y/n]: ", True), (User.DEFAULT_YES, '')),
(("Continue? [Y/n]: ", False), (User.DEFAULT_YES, 'q')),
(("Continue? [y/N]: ", True), (User.DEFAULT_NO, 'y')),
(("Continue? [y/N]: ", False), (User.DEFAULT_NO, 'n')),
(("Continue? [y/N]: ", False), (User.DEFAULT_NO, '')),
(("Continue? [y/N]: ", False), (User.DEFAULT_NO, 'q')),
)
for test_case in test_cases:
expected, inputs = test_case
def mock_raw_input(message):
self.assertEqual(expected[0], message)
return inputs[1]
result = User().confirm(default=inputs[0],
raw_input=mock_raw_input)
self.assertEqual(expected[1], result)
def test_warn_if_application_is_xcode(self):
output = OutputCapture()
user = User()
output.assert_outputs(self, user._warn_if_application_is_xcode, ["TextMate"])
output.assert_outputs(self, user._warn_if_application_is_xcode, ["/Applications/TextMate.app"])
output.assert_outputs(self, user._warn_if_application_is_xcode, ["XCode"]) # case sensitive matching
xcode_warning = "Instead of using Xcode.app, consider using EDITOR=\"xed --wait\".\n"
output.assert_outputs(self, user._warn_if_application_is_xcode, ["Xcode"], expected_stdout=xcode_warning)
output.assert_outputs(self, user._warn_if_application_is_xcode, ["/Developer/Applications/Xcode.app"], expected_stdout=xcode_warning)
|
chugunovyar/factoryForBuild
|
refs/heads/master
|
env/lib/python2.7/site-packages/scipy/sparse/linalg/tests/test_interface.py
|
38
|
"""Test functions for the sparse.linalg.interface module
"""
from __future__ import division, print_function, absolute_import
from functools import partial
from itertools import product
import operator
import nose
from numpy.testing import TestCase, assert_, assert_equal, \
assert_raises
import numpy as np
import scipy.sparse as sparse
from scipy.sparse.linalg import interface
# Only test matmul operator (A @ B) when available (Python 3.5+)
TEST_MATMUL = hasattr(operator, 'matmul')
class TestLinearOperator(TestCase):
def setUp(self):
self.A = np.array([[1,2,3],
[4,5,6]])
self.B = np.array([[1,2],
[3,4],
[5,6]])
self.C = np.array([[1,2],
[3,4]])
def test_matvec(self):
def get_matvecs(A):
return [{
'shape': A.shape,
'matvec': lambda x: np.dot(A, x).reshape(A.shape[0]),
'rmatvec': lambda x: np.dot(A.T.conj(),
x).reshape(A.shape[1])
},
{
'shape': A.shape,
'matvec': lambda x: np.dot(A, x),
'rmatvec': lambda x: np.dot(A.T.conj(), x),
'matmat': lambda x: np.dot(A, x)
}]
for matvecs in get_matvecs(self.A):
A = interface.LinearOperator(**matvecs)
assert_(A.args == ())
assert_equal(A.matvec(np.array([1,2,3])), [14,32])
assert_equal(A.matvec(np.array([[1],[2],[3]])), [[14],[32]])
assert_equal(A * np.array([1,2,3]), [14,32])
assert_equal(A * np.array([[1],[2],[3]]), [[14],[32]])
assert_equal(A.dot(np.array([1,2,3])), [14,32])
assert_equal(A.dot(np.array([[1],[2],[3]])), [[14],[32]])
assert_equal(A.matvec(np.matrix([[1],[2],[3]])), [[14],[32]])
assert_equal(A * np.matrix([[1],[2],[3]]), [[14],[32]])
assert_equal(A.dot(np.matrix([[1],[2],[3]])), [[14],[32]])
assert_equal((2*A)*[1,1,1], [12,30])
assert_equal((2*A).rmatvec([1,1]), [10, 14, 18])
assert_equal((2*A).H.matvec([1,1]), [10, 14, 18])
assert_equal((2*A)*[[1],[1],[1]], [[12],[30]])
assert_equal((2*A).matmat([[1],[1],[1]]), [[12],[30]])
assert_equal((A*2)*[1,1,1], [12,30])
assert_equal((A*2)*[[1],[1],[1]], [[12],[30]])
assert_equal((2j*A)*[1,1,1], [12j,30j])
assert_equal((A+A)*[1,1,1], [12, 30])
assert_equal((A+A).rmatvec([1,1]), [10, 14, 18])
assert_equal((A+A).H.matvec([1,1]), [10, 14, 18])
assert_equal((A+A)*[[1],[1],[1]], [[12], [30]])
assert_equal((A+A).matmat([[1],[1],[1]]), [[12], [30]])
assert_equal((-A)*[1,1,1], [-6,-15])
assert_equal((-A)*[[1],[1],[1]], [[-6],[-15]])
assert_equal((A-A)*[1,1,1], [0,0])
assert_equal((A-A)*[[1],[1],[1]], [[0],[0]])
z = A+A
assert_(len(z.args) == 2 and z.args[0] is A and z.args[1] is A)
z = 2*A
assert_(len(z.args) == 2 and z.args[0] is A and z.args[1] == 2)
assert_(isinstance(A.matvec([1, 2, 3]), np.ndarray))
assert_(isinstance(A.matvec(np.array([[1],[2],[3]])), np.ndarray))
assert_(isinstance(A * np.array([1,2,3]), np.ndarray))
assert_(isinstance(A * np.array([[1],[2],[3]]), np.ndarray))
assert_(isinstance(A.dot(np.array([1,2,3])), np.ndarray))
assert_(isinstance(A.dot(np.array([[1],[2],[3]])), np.ndarray))
assert_(isinstance(A.matvec(np.matrix([[1],[2],[3]])), np.ndarray))
assert_(isinstance(A * np.matrix([[1],[2],[3]]), np.ndarray))
assert_(isinstance(A.dot(np.matrix([[1],[2],[3]])), np.ndarray))
assert_(isinstance(2*A, interface._ScaledLinearOperator))
assert_(isinstance(2j*A, interface._ScaledLinearOperator))
assert_(isinstance(A+A, interface._SumLinearOperator))
assert_(isinstance(-A, interface._ScaledLinearOperator))
assert_(isinstance(A-A, interface._SumLinearOperator))
assert_((2j*A).dtype == np.complex_)
assert_raises(ValueError, A.matvec, np.array([1,2]))
assert_raises(ValueError, A.matvec, np.array([1,2,3,4]))
assert_raises(ValueError, A.matvec, np.array([[1],[2]]))
assert_raises(ValueError, A.matvec, np.array([[1],[2],[3],[4]]))
assert_raises(ValueError, lambda: A*A)
assert_raises(ValueError, lambda: A**2)
for matvecsA, matvecsB in product(get_matvecs(self.A),
get_matvecs(self.B)):
A = interface.LinearOperator(**matvecsA)
B = interface.LinearOperator(**matvecsB)
assert_equal((A*B)*[1,1], [50,113])
assert_equal((A*B)*[[1],[1]], [[50],[113]])
assert_equal((A*B).matmat([[1],[1]]), [[50],[113]])
assert_equal((A*B).rmatvec([1,1]), [71,92])
assert_equal((A*B).H.matvec([1,1]), [71,92])
assert_(isinstance(A*B, interface._ProductLinearOperator))
assert_raises(ValueError, lambda: A+B)
assert_raises(ValueError, lambda: A**2)
z = A*B
assert_(len(z.args) == 2 and z.args[0] is A and z.args[1] is B)
for matvecsC in get_matvecs(self.C):
C = interface.LinearOperator(**matvecsC)
assert_equal((C**2)*[1,1], [17,37])
assert_equal((C**2).rmatvec([1,1]), [22,32])
assert_equal((C**2).H.matvec([1,1]), [22,32])
assert_equal((C**2).matmat([[1],[1]]), [[17],[37]])
assert_(isinstance(C**2, interface._PowerLinearOperator))
def test_matmul(self):
if not TEST_MATMUL:
raise nose.SkipTest("matmul is only tested in Python 3.5+")
D = {'shape': self.A.shape,
'matvec': lambda x: np.dot(self.A, x).reshape(self.A.shape[0]),
'rmatvec': lambda x: np.dot(self.A.T.conj(),
x).reshape(self.A.shape[1]),
'matmat': lambda x: np.dot(self.A, x)}
A = interface.LinearOperator(**D)
B = np.array([[1, 2, 3],
[4, 5, 6],
[7, 8, 9]])
b = B[0]
assert_equal(operator.matmul(A, b), A * b)
assert_equal(operator.matmul(A, B), A * B)
assert_raises(ValueError, operator.matmul, A, 2)
assert_raises(ValueError, operator.matmul, 2, A)
class TestAsLinearOperator(TestCase):
def setUp(self):
self.cases = []
def make_cases(dtype):
self.cases.append(np.matrix([[1,2,3],[4,5,6]], dtype=dtype))
self.cases.append(np.array([[1,2,3],[4,5,6]], dtype=dtype))
self.cases.append(sparse.csr_matrix([[1,2,3],[4,5,6]], dtype=dtype))
# Test default implementations of _adjoint and _rmatvec, which
# refer to each other.
def mv(x, dtype):
y = np.array([1 * x[0] + 2 * x[1] + 3 * x[2],
4 * x[0] + 5 * x[1] + 6 * x[2]], dtype=dtype)
if len(x.shape) == 2:
y = y.reshape(-1, 1)
return y
def rmv(x, dtype):
return np.array([1 * x[0] + 4 * x[1],
2 * x[0] + 5 * x[1],
3 * x[0] + 6 * x[1]], dtype=dtype)
class BaseMatlike(interface.LinearOperator):
def __init__(self, dtype):
self.dtype = np.dtype(dtype)
self.shape = (2,3)
def _matvec(self, x):
return mv(x, self.dtype)
class HasRmatvec(BaseMatlike):
def _rmatvec(self,x):
return rmv(x, self.dtype)
class HasAdjoint(BaseMatlike):
def _adjoint(self):
shape = self.shape[1], self.shape[0]
matvec = partial(rmv, dtype=self.dtype)
rmatvec = partial(mv, dtype=self.dtype)
return interface.LinearOperator(matvec=matvec,
rmatvec=rmatvec,
dtype=self.dtype,
shape=shape)
self.cases.append(HasRmatvec(dtype))
self.cases.append(HasAdjoint(dtype))
make_cases('int32')
make_cases('float32')
make_cases('float64')
def test_basic(self):
for M in self.cases:
A = interface.aslinearoperator(M)
M,N = A.shape
assert_equal(A.matvec(np.array([1,2,3])), [14,32])
assert_equal(A.matvec(np.array([[1],[2],[3]])), [[14],[32]])
assert_equal(A * np.array([1,2,3]), [14,32])
assert_equal(A * np.array([[1],[2],[3]]), [[14],[32]])
assert_equal(A.rmatvec(np.array([1,2])), [9,12,15])
assert_equal(A.rmatvec(np.array([[1],[2]])), [[9],[12],[15]])
assert_equal(A.H.matvec(np.array([1,2])), [9,12,15])
assert_equal(A.H.matvec(np.array([[1],[2]])), [[9],[12],[15]])
assert_equal(
A.matmat(np.array([[1,4],[2,5],[3,6]])),
[[14,32],[32,77]])
assert_equal(A * np.array([[1,4],[2,5],[3,6]]), [[14,32],[32,77]])
if hasattr(M,'dtype'):
assert_equal(A.dtype, M.dtype)
def test_dot(self):
for M in self.cases:
A = interface.aslinearoperator(M)
M,N = A.shape
assert_equal(A.dot(np.array([1,2,3])), [14,32])
assert_equal(A.dot(np.array([[1],[2],[3]])), [[14],[32]])
assert_equal(
A.dot(np.array([[1,4],[2,5],[3,6]])),
[[14,32],[32,77]])
def test_repr():
A = interface.LinearOperator(shape=(1, 1), matvec=lambda x: 1)
repr_A = repr(A)
assert_('unspecified dtype' not in repr_A, repr_A)
def test_identity():
ident = interface.IdentityOperator((3, 3))
assert_equal(ident * [1, 2, 3], [1, 2, 3])
assert_equal(ident.dot(np.arange(9).reshape(3, 3)).ravel(), np.arange(9))
assert_raises(ValueError, ident.matvec, [1, 2, 3, 4])
def test_attributes():
A = interface.aslinearoperator(np.arange(16).reshape(4, 4))
def always_four_ones(x):
x = np.asarray(x)
assert_(x.shape == (3,) or x.shape == (3, 1))
return np.ones(4)
B = interface.LinearOperator(shape=(4, 3), matvec=always_four_ones)
for op in [A, B, A * B, A.H, A + A, B + B, A ** 4]:
assert_(hasattr(op, "dtype"))
assert_(hasattr(op, "shape"))
assert_(hasattr(op, "_matvec"))
def matvec(x):
""" Needed for test_pickle as local functions are not pickleable """
return np.zeros(3)
def test_pickle():
import pickle
for protocol in range(pickle.HIGHEST_PROTOCOL + 1):
A = interface.LinearOperator((3, 3), matvec)
s = pickle.dumps(A, protocol=protocol)
B = pickle.loads(s)
for k in A.__dict__:
assert_equal(getattr(A, k), getattr(B, k))
def test_inheritance():
class Empty(interface.LinearOperator):
pass
assert_raises(TypeError, Empty)
class Identity(interface.LinearOperator):
def __init__(self, n):
super(Identity, self).__init__(dtype=None, shape=(n, n))
def _matvec(self, x):
return x
id3 = Identity(3)
assert_equal(id3.matvec([1, 2, 3]), [1, 2, 3])
assert_raises(NotImplementedError, id3.rmatvec, [4, 5, 6])
class MatmatOnly(interface.LinearOperator):
def __init__(self, A):
super(MatmatOnly, self).__init__(A.dtype, A.shape)
self.A = A
def _matmat(self, x):
return self.A.dot(x)
mm = MatmatOnly(np.random.randn(5, 3))
assert_equal(mm.matvec(np.random.randn(3)).shape, (5,))
def test_dtypes_of_operator_sum():
# gh-6078
mat_complex = np.random.rand(2,2) + 1j * np.random.rand(2,2)
mat_real = np.random.rand(2,2)
complex_operator = interface.aslinearoperator(mat_complex)
real_operator = interface.aslinearoperator(mat_real)
sum_complex = complex_operator + complex_operator
sum_real = real_operator + real_operator
assert_equal(sum_real.dtype, np.float64)
assert_equal(sum_complex.dtype, np.complex128)
|
DataONEorg/d1_python
|
refs/heads/master
|
utilities/src/d1_util/download_all_objects.py
|
1
|
#!/usr/bin/env python
# This work was created by participants in the DataONE project, and is
# jointly copyrighted by participating institutions in DataONE. For
# more information on DataONE, see our web site at http://dataone.org.
#
# Copyright 2009-2019 DataONE
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Download all Science Objects in a DataONE environment.
This is an example on how to use the DataONE Client and Common libraries for Python. It
shows how to:
- Retrieve a list of all DataONE Member Nodes
- Retrieve a list of all objects of specific FormatID on each of those Member Nodes
- Retrieve and examine the System Metadata for each of the listed objects
- Based on information in the System Metadata, determine if the corresponding object
should be downloaded
- Download the corresponding object
Notes:
- This approach retrieves object lists directly from each Member Node and is mainly
suitable in special situations where a 3rd party wishes to examine the overall state
of objects in DataONE, for instance, for creating statistics or data quality reports.
- This approach uses the listObjects() Member Node API method, which has limited
filtering facilities. The example shows how to use this filtering to list objects
that are of a specific type (FormatID) and that are native to the Member Node (i.e.,
not replicas). If a more specific set of objects is desired, it is better to use
DataONE's query interface, which offers much richer filtering facilities.
- It is not possible to filter out non-public objects with listObjects(). Instead, this
script attempts to download the object's System Metadata and checks for NotAuthorized
exceptions.
- If a completely unfiltered object list is required, simply remove the formatId and
replicaStatus parameters in the listObjects() call below.
- The Member Node object list is retrieved in small sections, called pages. The objects
on each page are processed before retrieving the next page.
- The listObjects() Member Node API method may not be efficiently implemented by all
Member Nodes as it is intended primarily for use by Coordinating Nodes.
- The listObjects() method may miss objects that are created while the method is in
use.
"""
import argparse
import logging
import os
import shutil
import sys
import urllib.parse
import d1_common.const
import d1_common.env
import d1_common.types.exceptions
import d1_client.cnclient
import d1_client.mnclient_2_0
# Config
# In addition to the default production environment, DataONE maintains several
# separate environments for use when developing and testing DataONE components.
# There are no connections between the environments. For instance, certificates,
# DataONE identities and science objects are exclusive to the environment in
# which they were created. This setting controls to which environment the CN
# client connects.
# Round-robin CN endpoints
DATAONE_ROOT = d1_common.const.URL_DATAONE_ROOT # (recommended, production)
# DATAONE_ROOT = 'https://cn-dev.test.dataone.org/cn'
# DATAONE_ROOT = 'https://cn-stage.test.dataone.org/cn'
# DATAONE_ROOT = 'https://cn-sandbox.dataone.org/cn'
# DATAONE_ROOT = 'https://cn-stage.dataone.org/cn/'
# DATAONE_ROOT = 'https://cn-stage.test.dataone.org/cn'
# Only retrieve objects of this type. A complete list of valid formatIds can be
# found at https://cn.dataone.org/cn/v1/formats
LIST_OBJECTS_FORMAT_ID = "eml://ecoinformatics.org/eml-2.1.1"
# The number of objects to list each time listObjects() is called.
LIST_OBJECTS_PAGE_SIZE = 100
# The location in which to store downloaded objects.
DOWNLOAD_FOLDER = "./d1_objects"
# Don't download objects larger than this size.
MAX_FILE_SIZE_TO_DOWNLOAD = 1024 ** 2
def main():
parser = argparse.ArgumentParser(
description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter
)
parser.add_argument("--debug", action="store_true", help="Debug level logging")
parser.add_argument(
"--env",
type=str,
default="prod",
help="Environment, one of {}".format(", ".join(d1_common.env.D1_ENV_DICT)),
)
parser.add_argument(
"--cert-pub",
dest="cert_pem_path",
action="store",
help="Path to PEM formatted public key of certificate",
)
parser.add_argument(
"--cert-key",
dest="cert_key_path",
action="store",
help="Path to PEM formatted private key of certificate",
)
parser.add_argument(
"--timeout",
action="store",
default=d1_common.const.DEFAULT_HTTP_TIMEOUT,
help="Amount of time to wait for calls to complete (seconds)",
)
logging.basicConfig()
# Setting the default logger to level "DEBUG" causes the script to become
# very verbose.
logging.getLogger("").setLevel(logging.DEBUG)
try:
os.makedirs(DOWNLOAD_FOLDER)
except OSError:
pass
node_list = get_node_list_from_coordinating_node()
for node in node_list.node:
if is_member_node(node):
member_node_object_downloader = MemberNodeObjectDownloader(node)
member_node_object_downloader.download_objects_from_member_node()
def get_node_list_from_coordinating_node():
cn_client = d1_client.cnclient.CoordinatingNodeClient(base_url=DATAONE_ROOT)
try:
return cn_client.listNodes()
except d1_common.types.exceptions.DataONEException:
logging.exception("listNodes() failed with exception:")
raise
def is_member_node(node):
return node.type == "mn"
# ==============================================================================
class MemberNodeObjectDownloader(object):
def __init__(self, node):
self._node = node
self._mn_client = d1_client.mnclient_2_0.MemberNodeClient_2_0(node.baseURL)
def download_objects_from_member_node(self):
logging.info("Retrieving objects for Member Node: {}".format(self._node.name))
current_start = 0
while True:
try:
object_list = self._mn_client.listObjects(
start=current_start,
count=LIST_OBJECTS_PAGE_SIZE,
formatId=LIST_OBJECTS_FORMAT_ID,
replicaStatus=False,
)
except d1_common.types.exceptions.DataONEException:
logging.exception("listObjects() failed with exception:")
raise
else:
current_start += object_list.count
if current_start >= object_list.total:
break
logging.debug(
"Retrieved page: {}/{}".format(
current_start / LIST_OBJECTS_PAGE_SIZE + 1,
object_list.total / LIST_OBJECTS_PAGE_SIZE,
)
)
for d1_object in object_list.objectInfo:
self._download_d1_object_if_public(d1_object)
def _download_d1_object_if_public(self, d1_object):
pid = d1_object.identifier.value()
sys_meta = self._get_d1_object_system_metadata(pid)
if sys_meta is not None:
# The System Metadata object can be examined to determine system level
# details about the corresponding object.
if sys_meta.size < MAX_FILE_SIZE_TO_DOWNLOAD:
self.download_d1_object(pid)
def _get_d1_object_system_metadata(self, pid):
try:
return self._mn_client.getSystemMetadata(pid)
except d1_common.types.exceptions.NotAuthorized:
logging.info("Ignoring non-public object: {}".format(pid))
except d1_common.types.exceptions.DataONEException:
logging.exception("getSystemMetadata() failed with exception:")
raise
def download_d1_object(self, pid):
try:
object_stream = self._mn_client.get(pid)
except d1_common.types.exceptions.DataONEException:
logging.exception("get() failed with exception:")
raise
else:
# The PID (DataONE Persistent Identifier) can contain characters that are
# not valid for use as filenames (most commonly, slashes). A simple way to
# make a PID safe for use as a filename is to "percent-encode" it.
pid_filename = urllib.parse.quote(pid, safe="")
with open(os.path.join(DOWNLOAD_FOLDER, pid_filename), "wb") as f:
shutil.copyfileobj(object_stream, f)
if __name__ == "__main__":
sys.exit(main())
|
bespike/litecoin
|
refs/heads/0.18
|
test/functional/rpc_users.py
|
4
|
#!/usr/bin/env python3
# Copyright (c) 2015-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test multiple RPC users."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
get_datadir_path,
str_to_b64str,
)
import os
import http.client
import urllib.parse
import subprocess
from random import SystemRandom
import string
import configparser
import sys
class HTTPBasicsTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
def setup_chain(self):
super().setup_chain()
#Append rpcauth to bitcoin.conf before initialization
rpcauth = "rpcauth=rt:93648e835a54c573682c2eb19f882535$7681e9c5b74bdd85e78166031d2058e1069b3ed7ed967c93fc63abba06f31144"
rpcauth2 = "rpcauth=rt2:f8607b1a88861fac29dfccf9b52ff9f$ff36a0c23c8c62b4846112e50fa888416e94c17bfd4c42f88fd8f55ec6a3137e"
rpcuser = "rpcuser=rpcuser💻"
rpcpassword = "rpcpassword=rpcpassword🔑"
self.user = ''.join(SystemRandom().choice(string.ascii_letters + string.digits) for _ in range(10))
config = configparser.ConfigParser()
config.read_file(open(self.options.configfile))
gen_rpcauth = config['environment']['RPCAUTH']
p = subprocess.Popen([sys.executable, gen_rpcauth, self.user], stdout=subprocess.PIPE, universal_newlines=True)
lines = p.stdout.read().splitlines()
rpcauth3 = lines[1]
self.password = lines[3]
with open(os.path.join(get_datadir_path(self.options.tmpdir, 0), "litecoin.conf"), 'a', encoding='utf8') as f:
f.write(rpcauth+"\n")
f.write(rpcauth2+"\n")
f.write(rpcauth3+"\n")
with open(os.path.join(get_datadir_path(self.options.tmpdir, 1), "litecoin.conf"), 'a', encoding='utf8') as f:
f.write(rpcuser+"\n")
f.write(rpcpassword+"\n")
def run_test(self):
##################################################
# Check correctness of the rpcauth config option #
##################################################
url = urllib.parse.urlparse(self.nodes[0].url)
#Old authpair
authpair = url.username + ':' + url.password
#New authpair generated via share/rpcauth tool
password = "cA773lm788buwYe4g4WT+05pKyNruVKjQ25x3n0DQcM="
#Second authpair with different username
password2 = "8/F3uMDw4KSEbw96U3CA1C4X05dkHDN2BPFjTgZW4KI="
authpairnew = "rt:"+password
self.log.info('Correct...')
headers = {"Authorization": "Basic " + str_to_b64str(authpair)}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status, 200)
conn.close()
#Use new authpair to confirm both work
self.log.info('Correct...')
headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status, 200)
conn.close()
#Wrong login name with rt's password
self.log.info('Wrong...')
authpairnew = "rtwrong:"+password
headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status, 401)
conn.close()
#Wrong password for rt
self.log.info('Wrong...')
authpairnew = "rt:"+password+"wrong"
headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status, 401)
conn.close()
#Correct for rt2
self.log.info('Correct...')
authpairnew = "rt2:"+password2
headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status, 200)
conn.close()
#Wrong password for rt2
self.log.info('Wrong...')
authpairnew = "rt2:"+password2+"wrong"
headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status, 401)
conn.close()
#Correct for randomly generated user
self.log.info('Correct...')
authpairnew = self.user+":"+self.password
headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status, 200)
conn.close()
#Wrong password for randomly generated user
self.log.info('Wrong...')
authpairnew = self.user+":"+self.password+"Wrong"
headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status, 401)
conn.close()
###############################################################
# Check correctness of the rpcuser/rpcpassword config options #
###############################################################
url = urllib.parse.urlparse(self.nodes[1].url)
# rpcuser and rpcpassword authpair
self.log.info('Correct...')
rpcuserauthpair = "rpcuser💻:rpcpassword🔑"
headers = {"Authorization": "Basic " + str_to_b64str(rpcuserauthpair)}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status, 200)
conn.close()
#Wrong login name with rpcuser's password
rpcuserauthpair = "rpcuserwrong:rpcpassword"
headers = {"Authorization": "Basic " + str_to_b64str(rpcuserauthpair)}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status, 401)
conn.close()
#Wrong password for rpcuser
self.log.info('Wrong...')
rpcuserauthpair = "rpcuser:rpcpasswordwrong"
headers = {"Authorization": "Basic " + str_to_b64str(rpcuserauthpair)}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status, 401)
conn.close()
if __name__ == '__main__':
HTTPBasicsTest ().main ()
|
denovator/myfriki
|
refs/heads/master
|
lib/werkzeug/werkzeug/security.py
|
146
|
# -*- coding: utf-8 -*-
"""
werkzeug.security
~~~~~~~~~~~~~~~~~
Security related helpers such as secure password hashing tools.
:copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import os
import hmac
import hashlib
import posixpath
import codecs
from struct import Struct
from random import SystemRandom
from operator import xor
from itertools import starmap
from werkzeug._compat import range_type, PY2, text_type, izip, to_bytes, \
string_types, to_native
SALT_CHARS = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789'
DEFAULT_PBKDF2_ITERATIONS = 1000
_pack_int = Struct('>I').pack
_builtin_safe_str_cmp = getattr(hmac, 'compare_digest', None)
_sys_rng = SystemRandom()
_os_alt_seps = list(sep for sep in [os.path.sep, os.path.altsep]
if sep not in (None, '/'))
def _find_hashlib_algorithms():
algos = getattr(hashlib, 'algorithms', None)
if algos is None:
algos = ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512')
rv = {}
for algo in algos:
func = getattr(hashlib, algo, None)
if func is not None:
rv[algo] = func
return rv
_hash_funcs = _find_hashlib_algorithms()
def pbkdf2_hex(data, salt, iterations=DEFAULT_PBKDF2_ITERATIONS,
keylen=None, hashfunc=None):
"""Like :func:`pbkdf2_bin` but returns a hex encoded string.
.. versionadded:: 0.9
:param data: the data to derive.
:param salt: the salt for the derivation.
:param iterations: the number of iterations.
:param keylen: the length of the resulting key. If not provided
the digest size will be used.
:param hashfunc: the hash function to use. This can either be the
string name of a known hash function or a function
from the hashlib module. Defaults to sha1.
"""
rv = pbkdf2_bin(data, salt, iterations, keylen, hashfunc)
return to_native(codecs.encode(rv, 'hex_codec'))
def pbkdf2_bin(data, salt, iterations=DEFAULT_PBKDF2_ITERATIONS,
keylen=None, hashfunc=None):
"""Returns a binary digest for the PBKDF2 hash algorithm of `data`
with the given `salt`. It iterates `iterations` time and produces a
key of `keylen` bytes. By default SHA-1 is used as hash function,
a different hashlib `hashfunc` can be provided.
.. versionadded:: 0.9
:param data: the data to derive.
:param salt: the salt for the derivation.
:param iterations: the number of iterations.
:param keylen: the length of the resulting key. If not provided
the digest size will be used.
:param hashfunc: the hash function to use. This can either be the
string name of a known hash function or a function
from the hashlib module. Defaults to sha1.
"""
if isinstance(hashfunc, string_types):
hashfunc = _hash_funcs[hashfunc]
elif not hashfunc:
hashfunc = hashlib.sha1
salt = to_bytes(salt)
mac = hmac.HMAC(to_bytes(data), None, hashfunc)
if not keylen:
keylen = mac.digest_size
def _pseudorandom(x, mac=mac):
h = mac.copy()
h.update(x)
return bytearray(h.digest())
buf = bytearray()
for block in range_type(1, -(-keylen // mac.digest_size) + 1):
rv = u = _pseudorandom(salt + _pack_int(block))
for i in range_type(iterations - 1):
u = _pseudorandom(bytes(u))
rv = bytearray(starmap(xor, izip(rv, u)))
buf.extend(rv)
return bytes(buf[:keylen])
def safe_str_cmp(a, b):
"""This function compares strings in somewhat constant time. This
requires that the length of at least one string is known in advance.
Returns `True` if the two strings are equal or `False` if they are not.
.. versionadded:: 0.7
"""
if isinstance(a, text_type):
a = a.encode('utf-8')
if isinstance(b, text_type):
b = b.encode('utf-8')
if _builtin_safe_str_cmp is not None:
return _builtin_safe_str_cmp(a, b)
if len(a) != len(b):
return False
rv = 0
if PY2:
for x, y in izip(a, b):
rv |= ord(x) ^ ord(y)
else:
for x, y in izip(a, b):
rv |= x ^ y
return rv == 0
def gen_salt(length):
"""Generate a random string of SALT_CHARS with specified ``length``."""
if length <= 0:
raise ValueError('requested salt of length <= 0')
return ''.join(_sys_rng.choice(SALT_CHARS) for _ in range_type(length))
def _hash_internal(method, salt, password):
"""Internal password hash helper. Supports plaintext without salt,
unsalted and salted passwords. In case salted passwords are used
hmac is used.
"""
if method == 'plain':
return password, method
if isinstance(password, text_type):
password = password.encode('utf-8')
if method.startswith('pbkdf2:'):
args = method[7:].split(':')
if len(args) not in (1, 2):
raise ValueError('Invalid number of arguments for PBKDF2')
method = args.pop(0)
iterations = args and int(args[0] or 0) or DEFAULT_PBKDF2_ITERATIONS
is_pbkdf2 = True
actual_method = 'pbkdf2:%s:%d' % (method, iterations)
else:
is_pbkdf2 = False
actual_method = method
hash_func = _hash_funcs.get(method)
if hash_func is None:
raise TypeError('invalid method %r' % method)
if is_pbkdf2:
if not salt:
raise ValueError('Salt is required for PBKDF2')
rv = pbkdf2_hex(password, salt, iterations,
hashfunc=hash_func)
elif salt:
if isinstance(salt, text_type):
salt = salt.encode('utf-8')
rv = hmac.HMAC(salt, password, hash_func).hexdigest()
else:
h = hash_func()
h.update(password)
rv = h.hexdigest()
return rv, actual_method
def generate_password_hash(password, method='pbkdf2:sha1', salt_length=8):
"""Hash a password with the given method and salt with with a string of
the given length. The format of the string returned includes the method
that was used so that :func:`check_password_hash` can check the hash.
The format for the hashed string looks like this::
method$salt$hash
This method can **not** generate unsalted passwords but it is possible
to set the method to plain to enforce plaintext passwords. If a salt
is used, hmac is used internally to salt the password.
If PBKDF2 is wanted it can be enabled by setting the method to
``pbkdf2:method:iterations`` where iterations is optional::
pbkdf2:sha1:2000$salt$hash
pbkdf2:sha1$salt$hash
:param password: the password to hash
:param method: the hash method to use (one that hashlib supports), can
optionally be in the format ``pbpdf2:<method>[:iterations]``
to enable PBKDF2.
:param salt_length: the length of the salt in letters
"""
salt = method != 'plain' and gen_salt(salt_length) or ''
h, actual_method = _hash_internal(method, salt, password)
return '%s$%s$%s' % (actual_method, salt, h)
def check_password_hash(pwhash, password):
"""check a password against a given salted and hashed password value.
In order to support unsalted legacy passwords this method supports
plain text passwords, md5 and sha1 hashes (both salted and unsalted).
Returns `True` if the password matched, `False` otherwise.
:param pwhash: a hashed string like returned by
:func:`generate_password_hash`
:param password: the plaintext password to compare against the hash
"""
if pwhash.count('$') < 2:
return False
method, salt, hashval = pwhash.split('$', 2)
return safe_str_cmp(_hash_internal(method, salt, password)[0], hashval)
def safe_join(directory, filename):
"""Safely join `directory` and `filename`. If this cannot be done,
this function returns ``None``.
:param directory: the base directory.
:param filename: the untrusted filename relative to that directory.
"""
filename = posixpath.normpath(filename)
for sep in _os_alt_seps:
if sep in filename:
return None
if os.path.isabs(filename) or filename.startswith('../'):
return None
return os.path.join(directory, filename)
|
Kongsea/tensorflow
|
refs/heads/master
|
tensorflow/contrib/learn/python/learn/utils/gc.py
|
45
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
r"""System for specifying garbage collection (GC) of path based data.
This framework allows for GC of data specified by path names, for example files
on disk. gc.Path objects each represent a single item stored at a path and may
be a base directory,
/tmp/exports/0/...
/tmp/exports/1/...
...
or a fully qualified file,
/tmp/train-1.ckpt
/tmp/train-2.ckpt
...
A gc filter function takes and returns a list of gc.Path items. Filter
functions are responsible for selecting Path items for preservation or deletion.
Note that functions should always return a sorted list.
For example,
base_dir = "/tmp"
# Create the directories.
for e in xrange(10):
os.mkdir("%s/%d" % (base_dir, e), 0o755)
# Create a simple parser that pulls the export_version from the directory.
path_regex = "^" + re.escape(base_dir) + "/(\\d+)$"
def parser(path):
match = re.match(path_regex, path.path)
if not match:
return None
return path._replace(export_version=int(match.group(1)))
path_list = gc.get_paths("/tmp", parser) # contains all ten Paths
every_fifth = gc.mod_export_version(5)
print(every_fifth(path_list)) # shows ["/tmp/0", "/tmp/5"]
largest_three = gc.largest_export_versions(3)
print(largest_three(all_paths)) # shows ["/tmp/7", "/tmp/8", "/tmp/9"]
both = gc.union(every_fifth, largest_three)
print(both(all_paths)) # shows ["/tmp/0", "/tmp/5",
# "/tmp/7", "/tmp/8", "/tmp/9"]
# Delete everything not in 'both'.
to_delete = gc.negation(both)
for p in to_delete(all_paths):
gfile.DeleteRecursively(p.path) # deletes: "/tmp/1", "/tmp/2",
# "/tmp/3", "/tmp/4", "/tmp/6",
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import heapq
import math
import os
from tensorflow.python.platform import gfile
from tensorflow.python.util import compat
Path = collections.namedtuple('Path', 'path export_version')
def largest_export_versions(n):
"""Creates a filter that keeps the largest n export versions.
Args:
n: number of versions to keep.
Returns:
A filter function that keeps the n largest paths.
"""
def keep(paths):
heap = []
for idx, path in enumerate(paths):
if path.export_version is not None:
heapq.heappush(heap, (path.export_version, idx))
keepers = [paths[i] for _, i in heapq.nlargest(n, heap)]
return sorted(keepers)
return keep
def one_of_every_n_export_versions(n):
"""Creates a filter that keeps one of every n export versions.
Args:
n: interval size.
Returns:
A filter function that keeps exactly one path from each interval
[0, n], (n, 2n], (2n, 3n], etc... If more than one path exists in an
interval the largest is kept.
"""
def keep(paths):
"""A filter function that keeps exactly one out of every n paths."""
keeper_map = {} # map from interval to largest path seen in that interval
for p in paths:
if p.export_version is None:
# Skip missing export_versions.
continue
# Find the interval (with a special case to map export_version = 0 to
# interval 0.
interval = math.floor(
(p.export_version - 1) / n) if p.export_version else 0
existing = keeper_map.get(interval, None)
if (not existing) or (existing.export_version < p.export_version):
keeper_map[interval] = p
return sorted(keeper_map.values())
return keep
def mod_export_version(n):
"""Creates a filter that keeps every export that is a multiple of n.
Args:
n: step size.
Returns:
A filter function that keeps paths where export_version % n == 0.
"""
def keep(paths):
keepers = []
for p in paths:
if p.export_version % n == 0:
keepers.append(p)
return sorted(keepers)
return keep
def union(lf, rf):
"""Creates a filter that keeps the union of two filters.
Args:
lf: first filter
rf: second filter
Returns:
A filter function that keeps the n largest paths.
"""
def keep(paths):
l = set(lf(paths))
r = set(rf(paths))
return sorted(list(l|r))
return keep
def negation(f):
"""Negate a filter.
Args:
f: filter function to invert
Returns:
A filter function that returns the negation of f.
"""
def keep(paths):
l = set(paths)
r = set(f(paths))
return sorted(list(l-r))
return keep
def get_paths(base_dir, parser):
"""Gets a list of Paths in a given directory.
Args:
base_dir: directory.
parser: a function which gets the raw Path and can augment it with
information such as the export_version, or ignore the path by returning
None. An example parser may extract the export version from a path
such as "/tmp/exports/100" an another may extract from a full file
name such as "/tmp/checkpoint-99.out".
Returns:
A list of Paths contained in the base directory with the parsing function
applied.
By default the following fields are populated,
- Path.path
The parsing function is responsible for populating,
- Path.export_version
"""
raw_paths = gfile.ListDirectory(base_dir)
paths = []
for r in raw_paths:
p = parser(Path(os.path.join(compat.as_str_any(base_dir),
compat.as_str_any(r)),
None))
if p:
paths.append(p)
return sorted(paths)
|
rahul67/hue
|
refs/heads/master
|
apps/oozie/src/oozie/migrations/0003_auto__add_sqoop.py
|
40
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Sqoop'
db.create_table('oozie_sqoop', (
('files', self.gf('django.db.models.fields.CharField')(default='[]', max_length=512)),
('job_xml', self.gf('django.db.models.fields.CharField')(default='', max_length=512, blank=True)),
('job_properties', self.gf('django.db.models.fields.TextField')(default='[{"name":"oozie.use.system.libpath","value":"true"},{"name":"oozie.hive.defaults","value":"${hive.default.xml}"}]')),
('params', self.gf('django.db.models.fields.TextField')(default='[]')),
('archives', self.gf('django.db.models.fields.CharField')(default='[]', max_length=512)),
('node_ptr', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['oozie.Node'], unique=True, primary_key=True)),
('prepares', self.gf('django.db.models.fields.TextField')(default='[]')),
('script_path', self.gf('django.db.models.fields.CharField')(default='', max_length=256, blank=True)),
))
db.send_create_signal('oozie', ['Sqoop'])
def backwards(self, orm):
# Deleting model 'Sqoop'
db.delete_table('oozie_sqoop')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'oozie.coordinator': {
'Meta': {'object_name': 'Coordinator', '_ormbases': ['oozie.Job']},
'concurrency': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'end': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 9, 22, 16, 54, 47, 194166)'}),
'execution': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'frequency_number': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'frequency_unit': ('django.db.models.fields.CharField', [], {'default': "'days'", 'max_length': '20'}),
'job_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Job']", 'unique': 'True', 'primary_key': 'True'}),
'start': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 9, 19, 16, 54, 47, 194133)'}),
'throttle': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'timeout': ('django.db.models.fields.SmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'timezone': ('django.db.models.fields.CharField', [], {'default': "'America/Los_Angeles'", 'max_length': '24'}),
'workflow': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['oozie.Workflow']", 'null': 'True'})
},
'oozie.datainput': {
'Meta': {'object_name': 'DataInput'},
'coordinator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['oozie.Coordinator']"}),
'dataset': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Dataset']", 'unique': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '40'})
},
'oozie.dataoutput': {
'Meta': {'object_name': 'DataOutput'},
'coordinator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['oozie.Coordinator']"}),
'dataset': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Dataset']", 'unique': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '40'})
},
'oozie.dataset': {
'Meta': {'object_name': 'Dataset'},
'coordinator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['oozie.Coordinator']"}),
'description': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '1024', 'blank': 'True'}),
'done_flag': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '64', 'blank': 'True'}),
'frequency_number': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'frequency_unit': ('django.db.models.fields.CharField', [], {'default': "'days'", 'max_length': '20'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'start': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 9, 19, 16, 54, 47, 194766)'}),
'timezone': ('django.db.models.fields.CharField', [], {'default': "'America/Los_Angeles'", 'max_length': '24'}),
'uri': ('django.db.models.fields.CharField', [], {'default': "'/data/${YEAR}${MONTH}${DAY}'", 'max_length': '1024'})
},
'oozie.end': {
'Meta': {'object_name': 'End'},
'node_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Node']", 'unique': 'True', 'primary_key': 'True'})
},
'oozie.fork': {
'Meta': {'object_name': 'Fork'},
'node_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Node']", 'unique': 'True', 'primary_key': 'True'})
},
'oozie.history': {
'Meta': {'object_name': 'History'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'job': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['oozie.Job']"}),
'oozie_job_id': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'properties': ('django.db.models.fields.TextField', [], {}),
'submission_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'submitter': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'oozie.hive': {
'Meta': {'object_name': 'Hive'},
'archives': ('django.db.models.fields.CharField', [], {'default': "'[]'", 'max_length': '512'}),
'files': ('django.db.models.fields.CharField', [], {'default': "'[]'", 'max_length': '512'}),
'job_properties': ('django.db.models.fields.TextField', [], {'default': '\'[{"name":"oozie.use.system.libpath","value":"true"},{"name":"oozie.hive.defaults","value":"${hive.default.xml}"}]\''}),
'job_xml': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '512', 'blank': 'True'}),
'node_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Node']", 'unique': 'True', 'primary_key': 'True'}),
'params': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'prepares': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'script_path': ('django.db.models.fields.CharField', [], {'max_length': '256'})
},
'oozie.java': {
'Meta': {'object_name': 'Java'},
'archives': ('django.db.models.fields.CharField', [], {'default': "'[]'", 'max_length': '512'}),
'args': ('django.db.models.fields.CharField', [], {'max_length': '4096', 'blank': 'True'}),
'files': ('django.db.models.fields.CharField', [], {'default': "'[]'", 'max_length': '512'}),
'jar_path': ('django.db.models.fields.CharField', [], {'max_length': '512'}),
'java_opts': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'job_properties': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'job_xml': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '512', 'blank': 'True'}),
'main_class': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'node_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Node']", 'unique': 'True', 'primary_key': 'True'}),
'prepares': ('django.db.models.fields.TextField', [], {'default': "'[]'"})
},
'oozie.job': {
'Meta': {'object_name': 'Job'},
'deployment_dir': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_shared': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True', 'blank': 'True'}),
'last_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'parameters': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'schema_version': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'oozie.join': {
'Meta': {'object_name': 'Join'},
'node_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Node']", 'unique': 'True', 'primary_key': 'True'})
},
'oozie.kill': {
'Meta': {'object_name': 'Kill'},
'message': ('django.db.models.fields.CharField', [], {'default': "'Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]'", 'max_length': '256'}),
'node_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Node']", 'unique': 'True', 'primary_key': 'True'})
},
'oozie.link': {
'Meta': {'object_name': 'Link'},
'child': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'parent_node'", 'to': "orm['oozie.Node']"}),
'comment': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '1024', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'child_node'", 'to': "orm['oozie.Node']"})
},
'oozie.mapreduce': {
'Meta': {'object_name': 'Mapreduce'},
'archives': ('django.db.models.fields.CharField', [], {'default': "'[]'", 'max_length': '512'}),
'files': ('django.db.models.fields.CharField', [], {'default': "'[]'", 'max_length': '512'}),
'jar_path': ('django.db.models.fields.CharField', [], {'max_length': '512'}),
'job_properties': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'job_xml': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '512', 'blank': 'True'}),
'node_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Node']", 'unique': 'True'}),
'prepares': ('django.db.models.fields.TextField', [], {'default': "'[]'"})
},
'oozie.node': {
'Meta': {'object_name': 'Node'},
'children': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'parents'", 'symmetrical': 'False', 'through': "orm['oozie.Link']", 'to': "orm['oozie.Node']"}),
'description': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '1024', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'node_type': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'workflow': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['oozie.Workflow']"})
},
'oozie.pig': {
'Meta': {'object_name': 'Pig'},
'archives': ('django.db.models.fields.CharField', [], {'default': "'[]'", 'max_length': '512'}),
'files': ('django.db.models.fields.CharField', [], {'default': "'[]'", 'max_length': '512'}),
'job_properties': ('django.db.models.fields.TextField', [], {'default': '\'[{"name":"oozie.use.system.libpath","value":"true"}]\''}),
'job_xml': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '512', 'blank': 'True'}),
'node_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Node']", 'unique': 'True', 'primary_key': 'True'}),
'params': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'prepares': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'script_path': ('django.db.models.fields.CharField', [], {'max_length': '256'})
},
'oozie.sqoop': {
'Meta': {'object_name': 'Sqoop'},
'archives': ('django.db.models.fields.CharField', [], {'default': "'[]'", 'max_length': '512'}),
'files': ('django.db.models.fields.CharField', [], {'default': "'[]'", 'max_length': '512'}),
'job_properties': ('django.db.models.fields.TextField', [], {'default': '\'[{"name":"oozie.use.system.libpath","value":"true"},{"name":"oozie.hive.defaults","value":"${hive.default.xml}"}]\''}),
'job_xml': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '512', 'blank': 'True'}),
'node_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Node']", 'unique': 'True', 'primary_key': 'True'}),
'params': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'prepares': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'script_path': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '256', 'blank': 'True'})
},
'oozie.start': {
'Meta': {'object_name': 'Start'},
'node_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Node']", 'unique': 'True'})
},
'oozie.streaming': {
'Meta': {'object_name': 'Streaming'},
'archives': ('django.db.models.fields.CharField', [], {'default': "'[]'", 'max_length': '512'}),
'files': ('django.db.models.fields.CharField', [], {'default': "'[]'", 'max_length': '512'}),
'job_properties': ('django.db.models.fields.TextField', [], {'default': '\'[{"name":"oozie.use.system.libpath","value":"true"}]\''}),
'mapper': ('django.db.models.fields.CharField', [], {'max_length': '512'}),
'node_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Node']", 'unique': 'True', 'primary_key': 'True'}),
'reducer': ('django.db.models.fields.CharField', [], {'max_length': '512'})
},
'oozie.workflow': {
'Meta': {'object_name': 'Workflow', '_ormbases': ['oozie.Job']},
'end': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'end_workflow'", 'null': 'True', 'to': "orm['oozie.End']"}),
'is_single': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'job_properties': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'job_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Job']", 'unique': 'True', 'primary_key': 'True'}),
'job_xml': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '512', 'blank': 'True'}),
'start': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'start_workflow'", 'null': 'True', 'to': "orm['oozie.Start']"})
}
}
complete_apps = ['oozie']
|
charmander/livestreamer
|
refs/heads/develop
|
src/livestreamer/plugins/mips.py
|
34
|
import re
from livestreamer.plugin import Plugin
from livestreamer.plugin.api import http, validate
from livestreamer.plugin.api.utils import parse_query
from livestreamer.stream import RTMPStream
BALANCER_URL = "http://www.mips.tv:1935/loadbalancer"
PLAYER_URL = "http://mips.tv/embedplayer/{0}/1/500/400"
SWF_URL = "http://mips.tv/content/scripts/eplayer.swf"
_url_re = re.compile("http(s)?://(\w+.)?mips.tv/(?P<channel>[^/&?]+)")
_flashvars_re = re.compile("'FlashVars', '([^']+)'")
_rtmp_re = re.compile("redirect=(.+)")
_schema = validate.Schema(
validate.transform(_flashvars_re.search),
validate.any(
None,
validate.all(
validate.get(1),
validate.transform(parse_query),
{
"id": validate.transform(int),
validate.optional("s"): validate.text
}
)
)
)
_rtmp_schema = validate.Schema(
validate.transform(_rtmp_re.search),
validate.get(1),
)
class Mips(Plugin):
@classmethod
def can_handle_url(self, url):
return _url_re.match(url)
def _get_streams(self):
match = _url_re.match(self.url)
channel = match.group("channel")
headers = {"Referer": self.url}
url = PLAYER_URL.format(channel)
res = http.get(url, headers=headers, schema=_schema)
if not res or "s" not in res:
return
streams = {}
server = http.get(BALANCER_URL, headers=headers, schema=_rtmp_schema)
playpath = "{0}?{1}".format(res["s"], res["id"])
streams["live"] = RTMPStream(self.session, {
"rtmp": "rtmp://{0}/live/{1}".format(server, playpath),
"pageUrl": self.url,
"swfVfy": SWF_URL,
"conn": "S:OK",
"live": True
})
return streams
__plugin__ = Mips
|
t3dev/odoo
|
refs/heads/master
|
addons/payment/controllers/portal.py
|
1
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import logging
from odoo import http, _
from odoo.http import request
from odoo.tools import DEFAULT_SERVER_DATETIME_FORMAT
from datetime import datetime, timedelta
_logger = logging.getLogger(__name__)
class PaymentProcessing(http.Controller):
@staticmethod
def remove_payment_transaction(transactions):
tx_ids_list = request.session.get("__payment_tx_ids__", [])
if transactions:
for tx in transactions:
if tx.id in tx_ids_list:
tx_ids_list.remove(tx.id)
else:
return False
request.session["__payment_tx_ids__"] = tx_ids_list
return True
@staticmethod
def add_payment_transaction(transactions):
if not transactions:
return False
tx_ids_list = set(request.session.get("__payment_tx_ids__", [])) | set(transactions.ids)
request.session["__payment_tx_ids__"] = tx_ids_list
return True
@staticmethod
def get_payment_transaction_ids():
# return the ids and not the recordset, since we might need to
# sudo the browse to access all the record
# I prefer to let the controller chose when to access to payment.transaction using sudo
return request.session.get("__payment_tx_ids__", [])
@http.route(['/payment/process'], type="http", auth="public", website=True)
def payment_status_page(self, **kwargs):
# When the customer is redirect to this website page,
# we retrieve the payment transaction list from his session
tx_ids_list = self.get_payment_transaction_ids()
payment_transaction_ids = request.env['payment.transaction'].sudo().browse(tx_ids_list).exists()
render_ctx = {
'payment_tx_ids': payment_transaction_ids.ids,
}
return request.render("payment.payment_process_page", render_ctx)
@http.route(['/payment/process/poll'], type="json", auth="public")
def payment_status_poll(self):
# retrieve the transactions
tx_ids_list = self.get_payment_transaction_ids()
payment_transaction_ids = request.env['payment.transaction'].sudo().search([
('id', 'in', list(tx_ids_list)),
('date', '>=', (datetime.now() - timedelta(days=1)).strftime(DEFAULT_SERVER_DATETIME_FORMAT)),
])
if not payment_transaction_ids:
return {
'success': False,
'error': 'no_tx_found',
}
processed_tx = payment_transaction_ids.filtered('is_processed')
self.remove_payment_transaction(processed_tx)
# create the returned dictionnary
result = {
'success': True,
'transactions': [],
}
# populate the returned dictionnary with the transactions data
for tx in payment_transaction_ids:
message_to_display = tx.acquirer_id[tx.state + '_msg'] if tx.state in ['done', 'pending', 'cancel', 'error'] else None
result['transactions'].append({
'reference': tx.reference,
'state': tx.state,
'return_url': tx.return_url,
'is_processed': tx.is_processed,
'state_message': tx.state_message,
'message_to_display': message_to_display,
'amount': tx.amount,
'currency': tx.currency_id.name,
'acquirer_provider': tx.acquirer_id.provider,
})
tx_to_process = payment_transaction_ids.filtered(lambda x: x.state == 'done' and x.is_processed is False)
try:
tx_to_process._post_process_after_done()
except Exception as e:
request.env.cr.rollback()
result['success'] = False
result['error'] = str(e)
_logger.error("Error while processing transaction(s) %s, exception \"%s\"", tx_to_process.ids, str(e))
return result
class WebsitePayment(http.Controller):
@http.route(['/my/payment_method'], type='http', auth="user", website=True)
def payment_method(self, **kwargs):
acquirers = list(request.env['payment.acquirer'].search([
('website_published', '=', True), ('registration_view_template_id', '!=', False),
('payment_flow', '=', 's2s'), ('company_id', '=', request.env.user.company_id.id)
]))
partner = request.env.user.partner_id
payment_tokens = partner.payment_token_ids
payment_tokens |= partner.commercial_partner_id.sudo().payment_token_ids
return_url = request.params.get('redirect', '/my/payment_method')
values = {
'pms': payment_tokens,
'acquirers': acquirers,
'error_message': [kwargs['error']] if kwargs.get('error') else False,
'return_url': return_url,
'bootstrap_formatting': True,
'partner_id': partner.id
}
return request.render("payment.pay_methods", values)
@http.route(['/website_payment/pay'], type='http', auth='public', website=True)
def pay(self, reference='', order_id=None, amount=False, currency_id=None, acquirer_id=None, **kw):
env = request.env
user = env.user.sudo()
# Default values
values = {
'amount': 0.0,
'currency': user.company_id.currency_id,
}
# Check sale order
if order_id:
try:
order_id = int(order_id)
order = env['sale.order'].browse(order_id)
values.update({
'currency': order.currency_id,
'amount': order.amount_total,
'order_id': order_id
})
except:
order_id = None
# Check currency
if currency_id:
try:
currency_id = int(currency_id)
values['currency'] = env['res.currency'].browse(currency_id)
except:
pass
# Check amount
if amount:
try:
amount = float(amount)
values['amount'] = amount
except:
pass
# Check reference
reference_values = order_id and {'sale_order_ids': [(4, order_id)]} or {}
values['reference'] = env['payment.transaction']._compute_reference(values=reference_values, prefix=reference)
# Check acquirer
acquirers = None
if acquirer_id:
acquirers = env['payment.acquirer'].browse(int(acquirer_id))
if not acquirers:
acquirers = env['payment.acquirer'].search([('website_published', '=', True), ('company_id', '=', user.company_id.id)])
# Check partner
partner_id = user.partner_id.id if not user._is_public() else False
values.update({
'partner_id': partner_id,
'bootstrap_formatting': True,
'error_msg': kw.get('error_msg')
})
values['acquirers'] = [acq for acq in acquirers if acq.payment_flow in ['form', 's2s']]
values['pms'] = request.env['payment.token'].search([('acquirer_id', 'in', acquirers.filtered(lambda x: x.payment_flow == 's2s').ids)])
return request.render('payment.pay', values)
@http.route(['/website_payment/transaction/<string:reference>/<string:amount>/<string:currency_id>',
'/website_payment/transaction/v2/<string:amount>/<string:currency_id>/<path:reference>',], type='json', auth='public')
def transaction(self, acquirer_id, reference, amount, currency_id, **kwargs):
partner_id = request.env.user.partner_id.id if not request.env.user._is_public() else False
acquirer = request.env['payment.acquirer'].browse(acquirer_id)
order_id = kwargs.get('order_id')
reference_values = order_id and {'sale_order_ids': [(4, order_id)]} or {}
reference = request.env['payment.transaction']._compute_reference(values=reference_values, prefix=reference)
values = {
'acquirer_id': int(acquirer_id),
'reference': reference,
'amount': float(amount),
'currency_id': int(currency_id),
'partner_id': partner_id,
'type': 'form_save' if acquirer.save_token != 'none' and partner_id else 'form',
}
if order_id:
values['sale_order_ids'] = [(6, 0, [order_id])]
reference_values = order_id and {'sale_order_ids': [(4, order_id)]} or {}
reference_values.update(acquirer_id=int(acquirer_id))
values['reference'] = request.env['payment.transaction']._compute_reference(values=reference_values, prefix=reference)
tx = request.env['payment.transaction'].sudo().with_context(lang=None).create(values)
tx.return_url = '/website_payment/confirm?tx_id=%d' % tx.id
PaymentProcessing.add_payment_transaction(tx)
render_values = {
'partner_id': partner_id,
}
return acquirer.sudo().render(tx.reference, float(amount), int(currency_id), values=render_values)
@http.route(['/website_payment/token/<string:reference>/<string:amount>/<string:currency_id>',
'/website_payment/token/v2/<string:amount>/<string:currency_id>/<path:reference>'], type='http', auth='public', website=True)
def payment_token(self, pm_id, reference, amount, currency_id, return_url=None, **kwargs):
token = request.env['payment.token'].browse(int(pm_id))
order_id = kwargs.get('order_id')
if not token:
return request.redirect('/website_payment/pay?error_msg=%s' % _('Cannot setup the payment.'))
partner_id = request.env.user.partner_id.id if not request.env.user._is_public() else False
values = {
'acquirer_id': token.acquirer_id.id,
'reference': reference,
'amount': float(amount),
'currency_id': int(currency_id),
'partner_id': partner_id,
'payment_token_id': pm_id,
'type': 'form_save' if token.acquirer_id.save_token != 'none' and partner_id else 'form',
'return_url': return_url,
}
if order_id:
values['sale_order_ids'] = [(6, 0, [order_id])]
tx = request.env['payment.transaction'].sudo().with_context(lang=None).create(values)
PaymentProcessing.add_payment_transaction(tx)
try:
res = tx.s2s_do_transaction()
if tx.state == 'done':
tx.return_url = return_url or '/website_payment/confirm?tx_id=%d' % tx.id
valid_state = 'authorized' if tx.acquirer_id.capture_manually else 'done'
if not res or tx.state != valid_state:
tx.return_url = '/website_payment/pay?error_msg=%s' % _('Payment transaction failed.')
return request.redirect('/payment/process')
except Exception as e:
return request.redirect('/payment/process')
@http.route(['/website_payment/confirm'], type='http', auth='public', website=True)
def confirm(self, **kw):
tx_id = int(kw.get('tx_id', 0))
if tx_id:
tx = request.env['payment.transaction'].browse(tx_id)
if tx.state == 'done':
status = 'success'
message = tx.acquirer_id.done_msg
elif tx.state == 'pending':
status = 'warning'
message = tx.acquirer_id.pending_msg
else:
status = 'danger'
message = tx.acquirer_id.error_msg
PaymentProcessing.remove_payment_transaction(tx)
return request.render('payment.confirm', {'tx': tx, 'status': status, 'message': message})
else:
return request.redirect('/my/home')
|
Midnighter/Everyday-Utilities
|
refs/heads/master
|
meb/utils/threaded.py
|
1
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
=================
Threading Classes
=================
:Author:
Moritz Emanuel Beber
:Date:
2011-02-26
:Copyright:
Copyright(c) 2011 Jacobs University of Bremen. All rights reserved.
:File:
threaded.py
Notes
-----
Most classes (unless they inherit from old-style classes) are new-style classes.
Attributes and methods not intended to be accessed directly by the user are
preceeded by a single underscore '_' but they can be used if the user knows
what he is doing. Those preceeded with two underscores '__' should under no
circumstances be touched.
"""
import os
import threading
import logging
import paramiko
import socket
import math
from Queue import Queue
from .errors import NetworkError
class ThreadPoolWorker(threading.Thread):
"""
Worker thread that operates on items from its queue.
"""
def __init__(self, queue, exception_queue=None):
"""
"""
threading.Thread.__init__(self)
self._queue = queue
self._exception_queue = exception_queue
self.daemon = True
def run(self):
"""
"""
while True:
(perform, args, kw_args) = self._queue.get()
try:
perform(*args, **kw_args)
except StandardError as err:
if self._exception_queue:
self._exception_queue.put((err, perform, args, kw_args))
finally:
self._queue.task_done()
def ThreadPool(object):
"""
"""
def __init__(self, num_threads, retry=False):
"""
"""
object.__init__(self)
self.queue = Queue()
if retry:
self.exception_queue = Queue()
else:
self.exception_queue = None
for i in xrange(num_threads):
w = ThreadPoolWorker(self.queue, self.exception_queue)
w.start()
def put(self, perform, *args, **kw_args):
"""
"""
self.queue.put((perform, args, kw_args))
def join(self):
"""
"""
self.queue.join()
class RemoteSetup(object):
"""
docstring for RemoteSetup
"""
def __init__(self, host, options, *args, **kwargs):
"""
docstring
"""
object.__init__(self)
self._host = str(host)
self.name = "%s@%s" % (self.__class__.__name__, self._host)
self.logger = logging.getLogger(self.name)
self._child_name = "%s.SSHClient" % self.name
self._child_logger = logging.getLogger(self._child_name)
self._child_logger.propagate = 0
self._options = options
self._client = None
self._n_cpus = None
self._cpu_usage = None
self._io_lock = threading.Lock()
def __del__(self):
"""
docstring
"""
if self._client:
self._client.close()
def close(self):
if self._client:
self._client.close()
def make_ssh_connection(self):
"""
docstring
"""
# create the communication instance
self.logger.debug("Creating SSHClient instance")
self._client = paramiko.SSHClient()
# set logging for it
self.logger.debug("Setting log channel")
self._client.set_log_channel(self._child_name)
self.logger.debug("Setting missing host key policies")
if self._options.auto_add:
self._client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
else:
self._client.set_missing_host_key_policy(paramiko.WarningPolicy())
self.logger.debug("Loading known host keys")
self._io_lock.acquire()
try:
self._client.load_host_keys(
os.path.expanduser("~/.ssh/known_hosts"))
except IOError as err:
self.logger.exception(str(err))
# how to proceed when loading of host keys fails?
# right now making the connection probably still fails so all is well
finally:
self._io_lock.release()
self.logger.debug("Making connection")
try:
self._client.connect(hostname=self._host, port=self._options.ssh_port,
username=self._options.username, password=self._options.password)
except paramiko.BadHostKeyException:
raise NetworkError("Bad Host Key")
except paramiko.AuthenticationException:
raise NetworkError("Authentication Error")
except paramiko.SSHException:
raise NetworkError("Connection Error")
except socket.error:
raise NetworkError("Socket Error")
else:
self.logger.info("Connection established and authenticated")
self._io_lock.acquire()
self._client.save_host_keys(os.path.expanduser("~/.ssh/known_hosts"))
self._io_lock.release()
def one_time_cmd(self, cmd):
"""
"""
try:
(stdin_fh, stdout_fh, stderr_fh) = self._client.exec_command(cmd,\
self._options.buf_size)
except paramiko.SSHException:
raise NetworkError("Failed to execute remote command")
stderr = stderr_fh.read()
stdout = stdout_fh.read()
if stderr and not stdout:
raise NetworkError("Remote command failed with: %s", stderr)
else:
return stdout
def _detect_ncpus(self):
"""
docstring
"""
# get number of cpus on linux
cmd = "grep -c 'model name' '/proc/cpuinfo'"
stdout = self.one_time_cmd(cmd)
if stdout:
self.logger.debug(stdout)
stdout = stdout.split("\n")
for line in stdout:
try:
self._n_cpus = int(line)
except ValueError:
continue
else:
return
# no CPUs detected, i.e., cmd caused an error
# will use pty on MacOS as well for consistency
cmd = "sysctl -n hw.ncpu"
stdout = self.one_time_cmd(cmd)
if stdout:
self.logger.debug(stdout)
stdout = stdout.split("\n")
for line in stdout:
try:
self._n_cpus = int(line)
except ValueError:
continue
else:
return
# return the default value
self.logger.warning("Could not detect number of CPUs,"\
" assuming default '1'")
self._n_cpus = 1
def _detect_cpu_usage(self, num_probes=10.0):
"""
docstring
"""
# for linux, unix, and macosx that's why both -e and -a
cmd = "vmstat 1 %d" % num_probes
stdout = self.one_time_cmd(cmd)
if stdout:
self.logger.debug(stdout)
stdout = stdout.split("\n")
total = 0.
for line in stdout:
if not line:
continue
tmp = line.split()
# only want to parse lines that start with numbers
try:
float(tmp[0])
except ValueError:
continue
# cheap trick not to parse ordinary text, like %CPU header
# ps --no-headers not available on mac, for example
try:
total += float(tmp[12])
except ValueError:
continue
self._cpu_usage = math.ceil(total / num_probes)
return
# default usage
self.logger.warning("Could not detect CPU usage, assuming 0 %%")
self._cpu_usage = 0.
def remote_shell_cmd(self, cmd, timeout=20.):
"""
"""
try:
channel = self._client.invoke_shell()
except paramiko.SSHException:
raise NetworkError("Failed to invoke remote shell")
if channel.gettimeout():
self.logger.debug("Channel timeout: %f", channel.gettimeout())
else:
channel.settimeout(timeout)
try:
channel.sendall(cmd)
except socket.timeout:
channel.close()
raise NetworkError("Connection timed out")
stdout = ""
expect = "%s@%s:~>\r\n" % (self._options.username, self._host)
while True:
try:
stdout += channel.recv(self._options.buf_size)
if stdout.endswith(expect):
break
except socket.timeout:
break
channel.close()
return stdout
def _setup_job(self, lower, upper, shell_file="batch_jobs.sh"):
"""
docstring
"""
cmd = "screen -dmS batch_simulation %s %d %d\n"\
% (shell_file, lower, upper)
# we only have to check for immediate errors of running this command
# not sure how to do that atm
stdout = self.remote_shell_cmd(cmd)
if stdout:
self.logger.debug(stdout)
def usage(self):
"""
docstring
"""
self.logger.debug("Establishing SSH connection...")
try:
self.make_ssh_connection()
except NetworkError as err:
self.logger.debug(str(err))
return 0
self.logger.debug("Detecting number of CPUs...")
self._detect_ncpus()
self.logger.debug("There are %d CPUs online", self._n_cpus)
self.logger.debug("Detecting CPU usage...")
self._detect_cpu_usage()
self.logger.debug("Usage is: %f", self._cpu_usage)
# compare work load with number of cpus present
self._cpu_usage = round(self._n_cpus * self._cpu_usage / 100.0, 0)
self._n_cpus = self._n_cpus - int(self._cpu_usage)
self.logger.debug("Number of CPUs to use: %d", self._n_cpus)
self.logger.debug("Closing client")
self._client.close()
return self._n_cpus
def run(self, lower, upper, shell_file="batch_jobs.sh"):
"""
docstring
"""
self.logger.debug("Establishing SSH connection...")
try:
self.make_ssh_connection()
except NetworkError as err:
self.logger.debug(str(err))
return None
# start simulations
self._setup_job(lower, upper, shell_file)
self.logger.info("Remote job started")
self._client.close()
def _detect_processes(self, *args):
"""
docstring
"""
pids = list()
for comm in args:
cmd = "ps -u %s -o pid,comm | grep %s | grep -v grep" %\
(self._options.username, comm)
stdout = self.one_time_cmd(cmd)
if stdout:
self.logger.debug(stdout)
stdout = stdout.split("\n")
for line in stdout:
# cheap trick not to parse ordinary text, like %CPU header
try:
pids.append(int(line.split()[0]))
except ValueError:
continue
except IndexError:
break
return pids
def kill(self, *args):
"""
docstring
"""
self.logger.debug("Establishing SSH connection...")
try:
self.make_ssh_connection()
except NetworkError as err:
self.logger.debug(str(err))
return 0
self.logger.debug("Killing process(es)...")
pids = self._detect_processes(*args)
self.logger.debug(pids)
killed = 0
for pid in pids:
cmd = "kill %d" % pid
try:
stdout = self.one_time_cmd(cmd)
except NetworkError as err:
self.logger.debug(str(err))
self.logger.debug(stdout)
else:
killed += 1
self.logger.debug("Closing client")
self._client.close()
return killed
|
srikantbmandal/ansible
|
refs/heads/devel
|
lib/ansible/plugins/action/ce.py
|
7
|
#
# (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import sys
import copy
from ansible.plugins.action.normal import ActionModule as _ActionModule
from ansible.utils.path import unfrackpath
from ansible.plugins import connection_loader
from ansible.module_utils.six import iteritems
from ansible.module_utils.ce import ce_argument_spec
from ansible.module_utils.basic import AnsibleFallbackNotFound
from ansible.module_utils._text import to_bytes
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
class ActionModule(_ActionModule):
def run(self, tmp=None, task_vars=None):
if self._play_context.connection != 'local':
return dict(
failed=True,
msg='invalid connection specified, expected connection=local, '
'got %s' % self._play_context.connection
)
provider = self.load_provider()
transport = provider['transport'] or 'cli'
display.vvvv('connection transport is %s' % transport, self._play_context.remote_addr)
if transport == 'cli':
pc = copy.deepcopy(self._play_context)
pc.connection = 'network_cli'
pc.network_os = 'ce'
pc.remote_addr = provider['host'] or self._play_context.remote_addr
pc.port = int(provider['port']) or int(self._play_context.port) or 22
pc.remote_user = provider['username'] or self._play_context.connection_user
pc.password = provider['password'] or self._play_context.password
pc.timeout = provider['timeout'] or self._play_context.timeout
self._task.args['provider'] = provider.update(
host=pc.remote_addr,
port=pc.port,
username=pc.remote_user,
password=pc.password,
ssh_keyfile=pc.private_key_file
)
display.vvv('using connection plugin %s' % pc.connection, pc.remote_addr)
connection = self._shared_loader_obj.connection_loader.get('persistent', pc, sys.stdin)
socket_path = self._get_socket_path(pc)
display.vvvv('socket_path: %s' % socket_path, pc.remote_addr)
if not os.path.exists(socket_path):
# start the connection if it isn't started
rc, out, err = connection.exec_command('open_shell()')
display.vvvv('open_shell() returned %s %s %s' % (rc, out, err))
if rc != 0:
return {'failed': True,
'msg': 'unable to open shell. Please see: ' +
'https://docs.ansible.com/ansible/network_debug_troubleshooting.html#unable-to-open-shell',
'rc': rc}
else:
# make sure we are in the right cli context which should be
# enable mode and not config module
rc, out, err = connection.exec_command('prompt()')
while str(out).strip().endswith(']'):
display.vvvv('wrong context, sending exit to device', self._play_context.remote_addr)
connection.exec_command('return')
rc, out, err = connection.exec_command('prompt()')
task_vars['ansible_socket'] = socket_path
# make sure a transport value is set in args
self._task.args['transport'] = transport
result = super(ActionModule, self).run(tmp, task_vars)
return result
def _get_socket_path(self, play_context):
ssh = connection_loader.get('ssh', class_only=True)
cp = ssh._create_control_path(play_context.remote_addr, play_context.port, play_context.remote_user)
path = unfrackpath("$HOME/.ansible/pc")
return cp % dict(directory=path)
def load_provider(self):
provider = self._task.args.get('provider', {})
for key, value in iteritems(ce_argument_spec):
if key != 'provider' and key not in provider:
if key in self._task.args:
provider[key] = self._task.args[key]
elif 'fallback' in value:
provider[key] = self._fallback(value['fallback'])
elif key not in provider:
provider[key] = None
return provider
def _fallback(self, fallback):
strategy = fallback[0]
args = []
kwargs = {}
for item in fallback[1:]:
if isinstance(item, dict):
kwargs = item
else:
args = item
try:
return strategy(*args, **kwargs)
except AnsibleFallbackNotFound:
pass
|
pankajanand18/python-tests
|
refs/heads/master
|
coinchange.py
|
1
|
coins=[1,2,5,10]
def getDenomination(coins,sum):
min_deno=[0] * (sum + 1 )
for amt in xrange(1,sum+1):
min_deno[amt]= 30000
temp= 30000
for i in xrange(len(coins)):
if amt >= coins[i]:
temp_amt = min_deno[amt-coins[i]] + 1
#print temp_amt
if temp_amt < temp:
temp = temp_amt
min_deno[amt] = temp
print "%s %s %s"%(coins[i],amt,min_deno)
print min_deno
getDenomination(coins,23)
|
Bysmyyr/chromium-crosswalk
|
refs/heads/master
|
tools/telemetry/telemetry/web_perf/metrics/webrtc_rendering_stats.py
|
2
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
from telemetry.util import statistics
DISPLAY_HERTZ = 60.0
VSYNC_DURATION = 1e6 / DISPLAY_HERTZ
# When to consider a frame frozen (in VSYNC units): meaning 1 initial
# frame + 5 repeats of that frame.
FROZEN_THRESHOLD = 6
# Severity factor.
SEVERITY = 3
IDEAL_RENDER_INSTANT = 'Ideal Render Instant'
ACTUAL_RENDER_BEGIN = 'Actual Render Begin'
ACTUAL_RENDER_END = 'Actual Render End'
SERIAL = 'Serial'
class TimeStats(object):
"""Stats container for webrtc rendering metrics."""
def __init__(self, drift_time=None, mean_drift_time=None,
std_dev_drift_time=None, percent_badly_out_of_sync=None,
percent_out_of_sync=None, smoothness_score=None, freezing_score=None,
rendering_length_error=None, fps=None, frame_distribution=None):
self.drift_time = drift_time
self.mean_drift_time = mean_drift_time
self.std_dev_drift_time = std_dev_drift_time
self.percent_badly_out_of_sync = percent_badly_out_of_sync
self.percent_out_of_sync = percent_out_of_sync
self.smoothness_score = smoothness_score
self.freezing_score = freezing_score
self.rendering_length_error = rendering_length_error
self.fps = fps
self.frame_distribution = frame_distribution
self.invalid_data = False
class WebMediaPlayerMsRenderingStats(object):
"""Analyzes events of WebMediaPlayerMs type."""
def __init__(self, events):
"""Save relevant events according to their stream."""
self.stream_to_events = self._MapEventsToStream(events)
def _IsEventValid(self, event):
"""Check that the needed arguments are present in event.
Args:
event: event to check.
Returns:
True is event is valid, false otherwise."""
if not event.args:
return False
mandatory = [ACTUAL_RENDER_BEGIN, ACTUAL_RENDER_END,
IDEAL_RENDER_INSTANT, SERIAL]
for parameter in mandatory:
if not parameter in event.args:
return False
return True
def _MapEventsToStream(self, events):
"""Build a dictionary of events indexed by stream.
The events of interest have a 'Serial' argument which represents the
stream ID. The 'Serial' argument identifies the local or remote nature of
the stream with a least significant bit of 0 or 1 as well as the hash
value of the video track's URL. So stream::=hash(0|1} . The method will
then list the events of the same stream in a frame_distribution on stream
id. Practically speaking remote streams have an odd stream id and local
streams have a even stream id.
Args:
events: Telemetry WebMediaPlayerMs events.
Returns:
A dict of stream IDs mapped to events on that stream.
"""
stream_to_events = {}
for event in events:
if not self._IsEventValid(event):
# This is not a render event, skip it.
continue
stream = event.args[SERIAL]
events_for_stream = stream_to_events.setdefault(stream, [])
events_for_stream.append(event)
return stream_to_events
def _GetCadence(self, relevant_events):
"""Calculate the apparent cadence of the rendering.
In this paragraph I will be using regex notation. What is intended by the
word cadence is a sort of extended instantaneous 'Cadence' (thus not
necessarily periodic). Just as an example, a normal 'Cadence' could be
something like [2 3] which means possibly an observed frame persistance
progression of [{2 3}+] for an ideal 20FPS video source. So what we are
calculating here is the list of frame persistance, kind of a
'Proto-Cadence', but cadence is shorter so we abuse the word.
Args:
relevant_events: list of Telemetry events.
Returns:
a list of frame persistance values.
"""
cadence = []
frame_persistence = 0
old_ideal_render = 0
for event in relevant_events:
if not self._IsEventValid(event):
# This event is not a render event so skip it.
continue
if event.args[IDEAL_RENDER_INSTANT] == old_ideal_render:
frame_persistence += 1
else:
cadence.append(frame_persistence)
frame_persistence = 1
old_ideal_render = event.args[IDEAL_RENDER_INSTANT]
cadence.append(frame_persistence)
cadence.pop(0)
return cadence
def _GetSourceToOutputDistribution(self, cadence):
"""Create distribution for the cadence frame display values.
If the overall display distribution is A1:A2:..:An, this will tell us how
many times a frame stays displayed during Ak*VSYNC_DURATION, also known as
'source to output' distribution. Or in other terms:
a distribution B::= let C be the cadence, B[k]=p with k in Unique(C)
and p=Card(k in C).
Args:
cadence: list of frame persistance values.
Returns:
a dictionary containing the distribution
"""
frame_distribution = {}
for ticks in cadence:
ticks_so_far = frame_distribution.setdefault(ticks, 0)
frame_distribution[ticks] = ticks_so_far + 1
return frame_distribution
def _GetFpsFromCadence(self, frame_distribution):
"""Calculate the apparent FPS from frame distribution.
Knowing the display frequency and the frame distribution, it is possible to
calculate the video apparent frame rate as played by WebMediaPlayerMs
module.
Args:
frame_distribution: the source to output distribution.
Returns:
the video apparent frame rate.
"""
number_frames = sum(frame_distribution.values())
number_vsyncs = sum([ticks * frame_distribution[ticks]
for ticks in frame_distribution])
mean_ratio = float(number_vsyncs) / number_frames
return DISPLAY_HERTZ / mean_ratio
def _GetFrozenFramesReports(self, frame_distribution):
"""Find evidence of frozen frames in distribution.
For simplicity we count as freezing the frames that appear at least five
times in a row counted from 'Ideal Render Instant' perspective. So let's
say for 1 source frame, we rendered 6 frames, then we consider 5 of these
rendered frames as frozen. But we mitigate this by saying anything under
5 frozen frames will not be counted as frozen.
Args:
frame_distribution: the source to output distribution.
Returns:
a list of dicts whose keys are ('frozen_frames', 'occurrences').
"""
frozen_frames = []
frozen_frame_vsyncs = [ticks for ticks in frame_distribution if ticks >=
FROZEN_THRESHOLD]
for frozen_frames_vsync in frozen_frame_vsyncs:
logging.debug('%s frames not updated after %s vsyncs',
frame_distribution[frozen_frames_vsync], frozen_frames_vsync)
frozen_frames.append(
{'frozen_frames': frozen_frames_vsync - 1,
'occurrences': frame_distribution[frozen_frames_vsync]})
return frozen_frames
def _FrozenPenaltyWeight(self, number_frozen_frames):
"""Returns the weighted penalty for a number of frozen frames.
As mentioned earlier, we count for frozen anything above 6 vsync display
duration for the same 'Initial Render Instant', which is five frozen
frames.
Args:
number_frozen_frames: number of frozen frames.
Returns:
the penalty weight (int) for that number of frozen frames.
"""
penalty = {
0: 0,
1: 0,
2: 0,
3: 0,
4: 0,
5: 1,
6: 5,
7: 15,
8: 25
}
weight = penalty.get(number_frozen_frames, 8 * (number_frozen_frames - 4))
return weight
def _IsRemoteStream(self, stream):
"""Check if stream is remote."""
return stream % 2
def _GetDrifTimeStats(self, relevant_events, cadence):
"""Get the drift time statistics.
This method will calculate drift_time stats, that is to say :
drift_time::= list(actual render begin - ideal render).
rendering_length error::= the rendering length error.
Args:
relevant_events: events to get drift times stats from.
cadence: list of frame persistence values.
Returns:
a tuple of (drift_time, rendering_length_error).
"""
drift_time = []
old_ideal_render = 0
discrepancy = []
index = 0
for event in relevant_events:
current_ideal_render = event.args[IDEAL_RENDER_INSTANT]
if current_ideal_render == old_ideal_render:
# Skip to next event because we're looking for a source frame.
continue
actual_render_begin = event.args[ACTUAL_RENDER_BEGIN]
drift_time.append(actual_render_begin - current_ideal_render)
discrepancy.append(abs(current_ideal_render - old_ideal_render
- VSYNC_DURATION * cadence[index]))
old_ideal_render = current_ideal_render
index += 1
discrepancy.pop(0)
last_ideal_render = relevant_events[-1].args[IDEAL_RENDER_INSTANT]
first_ideal_render = relevant_events[0].args[IDEAL_RENDER_INSTANT]
rendering_length_error = 100.0 * (sum([x for x in discrepancy]) /
(last_ideal_render - first_ideal_render))
return drift_time, rendering_length_error
def _GetSmoothnessStats(self, norm_drift_time):
"""Get the smoothness stats from the normalized drift time.
This method will calculate the smoothness score, along with the percentage
of frames badly out of sync and the percentage of frames out of sync. To be
considered badly out of sync, a frame has to have missed rendering by at
least 2*VSYNC_DURATION. To be considered out of sync, a frame has to have
missed rendering by at least one VSYNC_DURATION.
The smoothness score is a measure of how out of sync the frames are.
Args:
norm_drift_time: normalized drift time.
Returns:
a tuple of (percent_badly_oos, percent_out_of_sync, smoothness_score)
"""
# How many times is a frame later/earlier than T=2*VSYNC_DURATION. Time is
# in microseconds.
frames_severely_out_of_sync = len(
[x for x in norm_drift_time if abs(x) > 2 * VSYNC_DURATION])
percent_badly_oos = (
100.0 * frames_severely_out_of_sync / len(norm_drift_time))
# How many times is a frame later/earlier than VSYNC_DURATION.
frames_out_of_sync = len(
[x for x in norm_drift_time if abs(x) > VSYNC_DURATION])
percent_out_of_sync = (
100.0 * frames_out_of_sync / len(norm_drift_time))
frames_oos_only_once = frames_out_of_sync - frames_severely_out_of_sync
# Calculate smoothness metric. From the formula, we can see that smoothness
# score can be negative.
smoothness_score = 100.0 - 100.0 * (frames_oos_only_once +
SEVERITY * frames_severely_out_of_sync) / len(norm_drift_time)
# Minimum smoothness_score value allowed is zero.
if smoothness_score < 0:
smoothness_score = 0
return (percent_badly_oos, percent_out_of_sync, smoothness_score)
def _GetFreezingScore(self, frame_distribution):
"""Get the freezing score."""
# The freezing score is based on the source to output distribution.
number_vsyncs = sum([n * frame_distribution[n]
for n in frame_distribution])
frozen_frames = self._GetFrozenFramesReports(frame_distribution)
# Calculate freezing metric.
# Freezing metric can be negative if things are really bad. In that case we
# change it to zero as minimum valud.
freezing_score = 100.0
for frozen_report in frozen_frames:
weight = self._FrozenPenaltyWeight(frozen_report['frozen_frames'])
freezing_score -= (
100.0 * frozen_report['occurrences'] / number_vsyncs * weight)
if freezing_score < 0:
freezing_score = 0
return freezing_score
def GetTimeStats(self):
"""Calculate time stamp stats for all remote stream events."""
stats = {}
for stream, relevant_events in self.stream_to_events.iteritems():
if len(relevant_events) == 1:
logging.debug('Found a stream=%s with just one event', stream)
continue
if not self._IsRemoteStream(stream):
logging.info('Skipping processing of local stream: %s', stream)
continue
cadence = self._GetCadence(relevant_events)
if not cadence:
stats = TimeStats()
stats.invalid_data = True
return stats
frame_distribution = self._GetSourceToOutputDistribution(cadence)
fps = self._GetFpsFromCadence(frame_distribution)
drift_time_stats = self._GetDrifTimeStats(relevant_events, cadence)
(drift_time, rendering_length_error) = drift_time_stats
# Drift time normalization.
mean_drift_time = statistics.ArithmeticMean(drift_time)
norm_drift_time = [abs(x - mean_drift_time) for x in drift_time]
smoothness_stats = self._GetSmoothnessStats(norm_drift_time)
(percent_badly_oos, percent_out_of_sync,
smoothness_score) = smoothness_stats
freezing_score = self._GetFreezingScore(frame_distribution)
stats = TimeStats(drift_time=drift_time,
percent_badly_out_of_sync=percent_badly_oos,
percent_out_of_sync=percent_out_of_sync,
smoothness_score=smoothness_score, freezing_score=freezing_score,
rendering_length_error=rendering_length_error, fps=fps,
frame_distribution=frame_distribution)
return stats
|
t11e/django
|
refs/heads/master
|
django/db/backends/sqlite3/base.py
|
1
|
"""
SQLite3 backend for django.
Python 2.3 and 2.4 require pysqlite2 (http://pysqlite.org/).
Python 2.5 and later can use a pysqlite2 module or the sqlite3 module in the
standard library.
"""
import re
import sys
from django.db import utils
from django.db.backends import *
from django.db.backends.signals import connection_created
from django.db.backends.sqlite3.client import DatabaseClient
from django.db.backends.sqlite3.creation import DatabaseCreation
from django.db.backends.sqlite3.introspection import DatabaseIntrospection
from django.utils.safestring import SafeString
try:
try:
from pysqlite2 import dbapi2 as Database
except ImportError, e1:
from sqlite3 import dbapi2 as Database
except ImportError, exc:
import sys
from django.core.exceptions import ImproperlyConfigured
if sys.version_info < (2, 5, 0):
module = 'pysqlite2 module'
exc = e1
else:
module = 'either pysqlite2 or sqlite3 modules (tried in that order)'
raise ImproperlyConfigured("Error loading %s: %s" % (module, exc))
DatabaseError = Database.DatabaseError
IntegrityError = Database.IntegrityError
Database.register_converter("bool", lambda s: str(s) == '1')
Database.register_converter("time", util.typecast_time)
Database.register_converter("date", util.typecast_date)
Database.register_converter("datetime", util.typecast_timestamp)
Database.register_converter("timestamp", util.typecast_timestamp)
Database.register_converter("TIMESTAMP", util.typecast_timestamp)
Database.register_converter("decimal", util.typecast_decimal)
Database.register_adapter(decimal.Decimal, util.rev_typecast_decimal)
if Database.version_info >= (2,4,1):
# Starting in 2.4.1, the str type is not accepted anymore, therefore,
# we convert all str objects to Unicode
# As registering a adapter for a primitive type causes a small
# slow-down, this adapter is only registered for sqlite3 versions
# needing it.
Database.register_adapter(str, lambda s:s.decode('utf-8'))
Database.register_adapter(SafeString, lambda s:s.decode('utf-8'))
class DatabaseFeatures(BaseDatabaseFeatures):
# SQLite cannot handle us only partially reading from a cursor's result set
# and then writing the same rows to the database in another cursor. This
# setting ensures we always read result sets fully into memory all in one
# go.
can_use_chunked_reads = False
class DatabaseOperations(BaseDatabaseOperations):
def date_extract_sql(self, lookup_type, field_name):
# sqlite doesn't support extract, so we fake it with the user-defined
# function django_extract that's registered in connect(). Note that
# single quotes are used because this is a string (and could otherwise
# cause a collision with a field name).
return "django_extract('%s', %s)" % (lookup_type.lower(), field_name)
def date_trunc_sql(self, lookup_type, field_name):
# sqlite doesn't support DATE_TRUNC, so we fake it with a user-defined
# function django_date_trunc that's registered in connect(). Note that
# single quotes are used because this is a string (and could otherwise
# cause a collision with a field name).
return "django_date_trunc('%s', %s)" % (lookup_type.lower(), field_name)
def drop_foreignkey_sql(self):
return ""
def pk_default_value(self):
return 'NULL'
def quote_name(self, name):
if name.startswith('"') and name.endswith('"'):
return name # Quoting once is enough.
return '"%s"' % name
def no_limit_value(self):
return -1
def sql_flush(self, style, tables, sequences):
# NB: The generated SQL below is specific to SQLite
# Note: The DELETE FROM... SQL generated below works for SQLite databases
# because constraints don't exist
sql = ['%s %s %s;' % \
(style.SQL_KEYWORD('DELETE'),
style.SQL_KEYWORD('FROM'),
style.SQL_FIELD(self.quote_name(table))
) for table in tables]
# Note: No requirement for reset of auto-incremented indices (cf. other
# sql_flush() implementations). Just return SQL at this point
return sql
def year_lookup_bounds(self, value):
first = '%s-01-01'
second = '%s-12-31 23:59:59.999999'
return [first % value, second % value]
def convert_values(self, value, field):
"""SQLite returns floats when it should be returning decimals,
and gets dates and datetimes wrong.
For consistency with other backends, coerce when required.
"""
internal_type = field.get_internal_type()
if internal_type == 'DecimalField':
return util.typecast_decimal(field.format_number(value))
elif internal_type and internal_type.endswith('IntegerField') or internal_type == 'AutoField':
return int(value)
elif internal_type == 'DateField':
return util.typecast_date(value)
elif internal_type == 'DateTimeField':
return util.typecast_timestamp(value)
elif internal_type == 'TimeField':
return util.typecast_time(value)
# No field, or the field isn't known to be a decimal or integer
return value
class DatabaseWrapper(BaseDatabaseWrapper):
# SQLite requires LIKE statements to include an ESCAPE clause if the value
# being escaped has a percent or underscore in it.
# See http://www.sqlite.org/lang_expr.html for an explanation.
operators = {
'exact': '= %s',
'iexact': "LIKE %s ESCAPE '\\'",
'contains': "LIKE %s ESCAPE '\\'",
'icontains': "LIKE %s ESCAPE '\\'",
'regex': 'REGEXP %s',
'iregex': "REGEXP '(?i)' || %s",
'gt': '> %s',
'gte': '>= %s',
'lt': '< %s',
'lte': '<= %s',
'startswith': "LIKE %s ESCAPE '\\'",
'endswith': "LIKE %s ESCAPE '\\'",
'istartswith': "LIKE %s ESCAPE '\\'",
'iendswith': "LIKE %s ESCAPE '\\'",
}
def __init__(self, *args, **kwargs):
super(DatabaseWrapper, self).__init__(*args, **kwargs)
self.features = DatabaseFeatures()
self.ops = DatabaseOperations()
self.client = DatabaseClient(self)
self.creation = DatabaseCreation(self)
self.introspection = DatabaseIntrospection(self)
self.validation = BaseDatabaseValidation(self)
def _cursor(self):
if self.connection is None:
settings_dict = self.settings_dict
if not settings_dict['NAME']:
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured("Please fill out the database NAME in the settings module before using the database.")
kwargs = {
'database': settings_dict['NAME'],
'detect_types': Database.PARSE_DECLTYPES | Database.PARSE_COLNAMES,
}
kwargs.update(settings_dict['OPTIONS'])
self.connection = Database.connect(**kwargs)
# Register extract, date_trunc, and regexp functions.
self.connection.create_function("django_extract", 2, _sqlite_extract)
self.connection.create_function("django_date_trunc", 2, _sqlite_date_trunc)
self.connection.create_function("regexp", 2, _sqlite_regexp)
connection_created.send(sender=self.__class__)
return self.connection.cursor(factory=SQLiteCursorWrapper)
def close(self):
# If database is in memory, closing the connection destroys the
# database. To prevent accidental data loss, ignore close requests on
# an in-memory db.
if self.settings_dict['NAME'] != ":memory:":
BaseDatabaseWrapper.close(self)
FORMAT_QMARK_REGEX = re.compile(r'(?![^%])%s')
class SQLiteCursorWrapper(Database.Cursor):
"""
Django uses "format" style placeholders, but pysqlite2 uses "qmark" style.
This fixes it -- but note that if you want to use a literal "%s" in a query,
you'll need to use "%%s".
"""
def execute(self, query, params=()):
query = self.convert_query(query)
try:
return Database.Cursor.execute(self, query, params)
except Database.IntegrityError, e:
raise utils.IntegrityError, utils.IntegrityError(*tuple(e)), sys.exc_info()[2]
except Database.DatabaseError, e:
raise utils.DatabaseError, utils.DatabaseError(*tuple(e)), sys.exc_info()[2]
def executemany(self, query, param_list):
query = self.convert_query(query)
try:
return Database.Cursor.executemany(self, query, param_list)
except Database.IntegrityError, e:
raise utils.IntegrityError, utils.IntegrityError(*tuple(e)), sys.exc_info()[2]
except Database.DatabaseError, e:
raise utils.DatabaseError, utils.DatabaseError(*tuple(e)), sys.exc_info()[2]
def convert_query(self, query):
return FORMAT_QMARK_REGEX.sub('?', query).replace('%%','%')
def _sqlite_extract(lookup_type, dt):
if dt is None:
return None
try:
dt = util.typecast_timestamp(dt)
except (ValueError, TypeError):
return None
if lookup_type == 'week_day':
return (dt.isoweekday() % 7) + 1
else:
return getattr(dt, lookup_type)
def _sqlite_date_trunc(lookup_type, dt):
try:
dt = util.typecast_timestamp(dt)
except (ValueError, TypeError):
return None
if lookup_type == 'year':
return "%i-01-01 00:00:00" % dt.year
elif lookup_type == 'month':
return "%i-%02i-01 00:00:00" % (dt.year, dt.month)
elif lookup_type == 'day':
return "%i-%02i-%02i 00:00:00" % (dt.year, dt.month, dt.day)
def _sqlite_regexp(re_pattern, re_string):
import re
try:
return bool(re.search(re_pattern, re_string))
except:
return False
|
caglar10ur/lxc
|
refs/heads/master
|
config/yum/lxc-patch.py
|
64
|
# Yum plugin to re-patch container rootfs after a yum update is done
#
# Copyright (C) 2012 Oracle
#
# Authors:
# Dwight Engen <dwight.engen@oracle.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
# USA
#
import os
from fnmatch import fnmatch
from yum.plugins import TYPE_INTERACTIVE
from yum.plugins import PluginYumExit
requires_api_version = '2.0'
plugin_type = (TYPE_INTERACTIVE,)
def posttrans_hook(conduit):
pkgs = []
patch_required = False
# If we aren't root, we can't have updated anything
if os.geteuid():
return
# See what packages have files that were patched
confpkgs = conduit.confString('main', 'packages')
if not confpkgs:
return
tmp = confpkgs.split(",")
for confpkg in tmp:
pkgs.append(confpkg.strip())
conduit.info(2, "lxc-patch: checking if updated pkgs need patching...")
ts = conduit.getTsInfo()
for tsmem in ts.getMembers():
for pkg in pkgs:
if fnmatch(pkg, tsmem.po.name):
patch_required = True
if patch_required:
conduit.info(2, "lxc-patch: patching container...")
os.spawnlp(os.P_WAIT, "lxc-patch", "lxc-patch", "--patch", "/")
|
vlinhd11/vlinhd11-android-scripting
|
refs/heads/master
|
python/src/Lib/test/test_slice.py
|
56
|
# tests for slice objects; in particular the indices method.
import unittest
from test import test_support
from cPickle import loads, dumps
import sys
class SliceTest(unittest.TestCase):
def test_constructor(self):
self.assertRaises(TypeError, slice)
self.assertRaises(TypeError, slice, 1, 2, 3, 4)
def test_repr(self):
self.assertEqual(repr(slice(1, 2, 3)), "slice(1, 2, 3)")
def test_hash(self):
# Verify clearing of SF bug #800796
self.assertRaises(TypeError, hash, slice(5))
self.assertRaises(TypeError, slice(5).__hash__)
def test_cmp(self):
s1 = slice(1, 2, 3)
s2 = slice(1, 2, 3)
s3 = slice(1, 2, 4)
self.assertEqual(s1, s2)
self.assertNotEqual(s1, s3)
class Exc(Exception):
pass
class BadCmp(object):
def __eq__(self, other):
raise Exc
__hash__ = None # Silence Py3k warning
s1 = slice(BadCmp())
s2 = slice(BadCmp())
self.assertRaises(Exc, cmp, s1, s2)
self.assertEqual(s1, s1)
s1 = slice(1, BadCmp())
s2 = slice(1, BadCmp())
self.assertEqual(s1, s1)
self.assertRaises(Exc, cmp, s1, s2)
s1 = slice(1, 2, BadCmp())
s2 = slice(1, 2, BadCmp())
self.assertEqual(s1, s1)
self.assertRaises(Exc, cmp, s1, s2)
def test_members(self):
s = slice(1)
self.assertEqual(s.start, None)
self.assertEqual(s.stop, 1)
self.assertEqual(s.step, None)
s = slice(1, 2)
self.assertEqual(s.start, 1)
self.assertEqual(s.stop, 2)
self.assertEqual(s.step, None)
s = slice(1, 2, 3)
self.assertEqual(s.start, 1)
self.assertEqual(s.stop, 2)
self.assertEqual(s.step, 3)
class AnyClass:
pass
obj = AnyClass()
s = slice(obj)
self.assert_(s.stop is obj)
def test_indices(self):
self.assertEqual(slice(None ).indices(10), (0, 10, 1))
self.assertEqual(slice(None, None, 2).indices(10), (0, 10, 2))
self.assertEqual(slice(1, None, 2).indices(10), (1, 10, 2))
self.assertEqual(slice(None, None, -1).indices(10), (9, -1, -1))
self.assertEqual(slice(None, None, -2).indices(10), (9, -1, -2))
self.assertEqual(slice(3, None, -2).indices(10), (3, -1, -2))
# issue 3004 tests
self.assertEqual(slice(None, -9).indices(10), (0, 1, 1))
self.assertEqual(slice(None, -10).indices(10), (0, 0, 1))
self.assertEqual(slice(None, -11).indices(10), (0, 0, 1))
self.assertEqual(slice(None, -10, -1).indices(10), (9, 0, -1))
self.assertEqual(slice(None, -11, -1).indices(10), (9, -1, -1))
self.assertEqual(slice(None, -12, -1).indices(10), (9, -1, -1))
self.assertEqual(slice(None, 9).indices(10), (0, 9, 1))
self.assertEqual(slice(None, 10).indices(10), (0, 10, 1))
self.assertEqual(slice(None, 11).indices(10), (0, 10, 1))
self.assertEqual(slice(None, 8, -1).indices(10), (9, 8, -1))
self.assertEqual(slice(None, 9, -1).indices(10), (9, 9, -1))
self.assertEqual(slice(None, 10, -1).indices(10), (9, 9, -1))
self.assertEqual(
slice(-100, 100 ).indices(10),
slice(None).indices(10)
)
self.assertEqual(
slice(100, -100, -1).indices(10),
slice(None, None, -1).indices(10)
)
self.assertEqual(slice(-100L, 100L, 2L).indices(10), (0, 10, 2))
self.assertEqual(range(10)[::sys.maxint - 1], [0])
self.assertRaises(OverflowError, slice(None).indices, 1L<<100)
def test_setslice_without_getslice(self):
tmp = []
class X(object):
def __setslice__(self, i, j, k):
tmp.append((i, j, k))
x = X()
x[1:2] = 42
self.assertEquals(tmp, [(1, 2, 42)])
def test_pickle(self):
s = slice(10, 20, 3)
for protocol in (0,1,2):
t = loads(dumps(s, protocol))
self.assertEqual(s, t)
self.assertEqual(s.indices(15), t.indices(15))
self.assertNotEqual(id(s), id(t))
def test_main():
test_support.run_unittest(SliceTest)
if __name__ == "__main__":
test_main()
|
stbka/ansible
|
refs/heads/devel
|
lib/ansible/parsing/yaml/objects.py
|
169
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from six import text_type
class AnsibleBaseYAMLObject(object):
'''
the base class used to sub-class python built-in objects
so that we can add attributes to them during yaml parsing
'''
_data_source = None
_line_number = 0
_column_number = 0
def _get_ansible_position(self):
return (self._data_source, self._line_number, self._column_number)
def _set_ansible_position(self, obj):
try:
(src, line, col) = obj
except (TypeError, ValueError):
raise AssertionError(
'ansible_pos can only be set with a tuple/list '
'of three values: source, line number, column number'
)
self._data_source = src
self._line_number = line
self._column_number = col
ansible_pos = property(_get_ansible_position, _set_ansible_position)
class AnsibleMapping(AnsibleBaseYAMLObject, dict):
''' sub class for dictionaries '''
pass
class AnsibleUnicode(AnsibleBaseYAMLObject, text_type):
''' sub class for unicode objects '''
pass
class AnsibleSequence(AnsibleBaseYAMLObject, list):
''' sub class for lists '''
pass
|
saurabh6790/med_new_app
|
refs/heads/develop
|
patches/march_2013/p11_update_attach_files.py
|
30
|
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
import webnotes
def execute():
for f in webnotes.conn.sql("""select parent, fieldname
from tabDocField where options="attach_files:" """, as_dict=1):
if webnotes.conn.get_value("DocType", f.parent, "issingle"):
fname = webnotes.conn.get_value(f.parent, None, f.fieldname)
if fname:
if not (fname.startswith("http") or fname.startswith("files")):
webnotes.conn.set_value(f.parent, None, f.fieldname, "files/" + fname)
else:
webnotes.conn.sql("""update `tab%(parent)s`
set %(fieldname)s =
if(substr(%(fieldname)s,1,4)='http' or substr(%(fieldname)s,1,5)='files',
%(fieldname)s,
concat('files/', %(fieldname)s))""" % f)
|
chirpradio/chirpradio-machine
|
refs/heads/master
|
chirp/library/schema.py
|
1
|
"""Schema for our music library database.
Our data model is extremely simple:
* Our database contains audio file objects.
* Each audio file is uniquely identified by a fingerprint.
* Each audio file has many ID3 tags.
* ID3 tags are partitioned into sets by a timestamp.
"""
from chirp.common import mp3_header
from chirp.library import audio_file
create_audio_files_table = """
CREATE TABLE audio_files (
volume INTEGER, /* volume number */
import_timestamp INTEGER, /* seconds since the epoch */
fingerprint TEXT UNIQUE, /* SHA1 hash of the MPEG frames */
album_id INTEGER, /* Unique identifier for source album. */
sampling_rate_hz INTEGER, /* Audio sampling rate, measured in Hz */
bit_rate_kbps INTEGER, /* File bit rate, measued in kbps */
channels INTEGER, /* MPEG channel identifier */
frame_count INTEGER, /* total number of MPEG frames */
frame_size INTEGER, /* total size of all MPEG frames, in bytes */
duration_ms INTEGER /* song duration, measured in milliseconds */
)
"""
# Audio files are uniquely identified by their fingerprints.
create_audio_files_index = """
CREATE UNIQUE INDEX audio_files_index_fingerprint
ON audio_files ( fingerprint )
"""
create_id3_tags_table = """
CREATE TABLE id3_tags (
fingerprint TEXT, /* Fingerprint of the file this tag is part of */
timestamp INTEGER, /* Timestamp of this ID3 tag */
frame_id TEXT, /* The value of this_mutagen_id3_tag.FrameID */
value TEXT, /* For text tags, unicode(this_mutagen_id3_tag) */
mutagen_repr TEXT /* Very Python- and Mutagen-specific */
)
"""
create_id3_tags_index = """
CREATE INDEX id3_tags_index_fingerprint
ON id3_tags ( fingerprint, timestamp DESC )
"""
def audio_file_to_tuple(au_file):
"""Turn an AudioFile object into an insertable tuple."""
return (au_file.volume,
au_file.import_timestamp,
au_file.fingerprint,
au_file.album_id,
au_file.mp3_header.sampling_rate_hz,
au_file.mp3_header.bit_rate_kbps,
au_file.mp3_header.channels,
au_file.frame_count,
au_file.frame_size,
au_file.duration_ms)
def tuple_to_audio_file(au_file_tuple):
"""Convert a tuple into a new AudioFile object.
This is the inverse of audio_file_to_tuple.
"""
au_file = audio_file.AudioFile()
(au_file.volume,
au_file.import_timestamp,
au_file.fingerprint,
raw_album_id,
sampling_rate_hz,
bit_rate_kbps,
channels,
au_file.frame_count,
au_file.frame_size,
au_file.duration_ms) = au_file_tuple
au_file.album_id = int(raw_album_id)
au_file.mp3_header = mp3_header.MP3Header(
sampling_rate_hz=sampling_rate_hz,
bit_rate_kbps=bit_rate_kbps,
channels=channels)
return au_file
def id3_tag_to_tuple(fingerprint, timestamp, tag):
"""Turn a Mutagen ID3 tag object into an insertable tuple."""
value = u""
if hasattr(tag, "text"):
value = unicode(tag)
return (fingerprint, timestamp, tag.FrameID, value, repr(tag))
|
guozhangwang/kafka
|
refs/heads/trunk
|
tests/kafkatest/sanity_checks/__init__.py
|
93
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
|
JohnDenker/brython
|
refs/heads/master
|
www/src/Lib/test/test_xmlrpc.py
|
27
|
import base64
import datetime
import sys
import time
import unittest
from unittest import mock
import xmlrpc.client as xmlrpclib
import xmlrpc.server
import http.client
import socket
import os
import re
import io
import contextlib
from test import support
try:
import threading
except ImportError:
threading = None
alist = [{'astring': 'foo@bar.baz.spam',
'afloat': 7283.43,
'anint': 2**20,
'ashortlong': 2,
'anotherlist': ['.zyx.41'],
'abase64': xmlrpclib.Binary(b"my dog has fleas"),
'b64bytes': b"my dog has fleas",
'b64bytearray': bytearray(b"my dog has fleas"),
'boolean': False,
'unicode': '\u4000\u6000\u8000',
'ukey\u4000': 'regular value',
'datetime1': xmlrpclib.DateTime('20050210T11:41:23'),
'datetime2': xmlrpclib.DateTime(
(2005, 2, 10, 11, 41, 23, 0, 1, -1)),
'datetime3': xmlrpclib.DateTime(
datetime.datetime(2005, 2, 10, 11, 41, 23)),
}]
class XMLRPCTestCase(unittest.TestCase):
def test_dump_load(self):
dump = xmlrpclib.dumps((alist,))
load = xmlrpclib.loads(dump)
self.assertEqual(alist, load[0][0])
def test_dump_bare_datetime(self):
# This checks that an unwrapped datetime.date object can be handled
# by the marshalling code. This can't be done via test_dump_load()
# since with use_builtin_types set to 1 the unmarshaller would create
# datetime objects for the 'datetime[123]' keys as well
dt = datetime.datetime(2005, 2, 10, 11, 41, 23)
self.assertEqual(dt, xmlrpclib.DateTime('20050210T11:41:23'))
s = xmlrpclib.dumps((dt,))
result, m = xmlrpclib.loads(s, use_builtin_types=True)
(newdt,) = result
self.assertEqual(newdt, dt)
self.assertIs(type(newdt), datetime.datetime)
self.assertIsNone(m)
result, m = xmlrpclib.loads(s, use_builtin_types=False)
(newdt,) = result
self.assertEqual(newdt, dt)
self.assertIs(type(newdt), xmlrpclib.DateTime)
self.assertIsNone(m)
result, m = xmlrpclib.loads(s, use_datetime=True)
(newdt,) = result
self.assertEqual(newdt, dt)
self.assertIs(type(newdt), datetime.datetime)
self.assertIsNone(m)
result, m = xmlrpclib.loads(s, use_datetime=False)
(newdt,) = result
self.assertEqual(newdt, dt)
self.assertIs(type(newdt), xmlrpclib.DateTime)
self.assertIsNone(m)
def test_datetime_before_1900(self):
# same as before but with a date before 1900
dt = datetime.datetime(1, 2, 10, 11, 41, 23)
self.assertEqual(dt, xmlrpclib.DateTime('00010210T11:41:23'))
s = xmlrpclib.dumps((dt,))
result, m = xmlrpclib.loads(s, use_builtin_types=True)
(newdt,) = result
self.assertEqual(newdt, dt)
self.assertIs(type(newdt), datetime.datetime)
self.assertIsNone(m)
result, m = xmlrpclib.loads(s, use_builtin_types=False)
(newdt,) = result
self.assertEqual(newdt, dt)
self.assertIs(type(newdt), xmlrpclib.DateTime)
self.assertIsNone(m)
def test_bug_1164912 (self):
d = xmlrpclib.DateTime()
((new_d,), dummy) = xmlrpclib.loads(xmlrpclib.dumps((d,),
methodresponse=True))
self.assertIsInstance(new_d.value, str)
# Check that the output of dumps() is still an 8-bit string
s = xmlrpclib.dumps((new_d,), methodresponse=True)
self.assertIsInstance(s, str)
def test_newstyle_class(self):
class T(object):
pass
t = T()
t.x = 100
t.y = "Hello"
((t2,), dummy) = xmlrpclib.loads(xmlrpclib.dumps((t,)))
self.assertEqual(t2, t.__dict__)
def test_dump_big_long(self):
self.assertRaises(OverflowError, xmlrpclib.dumps, (2**99,))
def test_dump_bad_dict(self):
self.assertRaises(TypeError, xmlrpclib.dumps, ({(1,2,3): 1},))
def test_dump_recursive_seq(self):
l = [1,2,3]
t = [3,4,5,l]
l.append(t)
self.assertRaises(TypeError, xmlrpclib.dumps, (l,))
def test_dump_recursive_dict(self):
d = {'1':1, '2':1}
t = {'3':3, 'd':d}
d['t'] = t
self.assertRaises(TypeError, xmlrpclib.dumps, (d,))
def test_dump_big_int(self):
if sys.maxsize > 2**31-1:
self.assertRaises(OverflowError, xmlrpclib.dumps,
(int(2**34),))
xmlrpclib.dumps((xmlrpclib.MAXINT, xmlrpclib.MININT))
self.assertRaises(OverflowError, xmlrpclib.dumps,
(xmlrpclib.MAXINT+1,))
self.assertRaises(OverflowError, xmlrpclib.dumps,
(xmlrpclib.MININT-1,))
def dummy_write(s):
pass
m = xmlrpclib.Marshaller()
m.dump_int(xmlrpclib.MAXINT, dummy_write)
m.dump_int(xmlrpclib.MININT, dummy_write)
self.assertRaises(OverflowError, m.dump_int,
xmlrpclib.MAXINT+1, dummy_write)
self.assertRaises(OverflowError, m.dump_int,
xmlrpclib.MININT-1, dummy_write)
def test_dump_double(self):
xmlrpclib.dumps((float(2 ** 34),))
xmlrpclib.dumps((float(xmlrpclib.MAXINT),
float(xmlrpclib.MININT)))
xmlrpclib.dumps((float(xmlrpclib.MAXINT + 42),
float(xmlrpclib.MININT - 42)))
def dummy_write(s):
pass
m = xmlrpclib.Marshaller()
m.dump_double(xmlrpclib.MAXINT, dummy_write)
m.dump_double(xmlrpclib.MININT, dummy_write)
m.dump_double(xmlrpclib.MAXINT + 42, dummy_write)
m.dump_double(xmlrpclib.MININT - 42, dummy_write)
def test_dump_none(self):
value = alist + [None]
arg1 = (alist + [None],)
strg = xmlrpclib.dumps(arg1, allow_none=True)
self.assertEqual(value,
xmlrpclib.loads(strg)[0][0])
self.assertRaises(TypeError, xmlrpclib.dumps, (arg1,))
def test_dump_bytes(self):
sample = b"my dog has fleas"
self.assertEqual(sample, xmlrpclib.Binary(sample))
for type_ in bytes, bytearray, xmlrpclib.Binary:
value = type_(sample)
s = xmlrpclib.dumps((value,))
result, m = xmlrpclib.loads(s, use_builtin_types=True)
(newvalue,) = result
self.assertEqual(newvalue, sample)
self.assertIs(type(newvalue), bytes)
self.assertIsNone(m)
result, m = xmlrpclib.loads(s, use_builtin_types=False)
(newvalue,) = result
self.assertEqual(newvalue, sample)
self.assertIs(type(newvalue), xmlrpclib.Binary)
self.assertIsNone(m)
def test_get_host_info(self):
# see bug #3613, this raised a TypeError
transp = xmlrpc.client.Transport()
self.assertEqual(transp.get_host_info("user@host.tld"),
('host.tld',
[('Authorization', 'Basic dXNlcg==')], {}))
def test_ssl_presence(self):
try:
import ssl
except ImportError:
has_ssl = False
else:
has_ssl = True
try:
xmlrpc.client.ServerProxy('https://localhost:9999').bad_function()
except NotImplementedError:
self.assertFalse(has_ssl, "xmlrpc client's error with SSL support")
except socket.error:
self.assertTrue(has_ssl)
class HelperTestCase(unittest.TestCase):
def test_escape(self):
self.assertEqual(xmlrpclib.escape("a&b"), "a&b")
self.assertEqual(xmlrpclib.escape("a<b"), "a<b")
self.assertEqual(xmlrpclib.escape("a>b"), "a>b")
class FaultTestCase(unittest.TestCase):
def test_repr(self):
f = xmlrpclib.Fault(42, 'Test Fault')
self.assertEqual(repr(f), "<Fault 42: 'Test Fault'>")
self.assertEqual(repr(f), str(f))
def test_dump_fault(self):
f = xmlrpclib.Fault(42, 'Test Fault')
s = xmlrpclib.dumps((f,))
(newf,), m = xmlrpclib.loads(s)
self.assertEqual(newf, {'faultCode': 42, 'faultString': 'Test Fault'})
self.assertEqual(m, None)
s = xmlrpclib.Marshaller().dumps(f)
self.assertRaises(xmlrpclib.Fault, xmlrpclib.loads, s)
def test_dotted_attribute(self):
# this will raise AttributeError because code don't want us to use
# private methods
self.assertRaises(AttributeError,
xmlrpc.server.resolve_dotted_attribute, str, '__add')
self.assertTrue(xmlrpc.server.resolve_dotted_attribute(str, 'title'))
class DateTimeTestCase(unittest.TestCase):
def test_default(self):
with mock.patch('time.localtime') as localtime_mock:
time_struct = time.struct_time(
[2013, 7, 15, 0, 24, 49, 0, 196, 0])
localtime_mock.return_value = time_struct
localtime = time.localtime()
t = xmlrpclib.DateTime()
self.assertEqual(str(t),
time.strftime("%Y%m%dT%H:%M:%S", localtime))
def test_time(self):
d = 1181399930.036952
t = xmlrpclib.DateTime(d)
self.assertEqual(str(t),
time.strftime("%Y%m%dT%H:%M:%S", time.localtime(d)))
def test_time_tuple(self):
d = (2007,6,9,10,38,50,5,160,0)
t = xmlrpclib.DateTime(d)
self.assertEqual(str(t), '20070609T10:38:50')
def test_time_struct(self):
d = time.localtime(1181399930.036952)
t = xmlrpclib.DateTime(d)
self.assertEqual(str(t), time.strftime("%Y%m%dT%H:%M:%S", d))
def test_datetime_datetime(self):
d = datetime.datetime(2007,1,2,3,4,5)
t = xmlrpclib.DateTime(d)
self.assertEqual(str(t), '20070102T03:04:05')
def test_repr(self):
d = datetime.datetime(2007,1,2,3,4,5)
t = xmlrpclib.DateTime(d)
val ="<DateTime '20070102T03:04:05' at %x>" % id(t)
self.assertEqual(repr(t), val)
def test_decode(self):
d = ' 20070908T07:11:13 '
t1 = xmlrpclib.DateTime()
t1.decode(d)
tref = xmlrpclib.DateTime(datetime.datetime(2007,9,8,7,11,13))
self.assertEqual(t1, tref)
t2 = xmlrpclib._datetime(d)
self.assertEqual(t2, tref)
def test_comparison(self):
now = datetime.datetime.now()
dtime = xmlrpclib.DateTime(now.timetuple())
# datetime vs. DateTime
self.assertTrue(dtime == now)
self.assertTrue(now == dtime)
then = now + datetime.timedelta(seconds=4)
self.assertTrue(then >= dtime)
self.assertTrue(dtime < then)
# str vs. DateTime
dstr = now.strftime("%Y%m%dT%H:%M:%S")
self.assertTrue(dtime == dstr)
self.assertTrue(dstr == dtime)
dtime_then = xmlrpclib.DateTime(then.timetuple())
self.assertTrue(dtime_then >= dstr)
self.assertTrue(dstr < dtime_then)
# some other types
dbytes = dstr.encode('ascii')
dtuple = now.timetuple()
with self.assertRaises(TypeError):
dtime == 1970
with self.assertRaises(TypeError):
dtime != dbytes
with self.assertRaises(TypeError):
dtime == bytearray(dbytes)
with self.assertRaises(TypeError):
dtime != dtuple
with self.assertRaises(TypeError):
dtime < float(1970)
with self.assertRaises(TypeError):
dtime > dbytes
with self.assertRaises(TypeError):
dtime <= bytearray(dbytes)
with self.assertRaises(TypeError):
dtime >= dtuple
class BinaryTestCase(unittest.TestCase):
# XXX What should str(Binary(b"\xff")) return? I'm chosing "\xff"
# for now (i.e. interpreting the binary data as Latin-1-encoded
# text). But this feels very unsatisfactory. Perhaps we should
# only define repr(), and return r"Binary(b'\xff')" instead?
def test_default(self):
t = xmlrpclib.Binary()
self.assertEqual(str(t), '')
def test_string(self):
d = b'\x01\x02\x03abc123\xff\xfe'
t = xmlrpclib.Binary(d)
self.assertEqual(str(t), str(d, "latin-1"))
def test_decode(self):
d = b'\x01\x02\x03abc123\xff\xfe'
de = base64.encodebytes(d)
t1 = xmlrpclib.Binary()
t1.decode(de)
self.assertEqual(str(t1), str(d, "latin-1"))
t2 = xmlrpclib._binary(de)
self.assertEqual(str(t2), str(d, "latin-1"))
ADDR = PORT = URL = None
# The evt is set twice. First when the server is ready to serve.
# Second when the server has been shutdown. The user must clear
# the event after it has been set the first time to catch the second set.
def http_server(evt, numrequests, requestHandler=None):
class TestInstanceClass:
def div(self, x, y):
return x // y
def _methodHelp(self, name):
if name == 'div':
return 'This is the div function'
def my_function():
'''This is my function'''
return True
class MyXMLRPCServer(xmlrpc.server.SimpleXMLRPCServer):
def get_request(self):
# Ensure the socket is always non-blocking. On Linux, socket
# attributes are not inherited like they are on *BSD and Windows.
s, port = self.socket.accept()
s.setblocking(True)
return s, port
if not requestHandler:
requestHandler = xmlrpc.server.SimpleXMLRPCRequestHandler
serv = MyXMLRPCServer(("localhost", 0), requestHandler,
logRequests=False, bind_and_activate=False)
try:
serv.server_bind()
global ADDR, PORT, URL
ADDR, PORT = serv.socket.getsockname()
#connect to IP address directly. This avoids socket.create_connection()
#trying to connect to "localhost" using all address families, which
#causes slowdown e.g. on vista which supports AF_INET6. The server listens
#on AF_INET only.
URL = "http://%s:%d"%(ADDR, PORT)
serv.server_activate()
serv.register_introspection_functions()
serv.register_multicall_functions()
serv.register_function(pow)
serv.register_function(lambda x,y: x+y, 'add')
serv.register_function(my_function)
serv.register_instance(TestInstanceClass())
evt.set()
# handle up to 'numrequests' requests
while numrequests > 0:
serv.handle_request()
numrequests -= 1
except socket.timeout:
pass
finally:
serv.socket.close()
PORT = None
evt.set()
def http_multi_server(evt, numrequests, requestHandler=None):
class TestInstanceClass:
def div(self, x, y):
return x // y
def _methodHelp(self, name):
if name == 'div':
return 'This is the div function'
def my_function():
'''This is my function'''
return True
class MyXMLRPCServer(xmlrpc.server.MultiPathXMLRPCServer):
def get_request(self):
# Ensure the socket is always non-blocking. On Linux, socket
# attributes are not inherited like they are on *BSD and Windows.
s, port = self.socket.accept()
s.setblocking(True)
return s, port
if not requestHandler:
requestHandler = xmlrpc.server.SimpleXMLRPCRequestHandler
class MyRequestHandler(requestHandler):
rpc_paths = []
class BrokenDispatcher:
def _marshaled_dispatch(self, data, dispatch_method=None, path=None):
raise RuntimeError("broken dispatcher")
serv = MyXMLRPCServer(("localhost", 0), MyRequestHandler,
logRequests=False, bind_and_activate=False)
serv.socket.settimeout(3)
serv.server_bind()
try:
global ADDR, PORT, URL
ADDR, PORT = serv.socket.getsockname()
#connect to IP address directly. This avoids socket.create_connection()
#trying to connect to "localhost" using all address families, which
#causes slowdown e.g. on vista which supports AF_INET6. The server listens
#on AF_INET only.
URL = "http://%s:%d"%(ADDR, PORT)
serv.server_activate()
paths = ["/foo", "/foo/bar"]
for path in paths:
d = serv.add_dispatcher(path, xmlrpc.server.SimpleXMLRPCDispatcher())
d.register_introspection_functions()
d.register_multicall_functions()
serv.get_dispatcher(paths[0]).register_function(pow)
serv.get_dispatcher(paths[1]).register_function(lambda x,y: x+y, 'add')
serv.add_dispatcher("/is/broken", BrokenDispatcher())
evt.set()
# handle up to 'numrequests' requests
while numrequests > 0:
serv.handle_request()
numrequests -= 1
except socket.timeout:
pass
finally:
serv.socket.close()
PORT = None
evt.set()
# This function prevents errors like:
# <ProtocolError for localhost:57527/RPC2: 500 Internal Server Error>
def is_unavailable_exception(e):
'''Returns True if the given ProtocolError is the product of a server-side
exception caused by the 'temporarily unavailable' response sometimes
given by operations on non-blocking sockets.'''
# sometimes we get a -1 error code and/or empty headers
try:
if e.errcode == -1 or e.headers is None:
return True
exc_mess = e.headers.get('X-exception')
except AttributeError:
# Ignore socket.errors here.
exc_mess = str(e)
if exc_mess and 'temporarily unavailable' in exc_mess.lower():
return True
def make_request_and_skipIf(condition, reason):
# If we skip the test, we have to make a request because the
# the server created in setUp blocks expecting one to come in.
if not condition:
return lambda func: func
def decorator(func):
def make_request_and_skip(self):
try:
xmlrpclib.ServerProxy(URL).my_function()
except (xmlrpclib.ProtocolError, socket.error) as e:
if not is_unavailable_exception(e):
raise
raise unittest.SkipTest(reason)
return make_request_and_skip
return decorator
@unittest.skipUnless(threading, 'Threading required for this test.')
class BaseServerTestCase(unittest.TestCase):
requestHandler = None
request_count = 1
threadFunc = staticmethod(http_server)
def setUp(self):
# enable traceback reporting
xmlrpc.server.SimpleXMLRPCServer._send_traceback_header = True
self.evt = threading.Event()
# start server thread to handle requests
serv_args = (self.evt, self.request_count, self.requestHandler)
threading.Thread(target=self.threadFunc, args=serv_args).start()
# wait for the server to be ready
self.evt.wait()
self.evt.clear()
def tearDown(self):
# wait on the server thread to terminate
self.evt.wait()
# disable traceback reporting
xmlrpc.server.SimpleXMLRPCServer._send_traceback_header = False
class SimpleServerTestCase(BaseServerTestCase):
def test_simple1(self):
try:
p = xmlrpclib.ServerProxy(URL)
self.assertEqual(p.pow(6,8), 6**8)
except (xmlrpclib.ProtocolError, socket.error) as e:
# ignore failures due to non-blocking socket 'unavailable' errors
if not is_unavailable_exception(e):
# protocol error; provide additional information in test output
self.fail("%s\n%s" % (e, getattr(e, "headers", "")))
def test_nonascii(self):
start_string = 'P\N{LATIN SMALL LETTER Y WITH CIRCUMFLEX}t'
end_string = 'h\N{LATIN SMALL LETTER O WITH HORN}n'
try:
p = xmlrpclib.ServerProxy(URL)
self.assertEqual(p.add(start_string, end_string),
start_string + end_string)
except (xmlrpclib.ProtocolError, socket.error) as e:
# ignore failures due to non-blocking socket 'unavailable' errors
if not is_unavailable_exception(e):
# protocol error; provide additional information in test output
self.fail("%s\n%s" % (e, getattr(e, "headers", "")))
# [ch] The test 404 is causing lots of false alarms.
def XXXtest_404(self):
# send POST with http.client, it should return 404 header and
# 'Not Found' message.
conn = httplib.client.HTTPConnection(ADDR, PORT)
conn.request('POST', '/this-is-not-valid')
response = conn.getresponse()
conn.close()
self.assertEqual(response.status, 404)
self.assertEqual(response.reason, 'Not Found')
def test_introspection1(self):
expected_methods = set(['pow', 'div', 'my_function', 'add',
'system.listMethods', 'system.methodHelp',
'system.methodSignature', 'system.multicall'])
try:
p = xmlrpclib.ServerProxy(URL)
meth = p.system.listMethods()
self.assertEqual(set(meth), expected_methods)
except (xmlrpclib.ProtocolError, socket.error) as e:
# ignore failures due to non-blocking socket 'unavailable' errors
if not is_unavailable_exception(e):
# protocol error; provide additional information in test output
self.fail("%s\n%s" % (e, getattr(e, "headers", "")))
def test_introspection2(self):
try:
# test _methodHelp()
p = xmlrpclib.ServerProxy(URL)
divhelp = p.system.methodHelp('div')
self.assertEqual(divhelp, 'This is the div function')
except (xmlrpclib.ProtocolError, socket.error) as e:
# ignore failures due to non-blocking socket 'unavailable' errors
if not is_unavailable_exception(e):
# protocol error; provide additional information in test output
self.fail("%s\n%s" % (e, getattr(e, "headers", "")))
@make_request_and_skipIf(sys.flags.optimize >= 2,
"Docstrings are omitted with -O2 and above")
def test_introspection3(self):
try:
# test native doc
p = xmlrpclib.ServerProxy(URL)
myfunction = p.system.methodHelp('my_function')
self.assertEqual(myfunction, 'This is my function')
except (xmlrpclib.ProtocolError, socket.error) as e:
# ignore failures due to non-blocking socket 'unavailable' errors
if not is_unavailable_exception(e):
# protocol error; provide additional information in test output
self.fail("%s\n%s" % (e, getattr(e, "headers", "")))
def test_introspection4(self):
# the SimpleXMLRPCServer doesn't support signatures, but
# at least check that we can try making the call
try:
p = xmlrpclib.ServerProxy(URL)
divsig = p.system.methodSignature('div')
self.assertEqual(divsig, 'signatures not supported')
except (xmlrpclib.ProtocolError, socket.error) as e:
# ignore failures due to non-blocking socket 'unavailable' errors
if not is_unavailable_exception(e):
# protocol error; provide additional information in test output
self.fail("%s\n%s" % (e, getattr(e, "headers", "")))
def test_multicall(self):
try:
p = xmlrpclib.ServerProxy(URL)
multicall = xmlrpclib.MultiCall(p)
multicall.add(2,3)
multicall.pow(6,8)
multicall.div(127,42)
add_result, pow_result, div_result = multicall()
self.assertEqual(add_result, 2+3)
self.assertEqual(pow_result, 6**8)
self.assertEqual(div_result, 127//42)
except (xmlrpclib.ProtocolError, socket.error) as e:
# ignore failures due to non-blocking socket 'unavailable' errors
if not is_unavailable_exception(e):
# protocol error; provide additional information in test output
self.fail("%s\n%s" % (e, getattr(e, "headers", "")))
def test_non_existing_multicall(self):
try:
p = xmlrpclib.ServerProxy(URL)
multicall = xmlrpclib.MultiCall(p)
multicall.this_is_not_exists()
result = multicall()
# result.results contains;
# [{'faultCode': 1, 'faultString': '<class \'exceptions.Exception\'>:'
# 'method "this_is_not_exists" is not supported'>}]
self.assertEqual(result.results[0]['faultCode'], 1)
self.assertEqual(result.results[0]['faultString'],
'<class \'Exception\'>:method "this_is_not_exists" '
'is not supported')
except (xmlrpclib.ProtocolError, socket.error) as e:
# ignore failures due to non-blocking socket 'unavailable' errors
if not is_unavailable_exception(e):
# protocol error; provide additional information in test output
self.fail("%s\n%s" % (e, getattr(e, "headers", "")))
def test_dotted_attribute(self):
# Raises an AttributeError because private methods are not allowed.
self.assertRaises(AttributeError,
xmlrpc.server.resolve_dotted_attribute, str, '__add')
self.assertTrue(xmlrpc.server.resolve_dotted_attribute(str, 'title'))
# Get the test to run faster by sending a request with test_simple1.
# This avoids waiting for the socket timeout.
self.test_simple1()
def test_unicode_host(self):
server = xmlrpclib.ServerProxy("http://%s:%d/RPC2" % (ADDR, PORT))
self.assertEqual(server.add("a", "\xe9"), "a\xe9")
def test_partial_post(self):
# Check that a partial POST doesn't make the server loop: issue #14001.
conn = http.client.HTTPConnection(ADDR, PORT)
conn.request('POST', '/RPC2 HTTP/1.0\r\nContent-Length: 100\r\n\r\nbye')
conn.close()
class MultiPathServerTestCase(BaseServerTestCase):
threadFunc = staticmethod(http_multi_server)
request_count = 2
def test_path1(self):
p = xmlrpclib.ServerProxy(URL+"/foo")
self.assertEqual(p.pow(6,8), 6**8)
self.assertRaises(xmlrpclib.Fault, p.add, 6, 8)
def test_path2(self):
p = xmlrpclib.ServerProxy(URL+"/foo/bar")
self.assertEqual(p.add(6,8), 6+8)
self.assertRaises(xmlrpclib.Fault, p.pow, 6, 8)
def test_path3(self):
p = xmlrpclib.ServerProxy(URL+"/is/broken")
self.assertRaises(xmlrpclib.Fault, p.add, 6, 8)
#A test case that verifies that a server using the HTTP/1.1 keep-alive mechanism
#does indeed serve subsequent requests on the same connection
class BaseKeepaliveServerTestCase(BaseServerTestCase):
#a request handler that supports keep-alive and logs requests into a
#class variable
class RequestHandler(xmlrpc.server.SimpleXMLRPCRequestHandler):
parentClass = xmlrpc.server.SimpleXMLRPCRequestHandler
protocol_version = 'HTTP/1.1'
myRequests = []
def handle(self):
self.myRequests.append([])
self.reqidx = len(self.myRequests)-1
return self.parentClass.handle(self)
def handle_one_request(self):
result = self.parentClass.handle_one_request(self)
self.myRequests[self.reqidx].append(self.raw_requestline)
return result
requestHandler = RequestHandler
def setUp(self):
#clear request log
self.RequestHandler.myRequests = []
return BaseServerTestCase.setUp(self)
#A test case that verifies that a server using the HTTP/1.1 keep-alive mechanism
#does indeed serve subsequent requests on the same connection
class KeepaliveServerTestCase1(BaseKeepaliveServerTestCase):
def test_two(self):
p = xmlrpclib.ServerProxy(URL)
#do three requests.
self.assertEqual(p.pow(6,8), 6**8)
self.assertEqual(p.pow(6,8), 6**8)
self.assertEqual(p.pow(6,8), 6**8)
p("close")()
#they should have all been handled by a single request handler
self.assertEqual(len(self.RequestHandler.myRequests), 1)
#check that we did at least two (the third may be pending append
#due to thread scheduling)
self.assertGreaterEqual(len(self.RequestHandler.myRequests[-1]), 2)
#test special attribute access on the serverproxy, through the __call__
#function.
class KeepaliveServerTestCase2(BaseKeepaliveServerTestCase):
#ask for two keepalive requests to be handled.
request_count=2
def test_close(self):
p = xmlrpclib.ServerProxy(URL)
#do some requests with close.
self.assertEqual(p.pow(6,8), 6**8)
self.assertEqual(p.pow(6,8), 6**8)
self.assertEqual(p.pow(6,8), 6**8)
p("close")() #this should trigger a new keep-alive request
self.assertEqual(p.pow(6,8), 6**8)
self.assertEqual(p.pow(6,8), 6**8)
self.assertEqual(p.pow(6,8), 6**8)
p("close")()
#they should have all been two request handlers, each having logged at least
#two complete requests
self.assertEqual(len(self.RequestHandler.myRequests), 2)
self.assertGreaterEqual(len(self.RequestHandler.myRequests[-1]), 2)
self.assertGreaterEqual(len(self.RequestHandler.myRequests[-2]), 2)
def test_transport(self):
p = xmlrpclib.ServerProxy(URL)
#do some requests with close.
self.assertEqual(p.pow(6,8), 6**8)
p("transport").close() #same as above, really.
self.assertEqual(p.pow(6,8), 6**8)
p("close")()
self.assertEqual(len(self.RequestHandler.myRequests), 2)
#A test case that verifies that gzip encoding works in both directions
#(for a request and the response)
class GzipServerTestCase(BaseServerTestCase):
#a request handler that supports keep-alive and logs requests into a
#class variable
class RequestHandler(xmlrpc.server.SimpleXMLRPCRequestHandler):
parentClass = xmlrpc.server.SimpleXMLRPCRequestHandler
protocol_version = 'HTTP/1.1'
def do_POST(self):
#store content of last request in class
self.__class__.content_length = int(self.headers["content-length"])
return self.parentClass.do_POST(self)
requestHandler = RequestHandler
class Transport(xmlrpclib.Transport):
#custom transport, stores the response length for our perusal
fake_gzip = False
def parse_response(self, response):
self.response_length=int(response.getheader("content-length", 0))
return xmlrpclib.Transport.parse_response(self, response)
def send_content(self, connection, body):
if self.fake_gzip:
#add a lone gzip header to induce decode error remotely
connection.putheader("Content-Encoding", "gzip")
return xmlrpclib.Transport.send_content(self, connection, body)
def setUp(self):
BaseServerTestCase.setUp(self)
def test_gzip_request(self):
t = self.Transport()
t.encode_threshold = None
p = xmlrpclib.ServerProxy(URL, transport=t)
self.assertEqual(p.pow(6,8), 6**8)
a = self.RequestHandler.content_length
t.encode_threshold = 0 #turn on request encoding
self.assertEqual(p.pow(6,8), 6**8)
b = self.RequestHandler.content_length
self.assertTrue(a>b)
p("close")()
def test_bad_gzip_request(self):
t = self.Transport()
t.encode_threshold = None
t.fake_gzip = True
p = xmlrpclib.ServerProxy(URL, transport=t)
cm = self.assertRaisesRegex(xmlrpclib.ProtocolError,
re.compile(r"\b400\b"))
with cm:
p.pow(6, 8)
p("close")()
def test_gsip_response(self):
t = self.Transport()
p = xmlrpclib.ServerProxy(URL, transport=t)
old = self.requestHandler.encode_threshold
self.requestHandler.encode_threshold = None #no encoding
self.assertEqual(p.pow(6,8), 6**8)
a = t.response_length
self.requestHandler.encode_threshold = 0 #always encode
self.assertEqual(p.pow(6,8), 6**8)
p("close")()
b = t.response_length
self.requestHandler.encode_threshold = old
self.assertTrue(a>b)
#Test special attributes of the ServerProxy object
class ServerProxyTestCase(unittest.TestCase):
def setUp(self):
unittest.TestCase.setUp(self)
if threading:
self.url = URL
else:
# Without threading, http_server() and http_multi_server() will not
# be executed and URL is still equal to None. 'http://' is a just
# enough to choose the scheme (HTTP)
self.url = 'http://'
def test_close(self):
p = xmlrpclib.ServerProxy(self.url)
self.assertEqual(p('close')(), None)
def test_transport(self):
t = xmlrpclib.Transport()
p = xmlrpclib.ServerProxy(self.url, transport=t)
self.assertEqual(p('transport'), t)
# This is a contrived way to make a failure occur on the server side
# in order to test the _send_traceback_header flag on the server
class FailingMessageClass(http.client.HTTPMessage):
def get(self, key, failobj=None):
key = key.lower()
if key == 'content-length':
return 'I am broken'
return super().get(key, failobj)
@unittest.skipUnless(threading, 'Threading required for this test.')
class FailingServerTestCase(unittest.TestCase):
def setUp(self):
self.evt = threading.Event()
# start server thread to handle requests
serv_args = (self.evt, 1)
threading.Thread(target=http_server, args=serv_args).start()
# wait for the server to be ready
self.evt.wait()
self.evt.clear()
def tearDown(self):
# wait on the server thread to terminate
self.evt.wait()
# reset flag
xmlrpc.server.SimpleXMLRPCServer._send_traceback_header = False
# reset message class
default_class = http.client.HTTPMessage
xmlrpc.server.SimpleXMLRPCRequestHandler.MessageClass = default_class
def test_basic(self):
# check that flag is false by default
flagval = xmlrpc.server.SimpleXMLRPCServer._send_traceback_header
self.assertEqual(flagval, False)
# enable traceback reporting
xmlrpc.server.SimpleXMLRPCServer._send_traceback_header = True
# test a call that shouldn't fail just as a smoke test
try:
p = xmlrpclib.ServerProxy(URL)
self.assertEqual(p.pow(6,8), 6**8)
except (xmlrpclib.ProtocolError, socket.error) as e:
# ignore failures due to non-blocking socket 'unavailable' errors
if not is_unavailable_exception(e):
# protocol error; provide additional information in test output
self.fail("%s\n%s" % (e, getattr(e, "headers", "")))
def test_fail_no_info(self):
# use the broken message class
xmlrpc.server.SimpleXMLRPCRequestHandler.MessageClass = FailingMessageClass
try:
p = xmlrpclib.ServerProxy(URL)
p.pow(6,8)
except (xmlrpclib.ProtocolError, socket.error) as e:
# ignore failures due to non-blocking socket 'unavailable' errors
if not is_unavailable_exception(e) and hasattr(e, "headers"):
# The two server-side error headers shouldn't be sent back in this case
self.assertTrue(e.headers.get("X-exception") is None)
self.assertTrue(e.headers.get("X-traceback") is None)
else:
self.fail('ProtocolError not raised')
def test_fail_with_info(self):
# use the broken message class
xmlrpc.server.SimpleXMLRPCRequestHandler.MessageClass = FailingMessageClass
# Check that errors in the server send back exception/traceback
# info when flag is set
xmlrpc.server.SimpleXMLRPCServer._send_traceback_header = True
try:
p = xmlrpclib.ServerProxy(URL)
p.pow(6,8)
except (xmlrpclib.ProtocolError, socket.error) as e:
# ignore failures due to non-blocking socket 'unavailable' errors
if not is_unavailable_exception(e) and hasattr(e, "headers"):
# We should get error info in the response
expected_err = "invalid literal for int() with base 10: 'I am broken'"
self.assertEqual(e.headers.get("X-exception"), expected_err)
self.assertTrue(e.headers.get("X-traceback") is not None)
else:
self.fail('ProtocolError not raised')
@contextlib.contextmanager
def captured_stdout(encoding='utf-8'):
"""A variation on support.captured_stdout() which gives a text stream
having a `buffer` attribute.
"""
import io
orig_stdout = sys.stdout
sys.stdout = io.TextIOWrapper(io.BytesIO(), encoding=encoding)
try:
yield sys.stdout
finally:
sys.stdout = orig_stdout
class CGIHandlerTestCase(unittest.TestCase):
def setUp(self):
self.cgi = xmlrpc.server.CGIXMLRPCRequestHandler()
def tearDown(self):
self.cgi = None
def test_cgi_get(self):
with support.EnvironmentVarGuard() as env:
env['REQUEST_METHOD'] = 'GET'
# if the method is GET and no request_text is given, it runs handle_get
# get sysout output
with captured_stdout(encoding=self.cgi.encoding) as data_out:
self.cgi.handle_request()
# parse Status header
data_out.seek(0)
handle = data_out.read()
status = handle.split()[1]
message = ' '.join(handle.split()[2:4])
self.assertEqual(status, '400')
self.assertEqual(message, 'Bad Request')
def test_cgi_xmlrpc_response(self):
data = """<?xml version='1.0'?>
<methodCall>
<methodName>test_method</methodName>
<params>
<param>
<value><string>foo</string></value>
</param>
<param>
<value><string>bar</string></value>
</param>
</params>
</methodCall>
"""
with support.EnvironmentVarGuard() as env, \
captured_stdout(encoding=self.cgi.encoding) as data_out, \
support.captured_stdin() as data_in:
data_in.write(data)
data_in.seek(0)
env['CONTENT_LENGTH'] = str(len(data))
self.cgi.handle_request()
data_out.seek(0)
# will respond exception, if so, our goal is achieved ;)
handle = data_out.read()
# start with 44th char so as not to get http header, we just
# need only xml
self.assertRaises(xmlrpclib.Fault, xmlrpclib.loads, handle[44:])
# Also test the content-length returned by handle_request
# Using the same test method inorder to avoid all the datapassing
# boilerplate code.
# Test for bug: http://bugs.python.org/issue5040
content = handle[handle.find("<?xml"):]
self.assertEqual(
int(re.search('Content-Length: (\d+)', handle).group(1)),
len(content))
class UseBuiltinTypesTestCase(unittest.TestCase):
def test_use_builtin_types(self):
# SimpleXMLRPCDispatcher.__init__ accepts use_builtin_types, which
# makes all dispatch of binary data as bytes instances, and all
# dispatch of datetime argument as datetime.datetime instances.
self.log = []
expected_bytes = b"my dog has fleas"
expected_date = datetime.datetime(2008, 5, 26, 18, 25, 12)
marshaled = xmlrpclib.dumps((expected_bytes, expected_date), 'foobar')
def foobar(*args):
self.log.extend(args)
handler = xmlrpc.server.SimpleXMLRPCDispatcher(
allow_none=True, encoding=None, use_builtin_types=True)
handler.register_function(foobar)
handler._marshaled_dispatch(marshaled)
self.assertEqual(len(self.log), 2)
mybytes, mydate = self.log
self.assertEqual(self.log, [expected_bytes, expected_date])
self.assertIs(type(mydate), datetime.datetime)
self.assertIs(type(mybytes), bytes)
def test_cgihandler_has_use_builtin_types_flag(self):
handler = xmlrpc.server.CGIXMLRPCRequestHandler(use_builtin_types=True)
self.assertTrue(handler.use_builtin_types)
def test_xmlrpcserver_has_use_builtin_types_flag(self):
server = xmlrpc.server.SimpleXMLRPCServer(("localhost", 0),
use_builtin_types=True)
server.server_close()
self.assertTrue(server.use_builtin_types)
@support.reap_threads
def test_main():
xmlrpc_tests = [XMLRPCTestCase, HelperTestCase, DateTimeTestCase,
BinaryTestCase, FaultTestCase]
xmlrpc_tests.append(UseBuiltinTypesTestCase)
xmlrpc_tests.append(SimpleServerTestCase)
xmlrpc_tests.append(KeepaliveServerTestCase1)
xmlrpc_tests.append(KeepaliveServerTestCase2)
try:
import gzip
xmlrpc_tests.append(GzipServerTestCase)
except ImportError:
pass #gzip not supported in this build
xmlrpc_tests.append(MultiPathServerTestCase)
xmlrpc_tests.append(ServerProxyTestCase)
xmlrpc_tests.append(FailingServerTestCase)
xmlrpc_tests.append(CGIHandlerTestCase)
support.run_unittest(*xmlrpc_tests)
if __name__ == "__main__":
test_main()
|
SCOAP3/invenio
|
refs/heads/master
|
invenio/modules/formatter/format_elements/bfe_title_brief.py
|
13
|
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2006, 2007, 2008, 2009, 2010, 2011 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""BibFormat element - Prints short title
"""
__revision__ = "$Id$"
def format_element(bfo, highlight="no", multilang='no'):
"""
Prints a short title, suitable for brief format.
@param highlight: highlights the words corresponding to search query if set to 'yes'
"""
if multilang == 'yes':
if bfo.lang == 'fr':
title = bfo.field('246_1a')
else:
title = bfo.field('245__a')
else:
title = bfo.field('245__a')
title_remainder = bfo.field('245__b')
title_tome = bfo.field('245__n')
title_part = bfo.field('245__p')
edition_statement = bfo.field('250__a')
out = title
if len(title_remainder) > 0:
out += ": " + title_remainder
if len(edition_statement) > 0:
out += "; " + edition_statement
if len(title_tome) > 0:
out += ", " + title_tome
if len(title_part) > 0:
out += ": " + title_part
#Try to display 'Conference' title if other titles were not found
if out == '':
out += bfo.field('111__a')
if highlight == 'yes':
from invenio.modules.formatter import utils as bibformat_utils
out = bibformat_utils.highlight(out, bfo.search_pattern,
prefix_tag="<span style='font-weight: bolder'>",
suffix_tag='</style>')
return out
|
sorenk/ansible
|
refs/heads/devel
|
lib/ansible/utils/module_docs_fragments/vyos.py
|
58
|
#
# (c) 2015, Peter Sprygada <psprygada@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
class ModuleDocFragment(object):
# Standard files documentation fragment
DOCUMENTATION = """
options:
provider:
description:
- B(Deprecated)
- "Starting with Ansible 2.5 we recommend using C(connection: network_cli)."
- For more information please see the L(Network Guide, ../network/getting_started/network_differences.html#multiple-communication-protocols).
- HORIZONTALLINE
- A dict object containing connection details.
suboptions:
host:
description:
- Specifies the DNS host name or address for connecting to the remote
device over the specified transport. The value of host is used as
the destination address for the transport.
required: true
port:
description:
- Specifies the port to use when building the connection to the remote
device.
default: 22
username:
description:
- Configures the username to use to authenticate the connection to
the remote device. This value is used to authenticate
the SSH session. If the value is not specified in the task, the
value of environment variable C(ANSIBLE_NET_USERNAME) will be used instead.
password:
description:
- Specifies the password to use to authenticate the connection to
the remote device. This value is used to authenticate
the SSH session. If the value is not specified in the task, the
value of environment variable C(ANSIBLE_NET_PASSWORD) will be used instead.
timeout:
description:
- Specifies the timeout in seconds for communicating with the network device
for either connecting or sending commands. If the timeout is
exceeded before the operation is completed, the module will error.
default: 10
ssh_keyfile:
description:
- Specifies the SSH key to use to authenticate the connection to
the remote device. This value is the path to the
key used to authenticate the SSH session. If the value is not specified
in the task, the value of environment variable C(ANSIBLE_NET_SSH_KEYFILE)
will be used instead.
notes:
- For more information on using Ansible to manage network devices see the :ref:`Ansible Network Guide <network_guide>`
"""
|
bankonme/python-keepkey
|
refs/heads/master
|
tests/test_msg_simplesigntx.py
|
3
|
# tx 4a7b7e0403ae5607e473949cfa03f09f2cd8b0f404bf99ce10b7303d86280bf7
# 100 UTXO for spending for unittests
import unittest
import common
import binascii
import keepkeylib.messages_pb2 as proto
import keepkeylib.types_pb2 as proto_types
from keepkeylib.client import CallException
from keepkeylib.tx_api import TXAPITestnet
class TestMsgSimplesigntx(common.KeepKeyTest):
def test_one_one_fee(self):
self.setup_mnemonic_nopin_nopassphrase()
# tx: d5f65ee80147b4bcc70b75e4bbf2d7382021b871bd8867ef8fa525ef50864882
# input 0: 0.0039 BTC
inp1 = proto_types.TxInputType(address_n=[0], # 14LmW5k4ssUrtbAB4255zdqv3b4w1TuX9e
# amount=390000,
prev_hash=binascii.unhexlify('d5f65ee80147b4bcc70b75e4bbf2d7382021b871bd8867ef8fa525ef50864882'),
prev_index=0,
)
out1 = proto_types.TxOutputType(address='1MJ2tj2ThBE62zXbBYA5ZaN3fdve5CPAz1',
amount=390000 - 10000,
script_type=proto_types.PAYTOADDRESS,
)
with self.client:
self.client.set_expected_responses([proto.ButtonRequest(code=proto_types.ButtonRequest_ConfirmOutput),
proto.ButtonRequest(code=proto_types.ButtonRequest_SignTx),
proto.TxRequest(request_type=proto_types.TXFINISHED)])
serialized_tx = self.client.simple_sign_tx('Bitcoin', [inp1, ], [out1, ])
# Accepted by network: tx fd79435246dee76b2f159d2db08032d666c95adc544de64c8c49f474df4a7fee
self.assertEqual(binascii.hexlify(serialized_tx), '010000000182488650ef25a58fef6788bd71b8212038d7f2bbe4750bc7bcb44701e85ef6d5000000006b4830450221009a0b7be0d4ed3146ee262b42202841834698bb3ee39c24e7437df208b8b7077102202b79ab1e7736219387dffe8d615bbdba87e11477104b867ef47afed1a5ede7810121023230848585885f63803a0a8aecdd6538792d5c539215c91698e315bf0253b43dffffffff0160cc0500000000001976a914de9b2a8da088824e8fe51debea566617d851537888ac00000000')
def test_testnet_one_two_fee(self):
self.setup_mnemonic_nopin_nopassphrase()
# tx: 6f90f3c7cbec2258b0971056ef3fe34128dbde30daa9c0639a898f9977299d54
# input 1: 10.00000000 BTC
inp1 = proto_types.TxInputType(address_n=[0], # mirio8q3gtv7fhdnmb3TpZ4EuafdzSs7zL
# amount=1000000000,
prev_hash=binascii.unhexlify('6f90f3c7cbec2258b0971056ef3fe34128dbde30daa9c0639a898f9977299d54'),
prev_index=1,
)
out1 = proto_types.TxOutputType(address='mfiGQVPcRcaEvQPYDErR34DcCovtxYvUUV',
amount=1000000000 - 500000000 - 10000000,
script_type=proto_types.PAYTOADDRESS,
)
out2 = proto_types.TxOutputType(address_n=[2],
amount=500000000,
script_type=proto_types.PAYTOADDRESS,
)
with self.client:
self.client.set_tx_api(TXAPITestnet())
self.client.set_expected_responses([proto.ButtonRequest(code=proto_types.ButtonRequest_ConfirmOutput),
# proto.ButtonRequest(code=proto_types.ButtonRequest_ConfirmOutput), # don't confirm change
proto.ButtonRequest(code=proto_types.ButtonRequest_SignTx),
proto.TxRequest(request_type=proto_types.TXFINISHED)])
serialized_tx = self.client.simple_sign_tx('Testnet', [inp1, ], [out1, out2])
self.assertEqual(binascii.hexlify(serialized_tx), '0100000001549d2977998f899a63c0a9da30dedb2841e33fef561097b05822eccbc7f3906f010000006b4830450221009c2d30385519fdb13dce13d5ac038be07d7b2dad0b0f7b2c1c339d7255bcf553022056a2f5bceab3cd0ffed4d388387e631f419d67ff9ce7798e3d7dfe6a6d6ec4bd0121023230848585885f63803a0a8aecdd6538792d5c539215c91698e315bf0253b43dffffffff0280ce341d000000001976a9140223b1a09138753c9cb0baf95a0a62c82711567a88ac0065cd1d000000001976a9142db345c36563122e2fd0f5485fb7ea9bbf7cb5a288ac00000000')
def test_testnet_fee_too_high(self):
self.setup_mnemonic_nopin_nopassphrase()
# tx: 6f90f3c7cbec2258b0971056ef3fe34128dbde30daa9c0639a898f9977299d54
# input 1: 10.00000000 BTC
inp1 = proto_types.TxInputType(address_n=[0], # mirio8q3gtv7fhdnmb3TpZ4EuafdzSs7zL
# amount=1000000000,
prev_hash=binascii.unhexlify('6f90f3c7cbec2258b0971056ef3fe34128dbde30daa9c0639a898f9977299d54'),
prev_index=1,
)
out1 = proto_types.TxOutputType(address='mfiGQVPcRcaEvQPYDErR34DcCovtxYvUUV',
amount=1000000000 - 500000000 - 100000000,
script_type=proto_types.PAYTOADDRESS,
)
out2 = proto_types.TxOutputType(address_n=[2],
amount=500000000,
script_type=proto_types.PAYTOADDRESS,
)
with self.client:
self.client.set_tx_api(TXAPITestnet())
self.client.set_expected_responses([proto.ButtonRequest(code=proto_types.ButtonRequest_ConfirmOutput),
# proto.ButtonRequest(code=proto_types.ButtonRequest_ConfirmOutput), # don't confirm change
proto.ButtonRequest(code=proto_types.ButtonRequest_FeeOverThreshold),
proto.ButtonRequest(code=proto_types.ButtonRequest_SignTx),
proto.TxRequest(request_type=proto_types.TXFINISHED)])
serialized_tx = self.client.simple_sign_tx('Testnet', [inp1, ], [out1, out2])
self.assertEqual(binascii.hexlify(serialized_tx), '0100000001549d2977998f899a63c0a9da30dedb2841e33fef561097b05822eccbc7f3906f010000006a47304402205ea68e9d52d4be14420ccecf7f2e11489d49b86bedb79ee99b5e9b7188884150022056219cb3384a5df8048cca286a9533403dbda1571afd84b51379cdaee6a6dea80121023230848585885f63803a0a8aecdd6538792d5c539215c91698e315bf0253b43dffffffff020084d717000000001976a9140223b1a09138753c9cb0baf95a0a62c82711567a88ac0065cd1d000000001976a9142db345c36563122e2fd0f5485fb7ea9bbf7cb5a288ac00000000')
def test_one_two_fee(self):
self.setup_mnemonic_nopin_nopassphrase()
# tx: d5f65ee80147b4bcc70b75e4bbf2d7382021b871bd8867ef8fa525ef50864882
# input 0: 0.0039 BTC
inp1 = proto_types.TxInputType(address_n=[0], # 14LmW5k4ssUrtbAB4255zdqv3b4w1TuX9e
# amount=390000,
prev_hash=binascii.unhexlify('d5f65ee80147b4bcc70b75e4bbf2d7382021b871bd8867ef8fa525ef50864882'),
prev_index=0,
)
out1 = proto_types.TxOutputType(address='1MJ2tj2ThBE62zXbBYA5ZaN3fdve5CPAz1',
amount=390000 - 80000 - 10000,
script_type=proto_types.PAYTOADDRESS,
)
out2 = proto_types.TxOutputType(address_n=[1],
amount=80000,
script_type=proto_types.PAYTOADDRESS,
)
with self.client:
self.client.set_expected_responses([proto.ButtonRequest(code=proto_types.ButtonRequest_ConfirmOutput),
# proto.ButtonRequest(code=proto_types.ButtonRequest_ConfirmOutput), # don't confirm change
proto.ButtonRequest(code=proto_types.ButtonRequest_SignTx),
proto.TxRequest(request_type=proto_types.TXFINISHED)])
serialized_tx = self.client.simple_sign_tx('Bitcoin', [inp1, ], [out1, out2])
self.assertEqual(binascii.hexlify(serialized_tx), '010000000182488650ef25a58fef6788bd71b8212038d7f2bbe4750bc7bcb44701e85ef6d5000000006b483045022100c1400d8485d3bdcae7413e123148f35ece84806fc387ab88c66b469df89aef1702201d481d04216b319dc549ffe2333143629ba18828a4e2d1783ab719a6aa263eb70121023230848585885f63803a0a8aecdd6538792d5c539215c91698e315bf0253b43dffffffff02e0930400000000001976a914de9b2a8da088824e8fe51debea566617d851537888ac80380100000000001976a9140223b1a09138753c9cb0baf95a0a62c82711567a88ac00000000')
def test_one_three_fee(self):
self.setup_mnemonic_nopin_nopassphrase()
# tx: d5f65ee80147b4bcc70b75e4bbf2d7382021b871bd8867ef8fa525ef50864882
# input 0: 0.0039 BTC
inp1 = proto_types.TxInputType(address_n=[0], # 14LmW5k4ssUrtbAB4255zdqv3b4w1TuX9e
# amount=390000,
prev_hash=binascii.unhexlify('d5f65ee80147b4bcc70b75e4bbf2d7382021b871bd8867ef8fa525ef50864882'),
prev_index=0,
)
out1 = proto_types.TxOutputType(address='1MJ2tj2ThBE62zXbBYA5ZaN3fdve5CPAz1',
amount=390000 - 80000 - 12000 - 10000,
script_type=proto_types.PAYTOADDRESS,
)
out2 = proto_types.TxOutputType(address='13uaUYn6XAooo88QvAqAVsiVvr2mAXutqP',
amount=12000,
script_type=proto_types.PAYTOADDRESS,
)
out3 = proto_types.TxOutputType(address_n=[1],
amount=80000,
script_type=proto_types.PAYTOADDRESS,
)
with self.client:
self.client.set_expected_responses([proto.ButtonRequest(code=proto_types.ButtonRequest_ConfirmOutput),
proto.ButtonRequest(code=proto_types.ButtonRequest_ConfirmOutput),
# proto.ButtonRequest(code=proto_types.ButtonRequest_ConfirmOutput), # don't confirm change
proto.ButtonRequest(code=proto_types.ButtonRequest_SignTx),
proto.TxRequest(request_type=proto_types.TXFINISHED)])
serialized_tx = self.client.simple_sign_tx('Bitcoin', [inp1, ], [out1, out2, out3])
self.assertEqual(binascii.hexlify(serialized_tx), '010000000182488650ef25a58fef6788bd71b8212038d7f2bbe4750bc7bcb44701e85ef6d5000000006b483045022100e695e2c530c7c0fc32e6b79b7cff56a7f70a8c9da787534f46b4204070f914fc02207b0879a81408a11e23b11d4c7965c62b5fc6d5c2d92340f5ee2da7b40e99314a0121023230848585885f63803a0a8aecdd6538792d5c539215c91698e315bf0253b43dffffffff0300650400000000001976a914de9b2a8da088824e8fe51debea566617d851537888ace02e0000000000001976a9141fe1d337fb81afca42818051e12fd18245d1b17288ac80380100000000001976a9140223b1a09138753c9cb0baf95a0a62c82711567a88ac00000000')
def test_two_two(self):
self.setup_mnemonic_nopin_nopassphrase()
# tx: c6be22d34946593bcad1d2b013e12f74159e69574ffea21581dad115572e031c
# input 1: 0.0010 BTC
# tx: 58497a7757224d1ff1941488d23087071103e5bf855f4c1c44e5c8d9d82ca46e
# input 1: 0.0011 BTC
inp1 = proto_types.TxInputType(address_n=[1], # 1CK7SJdcb8z9HuvVft3D91HLpLC6KSsGb
# amount=100000,
prev_hash=binascii.unhexlify('c6be22d34946593bcad1d2b013e12f74159e69574ffea21581dad115572e031c'),
prev_index=1,
)
inp2 = proto_types.TxInputType(address_n=[2], # 15AeAhtNJNKyowK8qPHwgpXkhsokzLtUpG
# amount=110000,
prev_hash=binascii.unhexlify('58497a7757224d1ff1941488d23087071103e5bf855f4c1c44e5c8d9d82ca46e'),
prev_index=1,
)
out1 = proto_types.TxOutputType(address='15Jvu3nZNP7u2ipw2533Q9VVgEu2Lu9F2B',
amount=210000 - 100000 - 10000,
script_type=proto_types.PAYTOADDRESS,
)
out2 = proto_types.TxOutputType(address_n=[3], # 1CmzyJp9w3NafXMSEFH4SLYUPAVCSUrrJ5
amount=100000,
script_type=proto_types.PAYTOADDRESS,
)
with self.client:
self.client.set_expected_responses([proto.ButtonRequest(code=proto_types.ButtonRequest_ConfirmOutput),
# proto.ButtonRequest(code=proto_types.ButtonRequest_ConfirmOutput), # don't confirm change
proto.ButtonRequest(code=proto_types.ButtonRequest_SignTx),
proto.TxRequest(request_type=proto_types.TXFINISHED)])
serialized_tx = self.client.simple_sign_tx('Bitcoin', [inp1, inp2], [out1, out2])
# Accepted by network: tx c63e24ed820c5851b60c54613fbc4bcb37df6cd49b4c96143e99580a472f79fb
self.assertEqual(binascii.hexlify(serialized_tx), '01000000021c032e5715d1da8115a2fe4f57699e15742fe113b0d2d1ca3b594649d322bec6010000006b483045022100f773c403b2f85a5c1d6c9c4ad69c43de66930fff4b1bc818eb257af98305546a0220443bde4be439f276a6ce793664b463580e210ec6c9255d68354449ac0443c76501210338d78612e990f2eea0c426b5e48a8db70b9d7ed66282b3b26511e0b1c75515a6ffffffff6ea42cd8d9c8e5441c4c5f85bfe50311078730d2881494f11f4d2257777a4958010000006b48304502210090cff1c1911e771605358a8cddd5ae94c7b60cc96e50275908d9bf9d6367c79f02202bfa72e10260a146abd59d0526e1335bacfbb2b4401780e9e3a7441b0480c8da0121038caebd6f753bbbd2bb1f3346a43cd32140648583673a31d62f2dfb56ad0ab9e3ffffffff02a0860100000000001976a9142f4490d5263906e4887ca2996b9e207af3e7824088aca0860100000000001976a914812c13d97f9159e54e326b481b8f88a73df8507a88ac00000000')
'''
def test_255_outputs(self):
self.setup_mnemonic_nopin_nopassphrase()
# Tests if device implements serialization of len(outputs) correctly
# tx: c63e24ed820c5851b60c54613fbc4bcb37df6cd49b4c96143e99580a472f79fb
# index 1: 0.0010 BTC
# tx: 39a29e954977662ab3879c66fb251ef753e0912223a83d1dcb009111d28265e5
# index 1: 0.0254 BTC
inp1 = proto_types.TxInputType(address_n=[3], # 1CmzyJp9w3NafXMSEFH4SLYUPAVCSUrrJ5
# amount=100000,
prev_hash=binascii.unhexlify('c63e24ed820c5851b60c54613fbc4bcb37df6cd49b4c96143e99580a472f79fb'),
prev_index=1,
)
inp2 = proto_types.TxInputType(address_n=[3], # 1CmzyJp9w3NafXMSEFH4SLYUPAVCSUrrJ5
# amount=2540000,
prev_hash=binascii.unhexlify('39a29e954977662ab3879c66fb251ef753e0912223a83d1dcb009111d28265e5'),
prev_index=1,
)
outputs = []
for _ in range(255):
out = proto_types.TxOutputType(address='1NwN6UduuVkJi6sw3gSiKZaCY5rHgVXC2h',
amount=10200,
script_type=proto_types.PAYTOADDRESS,
)
outputs.append(out)
with self.client:
self.client.set_expected_responses([proto.ButtonRequest(code=proto_types.ButtonRequest_ConfirmOutput), ] * 255 + \
[proto.ButtonRequest(code=proto_types.ButtonRequest_SignTx),
proto.TxRequest(request_type=proto_types.TXFINISHED)])
serialized_tx = self.client.simple_sign_tx('Bitcoin', [inp1, inp2], outputs)
self.assertEqual(binascii.hexlify(serialized_tx), '0100000002fb792f470a58993e14964c9bd46cdf37cb4bbc3f61540cb651580c82ed243ec6010000006b483045022100969da46f94a81f34f3717b014e0c3e1826eda1b0022ec2f9ce39f3d750ab9235022026da269770993211a1503413566a339bbb4389a482fffcf8e1f76713fc3b94f5012103477b9f0f34ae85434ce795f0c5e1e90c9420e5b5fad084d7cce9a487b94a7902ffffffffe56582d2119100cb1d3da8232291e053f71e25fb669c87b32a667749959ea239010000006a473044022052e1419bb237b9db400ab5e3df16db6355619d545fde9030924a360763ae9ad40220704beab04d72ecaeb42eca7d98faca7a0941e65f2e1341f183be2b83e6b09e1c012103477b9f0f34ae85434ce795f0c5e1e90c9420e5b5fad084d7cce9a487b94a7902fffffffffdff00' + 'd8270000000000001976a914f0a2b64e56ee2ff57126232f84af6e3a41d4055088ac' * 255 + '00000000')
'''
def test_fee_too_high(self):
self.setup_mnemonic_nopin_nopassphrase()
# tx: d5f65ee80147b4bcc70b75e4bbf2d7382021b871bd8867ef8fa525ef50864882
# input 0: 0.0039 BTC
inp1 = proto_types.TxInputType(address_n=[0], # 14LmW5k4ssUrtbAB4255zdqv3b4w1TuX9e
# amount=390000,
prev_hash=binascii.unhexlify('d5f65ee80147b4bcc70b75e4bbf2d7382021b871bd8867ef8fa525ef50864882'),
prev_index=0,
)
out1 = proto_types.TxOutputType(address='1MJ2tj2ThBE62zXbBYA5ZaN3fdve5CPAz1',
amount=390000 - 250000,
script_type=proto_types.PAYTOADDRESS,
)
with self.client:
self.client.set_expected_responses([proto.ButtonRequest(code=proto_types.ButtonRequest_ConfirmOutput),
proto.ButtonRequest(code=proto_types.ButtonRequest_FeeOverThreshold),
proto.ButtonRequest(code=proto_types.ButtonRequest_SignTx),
proto.TxRequest(request_type=proto_types.TXFINISHED)])
serialized_tx = self.client.simple_sign_tx('Bitcoin', [inp1, ], [out1, ])
self.assertEqual(binascii.hexlify(serialized_tx), '010000000182488650ef25a58fef6788bd71b8212038d7f2bbe4750bc7bcb44701e85ef6d5000000006b483045022100a3b17b37de3bfecca47f0d49f7bb0d0f68d45df7defe45713d57e83731f5e3d902205404b14630cea6a88b23a5f7c3a1b88494757a8ca5e1c0b0b93cf3c38231c3bd0121023230848585885f63803a0a8aecdd6538792d5c539215c91698e315bf0253b43dffffffff01e0220200000000001976a914de9b2a8da088824e8fe51debea566617d851537888ac00000000')
def test_not_enough_funds(self):
self.setup_mnemonic_nopin_nopassphrase()
# tx: d5f65ee80147b4bcc70b75e4bbf2d7382021b871bd8867ef8fa525ef50864882
# input 0: 0.0039 BTC
inp1 = proto_types.TxInputType(address_n=[0], # 14LmW5k4ssUrtbAB4255zdqv3b4w1TuX9e
# amount=390000,
prev_hash=binascii.unhexlify('d5f65ee80147b4bcc70b75e4bbf2d7382021b871bd8867ef8fa525ef50864882'),
prev_index=0,
)
out1 = proto_types.TxOutputType(address='1MJ2tj2ThBE62zXbBYA5ZaN3fdve5CPAz1',
amount=400000,
script_type=proto_types.PAYTOADDRESS,
)
with self.client:
self.client.set_expected_responses([proto.ButtonRequest(code=proto_types.ButtonRequest_ConfirmOutput),
proto.Failure(code=proto_types.Failure_NotEnoughFunds)])
try:
self.client.simple_sign_tx('Bitcoin', [inp1, ], [out1, ])
except CallException as e:
self.assertEqual(e.args[0], proto_types.Failure_NotEnoughFunds)
else:
self.assert_(False, "types.Failure_NotEnoughFunds expected")
def test_p2sh(self):
self.setup_mnemonic_nopin_nopassphrase()
inp1 = proto_types.TxInputType(address_n=[0], # 14LmW5k4ssUrtbAB4255zdqv3b4w1TuX9e
# amount=400000,
prev_hash=binascii.unhexlify('54aa5680dea781f45ebb536e53dffc526d68c0eb5c00547e323b2c32382dfba3'),
prev_index=1,
)
out1 = proto_types.TxOutputType(address='3DKGE1pvPpBAgZj94MbCinwmksewUNNYVR', # p2sh
amount=400000 - 10000,
script_type=proto_types.PAYTOSCRIPTHASH,
)
with self.client:
self.client.set_expected_responses([proto.ButtonRequest(code=proto_types.ButtonRequest_ConfirmOutput),
proto.ButtonRequest(code=proto_types.ButtonRequest_SignTx),
proto.TxRequest(request_type=proto_types.TXFINISHED)])
serialized_tx = self.client.simple_sign_tx('Bitcoin', [inp1, ], [out1, ])
# Accepted by network: tx 8cc1f4adf7224ce855cf535a5104594a0004cb3b640d6714fdb00b9128832dd5
self.assertEqual(binascii.hexlify(serialized_tx), '0100000001a3fb2d38322c3b327e54005cebc0686d52fcdf536e53bb5ef481a7de8056aa54010000006b4830450221009e020b0390ccad533b73b552f8a99a9d827212c558e4f755503674d07c92ad4502202d606f7316990e0461c51d4add25054f19c697aa3e3c2ced4d568f0b2c57e62f0121023230848585885f63803a0a8aecdd6538792d5c539215c91698e315bf0253b43dffffffff0170f305000000000017a9147f844bdb0b8fd54b64e3d16c85dc1170f1ff97c18700000000')
def test_spend_coinbase(self):
# 25 TEST generated to m/1 (mfiGQVPcRcaEvQPYDErR34DcCovtxYvUUV)
# tx: d6da21677d7cca5f42fbc7631d062c9ae918a0254f7c6c22de8e8cb7fd5b8236
# input 0: 25.0027823 BTC
self.setup_mnemonic_nopin_nopassphrase()
inp1 = proto_types.TxInputType(address_n=[1], # mfiGQVPcRcaEvQPYDErR34DcCovtxYvUUV
# amount=390000,
prev_hash=binascii.unhexlify('d6da21677d7cca5f42fbc7631d062c9ae918a0254f7c6c22de8e8cb7fd5b8236'),
prev_index=0,
)
out1 = proto_types.TxOutputType(address='mm6FM31rM5Vc3sw5D7kztiBg3jHUzyqF1g',
amount=2500278230 - 10000,
script_type=proto_types.PAYTOADDRESS,
)
with self.client:
self.client.set_tx_api(TXAPITestnet())
self.client.set_expected_responses([proto.ButtonRequest(code=proto_types.ButtonRequest_ConfirmOutput),
proto.ButtonRequest(code=proto_types.ButtonRequest_SignTx),
proto.TxRequest(request_type=proto_types.TXFINISHED)])
serialized_tx = self.client.simple_sign_tx('Testnet', [inp1, ], [out1, ])
# Accepted by network: tx
self.assertEqual(binascii.hexlify(serialized_tx), '010000000136825bfdb78c8ede226c7c4f25a018e99a2c061d63c7fb425fca7c7d6721dad6000000006a473044022047845c366eb24f40be315c7815a154513c444c7989eb80f7ce7ff6aeb703d26a022007c1f5efadf67c5889634fd7ac39a7ce78bffac291673e8772ecd8389c901d9f01210338d78612e990f2eea0c426b5e48a8db70b9d7ed66282b3b26511e0b1c75515a6ffffffff01c6100795000000001976a9143d2496e67f5f57a924353da42d4725b318e7a8ea88ac00000000')
if __name__ == '__main__':
unittest.main()
|
Eficent/odoomrp-wip
|
refs/heads/8.0
|
stock_picking_wave_delivery/__openerp__.py
|
25
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (c)
# 2015 Serv. Tec. Avanzados - Pedro M. Baeza (http://www.serviciosbaeza.com)
# 2015 AvanzOsc (http://www.avanzosc.es)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Stock Picking Wave Delivery',
'version': "1.0",
"author": "OdooMRP team,"
"AvanzOSC,"
"Serv. Tecnol. Avanzados - Pedro M. Baeza",
'website': "http://www.odoomrp.com",
'category': 'Warehouse Management',
'depends': ["delivery",
"stock_picking_wave_management",
],
'data': ["views/stock_picking_wave_view.xml",
],
'installable': True,
}
|
spallavolu/scikit-learn
|
refs/heads/master
|
sklearn/cluster/affinity_propagation_.py
|
224
|
""" Algorithms for clustering : Meanshift, Affinity propagation and spectral
clustering.
"""
# Author: Alexandre Gramfort alexandre.gramfort@inria.fr
# Gael Varoquaux gael.varoquaux@normalesup.org
# License: BSD 3 clause
import numpy as np
from ..base import BaseEstimator, ClusterMixin
from ..utils import as_float_array, check_array
from ..utils.validation import check_is_fitted
from ..metrics import euclidean_distances
from ..metrics import pairwise_distances_argmin
def affinity_propagation(S, preference=None, convergence_iter=15, max_iter=200,
damping=0.5, copy=True, verbose=False,
return_n_iter=False):
"""Perform Affinity Propagation Clustering of data
Read more in the :ref:`User Guide <affinity_propagation>`.
Parameters
----------
S : array-like, shape (n_samples, n_samples)
Matrix of similarities between points
preference : array-like, shape (n_samples,) or float, optional
Preferences for each point - points with larger values of
preferences are more likely to be chosen as exemplars. The number of
exemplars, i.e. of clusters, is influenced by the input preferences
value. If the preferences are not passed as arguments, they will be
set to the median of the input similarities (resulting in a moderate
number of clusters). For a smaller amount of clusters, this can be set
to the minimum value of the similarities.
convergence_iter : int, optional, default: 15
Number of iterations with no change in the number
of estimated clusters that stops the convergence.
max_iter : int, optional, default: 200
Maximum number of iterations
damping : float, optional, default: 0.5
Damping factor between 0.5 and 1.
copy : boolean, optional, default: True
If copy is False, the affinity matrix is modified inplace by the
algorithm, for memory efficiency
verbose : boolean, optional, default: False
The verbosity level
return_n_iter : bool, default False
Whether or not to return the number of iterations.
Returns
-------
cluster_centers_indices : array, shape (n_clusters,)
index of clusters centers
labels : array, shape (n_samples,)
cluster labels for each point
n_iter : int
number of iterations run. Returned only if `return_n_iter` is
set to True.
Notes
-----
See examples/cluster/plot_affinity_propagation.py for an example.
References
----------
Brendan J. Frey and Delbert Dueck, "Clustering by Passing Messages
Between Data Points", Science Feb. 2007
"""
S = as_float_array(S, copy=copy)
n_samples = S.shape[0]
if S.shape[0] != S.shape[1]:
raise ValueError("S must be a square array (shape=%s)" % repr(S.shape))
if preference is None:
preference = np.median(S)
if damping < 0.5 or damping >= 1:
raise ValueError('damping must be >= 0.5 and < 1')
random_state = np.random.RandomState(0)
# Place preference on the diagonal of S
S.flat[::(n_samples + 1)] = preference
A = np.zeros((n_samples, n_samples))
R = np.zeros((n_samples, n_samples)) # Initialize messages
# Intermediate results
tmp = np.zeros((n_samples, n_samples))
# Remove degeneracies
S += ((np.finfo(np.double).eps * S + np.finfo(np.double).tiny * 100) *
random_state.randn(n_samples, n_samples))
# Execute parallel affinity propagation updates
e = np.zeros((n_samples, convergence_iter))
ind = np.arange(n_samples)
for it in range(max_iter):
# tmp = A + S; compute responsibilities
np.add(A, S, tmp)
I = np.argmax(tmp, axis=1)
Y = tmp[ind, I] # np.max(A + S, axis=1)
tmp[ind, I] = -np.inf
Y2 = np.max(tmp, axis=1)
# tmp = Rnew
np.subtract(S, Y[:, None], tmp)
tmp[ind, I] = S[ind, I] - Y2
# Damping
tmp *= 1 - damping
R *= damping
R += tmp
# tmp = Rp; compute availabilities
np.maximum(R, 0, tmp)
tmp.flat[::n_samples + 1] = R.flat[::n_samples + 1]
# tmp = -Anew
tmp -= np.sum(tmp, axis=0)
dA = np.diag(tmp).copy()
tmp.clip(0, np.inf, tmp)
tmp.flat[::n_samples + 1] = dA
# Damping
tmp *= 1 - damping
A *= damping
A -= tmp
# Check for convergence
E = (np.diag(A) + np.diag(R)) > 0
e[:, it % convergence_iter] = E
K = np.sum(E, axis=0)
if it >= convergence_iter:
se = np.sum(e, axis=1)
unconverged = (np.sum((se == convergence_iter) + (se == 0))
!= n_samples)
if (not unconverged and (K > 0)) or (it == max_iter):
if verbose:
print("Converged after %d iterations." % it)
break
else:
if verbose:
print("Did not converge")
I = np.where(np.diag(A + R) > 0)[0]
K = I.size # Identify exemplars
if K > 0:
c = np.argmax(S[:, I], axis=1)
c[I] = np.arange(K) # Identify clusters
# Refine the final set of exemplars and clusters and return results
for k in range(K):
ii = np.where(c == k)[0]
j = np.argmax(np.sum(S[ii[:, np.newaxis], ii], axis=0))
I[k] = ii[j]
c = np.argmax(S[:, I], axis=1)
c[I] = np.arange(K)
labels = I[c]
# Reduce labels to a sorted, gapless, list
cluster_centers_indices = np.unique(labels)
labels = np.searchsorted(cluster_centers_indices, labels)
else:
labels = np.empty((n_samples, 1))
cluster_centers_indices = None
labels.fill(np.nan)
if return_n_iter:
return cluster_centers_indices, labels, it + 1
else:
return cluster_centers_indices, labels
###############################################################################
class AffinityPropagation(BaseEstimator, ClusterMixin):
"""Perform Affinity Propagation Clustering of data.
Read more in the :ref:`User Guide <affinity_propagation>`.
Parameters
----------
damping : float, optional, default: 0.5
Damping factor between 0.5 and 1.
convergence_iter : int, optional, default: 15
Number of iterations with no change in the number
of estimated clusters that stops the convergence.
max_iter : int, optional, default: 200
Maximum number of iterations.
copy : boolean, optional, default: True
Make a copy of input data.
preference : array-like, shape (n_samples,) or float, optional
Preferences for each point - points with larger values of
preferences are more likely to be chosen as exemplars. The number
of exemplars, ie of clusters, is influenced by the input
preferences value. If the preferences are not passed as arguments,
they will be set to the median of the input similarities.
affinity : string, optional, default=``euclidean``
Which affinity to use. At the moment ``precomputed`` and
``euclidean`` are supported. ``euclidean`` uses the
negative squared euclidean distance between points.
verbose : boolean, optional, default: False
Whether to be verbose.
Attributes
----------
cluster_centers_indices_ : array, shape (n_clusters,)
Indices of cluster centers
cluster_centers_ : array, shape (n_clusters, n_features)
Cluster centers (if affinity != ``precomputed``).
labels_ : array, shape (n_samples,)
Labels of each point
affinity_matrix_ : array, shape (n_samples, n_samples)
Stores the affinity matrix used in ``fit``.
n_iter_ : int
Number of iterations taken to converge.
Notes
-----
See examples/cluster/plot_affinity_propagation.py for an example.
The algorithmic complexity of affinity propagation is quadratic
in the number of points.
References
----------
Brendan J. Frey and Delbert Dueck, "Clustering by Passing Messages
Between Data Points", Science Feb. 2007
"""
def __init__(self, damping=.5, max_iter=200, convergence_iter=15,
copy=True, preference=None, affinity='euclidean',
verbose=False):
self.damping = damping
self.max_iter = max_iter
self.convergence_iter = convergence_iter
self.copy = copy
self.verbose = verbose
self.preference = preference
self.affinity = affinity
@property
def _pairwise(self):
return self.affinity == "precomputed"
def fit(self, X, y=None):
""" Create affinity matrix from negative euclidean distances, then
apply affinity propagation clustering.
Parameters
----------
X: array-like, shape (n_samples, n_features) or (n_samples, n_samples)
Data matrix or, if affinity is ``precomputed``, matrix of
similarities / affinities.
"""
X = check_array(X, accept_sparse='csr')
if self.affinity == "precomputed":
self.affinity_matrix_ = X
elif self.affinity == "euclidean":
self.affinity_matrix_ = -euclidean_distances(X, squared=True)
else:
raise ValueError("Affinity must be 'precomputed' or "
"'euclidean'. Got %s instead"
% str(self.affinity))
self.cluster_centers_indices_, self.labels_, self.n_iter_ = \
affinity_propagation(
self.affinity_matrix_, self.preference, max_iter=self.max_iter,
convergence_iter=self.convergence_iter, damping=self.damping,
copy=self.copy, verbose=self.verbose, return_n_iter=True)
if self.affinity != "precomputed":
self.cluster_centers_ = X[self.cluster_centers_indices_].copy()
return self
def predict(self, X):
"""Predict the closest cluster each sample in X belongs to.
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
New data to predict.
Returns
-------
labels : array, shape (n_samples,)
Index of the cluster each sample belongs to.
"""
check_is_fitted(self, "cluster_centers_indices_")
if not hasattr(self, "cluster_centers_"):
raise ValueError("Predict method is not supported when "
"affinity='precomputed'.")
return pairwise_distances_argmin(X, self.cluster_centers_)
|
weareua/MarkIT
|
refs/heads/master
|
accounts/models.py
|
1
|
# -*- coding: utf-8 -*-
from django.db import models
from django.contrib.auth.models import User
class Profile(models.Model):
# user mapping
user = models.OneToOneField(User)
class Meta(object):
verbose_name = (u"User Profile")
# extra user data
mobile_phone = models.CharField(
max_length=12,
blank=True,
verbose_name=(u"Mobile Phone"),
default='')
def __unicode__(self):
return self.user.username
|
marissazhou/django
|
refs/heads/master
|
tests/forms_tests/widget_tests/test_checkboxselectmultiple.py
|
161
|
from django.forms import CheckboxSelectMultiple
from .base import WidgetTest
class CheckboxSelectMultipleTest(WidgetTest):
widget = CheckboxSelectMultiple()
def test_render_value(self):
self.check_html(self.widget, 'beatles', ['J'], choices=self.beatles, html=(
"""<ul>
<li><label><input checked="checked" type="checkbox" name="beatles" value="J" /> John</label></li>
<li><label><input type="checkbox" name="beatles" value="P" /> Paul</label></li>
<li><label><input type="checkbox" name="beatles" value="G" /> George</label></li>
<li><label><input type="checkbox" name="beatles" value="R" /> Ringo</label></li>
</ul>"""
))
def test_render_value_multiple(self):
self.check_html(self.widget, 'beatles', ['J', 'P'], choices=self.beatles, html=(
"""<ul>
<li><label><input checked="checked" type="checkbox" name="beatles" value="J" /> John</label></li>
<li><label><input checked="checked" type="checkbox" name="beatles" value="P" /> Paul</label></li>
<li><label><input type="checkbox" name="beatles" value="G" /> George</label></li>
<li><label><input type="checkbox" name="beatles" value="R" /> Ringo</label></li>
</ul>"""
))
def test_render_none(self):
"""
If the value is None, none of the options are selected.
"""
self.check_html(self.widget, 'beatles', None, choices=self.beatles, html=(
"""<ul>
<li><label><input type="checkbox" name="beatles" value="J" /> John</label></li>
<li><label><input type="checkbox" name="beatles" value="P" /> Paul</label></li>
<li><label><input type="checkbox" name="beatles" value="G" /> George</label></li>
<li><label><input type="checkbox" name="beatles" value="R" /> Ringo</label></li>
</ul>"""
))
def test_nested_choices(self):
nested_choices = (
('unknown', 'Unknown'),
('Audio', (('vinyl', 'Vinyl'), ('cd', 'CD'))),
('Video', (('vhs', 'VHS'), ('dvd', 'DVD'))),
)
html = """
<ul id="media">
<li>
<label for="media_0"><input id="media_0" name="nestchoice" type="checkbox" value="unknown" /> Unknown</label>
</li>
<li>Audio<ul id="media_1">
<li>
<label for="media_1_0">
<input checked="checked" id="media_1_0" name="nestchoice" type="checkbox" value="vinyl" /> Vinyl
</label>
</li>
<li>
<label for="media_1_1"><input id="media_1_1" name="nestchoice" type="checkbox" value="cd" /> CD</label>
</li>
</ul></li>
<li>Video<ul id="media_2">
<li>
<label for="media_2_0"><input id="media_2_0" name="nestchoice" type="checkbox" value="vhs" /> VHS</label>
</li>
<li>
<label for="media_2_1">
<input checked="checked" id="media_2_1" name="nestchoice" type="checkbox" value="dvd" /> DVD
</label>
</li>
</ul></li>
</ul>
"""
self.check_html(
self.widget, 'nestchoice', ('vinyl', 'dvd'),
choices=nested_choices, attrs={'id': 'media'}, html=html,
)
def test_separate_ids(self):
"""
Each input gets a separate ID.
"""
choices = [('a', 'A'), ('b', 'B'), ('c', 'C')]
html = """
<ul id="abc">
<li>
<label for="abc_0"><input checked="checked" type="checkbox" name="letters" value="a" id="abc_0" /> A</label>
</li>
<li><label for="abc_1"><input type="checkbox" name="letters" value="b" id="abc_1" /> B</label></li>
<li>
<label for="abc_2"><input checked="checked" type="checkbox" name="letters" value="c" id="abc_2" /> C</label>
</li>
</ul>
"""
self.check_html(self.widget, 'letters', ['a', 'c'], choices=choices, attrs={'id': 'abc'}, html=html)
def test_separate_ids_constructor(self):
"""
Each input gets a separate ID when the ID is passed to the constructor.
"""
widget = CheckboxSelectMultiple(attrs={'id': 'abc'})
choices = [('a', 'A'), ('b', 'B'), ('c', 'C')]
html = """
<ul id="abc">
<li>
<label for="abc_0"><input checked="checked" type="checkbox" name="letters" value="a" id="abc_0" /> A</label>
</li>
<li><label for="abc_1"><input type="checkbox" name="letters" value="b" id="abc_1" /> B</label></li>
<li>
<label for="abc_2"><input checked="checked" type="checkbox" name="letters" value="c" id="abc_2" /> C</label>
</li>
</ul>
"""
self.check_html(widget, 'letters', ['a', 'c'], choices=choices, html=html)
|
kylehogan/hil
|
refs/heads/master
|
tests/deployment/vlan_networks.py
|
4
|
# Copyright 2013-2015 Massachusetts Open Cloud Contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the
# License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an "AS
# IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language
# governing permissions and limitations under the License.
"""Deployment Unit Tests - These tests are intended for our
internal setup only and will most likely not work on
other HaaS configurations."""
from haas import api, model, deferred, server
from haas.test_common import *
import pytest
@pytest.fixture
def configure():
config_testsuite()
config.load_extensions()
fail_on_log_warnings = pytest.fixture(autouse=True)(fail_on_log_warnings)
fresh_database = pytest.fixture(fresh_database)
@pytest.fixture
def server_init():
server.register_drivers()
server.validate_state()
with_request_context = pytest.yield_fixture(with_request_context)
site_layout = pytest.fixture(site_layout)
pytestmark = pytest.mark.usefixtures('configure',
'server_init',
'fresh_database',
'with_request_context',
'site_layout')
class TestNetworkVlan(NetworkTest):
def test_isolated_networks(self):
def get_legal_channels(network):
response_body = api.show_network(network)
response_body = json.loads(response_body)
return response_body['channels']
def create_networks():
nodes = self.collect_nodes()
# Create two networks
network_create_simple('net-0', 'anvil-nextgen')
network_create_simple('net-1', 'anvil-nextgen')
ports = self.get_all_ports(nodes)
# Assert that n0 and n1 are not on any network
port_networks = self.get_port_networks(ports)
assert self.get_network(nodes[0].nics[0].port, port_networks) == \
set()
assert self.get_network(nodes[1].nics[0].port, port_networks) == \
set()
# Get the channel ids for the tagged versions of the networks:
net_tag = {}
net_tag[0] = get_legal_channels('net-0')[1]
net_tag[1] = get_legal_channels('net-1')[1]
# Connect node 0 to net-0 (native mode)
api.node_connect_network(nodes[0].label,
nodes[0].nics[0].label,
'net-0')
# Connect node 1 to net-1 (tagged mode)
api.node_connect_network(nodes[1].label,
nodes[1].nics[0].label,
'net-1',
channel=net_tag[1])
deferred.apply_networking()
# Assert that n0 and n1 are on isolated networks
port_networks = self.get_port_networks(ports)
assert self.get_network(nodes[0].nics[0].port, port_networks) == \
set([nodes[0].nics[0].port])
assert self.get_network(nodes[1].nics[0].port, port_networks) == \
set([nodes[1].nics[0].port])
# Add n2 and n3 to the same networks as n0 and n1 respectively, but
# with different channels (native vs. tagged)
api.node_connect_network(nodes[2].label,
nodes[2].nics[0].label,
'net-0',
channel=net_tag[0])
api.node_connect_network(nodes[3].label,
nodes[3].nics[0].label,
'net-1')
deferred.apply_networking()
# Assert that n2 and n3 have been added to n0 and n1's networks
# respectively
port_networks = self.get_port_networks(ports)
assert self.get_network(nodes[0].nics[0].port, port_networks) == \
set([nodes[0].nics[0].port, nodes[2].nics[0].port])
assert self.get_network(nodes[1].nics[0].port, port_networks) == \
set([nodes[1].nics[0].port, nodes[3].nics[0].port])
# Verify that we can put nodes on more than one network, with
# different channels:
api.node_connect_network(nodes[2].label,
nodes[2].nics[0].label,
'net-1')
deferred.apply_networking()
port_networks = self.get_port_networks(ports)
assert self.get_network(nodes[1].nics[0].port, port_networks) == \
set([nodes[1].nics[0].port,
nodes[2].nics[0].port,
nodes[3].nics[0].port])
def delete_networks():
# Query the DB for nodes on this project
project = api._must_find(model.Project, 'anvil-nextgen')
nodes = project.nodes
ports = self.get_all_ports(nodes)
# Remove all nodes from their networks. We first build up a list of
# the arguments to the API calls, which has no direct references to
# database objects, and then make the API calls and invoke
# deferred.apply_networking after. This is important --
# The API calls and apply_networking normally run in their own
# transaction. We get away with not doing this in the tests because
# we serialize everything ourselves, so there's no risk of
# interference. If we were to hang on to references to database
# objects across such calls however, things could get harry.
all_attachments = []
for node in nodes:
attachments = model.NetworkAttachment.query \
.filter_by(nic=node.nics[0]).all()
for attachment in attachments:
all_attachments.append((node.label,
node.nics[0].label,
attachment.network.label))
for attachment in all_attachments:
api.node_detach_network(*attachment)
deferred.apply_networking()
# Assert that none of the nodes are on any network
port_networks = self.get_port_networks(ports)
for node in nodes:
assert self.get_network(node.nics[0].port, port_networks) == \
set()
# Delete the networks
api.network_delete('net-0')
api.network_delete('net-1')
# Create a project
api.project_create('anvil-nextgen')
create_networks()
delete_networks()
|
houzhenggang/hiwifi-openwrt-HC5661-HC5761
|
refs/heads/master
|
staging_dir/target-mipsel_r2_uClibc-0.9.33.2/usr/lib/python2.7/json/tests/test_encode_basestring_ascii.py
|
143
|
from collections import OrderedDict
from json.tests import PyTest, CTest
CASES = [
(u'/\\"\ucafe\ubabe\uab98\ufcde\ubcda\uef4a\x08\x0c\n\r\t`1~!@#$%^&*()_+-=[]{}|;:\',./<>?', '"/\\\\\\"\\ucafe\\ubabe\\uab98\\ufcde\\ubcda\\uef4a\\b\\f\\n\\r\\t`1~!@#$%^&*()_+-=[]{}|;:\',./<>?"'),
(u'\u0123\u4567\u89ab\ucdef\uabcd\uef4a', '"\\u0123\\u4567\\u89ab\\ucdef\\uabcd\\uef4a"'),
(u'controls', '"controls"'),
(u'\x08\x0c\n\r\t', '"\\b\\f\\n\\r\\t"'),
(u'{"object with 1 member":["array with 1 element"]}', '"{\\"object with 1 member\\":[\\"array with 1 element\\"]}"'),
(u' s p a c e d ', '" s p a c e d "'),
(u'\U0001d120', '"\\ud834\\udd20"'),
(u'\u03b1\u03a9', '"\\u03b1\\u03a9"'),
('\xce\xb1\xce\xa9', '"\\u03b1\\u03a9"'),
(u'\u03b1\u03a9', '"\\u03b1\\u03a9"'),
('\xce\xb1\xce\xa9', '"\\u03b1\\u03a9"'),
(u'\u03b1\u03a9', '"\\u03b1\\u03a9"'),
(u'\u03b1\u03a9', '"\\u03b1\\u03a9"'),
(u"`1~!@#$%^&*()_+-={':[,]}|;.</>?", '"`1~!@#$%^&*()_+-={\':[,]}|;.</>?"'),
(u'\x08\x0c\n\r\t', '"\\b\\f\\n\\r\\t"'),
(u'\u0123\u4567\u89ab\ucdef\uabcd\uef4a', '"\\u0123\\u4567\\u89ab\\ucdef\\uabcd\\uef4a"'),
]
class TestEncodeBasestringAscii(object):
def test_encode_basestring_ascii(self):
fname = self.json.encoder.encode_basestring_ascii.__name__
for input_string, expect in CASES:
result = self.json.encoder.encode_basestring_ascii(input_string)
self.assertEqual(result, expect,
'{0!r} != {1!r} for {2}({3!r})'.format(
result, expect, fname, input_string))
def test_ordered_dict(self):
# See issue 6105
items = [('one', 1), ('two', 2), ('three', 3), ('four', 4), ('five', 5)]
s = self.dumps(OrderedDict(items))
self.assertEqual(s, '{"one": 1, "two": 2, "three": 3, "four": 4, "five": 5}')
class TestPyEncodeBasestringAscii(TestEncodeBasestringAscii, PyTest): pass
class TestCEncodeBasestringAscii(TestEncodeBasestringAscii, CTest): pass
|
IMSGlobal/openbadges-validator-core
|
refs/heads/develop
|
openbadges/verifier/verifier.py
|
2
|
import json
from openbadges_bakery import unbake
from pydux import create_store
import traceback
from .actions.input import set_input_type, store_input
from .actions.tasks import add_task, report_message, resolve_task, trigger_condition
from .exceptions import SkipTask, TaskPrerequisitesError
from .logger import logger
from .openbadges_context import OPENBADGES_CONTEXT_V2_URI
from .reducers import main_reducer
from .state import (filter_active_tasks, filter_messages_for_report, format_message,
INITIAL_STATE, MESSAGE_LEVEL_ERROR, MESSAGE_LEVEL_WARNING,)
from . import tasks
from .tasks.task_types import INTAKE_JSON, JSONLD_COMPACT_DATA, VALIDATE_EXTENSION_NODE
from .tasks.validation import OBClasses
from .utils import list_of, CachableDocumentLoader, jsonld_use_cache
DEFAULT_OPTIONS = {
'include_original_json': False, # Return the original JSON strings fetched from HTTP
'use_cache': True,
'cache_backend': 'memory',
'cache_expire_after': 300,
'jsonld_options': jsonld_use_cache
}
def _get_options(options):
if options:
selected = DEFAULT_OPTIONS.copy()
selected.update(options)
else:
selected = DEFAULT_OPTIONS
if selected['use_cache']:
doc_loader = CachableDocumentLoader(
use_cache=selected['use_cache'],
backend=selected['cache_backend'],
expire_after=selected['cache_expire_after']
)
else:
doc_loader = CachableDocumentLoader(use_cache=False)
selected['jsonld_options'] = {'documentLoader': doc_loader}
return selected
def call_task(task_func, task_meta, store, options=DEFAULT_OPTIONS):
"""
Calls and resolves a task function in response to a queued task. May result
in additional actions added to the queue.
:param task_func: func
:param task_meta: dict (single entry in tasks state)
:param store: pydux store
:return:
"""
actions = []
try:
success, message, actions = task_func(store.get_state(), task_meta, **options)
except SkipTask:
raise NotImplemented("Implement SkipTask handling in call_task")
except TaskPrerequisitesError:
message = "Task could not run due to unmet prerequisites."
store.dispatch(resolve_task(task_meta.get('task_id'), success=False, result=message))
except Exception as e:
error_message = traceback.format_exception_only(type(e), e)
logger.error(traceback.format_exc())
message = "{} {}".format(e.__class__, error_message)
store.dispatch(resolve_task(task_meta.get('task_id'), success=False, result=message))
else:
store.dispatch(resolve_task(task_meta.get('task_id'), success=success, result=message))
if success:
for trigger in list_of(task_meta.get('triggers_completion', [])):
store.dispatch(trigger_condition(trigger, 'Completed by {}: {}'.format(
task_meta.get('task_id'), task_meta.get('name')
)))
# Make updates and queue up next tasks.
for action in actions:
if not isinstance(action, dict):
raise TypeError("Task {} returned actions of an unreadable type. Task details: {}".format(
task_meta.get('name'), json.dumps(task_meta)
))
store.dispatch(action)
def verification_store(badge_input, recipient_profile=None, store=None, options=DEFAULT_OPTIONS):
if store is None:
store = create_store(main_reducer, INITIAL_STATE)
try:
if hasattr(badge_input, 'read') and hasattr(badge_input, 'seek'):
badge_input.seek(0)
badge_data = unbake(badge_input)
if not badge_data:
raise ValueError("Could not find Open Badges metadata in file.")
else:
badge_data = badge_input
except ValueError as e:
# Could not obtain badge data from input. Set the result as a failed DETECT_INPUT_TYPE task.
store.dispatch(store_input(badge_input.name))
store.dispatch(add_task(tasks.DETECT_INPUT_TYPE))
store.dispatch(set_input_type('file'))
task = store.get_state()['tasks'][0]
store.dispatch(resolve_task(task.get('task_id'), success=False, result=e.message))
else:
store.dispatch(store_input(badge_data))
store.dispatch(add_task(tasks.DETECT_INPUT_TYPE))
if recipient_profile:
profile_id = recipient_profile.get('id')
recipient_profile['@context'] = recipient_profile.get('@context', OPENBADGES_CONTEXT_V2_URI)
task = add_task(
JSONLD_COMPACT_DATA,
data=json.dumps(recipient_profile),
expected_class=OBClasses.ExpectedRecipientProfile)
if profile_id:
task['node_id'] = profile_id
store.dispatch(task)
last_task_id = 0
while len(filter_active_tasks(store.get_state())):
active_tasks = filter_active_tasks(store.get_state())
task_meta = active_tasks[0]
task_func = tasks.task_named(task_meta['name'])
if task_meta['task_id'] == last_task_id:
break
last_task_id = task_meta['task_id']
call_task(task_func, task_meta, store, options)
return store
def generate_report(store, options=DEFAULT_OPTIONS):
"""
Returns a report of validity information based on a store and its tasks.
"""
state = store.get_state()
processed_input = state['input'].copy()
if not options.get('include_original_json'):
try:
del processed_input['original_json']
except KeyError:
pass
tasks_for_messages_list = filter_messages_for_report(state)
report = state['report'].copy()
report['messages'] = []
for task in tasks_for_messages_list:
report['messages'].append(format_message(task))
report['errorCount'] = len([m for m in report['messages'] if m['messageLevel'] == MESSAGE_LEVEL_ERROR])
report['warningCount'] = len([m for m in report['messages'] if m['messageLevel'] == MESSAGE_LEVEL_WARNING])
is_valid = True # Assume to be true at first
if bool(report['errorCount']) or len(state.get('graph', [])) == 0:
is_valid = False
report['valid'] = is_valid
ret = {
'graph': state['graph'],
'input': processed_input,
'report': report
}
return ret
def verify(badge_input, recipient_profile=None, **options):
"""
Verify and validate Open Badges
:param badge_input: str (url or json) or python file-like object (baked badge image)
:param recipient_profile: dict of a trusted Profile describing the entity assumed to be recipient
:param options: dict of options. See DEFAULT_OPTIONS for values
:return: dict
"""
selected_options = _get_options(options)
store = verification_store(badge_input, recipient_profile, options=selected_options)
return generate_report(store, options=selected_options)
def extension_validation_store(extension_input, store=None, options=DEFAULT_OPTIONS):
if store is None:
store = create_store(main_reducer, INITIAL_STATE)
if not isinstance(extension_input, dict):
raise ValueError
store.dispatch(store_input(extension_input.copy()))
extension_input['@context'] = extension_input.get('@context', OPENBADGES_CONTEXT_V2_URI)
extension_input['id'] = extension_input.get('id', '_:extension_validation_input')
compact_task = add_task(JSONLD_COMPACT_DATA, detectAndValidateClass=False, data=json.dumps(extension_input))
store.dispatch(compact_task)
tasks_remaining = True
while tasks_remaining:
active_tasks = filter_active_tasks(store.get_state())
if len(active_tasks) < 1:
tasks_remaining = False
break
task_meta = active_tasks[0]
task_func = tasks.task_named(task_meta['name'])
call_task(task_func, task_meta, store, options)
all_tasks = store.get_state()['tasks']
try:
first_extension_node_validation_task = [t for t in all_tasks if t['name'] == VALIDATE_EXTENSION_NODE][0]
except IndexError:
store.dispatch(report_message(
"No extensions were found to test. Check for proper use of context and type to declare an extension.",
message_level=MESSAGE_LEVEL_ERROR, success=False
))
return store
def validate_extensions(extension_input, **options):
"""
Validate Open Badges Extensions
:param extension_input: object with openbadges extension properites
:param options: dict of options. See DEFAULT_OPTIONS for values
:return: dict
"""
selected_options = _get_options(options)
store = extension_validation_store(extension_input, options=selected_options)
return generate_report(store, selected_options)
|
digitalocean/netbox
|
refs/heads/develop
|
netbox/tenancy/api/__init__.py
|
12133432
| |
ciudadanointeligente/votainteligente-portal-electoral
|
refs/heads/master
|
preguntales/tests/ranking_tests.py
|
1
|
# coding=utf-8
from elections.tests import VotaInteligenteTestCase as TestCase
from elections.models import Election, Candidate
from preguntales.models import Message, Answer
from preguntales.views import RankingMixin
from django.core.urlresolvers import reverse
class RankingTestCaseBase(TestCase):
def setUp(self):
self.election = Election.objects.get(id=1)
self.candidate1 = Candidate.objects.get(id=4)
self.candidate2 = Candidate.objects.get(id=5)
self.candidate3 = Candidate.objects.get(id=6)
self.candidate4 = Candidate.objects.create(name="Fiera")
self.candidate4.elections.add(self.election)
self.message = Message.objects.\
create(election=self.election,
author_name='author',
author_email='author@email.com',
subject='subject',
content='content',
slug=u'subject-slugified',
accepted=True
)
self.message.people.add(self.candidate1)
self.message.people.add(self.candidate2)
self.message.people.add(self.candidate3)
self.message.people.add(self.candidate4)
self.ans11 = Answer.objects.create(content=u'a11',
message=self.message,
person=self.candidate1
)
self.message2 = Message.objects\
.create(election=self.election,
author_name='author',
author_email='author@email.com',
subject='subject',
content='content',
slug=u'subject-slugified',
accepted=True
)
self.message2.people.add(self.candidate1)
self.message2.people.add(self.candidate2)
self.message2.people.add(self.candidate3)
self.message2.people.add(self.candidate4)
self.ans21 = Answer.objects.create(content=u'a21',
message=self.message2,
person=self.candidate1
)
self.ans22 = Answer.objects.create(content=u'a22',
message=self.message2,
person=self.candidate2
)
self.message3 = Message.objects\
.create(election=self.election,
author_name='author',
author_email='author@email.com',
subject='subject',
content='content',
slug='subject-slugified'
)
self.message3.people.add(self.candidate1)
self.message3.people.add(self.candidate2)
self.message3.people.add(self.candidate3)
self.message3.people.add(self.candidate4)
self.ans31 = Answer.objects.create(content=u'a31',
message=self.message3,
person=self.candidate1
)
self.ans32 = Answer.objects.create(content=u'a32',
message=self.message3,
person=self.candidate2
)
self.ans34 = Answer.objects.create(content=u'a34',
message=self.message3,
person=self.candidate4
)
self.message4 = Message.objects\
.create(election=self.election,
author_name='author',
author_email='author@email.com',
subject='subject',
content='content',
slug='subject-slugified'
)
self.message4.people.add(self.candidate1)
self.message4.people.add(self.candidate2)
self.message4.people.add(self.candidate3)
# this question wasn't asked to candidate 4
# self.message4.people.add(self.candidate4)
class RankingTestCase(RankingTestCaseBase):
def test_mixin(self):
ranking_view = RankingMixin()
ranking_view.candidate_queryset = self.election.candidates.all()
ranking = ranking_view.get_ranking()
self.assertEquals(len(ranking), 4)
def test_get_all_messages(self):
ranking_view = RankingMixin()
ranking_view.candidate_queryset = self.election.candidates.all()
self.assertEquals(ranking_view.all_messages().count(), 4)
self.assertIn(self.message, ranking_view.all_messages())
self.assertIn(self.message2, ranking_view.all_messages())
self.assertIn(self.message3, ranking_view.all_messages())
self.assertIn(self.message4, ranking_view.all_messages())
def test_get_all_possible_answers(self):
ranking_view = RankingMixin()
ranking_view.candidate_queryset = self.election.candidates.all()
self.assertEquals(ranking_view.all_possible_answers(), 15)
def test_get_actual_answered_questions(self):
ranking_view = RankingMixin()
ranking_view.candidate_queryset = self.election.candidates.all()
self.assertEquals(ranking_view.actual_answers(), 6)
def test_get_index(self):
ranking_view = RankingMixin()
ranking_view.candidate_queryset = Candidate.objects.all()
expected_index = float(15)/float(6)
self.assertEquals(ranking_view.success_index(), expected_index)
def test_get_clasified_answered_and_questions_num(self):
ranking_view = RankingMixin()
ranking_view.candidate_queryset = self.election.candidates.all()
clasified = ranking_view.get_clasified()
self.assertEquals(clasified[0]['id'], self.candidate1.id)
self.assertEquals(clasified[0]['name'], self.candidate1.name)
self.assertEquals(clasified[0]['candidate'], self.candidate1)
self.assertEquals(clasified[0]['possible_answers'], 4)
self.assertEquals(clasified[1]['name'], self.candidate2.name)
self.assertEquals(clasified[1]['id'], self.candidate2.id)
self.assertEquals(clasified[1]['candidate'], self.candidate2)
self.assertEquals(clasified[1]['possible_answers'], 4)
self.assertEquals(clasified[2]['name'], self.candidate3.name)
self.assertEquals(clasified[2]['id'], self.candidate3.id)
self.assertEquals(clasified[2]['candidate'], self.candidate3)
self.assertEquals(clasified[2]['possible_answers'], 4)
self.assertEquals(clasified[3]['name'], self.candidate4.name)
self.assertEquals(clasified[3]['id'], self.candidate4.id)
self.assertEquals(clasified[3]['candidate'], self.candidate4)
self.assertEquals(clasified[3]['possible_answers'], 3)
self.assertEquals(clasified[0]['actual_answers'], 3)
self.assertEquals(clasified[1]['actual_answers'], 2)
self.assertEquals(clasified[2]['actual_answers'], 0)
self.assertEquals(clasified[3]['actual_answers'], 1)
def test_clasified_points(self):
ranking_view = RankingMixin()
ranking_view.candidate_queryset = self.election.candidates.all()
clasified = ranking_view.get_clasified()
success_index = ranking_view.success_index()
possible_answers = clasified[0]["possible_answers"]
actual_answers = clasified[0]["actual_answers"]
expected_points1 = (success_index*actual_answers -
(possible_answers-actual_answers))*possible_answers
self.assertEquals(clasified[0]['points'], expected_points1)
possible_answers = clasified[1]["possible_answers"]
actual_answers = clasified[1]["actual_answers"]
expected_points1 = (success_index*actual_answers -
(possible_answers-actual_answers))*possible_answers
self.assertEquals(clasified[1]['points'], expected_points1)
possible_answers = clasified[2]["possible_answers"]
actual_answers = clasified[2]["actual_answers"]
expected_points1 = (success_index*actual_answers -
(possible_answers-actual_answers))*possible_answers
self.assertEquals(clasified[2]['points'], expected_points1)
possible_answers = clasified[3]["possible_answers"]
actual_answers = clasified[3]["actual_answers"]
expected_points1 = (success_index*actual_answers -
(possible_answers-actual_answers))*possible_answers
self.assertEquals(clasified[3]['points'], expected_points1)
def test_get_ordered_clasified(self):
ranking_view = RankingMixin()
ranking_view.candidate_queryset = self.election.candidates.all()
ordered = ranking_view.get_ordered()
self.assertEquals(ordered[0]['candidate'], self.candidate1)
self.assertEquals(ordered[3]['candidate'], self.candidate3)
def test_get_good_ones(self):
ranking_view = RankingMixin()
ranking_view.candidate_queryset = self.election.candidates.all()
good = ranking_view.get_good()
self.assertEquals(len(good), 2)
self.assertEquals(good[0]['candidate'], self.candidate1)
is_candidate2_or_4 = good[1]['candidate'] == self.candidate2 \
or good[1]['candidate'] == self.candidate4
self.assertTrue(is_candidate2_or_4)
def test_get_bad_ones(self):
ranking_view = RankingMixin()
ranking_view.candidate_queryset = self.election.candidates.all()
bad = ranking_view.get_bad()
self.assertEquals(len(bad), 2)
self.assertEquals(bad[0]['candidate'], self.candidate3)
is_candidate2_or_4 = bad[1]['candidate'] == self.candidate2 \
or bad[1]['candidate'] == self.candidate4
self.assertTrue(is_candidate2_or_4)
def test_reach_url_and_has_good_and_bad(self):
url = reverse('ranking_view', kwargs={'slug': self.election.slug})
response = self.client.get(url)
self.assertEquals(response.status_code, 200)
self.assertEquals(response.context['election'], self.election)
self.assertIn('good', response.context)
self.assertEquals(len(response.context['good']), 2)
goods = response.context['good']
bads = response.context['bad']
is_candidate2_or_4 = goods[1]['candidate'] == self.candidate2 \
or goods['candidate'] == self.candidate4
self.assertEquals(goods[0]['candidate'], self.candidate1)
self.assertTrue(is_candidate2_or_4)
self.assertIn('bad', response.context)
self.assertEquals(len(bads), 2)
is_candidate2_or_4 = bads[1]['candidate'] == self.candidate2 \
or bads[1]['candidate'] == self.candidate4
self.assertEquals(bads[0]['candidate'], self.candidate3)
class QuestionsPerCandidateViewTestCase(RankingTestCaseBase):
def test_it_is_reachable(self):
reverse_url = reverse('questions_per_candidate',
kwargs={'election_slug': self.election.slug,
'slug': self.candidate1.slug})
response = self.client.get(reverse_url)
self.assertEquals(response.status_code, 200)
self.assertIn('candidate', response.context)
self.assertEquals(response.context['candidate'], self.candidate1)
self.assertTemplateUsed(response,
'elections/questions_per_candidate.html')
self.assertIn('questions', response.context)
expected_messages = list(Message.objects.
filter(people=self.candidate1))
actual_messages = list(response.context['questions'])
self.assertEquals(actual_messages, expected_messages)
|
jasimpson/gnuradio-jasimpson
|
refs/heads/master
|
gr-msdd6000/src/python_test/test_tcp.py
|
16
|
#!/usr/bin/python
from socket import *
import string
import time
import struct;
import random;
myport = random.randint(1025,65535);
port = 10000
host = "10.45.4.43"
myaddr = ("10.45.1.229",myport);
buf = 100000;
TCPSock = socket(AF_INET,SOCK_STREAM);
#TCPSock = socket(AF_INET,SOCK_DGRAM);
TCPSock.bind(myaddr);
TCPSock.connect((host,port));
f_mhz = 2400;
f_hz = 0;
gain = 2;
window = 3; #0=rect, 1=hanning, 2=hamming, 3=blackman
#samples = 0xffffffff; #8-15 fft:(returns 2^number[8-15]) raw:(returns number)
samples = 2; #8-15 fft:(returns 2^number[8-15]) raw:(returns number)
decim = 2; #0-8
#decim = decim+16; # +16 to use 16bit instead of 32 bit
mode = 1; #0=IQ, 1=MAG, 2=MAGDB
sets = 0xffffffff;
#sets = 1;
fft_data = struct.pack("<IIIIIIIIII", 0x02, 0x20, f_mhz, f_hz, gain,window, samples, decim, mode,sets);
raw_data = struct.pack("<IIIIIIII", 0x01, 0x18, f_mhz, f_hz, gain,samples, decim,sets);
stat_data = struct.pack("!II", 0x0000, 0x0000)
data = raw_data;
#TCPSock.sendto(data, (host,port))
TCPSock.send(data);
print "sent"
count = 0;
while(1):
data,addr = TCPSock.recvfrom(buf);
print "got response"
print "Data length: %d bytes."%(len(data));
if(len(data)==12):
a,b,c = struct.unpack("!III",data);
print "%x,%x,%x"%(a,b,c);
datavector = [];
for d in data:
a = struct.unpack("<b",d);
datavector.append(a);
print datavector;
count = count + 1;
if(count > 1):
sets = 3;
raw_data_2 = struct.pack("<IIIIIIII", 0x01, 0x18, f_mhz, f_hz, gain,samples, decim,sets);
TCPSock.send(raw_data_2);
TCPSock.close();
|
msiedlarek/qtwebkit
|
refs/heads/5.4
|
Source/WebCore/make-file-arrays.py
|
127
|
#!/usr/bin/env python
# Copyright (C) 2012 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Usage: make-file-arrays.py [--condition=condition-string] --out-h=<header-file-name> --out-cpp=<cpp-file-name> <input-file>...
import os.path
import re
import sys
from optparse import OptionParser
def make_variable_name_and_read(file_name):
result = re.match(r"([\w\d_]+)\.([\w\d_]+)", os.path.basename(file_name))
if not result:
print "Invalid input file name:", os.path.basename(file_name)
sys.exit(1)
variable_name = result.group(1)[0].lower() + result.group(1)[1:] + result.group(2).capitalize()
file = open(file_name, "rb")
content = file.read()
file.close()
return (variable_name, content)
def strip_whitespace_and_comments(file_name, content):
result = re.match(r".*\.([^.]+)", file_name)
if not result:
print "The file name has no extension:", file_name
sys.exit(1)
extension = result.group(1).lower()
multi_line_comment = re.compile(r"/\*.*?\*/", re.MULTILINE | re.DOTALL)
single_line_comment = re.compile(r"//.*$", re.MULTILINE)
repeating_space = re.compile(r"[ \t]+", re.MULTILINE)
leading_space = re.compile(r"^[ \t]+", re.MULTILINE)
trailing_space = re.compile(r"[ \t]+$", re.MULTILINE)
empty_line = re.compile(r"\n+")
if extension == "js":
content = multi_line_comment.sub("", content)
content = single_line_comment.sub("", content)
content = repeating_space.sub(" ", content)
content = leading_space.sub("", content)
content = trailing_space.sub("", content)
content = empty_line.sub("\n", content)
elif extension == "css":
content = multi_line_comment.sub("", content)
content = repeating_space.sub(" ", content)
content = leading_space.sub("", content)
content = trailing_space.sub("", content)
content = empty_line.sub("\n", content)
return content
def main():
parser = OptionParser()
parser.add_option("--out-h", dest="out_header")
parser.add_option("--out-cpp", dest="out_cpp")
parser.add_option("--condition", dest="flag")
(options, args) = parser.parse_args()
if len(args) < 1:
parser.error("Need one or more input files")
if not options.out_header:
parser.error("Need to specify --out-h=filename")
if not options.out_cpp:
parser.error("Need to specify --out-cpp=filename")
if options.flag:
options.flag = options.flag.replace(" AND ", " && ")
options.flag = options.flag.replace(" OR ", " || ")
header_file = open(options.out_header, "w")
if options.flag:
header_file.write("#if " + options.flag + "\n")
header_file.write("namespace WebCore {\n")
cpp_file = open(options.out_cpp, "w")
cpp_file.write("#include \"config.h\"\n")
cpp_file.write("#include \"" + os.path.basename(options.out_header) + "\"\n")
if options.flag:
cpp_file.write("#if " + options.flag + "\n")
cpp_file.write("namespace WebCore {\n")
for file_name in args:
(variable_name, content) = make_variable_name_and_read(file_name)
content = strip_whitespace_and_comments(file_name, content)
size = len(content)
header_file.write("extern const char %s[%d];\n" % (variable_name, size))
cpp_file.write("const char %s[%d] = {\n" % (variable_name, size))
for index in range(size):
char_code = ord(content[index])
if char_code < 128:
cpp_file.write("%d" % char_code)
else:
cpp_file.write("'\\x%02x'" % char_code)
cpp_file.write("," if index != len(content) - 1 else "};\n")
if index % 20 == 19:
cpp_file.write("\n")
cpp_file.write("\n")
header_file.write("}\n")
if options.flag:
header_file.write("#endif\n")
header_file.close()
cpp_file.write("}\n")
if options.flag:
cpp_file.write("#endif\n")
cpp_file.close()
if __name__ == "__main__":
main()
|
ralphbean/mongrel2
|
refs/heads/master
|
docs/manual/inputs/parsing_mongrel2_reqs.py
|
94
|
import json
def parse_netstring(ns):
len, rest = ns.split(':', 1)
len = int(len)
assert rest[len] == ',', "Netstring did not end in ','"
return rest[:len], rest[len+1:]
def parse(msg):
sender, conn_id, path, rest = msg.split(' ', 3)
headers, rest = parse_netstring(rest)
body, _ = parse_netstring(rest)
headers = json.loads(headers)
return uuid, id, path, headers, body
|
b4be1/ball_catcher
|
refs/heads/master
|
src/model_dubin.py
|
2
|
# -*- coding: utf-8 -*-
"""
Created on Wed Aug 26 21:32:22 2015
Initialization
@author: plim
"""
import casadi as ca
import casadi.tools as cat
import numpy as np
# %% =========================================================================
# Parameters
# ============================================================================
# Simulation
T_sim = 5.0 # simulation time
N_sim = 15 # time steps
dt = T_sim/N_sim # time-step length
N_rk = 5 # number of RK4 steps
# MPC
N_ctrl = 2 # mpc replan horizon
N_delay = 3 # delay in perception
# Model
nx, nu, nz = 7, 2, 5
v_max = 10 # max speed
w_max = 1 # max rotation speed
a, b = 7.5, 2.5 # max speed restriction constants
# %% =========================================================================
# Variables
# ============================================================================
# State
state = cat.struct_symSX(['x_b','y_b','vx_b','vy_b',
'x_c','y_c','phi'])
# Control
control = cat.struct_symSX(['v','w'])
# Observation
observation = cat.struct_SX([
cat.entry('x_b', expr = state['x_b']),
cat.entry('y_b', expr = state['y_b']),
cat.entry('x_c', expr = state['x_c']),
cat.entry('y_c', expr = state['y_c']),
cat.entry('phi', expr = state['phi'])
])
# Belief state (mu, Sigma)
belief = cat.struct_symSX([
cat.entry('m',struct=state),
cat.entry('S',shapestruct=(state,state))
])
# Extended belief (mu, Sigma, Lambda) for plotting
ext_belief = cat.struct_symSX([
cat.entry('m',struct=state),
cat.entry('S',shapestruct=(state,state)),
cat.entry('L',shapestruct=(state,state))
])
# %% =========================================================================
# Initial conditions
# ============================================================================
# State
m0 = ca.DMatrix([0., 0., 5., 4.,
15., 0., ca.pi/2])
S0 = ca.diagcat([1., 1., 1., 1.,
1., 1., 1e-2]) * 0.25
L0 = ca.DMatrix.eye(nx) * 1e-3
L0[-1,-1] = 1e-5
mu0 = np.array(m0).ravel() # to get samples from normal distribution
b0 = belief()
b0['m'] = m0
b0['S'] = ca.densify(S0)
eb0 = ext_belief()
eb0['m'] = m0
eb0['S'] = ca.densify(S0)
eb0['L'] = ca.densify(L0)
# Covariances
Q = ca.DMatrix.eye(nx) * 1e-3 # does not change
Q[-1,-1] = 1e-5
# Nominal controls
u_ = control.repeated(ca.DMatrix.zeros(nu,N_sim))
u_[:,'v'] = 5.
u_[:,'w'] = 0.2
# %% =========================================================================
# Continuous-time dynamics
# ============================================================================
def continuous_dynamics(state, control):
# Unpack arguments
[x_b,y_b,vx_b,vy_b,x_c,y_c,phi] = state[...]
[v,w] = control[...]
# Define right-hand side
rhs = cat.struct_SX(state)
rhs['x_b'] = vx_b
rhs['y_b'] = vy_b
rhs['vx_b'] = 0
rhs['vy_b'] = 0
#rhs['x_c'] = v * (ca.cos(psi) * ca.cos(phi) - \
# ca.sin(psi) * ca.sin(phi))
#rhs['y_c'] = v * (ca.cos(psi) * ca.sin(phi) + \
# ca.sin(psi) * ca.cos(phi))
rhs['phi'] = w
rhs['x_c'] = v * ca.cos(phi)
rhs['y_c'] = v * ca.sin(phi)
return ca.SXFunction('Continuous dynamics',[state,control],[rhs])
# %% =========================================================================
# Observation covariance function
# ============================================================================
def observation_covariance(state, observation):
d = ca.veccat([ca.cos(state['phi']), ca.sin(state['phi'])])
r = ca.veccat([state['x_b']-state['x_c'], state['y_b']-state['y_c']])
r_cos_theta = ca.mul(d.T,r)
cos_theta = r_cos_theta / (ca.norm_2(r_cos_theta) + 1e-2)
# Look at the ball and be close to the ball
nz = observation.size
R = observation.squared(ca.SX.zeros(nz,nz))
R['x_b','x_b'] = ca.mul(r.T,r) * (1 - cos_theta) + 1e-2
R['y_b','y_b'] = ca.mul(r.T,r) * (1 - cos_theta) + 1e-2
# state -> R
return ca.SXFunction('Observation covariance',[state],[R])
|
runt18/mojo
|
refs/heads/master
|
mojo/public/c/PRESUBMIT.py
|
42
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Presubmit script for mojo/public/c.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into depot_tools.
"""
def CheckChangeOnUpload(input_api, output_api):
results = []
results += input_api.canned_checks.CheckChangeHasOnlyOneEol(input_api,
output_api)
results += input_api.canned_checks.CheckPatchFormatted(input_api, output_api)
return results
|
maxalbert/ansible
|
refs/heads/devel
|
lib/ansible/executor/task_queue_manager.py
|
46
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import multiprocessing
import os
import socket
import sys
from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.executor.play_iterator import PlayIterator
from ansible.executor.process.worker import WorkerProcess
from ansible.executor.process.result import ResultProcess
from ansible.executor.stats import AggregateStats
from ansible.playbook.play_context import PlayContext
from ansible.plugins import callback_loader, strategy_loader
from ansible.template import Templar
__all__ = ['TaskQueueManager']
class TaskQueueManager:
'''
This class handles the multiprocessing requirements of Ansible by
creating a pool of worker forks, a result handler fork, and a
manager object with shared datastructures/queues for coordinating
work between all processes.
The queue manager is responsible for loading the play strategy plugin,
which dispatches the Play's tasks to hosts.
'''
def __init__(self, inventory, variable_manager, loader, display, options, passwords, stdout_callback=None):
self._inventory = inventory
self._variable_manager = variable_manager
self._loader = loader
self._display = display
self._options = options
self._stats = AggregateStats()
self.passwords = passwords
self._stdout_callback = stdout_callback
self._callbacks_loaded = False
self._callback_plugins = []
# a special flag to help us exit cleanly
self._terminated = False
# this dictionary is used to keep track of notified handlers
self._notified_handlers = dict()
# dictionaries to keep track of failed/unreachable hosts
self._failed_hosts = dict()
self._unreachable_hosts = dict()
self._final_q = multiprocessing.Queue()
# create the pool of worker threads, based on the number of forks specified
try:
fileno = sys.stdin.fileno()
except ValueError:
fileno = None
self._workers = []
for i in range(self._options.forks):
main_q = multiprocessing.Queue()
rslt_q = multiprocessing.Queue()
prc = WorkerProcess(self, main_q, rslt_q, loader)
prc.start()
self._workers.append((prc, main_q, rslt_q))
self._result_prc = ResultProcess(self._final_q, self._workers)
self._result_prc.start()
def _initialize_notified_handlers(self, handlers):
'''
Clears and initializes the shared notified handlers dict with entries
for each handler in the play, which is an empty array that will contain
inventory hostnames for those hosts triggering the handler.
'''
# Zero the dictionary first by removing any entries there.
# Proxied dicts don't support iteritems, so we have to use keys()
for key in self._notified_handlers.keys():
del self._notified_handlers[key]
# FIXME: there is a block compile helper for this...
handler_list = []
for handler_block in handlers:
for handler in handler_block.block:
handler_list.append(handler)
# then initialize it with the handler names from the handler list
for handler in handler_list:
self._notified_handlers[handler.get_name()] = []
def load_callbacks(self):
'''
Loads all available callbacks, with the exception of those which
utilize the CALLBACK_TYPE option. When CALLBACK_TYPE is set to 'stdout',
only one such callback plugin will be loaded.
'''
if self._callbacks_loaded:
return
stdout_callback_loaded = False
if self._stdout_callback is None:
self._stdout_callback = C.DEFAULT_STDOUT_CALLBACK
if self._stdout_callback not in callback_loader:
raise AnsibleError("Invalid callback for stdout specified: %s" % self._stdout_callback)
for callback_plugin in callback_loader.all(class_only=True):
if hasattr(callback_plugin, 'CALLBACK_VERSION') and callback_plugin.CALLBACK_VERSION >= 2.0:
# we only allow one callback of type 'stdout' to be loaded, so check
# the name of the current plugin and type to see if we need to skip
# loading this callback plugin
callback_type = getattr(callback_plugin, 'CALLBACK_TYPE', None)
(callback_name, _) = os.path.splitext(os.path.basename(callback_plugin._original_path))
if callback_type == 'stdout':
if callback_name != self._stdout_callback or stdout_callback_loaded:
continue
stdout_callback_loaded = True
elif C.DEFAULT_CALLBACK_WHITELIST is None or callback_name not in C.DEFAULT_CALLBACK_WHITELIST:
continue
self._callback_plugins.append(callback_plugin(self._display))
else:
self._callback_plugins.append(callback_plugin())
self._callbacks_loaded = True
def run(self, play):
'''
Iterates over the roles/tasks in a play, using the given (or default)
strategy for queueing tasks. The default is the linear strategy, which
operates like classic Ansible by keeping all hosts in lock-step with
a given task (meaning no hosts move on to the next task until all hosts
are done with the current task).
'''
if not self._callbacks_loaded:
self.load_callbacks()
all_vars = self._variable_manager.get_vars(loader=self._loader, play=play)
templar = Templar(loader=self._loader, variables=all_vars)
new_play = play.copy()
new_play.post_validate(templar)
play_context = PlayContext(new_play, self._options, self.passwords)
for callback_plugin in self._callback_plugins:
if hasattr(callback_plugin, 'set_play_context'):
callback_plugin.set_play_context(play_context)
self.send_callback('v2_playbook_on_play_start', new_play)
# initialize the shared dictionary containing the notified handlers
self._initialize_notified_handlers(new_play.handlers)
# load the specified strategy (or the default linear one)
strategy = strategy_loader.get(new_play.strategy, self)
if strategy is None:
raise AnsibleError("Invalid play strategy specified: %s" % new_play.strategy, obj=play._ds)
# build the iterator
iterator = PlayIterator(inventory=self._inventory, play=new_play, play_context=play_context, all_vars=all_vars)
# and run the play using the strategy
return strategy.run(iterator, play_context)
def cleanup(self):
self._display.debug("RUNNING CLEANUP")
self.terminate()
self._final_q.close()
self._result_prc.terminate()
for (worker_prc, main_q, rslt_q) in self._workers:
rslt_q.close()
main_q.close()
worker_prc.terminate()
def get_inventory(self):
return self._inventory
def get_variable_manager(self):
return self._variable_manager
def get_loader(self):
return self._loader
def get_notified_handlers(self):
return self._notified_handlers
def get_workers(self):
return self._workers[:]
def terminate(self):
self._terminated = True
def send_callback(self, method_name, *args, **kwargs):
for callback_plugin in self._callback_plugins:
# a plugin that set self.disabled to True will not be called
# see osx_say.py example for such a plugin
if getattr(callback_plugin, 'disabled', False):
continue
methods = [
getattr(callback_plugin, method_name, None),
getattr(callback_plugin, 'v2_on_any', None)
]
for method in methods:
if method is not None:
try:
method(*args, **kwargs)
except Exception as e:
self._display.warning('Error when using %s: %s' % (method, str(e)))
|
winklerand/pandas
|
refs/heads/master
|
pandas/tests/indexes/timedeltas/test_indexing.py
|
2
|
from datetime import timedelta
import pytest
import numpy as np
import pandas as pd
import pandas.util.testing as tm
from pandas import TimedeltaIndex, timedelta_range, compat, Index, Timedelta
class TestTimedeltaIndex(object):
_multiprocess_can_split_ = True
def test_insert(self):
idx = TimedeltaIndex(['4day', '1day', '2day'], name='idx')
result = idx.insert(2, timedelta(days=5))
exp = TimedeltaIndex(['4day', '1day', '5day', '2day'], name='idx')
tm.assert_index_equal(result, exp)
# insertion of non-datetime should coerce to object index
result = idx.insert(1, 'inserted')
expected = Index([Timedelta('4day'), 'inserted', Timedelta('1day'),
Timedelta('2day')], name='idx')
assert not isinstance(result, TimedeltaIndex)
tm.assert_index_equal(result, expected)
assert result.name == expected.name
idx = timedelta_range('1day 00:00:01', periods=3, freq='s', name='idx')
# preserve freq
expected_0 = TimedeltaIndex(['1day', '1day 00:00:01', '1day 00:00:02',
'1day 00:00:03'],
name='idx', freq='s')
expected_3 = TimedeltaIndex(['1day 00:00:01', '1day 00:00:02',
'1day 00:00:03', '1day 00:00:04'],
name='idx', freq='s')
# reset freq to None
expected_1_nofreq = TimedeltaIndex(['1day 00:00:01', '1day 00:00:01',
'1day 00:00:02', '1day 00:00:03'],
name='idx', freq=None)
expected_3_nofreq = TimedeltaIndex(['1day 00:00:01', '1day 00:00:02',
'1day 00:00:03', '1day 00:00:05'],
name='idx', freq=None)
cases = [(0, Timedelta('1day'), expected_0),
(-3, Timedelta('1day'), expected_0),
(3, Timedelta('1day 00:00:04'), expected_3),
(1, Timedelta('1day 00:00:01'), expected_1_nofreq),
(3, Timedelta('1day 00:00:05'), expected_3_nofreq)]
for n, d, expected in cases:
result = idx.insert(n, d)
tm.assert_index_equal(result, expected)
assert result.name == expected.name
assert result.freq == expected.freq
# GH 18295 (test missing)
expected = TimedeltaIndex(['1day', pd.NaT, '2day', '3day'])
for na in (np.nan, pd.NaT, None):
result = timedelta_range('1day', '3day').insert(1, na)
tm.assert_index_equal(result, expected)
def test_delete(self):
idx = timedelta_range(start='1 Days', periods=5, freq='D', name='idx')
# prserve freq
expected_0 = timedelta_range(start='2 Days', periods=4, freq='D',
name='idx')
expected_4 = timedelta_range(start='1 Days', periods=4, freq='D',
name='idx')
# reset freq to None
expected_1 = TimedeltaIndex(
['1 day', '3 day', '4 day', '5 day'], freq=None, name='idx')
cases = {0: expected_0,
-5: expected_0,
-1: expected_4,
4: expected_4,
1: expected_1}
for n, expected in compat.iteritems(cases):
result = idx.delete(n)
tm.assert_index_equal(result, expected)
assert result.name == expected.name
assert result.freq == expected.freq
with pytest.raises((IndexError, ValueError)):
# either depeidnig on numpy version
result = idx.delete(5)
def test_delete_slice(self):
idx = timedelta_range(start='1 days', periods=10, freq='D', name='idx')
# prserve freq
expected_0_2 = timedelta_range(start='4 days', periods=7, freq='D',
name='idx')
expected_7_9 = timedelta_range(start='1 days', periods=7, freq='D',
name='idx')
# reset freq to None
expected_3_5 = TimedeltaIndex(['1 d', '2 d', '3 d',
'7 d', '8 d', '9 d', '10d'],
freq=None, name='idx')
cases = {(0, 1, 2): expected_0_2,
(7, 8, 9): expected_7_9,
(3, 4, 5): expected_3_5}
for n, expected in compat.iteritems(cases):
result = idx.delete(n)
tm.assert_index_equal(result, expected)
assert result.name == expected.name
assert result.freq == expected.freq
result = idx.delete(slice(n[0], n[-1] + 1))
tm.assert_index_equal(result, expected)
assert result.name == expected.name
assert result.freq == expected.freq
def test_getitem(self):
idx1 = timedelta_range('1 day', '31 day', freq='D', name='idx')
for idx in [idx1]:
result = idx[0]
assert result == Timedelta('1 day')
result = idx[0:5]
expected = timedelta_range('1 day', '5 day', freq='D',
name='idx')
tm.assert_index_equal(result, expected)
assert result.freq == expected.freq
result = idx[0:10:2]
expected = timedelta_range('1 day', '9 day', freq='2D',
name='idx')
tm.assert_index_equal(result, expected)
assert result.freq == expected.freq
result = idx[-20:-5:3]
expected = timedelta_range('12 day', '24 day', freq='3D',
name='idx')
tm.assert_index_equal(result, expected)
assert result.freq == expected.freq
result = idx[4::-1]
expected = TimedeltaIndex(['5 day', '4 day', '3 day',
'2 day', '1 day'],
freq='-1D', name='idx')
tm.assert_index_equal(result, expected)
assert result.freq == expected.freq
def test_take(self):
# GH 10295
idx1 = timedelta_range('1 day', '31 day', freq='D', name='idx')
for idx in [idx1]:
result = idx.take([0])
assert result == Timedelta('1 day')
result = idx.take([-1])
assert result == Timedelta('31 day')
result = idx.take([0, 1, 2])
expected = timedelta_range('1 day', '3 day', freq='D',
name='idx')
tm.assert_index_equal(result, expected)
assert result.freq == expected.freq
result = idx.take([0, 2, 4])
expected = timedelta_range('1 day', '5 day', freq='2D',
name='idx')
tm.assert_index_equal(result, expected)
assert result.freq == expected.freq
result = idx.take([7, 4, 1])
expected = timedelta_range('8 day', '2 day', freq='-3D',
name='idx')
tm.assert_index_equal(result, expected)
assert result.freq == expected.freq
result = idx.take([3, 2, 5])
expected = TimedeltaIndex(['4 day', '3 day', '6 day'], name='idx')
tm.assert_index_equal(result, expected)
assert result.freq is None
result = idx.take([-3, 2, 5])
expected = TimedeltaIndex(['29 day', '3 day', '6 day'], name='idx')
tm.assert_index_equal(result, expected)
assert result.freq is None
def test_take_invalid_kwargs(self):
idx = timedelta_range('1 day', '31 day', freq='D', name='idx')
indices = [1, 6, 5, 9, 10, 13, 15, 3]
msg = r"take\(\) got an unexpected keyword argument 'foo'"
tm.assert_raises_regex(TypeError, msg, idx.take,
indices, foo=2)
msg = "the 'out' parameter is not supported"
tm.assert_raises_regex(ValueError, msg, idx.take,
indices, out=indices)
msg = "the 'mode' parameter is not supported"
tm.assert_raises_regex(ValueError, msg, idx.take,
indices, mode='clip')
# TODO: This method came from test_timedelta; de-dup with version above
def test_take2(self):
tds = ['1day 02:00:00', '1 day 04:00:00', '1 day 10:00:00']
idx = TimedeltaIndex(start='1d', end='2d', freq='H', name='idx')
expected = TimedeltaIndex(tds, freq=None, name='idx')
taken1 = idx.take([2, 4, 10])
taken2 = idx[[2, 4, 10]]
for taken in [taken1, taken2]:
tm.assert_index_equal(taken, expected)
assert isinstance(taken, TimedeltaIndex)
assert taken.freq is None
assert taken.name == expected.name
def test_take_fill_value(self):
# GH 12631
idx = TimedeltaIndex(['1 days', '2 days', '3 days'],
name='xxx')
result = idx.take(np.array([1, 0, -1]))
expected = TimedeltaIndex(['2 days', '1 days', '3 days'],
name='xxx')
tm.assert_index_equal(result, expected)
# fill_value
result = idx.take(np.array([1, 0, -1]), fill_value=True)
expected = TimedeltaIndex(['2 days', '1 days', 'NaT'],
name='xxx')
tm.assert_index_equal(result, expected)
# allow_fill=False
result = idx.take(np.array([1, 0, -1]), allow_fill=False,
fill_value=True)
expected = TimedeltaIndex(['2 days', '1 days', '3 days'],
name='xxx')
tm.assert_index_equal(result, expected)
msg = ('When allow_fill=True and fill_value is not None, '
'all indices must be >= -1')
with tm.assert_raises_regex(ValueError, msg):
idx.take(np.array([1, 0, -2]), fill_value=True)
with tm.assert_raises_regex(ValueError, msg):
idx.take(np.array([1, 0, -5]), fill_value=True)
with pytest.raises(IndexError):
idx.take(np.array([1, -5]))
def test_get_loc(self):
idx = pd.to_timedelta(['0 days', '1 days', '2 days'])
for method in [None, 'pad', 'backfill', 'nearest']:
assert idx.get_loc(idx[1], method) == 1
assert idx.get_loc(idx[1].to_pytimedelta(), method) == 1
assert idx.get_loc(str(idx[1]), method) == 1
assert idx.get_loc(idx[1], 'pad',
tolerance=Timedelta(0)) == 1
assert idx.get_loc(idx[1], 'pad',
tolerance=np.timedelta64(0, 's')) == 1
assert idx.get_loc(idx[1], 'pad',
tolerance=timedelta(0)) == 1
with tm.assert_raises_regex(ValueError,
'unit abbreviation w/o a number'):
idx.get_loc(idx[1], method='nearest', tolerance='foo')
with pytest.raises(
ValueError,
match='tolerance size must match'):
idx.get_loc(idx[1], method='nearest',
tolerance=[Timedelta(0).to_timedelta64(),
Timedelta(0).to_timedelta64()])
for method, loc in [('pad', 1), ('backfill', 2), ('nearest', 1)]:
assert idx.get_loc('1 day 1 hour', method) == loc
# GH 16909
assert idx.get_loc(idx[1].to_timedelta64()) == 1
# GH 16896
assert idx.get_loc('0 days') == 0
def test_get_loc_nat(self):
tidx = TimedeltaIndex(['1 days 01:00:00', 'NaT', '2 days 01:00:00'])
assert tidx.get_loc(pd.NaT) == 1
assert tidx.get_loc(None) == 1
assert tidx.get_loc(float('nan')) == 1
assert tidx.get_loc(np.nan) == 1
def test_get_indexer(self):
idx = pd.to_timedelta(['0 days', '1 days', '2 days'])
tm.assert_numpy_array_equal(idx.get_indexer(idx),
np.array([0, 1, 2], dtype=np.intp))
target = pd.to_timedelta(['-1 hour', '12 hours', '1 day 1 hour'])
tm.assert_numpy_array_equal(idx.get_indexer(target, 'pad'),
np.array([-1, 0, 1], dtype=np.intp))
tm.assert_numpy_array_equal(idx.get_indexer(target, 'backfill'),
np.array([0, 1, 2], dtype=np.intp))
tm.assert_numpy_array_equal(idx.get_indexer(target, 'nearest'),
np.array([0, 1, 1], dtype=np.intp))
res = idx.get_indexer(target, 'nearest',
tolerance=Timedelta('1 hour'))
tm.assert_numpy_array_equal(res, np.array([0, -1, 1], dtype=np.intp))
|
nhomar/odoo
|
refs/heads/8.0
|
addons/portal_claim/__init__.py
|
346
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import portal_claim
|
blueboxgroup/nova
|
refs/heads/master
|
nova/cells/rpc_driver.py
|
63
|
# Copyright (c) 2012 Rackspace Hosting
# All Rights Reserved.
# Copyright 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Cells RPC Communication Driver
"""
from oslo_config import cfg
import oslo_messaging as messaging
from nova.cells import driver
from nova import rpc
cell_rpc_driver_opts = [
cfg.StrOpt('rpc_driver_queue_base',
default='cells.intercell',
help="Base queue name to use when communicating between "
"cells. Various topics by message type will be "
"appended to this.")]
CONF = cfg.CONF
CONF.register_opts(cell_rpc_driver_opts, group='cells')
CONF.import_opt('call_timeout', 'nova.cells.opts', group='cells')
rpcapi_cap_opt = cfg.StrOpt('intercell',
help='Set a version cap for messages sent between cells services')
CONF.register_opt(rpcapi_cap_opt, 'upgrade_levels')
class CellsRPCDriver(driver.BaseCellsDriver):
"""Driver for cell<->cell communication via RPC. This is used to
setup the RPC consumers as well as to send a message to another cell.
One instance of this class will be created for every neighbor cell
that we find in the DB and it will be associated with the cell in
its CellState.
One instance is also created by the cells manager for setting up
the consumers.
"""
def __init__(self, *args, **kwargs):
super(CellsRPCDriver, self).__init__(*args, **kwargs)
self.rpc_servers = []
self.intercell_rpcapi = InterCellRPCAPI()
def start_servers(self, msg_runner):
"""Start RPC servers.
Start up 2 separate servers for handling inter-cell
communication via RPC. Both handle the same types of
messages, but requests/replies are separated to solve
potential deadlocks. (If we used the same queue for both,
it's possible to exhaust the RPC thread pool while we wait
for replies.. such that we'd never consume a reply.)
"""
topic_base = CONF.cells.rpc_driver_queue_base
proxy_manager = InterCellRPCDispatcher(msg_runner)
for msg_type in msg_runner.get_message_types():
target = messaging.Target(topic='%s.%s' % (topic_base, msg_type),
server=CONF.host)
# NOTE(comstud): We do not need to use the object serializer
# on this because object serialization is taken care for us in
# the nova.cells.messaging module.
server = rpc.get_server(target, endpoints=[proxy_manager])
server.start()
self.rpc_servers.append(server)
def stop_servers(self):
"""Stop RPC servers.
NOTE: Currently there's no hooks when stopping services
to have managers cleanup, so this is not currently called.
"""
for server in self.rpc_servers:
server.stop()
def send_message_to_cell(self, cell_state, message):
"""Use the IntercellRPCAPI to send a message to a cell."""
self.intercell_rpcapi.send_message_to_cell(cell_state, message)
class InterCellRPCAPI(object):
"""Client side of the Cell<->Cell RPC API.
The CellsRPCDriver uses this to make calls to another cell.
API version history:
1.0 - Initial version.
... Grizzly supports message version 1.0. So, any changes to existing
methods in 2.x after that point should be done such that they can
handle the version_cap being set to 1.0.
"""
VERSION_ALIASES = {
'grizzly': '1.0',
}
def __init__(self):
super(InterCellRPCAPI, self).__init__()
self.version_cap = (
self.VERSION_ALIASES.get(CONF.upgrade_levels.intercell,
CONF.upgrade_levels.intercell))
self.transports = {}
def _get_client(self, next_hop, topic):
"""Turn the DB information for a cell into a messaging.RPCClient."""
transport = self._get_transport(next_hop)
target = messaging.Target(topic=topic, version='1.0')
serializer = rpc.RequestContextSerializer(None)
return messaging.RPCClient(transport,
target,
version_cap=self.version_cap,
serializer=serializer)
def _get_transport(self, next_hop):
"""NOTE(belliott) Each Transport object contains connection pool
state. Maintain references to them to avoid continual reconnects
to the message broker.
"""
transport_url = next_hop.db_info['transport_url']
if transport_url not in self.transports:
transport = messaging.get_transport(cfg.CONF, transport_url,
rpc.TRANSPORT_ALIASES)
self.transports[transport_url] = transport
else:
transport = self.transports[transport_url]
return transport
def send_message_to_cell(self, cell_state, message):
"""Send a message to another cell by JSON-ifying the message and
making an RPC cast to 'process_message'. If the message says to
fanout, do it. The topic that is used will be
'CONF.rpc_driver_queue_base.<message_type>'.
"""
topic_base = CONF.cells.rpc_driver_queue_base
topic = '%s.%s' % (topic_base, message.message_type)
cctxt = self._get_client(cell_state, topic)
if message.fanout:
cctxt = cctxt.prepare(fanout=message.fanout)
return cctxt.cast(message.ctxt, 'process_message',
message=message.to_json())
class InterCellRPCDispatcher(object):
"""RPC Dispatcher to handle messages received from other cells.
All messages received here have come from a sibling cell. Depending
on the ultimate target and type of message, we may process the message
in this cell, relay the message to another sibling cell, or both. This
logic is defined by the message class in the nova.cells.messaging module.
"""
target = messaging.Target(version='1.0')
def __init__(self, msg_runner):
"""Init the Intercell RPC Dispatcher."""
self.msg_runner = msg_runner
def process_message(self, _ctxt, message):
"""We received a message from another cell. Use the MessageRunner
to turn this from JSON back into an instance of the correct
Message class. Then process it!
"""
message = self.msg_runner.message_from_json(message)
message.process()
|
wangsai/oppia
|
refs/heads/master
|
extensions/__init__.py
|
12133432
| |
paulrouget/servo
|
refs/heads/master
|
tests/wpt/web-platform-tests/webdriver/tests/find_element_from_element/__init__.py
|
12133432
| |
laurentgo/pants
|
refs/heads/master
|
tests/python/pants_test/backend/python/__init__.py
|
12133432
| |
silenci/neutron
|
refs/heads/master
|
neutron/scheduler/__init__.py
|
12133432
| |
ujenmr/ansible
|
refs/heads/devel
|
lib/ansible/modules/windows/win_wait_for_process.py
|
14
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2017, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# this is a windows documentation stub, actual code lives in the .ps1
# file of the same name
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: win_wait_for_process
version_added: '2.7'
short_description: Waits for a process to exist or not exist before continuing.
description:
- Waiting for a process to start or stop.
- This is useful when Windows services behave poorly and do not enumerate external dependencies in their manifest.
options:
process_name_exact:
description:
- The name of the process(es) for which to wait.
type: str
process_name_pattern:
description:
- RegEx pattern matching desired process(es).
type: str
sleep:
description:
- Number of seconds to sleep between checks.
- Only applies when waiting for a process to start. Waiting for a process to start
does not have a native non-polling mechanism. Waiting for a stop uses native PowerShell
and does not require polling.
type: int
default: 1
process_min_count:
description:
- Minimum number of process matching the supplied pattern to satisfy C(present) condition.
- Only applies to C(present).
type: int
default: 1
pid:
description:
- The PID of the process.
type: int
owner:
description:
- The owner of the process.
- Requires PowerShell version 4.0 or newer.
type: str
pre_wait_delay:
description:
- Seconds to wait before checking processes.
type: int
default: 0
post_wait_delay:
description:
- Seconds to wait after checking for processes.
type: int
default: 0
state:
description:
- When checking for a running process C(present) will block execution
until the process exists, or until the timeout has been reached.
C(absent) will block execution untile the processs no longer exists,
or until the timeout has been reached.
- When waiting for C(present), the module will return changed only if
the process was not present on the initial check but became present on
subsequent checks.
- If, while waiting for C(absent), new processes matching the supplied
pattern are started, these new processes will not be included in the
action.
type: str
default: present
choices: [ absent, present ]
timeout:
description:
- The maximum number of seconds to wait for a for a process to start or stop
before erroring out.
type: int
default: 300
seealso:
- module: wait_for
- module: win_wait_for
author:
- Charles Crossan (@crossan007)
'''
EXAMPLES = r'''
- name: Wait 300 seconds for all Oracle VirtualBox processes to stop. (VBoxHeadless, VirtualBox, VBoxSVC)
win_wait_for_process:
process_name_pattern: 'v(irtual)?box(headless|svc)?'
state: absent
timeout: 500
- name: Wait 300 seconds for 3 instances of cmd to start, waiting 5 seconds between each check
win_wait_for_process:
process_name_exact: cmd
state: present
timeout: 500
sleep: 5
process_min_count: 3
'''
RETURN = r'''
elapsed:
description: The elapsed seconds between the start of poll and the end of the module.
returned: always
type: float
sample: 3.14159265
matched_processes:
description: List of matched processes (either stopped or started).
returned: always
type: complex
contains:
name:
description: The name of the matched process.
returned: always
type: str
sample: svchost
owner:
description: The owner of the matched process.
returned: when supported by PowerShell
type: str
sample: NT AUTHORITY\SYSTEM
pid:
description: The PID of the matched process.
returned: always
type: int
sample: 7908
'''
|
geodrinx/gearthview
|
refs/heads/master
|
ext-libs/twisted/test/ssl_helpers.py
|
45
|
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Helper classes for twisted.test.test_ssl.
They are in a separate module so they will not prevent test_ssl importing if
pyOpenSSL is unavailable.
"""
from __future__ import division, absolute_import
from twisted.python.compat import nativeString
from twisted.internet import ssl
from twisted.python.filepath import FilePath
from OpenSSL import SSL
certPath = nativeString(FilePath(__file__.encode("utf-8")
).sibling(b"server.pem").path)
class ClientTLSContext(ssl.ClientContextFactory):
isClient = 1
def getContext(self):
return SSL.Context(SSL.TLSv1_METHOD)
class ServerTLSContext:
isClient = 0
def __init__(self, filename=certPath):
self.filename = filename
def getContext(self):
ctx = SSL.Context(SSL.TLSv1_METHOD)
ctx.use_certificate_file(self.filename)
ctx.use_privatekey_file(self.filename)
return ctx
|
jkthompson/nupic
|
refs/heads/master
|
examples/opf/experiments/multistep/hotgym/permutations.py
|
5
|
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""
Template file used by ExpGenerator to generate the actual
permutations.py file by replacing $XXXXXXXX tokens with desired values.
This permutations.py file was generated by:
'~/nta/eng/lib/python2.6/site-packages/nupic/frameworks/opf/expGenerator/ExpGenerator.py'
"""
from nupic.swarming.permutationhelpers import *
# The name of the field being predicted. Any allowed permutation MUST contain
# the prediction field.
# (generated from PREDICTION_FIELD)
predictedField = 'consumption'
permutations = {
'modelParams': {
'inferenceType': PermuteChoices(['NontemporalMultiStep', 'TemporalMultiStep']),
'sensorParams': {
'encoders': {
'timestamp_dayOfWeek': PermuteEncoder(fieldName='timestamp', encoderClass='DateEncoder.dayOfWeek', radius=PermuteFloat(1.000000, 6.000000), w=21),
'timestamp_timeOfDay': PermuteEncoder(fieldName='timestamp', encoderClass='DateEncoder.timeOfDay', radius=PermuteFloat(0.500000, 12.000000), w=21),
'consumption': PermuteEncoder(fieldName='consumption', encoderClass='AdaptiveScalarEncoder', n=PermuteInt(28, 521), w=21, clipInput=True),
'timestamp_weekend': PermuteEncoder(fieldName='timestamp', encoderClass='DateEncoder.weekend', radius=PermuteChoices([1]), w=21),
},
},
'tpParams': {
'minThreshold': PermuteInt(9, 12),
'activationThreshold': PermuteInt(12, 16),
'pamLength': PermuteInt(1, 5),
},
}
}
# Fields selected for final hypersearch report;
# NOTE: These values are used as regular expressions by RunPermutations.py's
# report generator
# (fieldname values generated from PERM_PREDICTED_FIELD_NAME)
report = [
'.*consumption.*',
]
# Permutation optimization setting: either minimize or maximize metric
# used by RunPermutations.
# NOTE: The value is used as a regular expressions by RunPermutations.py's
# report generator
# (generated from minimize = 'prediction:aae:window=1000:field=consumption')
minimize = "multiStepBestPredictions:multiStep:errorMetric='aae':steps=1:window=1000:field=consumption"
#############################################################################
def permutationFilter(perm):
""" This function can be used to selectively filter out specific permutation
combinations. It is called by RunPermutations for every possible permutation
of the variables in the permutations dict. It should return True for valid a
combination of permutation values and False for an invalid one.
Parameters:
---------------------------------------------------------
perm: dict of one possible combination of name:value
pairs chosen from permutations.
"""
# An example of how to use this
#if perm['__consumption_encoder']['maxval'] > 300:
# return False;
#
return True
|
robdennis/sideboard
|
refs/heads/master
|
tests/plugins/manypackages/multi/env/lib/python2.7/sre.py
|
4
|
/usr/lib/python2.7/sre.py
|
pabelanger/did
|
refs/heads/master
|
docs/conf.py
|
11
|
# -*- coding: utf-8 -*-
#
# did documentation build configuration file, created by
# sphinx-quickstart on Mon Apr 27 17:44:03 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import os
import sys
try:
# use bootstrap theme if user has it installed
import sphinx_bootstrap_theme
HTML_THEME = 'bootstrap'
html_theme_path = [sphinx_bootstrap_theme.get_html_theme_path()]
except ImportError:
try:
# fall back to sphinx_rtd_theme if available
import sphinx_rtd_theme
HTML_THEME = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
except ImportError:
# and fall back to 'default' if neither of those are available
HTML_THEME = 'default'
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('../'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
master_man = 'man.1'
# General information about the project.
project = u'did'
copyright = u'2015, Petr Šplíchal'
author = u'Petr Šplíchal'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = ''
# The full version, including alpha/beta/rc tags.
release = ''
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = HTML_THEME
# 1.3.1 sphinx READTHEDOCS build compat
# SEE:
# https://github.com/shabda/pysaml2/commit/d55bfeebe
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if on_rtd: # only import and set the theme if we're building docs locally
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'diddoc'
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_man, 'did', u'did Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
|
dosiecki/NewsBlur
|
refs/heads/master
|
apps/search/management/commands/index_stories.py
|
15
|
import re
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
from apps.rss_feeds.models import Feed, MStory
from apps.reader.models import UserSubscription
from optparse import make_option
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option("-u", "--user", dest="user", nargs=1, help="Specify user id or username"),
make_option("-R", "--reindex", dest="reindex", action="store_true", help="Drop index and reindex all stories."),
)
def handle(self, *args, **options):
if options['reindex']:
MStory.index_all_for_search()
return
if not options['user']:
print "Missing user. Did you want to reindex everything? Use -R."
return
if re.match(r"([0-9]+)", options['user']):
user = User.objects.get(pk=int(options['user']))
else:
user = User.objects.get(username=options['user'])
subscriptions = UserSubscription.objects.filter(user=user)
print " ---> Indexing %s feeds..." % subscriptions.count()
for sub in subscriptions:
try:
sub.feed.index_stories_for_search()
except Feed.DoesNotExist:
print " ***> Couldn't find %s" % sub.feed_id
|
westinedu/similarinterest
|
refs/heads/master
|
zinnia/management/commands/spam_cleanup.py
|
2
|
"""Spam cleanup command module for Zinnia"""
from django.contrib import comments
from django.contrib.contenttypes.models import ContentType
from django.core.management.base import NoArgsCommand
from zinnia.models import Entry
class Command(NoArgsCommand):
"""Command object for removing comments
flagged as spam"""
help = "Remove entry's comments flagged as spam."
def handle_noargs(self, **options):
verbosity = int(options.get('verbosity', 1))
content_type = ContentType.objects.get_for_model(Entry)
spams = comments.get_model().objects.filter(is_public=False,
content_type=content_type,
flags__flag='spam')
spams_count = spams.count()
spams.delete()
if verbosity:
print '%i spam comments deleted.' % spams_count
|
lingthio/Flask-User
|
refs/heads/master
|
flask_user/email_adapters/email_adapter_interface.py
|
1
|
"""This module defines the EmailAdapter interface.
"""
# Author: Ling Thio <ling.thio@gmail.com>
# Copyright (c) 2013 Ling Thio
from __future__ import print_function
from flask_user import ConfigError
class EmailAdapterInterface(object):
""" Define the EmailAdapter interface to send emails through various email services."""
def __init__(self, app):
"""
Args:
app(Flask): The Flask application instance.
"""
pass
def send_email_message(self, recipient, subject, html_message, text_message, sender_email, sender_name):
""" Send email message via an email mailer.
Args:
recipient: Email address or tuple of (Name, Email-address).
subject: Subject line.
html_message: The message body in HTML.
text_message: The message body in plain text.
"""
raise NotImplementedError
|
fredhusser/scikit-learn
|
refs/heads/master
|
benchmarks/bench_glm.py
|
297
|
"""
A comparison of different methods in GLM
Data comes from a random square matrix.
"""
from datetime import datetime
import numpy as np
from sklearn import linear_model
from sklearn.utils.bench import total_seconds
if __name__ == '__main__':
import pylab as pl
n_iter = 40
time_ridge = np.empty(n_iter)
time_ols = np.empty(n_iter)
time_lasso = np.empty(n_iter)
dimensions = 500 * np.arange(1, n_iter + 1)
for i in range(n_iter):
print('Iteration %s of %s' % (i, n_iter))
n_samples, n_features = 10 * i + 3, 10 * i + 3
X = np.random.randn(n_samples, n_features)
Y = np.random.randn(n_samples)
start = datetime.now()
ridge = linear_model.Ridge(alpha=1.)
ridge.fit(X, Y)
time_ridge[i] = total_seconds(datetime.now() - start)
start = datetime.now()
ols = linear_model.LinearRegression()
ols.fit(X, Y)
time_ols[i] = total_seconds(datetime.now() - start)
start = datetime.now()
lasso = linear_model.LassoLars()
lasso.fit(X, Y)
time_lasso[i] = total_seconds(datetime.now() - start)
pl.figure('scikit-learn GLM benchmark results')
pl.xlabel('Dimensions')
pl.ylabel('Time (s)')
pl.plot(dimensions, time_ridge, color='r')
pl.plot(dimensions, time_ols, color='g')
pl.plot(dimensions, time_lasso, color='b')
pl.legend(['Ridge', 'OLS', 'LassoLars'], loc='upper left')
pl.axis('tight')
pl.show()
|
anbasile/flask_sample
|
refs/heads/master
|
flask/lib/python2.7/site-packages/pycparser/c_lexer.py
|
42
|
#------------------------------------------------------------------------------
# pycparser: c_lexer.py
#
# CLexer class: lexer for the C language
#
# Copyright (C) 2008-2015, Eli Bendersky
# License: BSD
#------------------------------------------------------------------------------
import re
import sys
from .ply import lex
from .ply.lex import TOKEN
class CLexer(object):
""" A lexer for the C language. After building it, set the
input text with input(), and call token() to get new
tokens.
The public attribute filename can be set to an initial
filaneme, but the lexer will update it upon #line
directives.
"""
def __init__(self, error_func, on_lbrace_func, on_rbrace_func,
type_lookup_func):
""" Create a new Lexer.
error_func:
An error function. Will be called with an error
message, line and column as arguments, in case of
an error during lexing.
on_lbrace_func, on_rbrace_func:
Called when an LBRACE or RBRACE is encountered
(likely to push/pop type_lookup_func's scope)
type_lookup_func:
A type lookup function. Given a string, it must
return True IFF this string is a name of a type
that was defined with a typedef earlier.
"""
self.error_func = error_func
self.on_lbrace_func = on_lbrace_func
self.on_rbrace_func = on_rbrace_func
self.type_lookup_func = type_lookup_func
self.filename = ''
# Keeps track of the last token returned from self.token()
self.last_token = None
# Allow either "# line" or "# <num>" to support GCC's
# cpp output
#
self.line_pattern = re.compile('([ \t]*line\W)|([ \t]*\d+)')
self.pragma_pattern = re.compile('[ \t]*pragma\W')
def build(self, **kwargs):
""" Builds the lexer from the specification. Must be
called after the lexer object is created.
This method exists separately, because the PLY
manual warns against calling lex.lex inside
__init__
"""
self.lexer = lex.lex(object=self, **kwargs)
def reset_lineno(self):
""" Resets the internal line number counter of the lexer.
"""
self.lexer.lineno = 1
def input(self, text):
self.lexer.input(text)
def token(self):
self.last_token = self.lexer.token()
return self.last_token
def find_tok_column(self, token):
""" Find the column of the token in its line.
"""
last_cr = self.lexer.lexdata.rfind('\n', 0, token.lexpos)
return token.lexpos - last_cr
######################-- PRIVATE --######################
##
## Internal auxiliary methods
##
def _error(self, msg, token):
location = self._make_tok_location(token)
self.error_func(msg, location[0], location[1])
self.lexer.skip(1)
def _make_tok_location(self, token):
return (token.lineno, self.find_tok_column(token))
##
## Reserved keywords
##
keywords = (
'_BOOL', '_COMPLEX', 'AUTO', 'BREAK', 'CASE', 'CHAR', 'CONST',
'CONTINUE', 'DEFAULT', 'DO', 'DOUBLE', 'ELSE', 'ENUM', 'EXTERN',
'FLOAT', 'FOR', 'GOTO', 'IF', 'INLINE', 'INT', 'LONG',
'REGISTER', 'OFFSETOF',
'RESTRICT', 'RETURN', 'SHORT', 'SIGNED', 'SIZEOF', 'STATIC', 'STRUCT',
'SWITCH', 'TYPEDEF', 'UNION', 'UNSIGNED', 'VOID',
'VOLATILE', 'WHILE',
)
keyword_map = {}
for keyword in keywords:
if keyword == '_BOOL':
keyword_map['_Bool'] = keyword
elif keyword == '_COMPLEX':
keyword_map['_Complex'] = keyword
else:
keyword_map[keyword.lower()] = keyword
##
## All the tokens recognized by the lexer
##
tokens = keywords + (
# Identifiers
'ID',
# Type identifiers (identifiers previously defined as
# types with typedef)
'TYPEID',
# constants
'INT_CONST_DEC', 'INT_CONST_OCT', 'INT_CONST_HEX', 'INT_CONST_BIN',
'FLOAT_CONST', 'HEX_FLOAT_CONST',
'CHAR_CONST',
'WCHAR_CONST',
# String literals
'STRING_LITERAL',
'WSTRING_LITERAL',
# Operators
'PLUS', 'MINUS', 'TIMES', 'DIVIDE', 'MOD',
'OR', 'AND', 'NOT', 'XOR', 'LSHIFT', 'RSHIFT',
'LOR', 'LAND', 'LNOT',
'LT', 'LE', 'GT', 'GE', 'EQ', 'NE',
# Assignment
'EQUALS', 'TIMESEQUAL', 'DIVEQUAL', 'MODEQUAL',
'PLUSEQUAL', 'MINUSEQUAL',
'LSHIFTEQUAL','RSHIFTEQUAL', 'ANDEQUAL', 'XOREQUAL',
'OREQUAL',
# Increment/decrement
'PLUSPLUS', 'MINUSMINUS',
# Structure dereference (->)
'ARROW',
# Conditional operator (?)
'CONDOP',
# Delimeters
'LPAREN', 'RPAREN', # ( )
'LBRACKET', 'RBRACKET', # [ ]
'LBRACE', 'RBRACE', # { }
'COMMA', 'PERIOD', # . ,
'SEMI', 'COLON', # ; :
# Ellipsis (...)
'ELLIPSIS',
# pre-processor
'PPHASH', # '#'
)
##
## Regexes for use in tokens
##
##
# valid C identifiers (K&R2: A.2.3), plus '$' (supported by some compilers)
identifier = r'[a-zA-Z_$][0-9a-zA-Z_$]*'
hex_prefix = '0[xX]'
hex_digits = '[0-9a-fA-F]+'
bin_prefix = '0[bB]'
bin_digits = '[01]+'
# integer constants (K&R2: A.2.5.1)
integer_suffix_opt = r'(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?'
decimal_constant = '(0'+integer_suffix_opt+')|([1-9][0-9]*'+integer_suffix_opt+')'
octal_constant = '0[0-7]*'+integer_suffix_opt
hex_constant = hex_prefix+hex_digits+integer_suffix_opt
bin_constant = bin_prefix+bin_digits+integer_suffix_opt
bad_octal_constant = '0[0-7]*[89]'
# character constants (K&R2: A.2.5.2)
# Note: a-zA-Z and '.-~^_!=&;,' are allowed as escape chars to support #line
# directives with Windows paths as filenames (..\..\dir\file)
# For the same reason, decimal_escape allows all digit sequences. We want to
# parse all correct code, even if it means to sometimes parse incorrect
# code.
#
simple_escape = r"""([a-zA-Z._~!=&\^\-\\?'"])"""
decimal_escape = r"""(\d+)"""
hex_escape = r"""(x[0-9a-fA-F]+)"""
bad_escape = r"""([\\][^a-zA-Z._~^!=&\^\-\\?'"x0-7])"""
escape_sequence = r"""(\\("""+simple_escape+'|'+decimal_escape+'|'+hex_escape+'))'
cconst_char = r"""([^'\\\n]|"""+escape_sequence+')'
char_const = "'"+cconst_char+"'"
wchar_const = 'L'+char_const
unmatched_quote = "('"+cconst_char+"*\\n)|('"+cconst_char+"*$)"
bad_char_const = r"""('"""+cconst_char+"""[^'\n]+')|('')|('"""+bad_escape+r"""[^'\n]*')"""
# string literals (K&R2: A.2.6)
string_char = r"""([^"\\\n]|"""+escape_sequence+')'
string_literal = '"'+string_char+'*"'
wstring_literal = 'L'+string_literal
bad_string_literal = '"'+string_char+'*'+bad_escape+string_char+'*"'
# floating constants (K&R2: A.2.5.3)
exponent_part = r"""([eE][-+]?[0-9]+)"""
fractional_constant = r"""([0-9]*\.[0-9]+)|([0-9]+\.)"""
floating_constant = '(((('+fractional_constant+')'+exponent_part+'?)|([0-9]+'+exponent_part+'))[FfLl]?)'
binary_exponent_part = r'''([pP][+-]?[0-9]+)'''
hex_fractional_constant = '((('+hex_digits+r""")?\."""+hex_digits+')|('+hex_digits+r"""\.))"""
hex_floating_constant = '('+hex_prefix+'('+hex_digits+'|'+hex_fractional_constant+')'+binary_exponent_part+'[FfLl]?)'
##
## Lexer states: used for preprocessor \n-terminated directives
##
states = (
# ppline: preprocessor line directives
#
('ppline', 'exclusive'),
# pppragma: pragma
#
('pppragma', 'exclusive'),
)
def t_PPHASH(self, t):
r'[ \t]*\#'
if self.line_pattern.match(t.lexer.lexdata, pos=t.lexer.lexpos):
t.lexer.begin('ppline')
self.pp_line = self.pp_filename = None
elif self.pragma_pattern.match(t.lexer.lexdata, pos=t.lexer.lexpos):
t.lexer.begin('pppragma')
else:
t.type = 'PPHASH'
return t
##
## Rules for the ppline state
##
@TOKEN(string_literal)
def t_ppline_FILENAME(self, t):
if self.pp_line is None:
self._error('filename before line number in #line', t)
else:
self.pp_filename = t.value.lstrip('"').rstrip('"')
@TOKEN(decimal_constant)
def t_ppline_LINE_NUMBER(self, t):
if self.pp_line is None:
self.pp_line = t.value
else:
# Ignore: GCC's cpp sometimes inserts a numeric flag
# after the file name
pass
def t_ppline_NEWLINE(self, t):
r'\n'
if self.pp_line is None:
self._error('line number missing in #line', t)
else:
self.lexer.lineno = int(self.pp_line)
if self.pp_filename is not None:
self.filename = self.pp_filename
t.lexer.begin('INITIAL')
def t_ppline_PPLINE(self, t):
r'line'
pass
t_ppline_ignore = ' \t'
def t_ppline_error(self, t):
self._error('invalid #line directive', t)
##
## Rules for the pppragma state
##
def t_pppragma_NEWLINE(self, t):
r'\n'
t.lexer.lineno += 1
t.lexer.begin('INITIAL')
def t_pppragma_PPPRAGMA(self, t):
r'pragma'
pass
t_pppragma_ignore = ' \t<>.-{}();=+-*/$%@&^~!?:,0123456789'
@TOKEN(string_literal)
def t_pppragma_STR(self, t): pass
@TOKEN(identifier)
def t_pppragma_ID(self, t): pass
def t_pppragma_error(self, t):
self._error('invalid #pragma directive', t)
##
## Rules for the normal state
##
t_ignore = ' \t'
# Newlines
def t_NEWLINE(self, t):
r'\n+'
t.lexer.lineno += t.value.count("\n")
# Operators
t_PLUS = r'\+'
t_MINUS = r'-'
t_TIMES = r'\*'
t_DIVIDE = r'/'
t_MOD = r'%'
t_OR = r'\|'
t_AND = r'&'
t_NOT = r'~'
t_XOR = r'\^'
t_LSHIFT = r'<<'
t_RSHIFT = r'>>'
t_LOR = r'\|\|'
t_LAND = r'&&'
t_LNOT = r'!'
t_LT = r'<'
t_GT = r'>'
t_LE = r'<='
t_GE = r'>='
t_EQ = r'=='
t_NE = r'!='
# Assignment operators
t_EQUALS = r'='
t_TIMESEQUAL = r'\*='
t_DIVEQUAL = r'/='
t_MODEQUAL = r'%='
t_PLUSEQUAL = r'\+='
t_MINUSEQUAL = r'-='
t_LSHIFTEQUAL = r'<<='
t_RSHIFTEQUAL = r'>>='
t_ANDEQUAL = r'&='
t_OREQUAL = r'\|='
t_XOREQUAL = r'\^='
# Increment/decrement
t_PLUSPLUS = r'\+\+'
t_MINUSMINUS = r'--'
# ->
t_ARROW = r'->'
# ?
t_CONDOP = r'\?'
# Delimeters
t_LPAREN = r'\('
t_RPAREN = r'\)'
t_LBRACKET = r'\['
t_RBRACKET = r'\]'
t_COMMA = r','
t_PERIOD = r'\.'
t_SEMI = r';'
t_COLON = r':'
t_ELLIPSIS = r'\.\.\.'
# Scope delimiters
# To see why on_lbrace_func is needed, consider:
# typedef char TT;
# void foo(int TT) { TT = 10; }
# TT x = 5;
# Outside the function, TT is a typedef, but inside (starting and ending
# with the braces) it's a parameter. The trouble begins with yacc's
# lookahead token. If we open a new scope in brace_open, then TT has
# already been read and incorrectly interpreted as TYPEID. So, we need
# to open and close scopes from within the lexer.
# Similar for the TT immediately outside the end of the function.
#
@TOKEN(r'\{')
def t_LBRACE(self, t):
self.on_lbrace_func()
return t
@TOKEN(r'\}')
def t_RBRACE(self, t):
self.on_rbrace_func()
return t
t_STRING_LITERAL = string_literal
# The following floating and integer constants are defined as
# functions to impose a strict order (otherwise, decimal
# is placed before the others because its regex is longer,
# and this is bad)
#
@TOKEN(floating_constant)
def t_FLOAT_CONST(self, t):
return t
@TOKEN(hex_floating_constant)
def t_HEX_FLOAT_CONST(self, t):
return t
@TOKEN(hex_constant)
def t_INT_CONST_HEX(self, t):
return t
@TOKEN(bin_constant)
def t_INT_CONST_BIN(self, t):
return t
@TOKEN(bad_octal_constant)
def t_BAD_CONST_OCT(self, t):
msg = "Invalid octal constant"
self._error(msg, t)
@TOKEN(octal_constant)
def t_INT_CONST_OCT(self, t):
return t
@TOKEN(decimal_constant)
def t_INT_CONST_DEC(self, t):
return t
# Must come before bad_char_const, to prevent it from
# catching valid char constants as invalid
#
@TOKEN(char_const)
def t_CHAR_CONST(self, t):
return t
@TOKEN(wchar_const)
def t_WCHAR_CONST(self, t):
return t
@TOKEN(unmatched_quote)
def t_UNMATCHED_QUOTE(self, t):
msg = "Unmatched '"
self._error(msg, t)
@TOKEN(bad_char_const)
def t_BAD_CHAR_CONST(self, t):
msg = "Invalid char constant %s" % t.value
self._error(msg, t)
@TOKEN(wstring_literal)
def t_WSTRING_LITERAL(self, t):
return t
# unmatched string literals are caught by the preprocessor
@TOKEN(bad_string_literal)
def t_BAD_STRING_LITERAL(self, t):
msg = "String contains invalid escape code"
self._error(msg, t)
@TOKEN(identifier)
def t_ID(self, t):
t.type = self.keyword_map.get(t.value, "ID")
if t.type == 'ID' and self.type_lookup_func(t.value):
t.type = "TYPEID"
return t
def t_error(self, t):
msg = 'Illegal character %s' % repr(t.value[0])
self._error(msg, t)
|
tangyibin/goblin-core
|
refs/heads/master
|
riscv/llvm/3.5/llvm-3.5.0.src/utils/lit/tests/test-output.py
|
26
|
# XFAIL: python2.5
# RUN: %{lit} -j 1 -v %{inputs}/test-data --output %t.results.out > %t.out
# RUN: FileCheck < %t.results.out %s
# CHECK: {
# CHECK: "__version__"
# CHECK: "elapsed"
# CHECK-NEXT: "tests": [
# CHECK-NEXT: {
# CHECK-NEXT: "code": "PASS",
# CHECK-NEXT: "elapsed": {{[0-9.]+}},
# CHECK-NEXT: "metrics": {
# CHECK-NEXT: "value0": 1,
# CHECK-NEXT: "value1": 2.3456
# CHECK-NEXT: }
# CHECK-NEXT: "name": "test-data :: metrics.ini",
# CHECK-NEXT: "output": "Test passed."
# CHECK-NEXT: }
# CHECK-NEXT: ]
# CHECK-NEXT: }
|
KokareIITP/django
|
refs/heads/master
|
django/db/backends/mysql/base.py
|
323
|
"""
MySQL database backend for Django.
Requires mysqlclient: https://pypi.python.org/pypi/mysqlclient/
MySQLdb is supported for Python 2 only: http://sourceforge.net/projects/mysql-python
"""
from __future__ import unicode_literals
import datetime
import re
import sys
import warnings
from django.conf import settings
from django.db import utils
from django.db.backends import utils as backend_utils
from django.db.backends.base.base import BaseDatabaseWrapper
from django.utils import six, timezone
from django.utils.deprecation import RemovedInDjango20Warning
from django.utils.encoding import force_str
from django.utils.functional import cached_property
from django.utils.safestring import SafeBytes, SafeText
try:
import MySQLdb as Database
except ImportError as e:
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured("Error loading MySQLdb module: %s" % e)
from MySQLdb.constants import CLIENT, FIELD_TYPE # isort:skip
from MySQLdb.converters import Thing2Literal, conversions # isort:skip
# Some of these import MySQLdb, so import them after checking if it's installed.
from .client import DatabaseClient # isort:skip
from .creation import DatabaseCreation # isort:skip
from .features import DatabaseFeatures # isort:skip
from .introspection import DatabaseIntrospection # isort:skip
from .operations import DatabaseOperations # isort:skip
from .schema import DatabaseSchemaEditor # isort:skip
from .validation import DatabaseValidation # isort:skip
# We want version (1, 2, 1, 'final', 2) or later. We can't just use
# lexicographic ordering in this check because then (1, 2, 1, 'gamma')
# inadvertently passes the version test.
version = Database.version_info
if (version < (1, 2, 1) or (version[:3] == (1, 2, 1) and
(len(version) < 5 or version[3] != 'final' or version[4] < 2))):
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured("MySQLdb-1.2.1p2 or newer is required; you have %s" % Database.__version__)
DatabaseError = Database.DatabaseError
IntegrityError = Database.IntegrityError
def adapt_datetime_warn_on_aware_datetime(value, conv):
# Remove this function and rely on the default adapter in Django 2.0.
if settings.USE_TZ and timezone.is_aware(value):
warnings.warn(
"The MySQL database adapter received an aware datetime (%s), "
"probably from cursor.execute(). Update your code to pass a "
"naive datetime in the database connection's time zone (UTC by "
"default).", RemovedInDjango20Warning)
# This doesn't account for the database connection's timezone,
# which isn't known. (That's why this adapter is deprecated.)
value = value.astimezone(timezone.utc).replace(tzinfo=None)
return Thing2Literal(value.strftime("%Y-%m-%d %H:%M:%S.%f"), conv)
# MySQLdb-1.2.1 returns TIME columns as timedelta -- they are more like
# timedelta in terms of actual behavior as they are signed and include days --
# and Django expects time, so we still need to override that. We also need to
# add special handling for SafeText and SafeBytes as MySQLdb's type
# checking is too tight to catch those (see Django ticket #6052).
django_conversions = conversions.copy()
django_conversions.update({
FIELD_TYPE.TIME: backend_utils.typecast_time,
FIELD_TYPE.DECIMAL: backend_utils.typecast_decimal,
FIELD_TYPE.NEWDECIMAL: backend_utils.typecast_decimal,
datetime.datetime: adapt_datetime_warn_on_aware_datetime,
})
# This should match the numerical portion of the version numbers (we can treat
# versions like 5.0.24 and 5.0.24a as the same). Based on the list of version
# at http://dev.mysql.com/doc/refman/4.1/en/news.html and
# http://dev.mysql.com/doc/refman/5.0/en/news.html .
server_version_re = re.compile(r'(\d{1,2})\.(\d{1,2})\.(\d{1,2})')
# MySQLdb-1.2.1 and newer automatically makes use of SHOW WARNINGS on
# MySQL-4.1 and newer, so the MysqlDebugWrapper is unnecessary. Since the
# point is to raise Warnings as exceptions, this can be done with the Python
# warning module, and this is setup when the connection is created, and the
# standard backend_utils.CursorDebugWrapper can be used. Also, using sql_mode
# TRADITIONAL will automatically cause most warnings to be treated as errors.
class CursorWrapper(object):
"""
A thin wrapper around MySQLdb's normal cursor class so that we can catch
particular exception instances and reraise them with the right types.
Implemented as a wrapper, rather than a subclass, so that we aren't stuck
to the particular underlying representation returned by Connection.cursor().
"""
codes_for_integrityerror = (1048,)
def __init__(self, cursor):
self.cursor = cursor
def execute(self, query, args=None):
try:
# args is None means no string interpolation
return self.cursor.execute(query, args)
except Database.OperationalError as e:
# Map some error codes to IntegrityError, since they seem to be
# misclassified and Django would prefer the more logical place.
if e.args[0] in self.codes_for_integrityerror:
six.reraise(utils.IntegrityError, utils.IntegrityError(*tuple(e.args)), sys.exc_info()[2])
raise
def executemany(self, query, args):
try:
return self.cursor.executemany(query, args)
except Database.OperationalError as e:
# Map some error codes to IntegrityError, since they seem to be
# misclassified and Django would prefer the more logical place.
if e.args[0] in self.codes_for_integrityerror:
six.reraise(utils.IntegrityError, utils.IntegrityError(*tuple(e.args)), sys.exc_info()[2])
raise
def __getattr__(self, attr):
if attr in self.__dict__:
return self.__dict__[attr]
else:
return getattr(self.cursor, attr)
def __iter__(self):
return iter(self.cursor)
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
# Ticket #17671 - Close instead of passing thru to avoid backend
# specific behavior.
self.close()
class DatabaseWrapper(BaseDatabaseWrapper):
vendor = 'mysql'
# This dictionary maps Field objects to their associated MySQL column
# types, as strings. Column-type strings can contain format strings; they'll
# be interpolated against the values of Field.__dict__ before being output.
# If a column type is set to None, it won't be included in the output.
_data_types = {
'AutoField': 'integer AUTO_INCREMENT',
'BinaryField': 'longblob',
'BooleanField': 'bool',
'CharField': 'varchar(%(max_length)s)',
'CommaSeparatedIntegerField': 'varchar(%(max_length)s)',
'DateField': 'date',
'DateTimeField': 'datetime',
'DecimalField': 'numeric(%(max_digits)s, %(decimal_places)s)',
'DurationField': 'bigint',
'FileField': 'varchar(%(max_length)s)',
'FilePathField': 'varchar(%(max_length)s)',
'FloatField': 'double precision',
'IntegerField': 'integer',
'BigIntegerField': 'bigint',
'IPAddressField': 'char(15)',
'GenericIPAddressField': 'char(39)',
'NullBooleanField': 'bool',
'OneToOneField': 'integer',
'PositiveIntegerField': 'integer UNSIGNED',
'PositiveSmallIntegerField': 'smallint UNSIGNED',
'SlugField': 'varchar(%(max_length)s)',
'SmallIntegerField': 'smallint',
'TextField': 'longtext',
'TimeField': 'time',
'UUIDField': 'char(32)',
}
@cached_property
def data_types(self):
if self.features.supports_microsecond_precision:
return dict(self._data_types, DateTimeField='datetime(6)', TimeField='time(6)')
else:
return self._data_types
operators = {
'exact': '= %s',
'iexact': 'LIKE %s',
'contains': 'LIKE BINARY %s',
'icontains': 'LIKE %s',
'regex': 'REGEXP BINARY %s',
'iregex': 'REGEXP %s',
'gt': '> %s',
'gte': '>= %s',
'lt': '< %s',
'lte': '<= %s',
'startswith': 'LIKE BINARY %s',
'endswith': 'LIKE BINARY %s',
'istartswith': 'LIKE %s',
'iendswith': 'LIKE %s',
}
# The patterns below are used to generate SQL pattern lookup clauses when
# the right-hand side of the lookup isn't a raw string (it might be an expression
# or the result of a bilateral transformation).
# In those cases, special characters for LIKE operators (e.g. \, *, _) should be
# escaped on database side.
#
# Note: we use str.format() here for readability as '%' is used as a wildcard for
# the LIKE operator.
pattern_esc = r"REPLACE(REPLACE(REPLACE({}, '\\', '\\\\'), '%%', '\%%'), '_', '\_')"
pattern_ops = {
'contains': "LIKE BINARY CONCAT('%%', {}, '%%')",
'icontains': "LIKE CONCAT('%%', {}, '%%')",
'startswith': "LIKE BINARY CONCAT({}, '%%')",
'istartswith': "LIKE CONCAT({}, '%%')",
'endswith': "LIKE BINARY CONCAT('%%', {})",
'iendswith': "LIKE CONCAT('%%', {})",
}
Database = Database
SchemaEditorClass = DatabaseSchemaEditor
def __init__(self, *args, **kwargs):
super(DatabaseWrapper, self).__init__(*args, **kwargs)
self.features = DatabaseFeatures(self)
self.ops = DatabaseOperations(self)
self.client = DatabaseClient(self)
self.creation = DatabaseCreation(self)
self.introspection = DatabaseIntrospection(self)
self.validation = DatabaseValidation(self)
def get_connection_params(self):
kwargs = {
'conv': django_conversions,
'charset': 'utf8',
}
if six.PY2:
kwargs['use_unicode'] = True
settings_dict = self.settings_dict
if settings_dict['USER']:
kwargs['user'] = settings_dict['USER']
if settings_dict['NAME']:
kwargs['db'] = settings_dict['NAME']
if settings_dict['PASSWORD']:
kwargs['passwd'] = force_str(settings_dict['PASSWORD'])
if settings_dict['HOST'].startswith('/'):
kwargs['unix_socket'] = settings_dict['HOST']
elif settings_dict['HOST']:
kwargs['host'] = settings_dict['HOST']
if settings_dict['PORT']:
kwargs['port'] = int(settings_dict['PORT'])
# We need the number of potentially affected rows after an
# "UPDATE", not the number of changed rows.
kwargs['client_flag'] = CLIENT.FOUND_ROWS
kwargs.update(settings_dict['OPTIONS'])
return kwargs
def get_new_connection(self, conn_params):
conn = Database.connect(**conn_params)
conn.encoders[SafeText] = conn.encoders[six.text_type]
conn.encoders[SafeBytes] = conn.encoders[bytes]
return conn
def init_connection_state(self):
with self.cursor() as cursor:
# SQL_AUTO_IS_NULL in MySQL controls whether an AUTO_INCREMENT column
# on a recently-inserted row will return when the field is tested for
# NULL. Disabling this value brings this aspect of MySQL in line with
# SQL standards.
cursor.execute('SET SQL_AUTO_IS_NULL = 0')
def create_cursor(self):
cursor = self.connection.cursor()
return CursorWrapper(cursor)
def _rollback(self):
try:
BaseDatabaseWrapper._rollback(self)
except Database.NotSupportedError:
pass
def _set_autocommit(self, autocommit):
with self.wrap_database_errors:
self.connection.autocommit(autocommit)
def disable_constraint_checking(self):
"""
Disables foreign key checks, primarily for use in adding rows with forward references. Always returns True,
to indicate constraint checks need to be re-enabled.
"""
self.cursor().execute('SET foreign_key_checks=0')
return True
def enable_constraint_checking(self):
"""
Re-enable foreign key checks after they have been disabled.
"""
# Override needs_rollback in case constraint_checks_disabled is
# nested inside transaction.atomic.
self.needs_rollback, needs_rollback = False, self.needs_rollback
try:
self.cursor().execute('SET foreign_key_checks=1')
finally:
self.needs_rollback = needs_rollback
def check_constraints(self, table_names=None):
"""
Checks each table name in `table_names` for rows with invalid foreign
key references. This method is intended to be used in conjunction with
`disable_constraint_checking()` and `enable_constraint_checking()`, to
determine if rows with invalid references were entered while constraint
checks were off.
Raises an IntegrityError on the first invalid foreign key reference
encountered (if any) and provides detailed information about the
invalid reference in the error message.
Backends can override this method if they can more directly apply
constraint checking (e.g. via "SET CONSTRAINTS ALL IMMEDIATE")
"""
cursor = self.cursor()
if table_names is None:
table_names = self.introspection.table_names(cursor)
for table_name in table_names:
primary_key_column_name = self.introspection.get_primary_key_column(cursor, table_name)
if not primary_key_column_name:
continue
key_columns = self.introspection.get_key_columns(cursor, table_name)
for column_name, referenced_table_name, referenced_column_name in key_columns:
cursor.execute("""
SELECT REFERRING.`%s`, REFERRING.`%s` FROM `%s` as REFERRING
LEFT JOIN `%s` as REFERRED
ON (REFERRING.`%s` = REFERRED.`%s`)
WHERE REFERRING.`%s` IS NOT NULL AND REFERRED.`%s` IS NULL"""
% (primary_key_column_name, column_name, table_name, referenced_table_name,
column_name, referenced_column_name, column_name, referenced_column_name))
for bad_row in cursor.fetchall():
raise utils.IntegrityError("The row in table '%s' with primary key '%s' has an invalid "
"foreign key: %s.%s contains a value '%s' that does not have a corresponding value in %s.%s."
% (table_name, bad_row[0],
table_name, column_name, bad_row[1],
referenced_table_name, referenced_column_name))
def is_usable(self):
try:
self.connection.ping()
except Database.Error:
return False
else:
return True
@cached_property
def mysql_version(self):
with self.temporary_connection():
server_info = self.connection.get_server_info()
match = server_version_re.match(server_info)
if not match:
raise Exception('Unable to determine MySQL version from version string %r' % server_info)
return tuple(int(x) for x in match.groups())
|
javachengwc/hue
|
refs/heads/master
|
desktop/core/ext-py/django-extensions-1.5.0/django_extensions/templatetags/indent_text.py
|
71
|
from django import template
register = template.Library()
class IndentByNode(template.Node):
def __init__(self, nodelist, indent_level, if_statement):
self.nodelist = nodelist
self.indent_level = template.Variable(indent_level)
if if_statement:
self.if_statement = template.Variable(if_statement)
else:
self.if_statement = None
def render(self, context):
indent_level = self.indent_level.resolve(context)
if self.if_statement:
try:
if_statement = bool(self.if_statement.resolve(context))
except template.VariableDoesNotExist:
if_statement = False
else:
if_statement = True
output = self.nodelist.render(context)
if if_statement:
indent = " " * indent_level
output = indent + indent.join(output.splitlines(True))
return output
def indentby(parser, token):
"""
Adds indentation to text between the tags by the given indentation level.
{% indentby <indent_level> [if <statement>] %}
...
{% endindentby %}
Arguments:
indent_level - Number of spaces to indent text with.
statement - Only apply indent_level if the boolean statement evalutates to True.
"""
args = token.split_contents()
largs = len(args)
if largs not in (2, 4):
raise template.TemplateSyntaxError("%r tag requires 1 or 3 arguments")
indent_level = args[1]
if_statement = None
if largs == 4:
if_statement = args[3]
nodelist = parser.parse(('endindentby', ))
parser.delete_first_token()
return IndentByNode(nodelist, indent_level, if_statement)
indentby = register.tag(indentby)
|
hujiajie/chromium-crosswalk
|
refs/heads/master
|
tools/telemetry/telemetry/testing/__init__.py
|
1201
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
|
golden1232004/webrtc_new
|
refs/heads/master
|
chromium/src/build/android/gyp/write_build_config.py
|
2
|
#!/usr/bin/env python
#
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Writes a build_config file.
The build_config file for a target is a json file containing information about
how to build that target based on the target's dependencies. This includes
things like: the javac classpath, the list of android resources dependencies,
etc. It also includes the information needed to create the build_config for
other targets that depend on that one.
Android build scripts should not refer to the build_config directly, and the
build specification should instead pass information in using the special
file-arg syntax (see build_utils.py:ExpandFileArgs). That syntax allows passing
of values in a json dict in a file and looks like this:
--python-arg=@FileArg(build_config_path:javac:classpath)
Note: If paths to input files are passed in this way, it is important that:
1. inputs/deps of the action ensure that the files are available the first
time the action runs.
2. Either (a) or (b)
a. inputs/deps ensure that the action runs whenever one of the files changes
b. the files are added to the action's depfile
"""
import itertools
import optparse
import os
import sys
import xml.dom.minidom
from util import build_utils
from util import md5_check
import write_ordered_libraries
# Types that should never be used as a dependency of another build config.
_ROOT_TYPES = ('android_apk', 'deps_dex', 'java_binary', 'resource_rewriter')
# Types that should not allow code deps to pass through.
_RESOURCE_TYPES = ('android_assets', 'android_resources')
class AndroidManifest(object):
def __init__(self, path):
self.path = path
dom = xml.dom.minidom.parse(path)
manifests = dom.getElementsByTagName('manifest')
assert len(manifests) == 1
self.manifest = manifests[0]
def GetInstrumentation(self):
instrumentation_els = self.manifest.getElementsByTagName('instrumentation')
if len(instrumentation_els) == 0:
return None
if len(instrumentation_els) != 1:
raise Exception(
'More than one <instrumentation> element found in %s' % self.path)
return instrumentation_els[0]
def CheckInstrumentation(self, expected_package):
instr = self.GetInstrumentation()
if not instr:
raise Exception('No <instrumentation> elements found in %s' % self.path)
instrumented_package = instr.getAttributeNS(
'http://schemas.android.com/apk/res/android', 'targetPackage')
if instrumented_package != expected_package:
raise Exception(
'Wrong instrumented package. Expected %s, got %s'
% (expected_package, instrumented_package))
def GetPackageName(self):
return self.manifest.getAttribute('package')
dep_config_cache = {}
def GetDepConfig(path):
if not path in dep_config_cache:
dep_config_cache[path] = build_utils.ReadJson(path)['deps_info']
return dep_config_cache[path]
def DepsOfType(wanted_type, configs):
return [c for c in configs if c['type'] == wanted_type]
def GetAllDepsConfigsInOrder(deps_config_paths):
def GetDeps(path):
return set(GetDepConfig(path)['deps_configs'])
return build_utils.GetSortedTransitiveDependencies(deps_config_paths, GetDeps)
def ResolveGroups(configs):
while True:
groups = DepsOfType('group', configs)
if not groups:
return configs
for config in groups:
index = configs.index(config)
expanded_configs = [GetDepConfig(p) for p in config['deps_configs']]
configs[index:index + 1] = expanded_configs
class Deps(object):
def __init__(self, direct_deps_config_paths):
self.all_deps_config_paths = GetAllDepsConfigsInOrder(
direct_deps_config_paths)
self.direct_deps_configs = ResolveGroups(
[GetDepConfig(p) for p in direct_deps_config_paths])
self.all_deps_configs = [
GetDepConfig(p) for p in self.all_deps_config_paths]
self.direct_deps_config_paths = direct_deps_config_paths
def All(self, wanted_type=None):
if type is None:
return self.all_deps_configs
return DepsOfType(wanted_type, self.all_deps_configs)
def Direct(self, wanted_type=None):
if wanted_type is None:
return self.direct_deps_configs
return DepsOfType(wanted_type, self.direct_deps_configs)
def AllConfigPaths(self):
return self.all_deps_config_paths
def RemoveNonDirectDep(self, path):
if path in self.direct_deps_config_paths:
raise Exception('Cannot remove direct dep.')
self.all_deps_config_paths.remove(path)
self.all_deps_configs.remove(GetDepConfig(path))
def _MergeAssets(all_assets):
"""Merges all assets from the given deps.
Returns:
A tuple of lists: (compressed, uncompressed)
Each tuple entry is a list of "srcPath:zipPath". srcPath is the path of the
asset to add, and zipPath is the location within the zip (excluding assets/
prefix)
"""
compressed = {}
uncompressed = {}
for asset_dep in all_assets:
entry = asset_dep['assets']
disable_compression = entry.get('disable_compression', False)
dest_map = uncompressed if disable_compression else compressed
other_map = compressed if disable_compression else uncompressed
outputs = entry.get('outputs', [])
for src, dest in itertools.izip_longest(entry['sources'], outputs):
if not dest:
dest = os.path.basename(src)
# Merge so that each path shows up in only one of the lists, and that
# deps of the same target override previous ones.
other_map.pop(dest, 0)
dest_map[dest] = src
def create_list(asset_map):
ret = ['%s:%s' % (src, dest) for dest, src in asset_map.iteritems()]
# Sort to ensure deterministic ordering.
ret.sort()
return ret
return create_list(compressed), create_list(uncompressed)
def _FilterUnwantedDepsPaths(dep_paths, target_type):
# Don't allow root targets to be considered as a dep.
ret = [p for p in dep_paths if GetDepConfig(p)['type'] not in _ROOT_TYPES]
# Don't allow java libraries to cross through assets/resources.
if target_type in _RESOURCE_TYPES:
ret = [p for p in ret if GetDepConfig(p)['type'] in _RESOURCE_TYPES]
return ret
def _AsInterfaceJar(jar_path):
return jar_path[:-3] + 'interface.jar'
def main(argv):
parser = optparse.OptionParser()
build_utils.AddDepfileOption(parser)
parser.add_option('--build-config', help='Path to build_config output.')
parser.add_option(
'--type',
help='Type of this target (e.g. android_library).')
parser.add_option(
'--possible-deps-configs',
help='List of paths for dependency\'s build_config files. Some '
'dependencies may not write build_config files. Missing build_config '
'files are handled differently based on the type of this target.')
# android_resources options
parser.add_option('--srcjar', help='Path to target\'s resources srcjar.')
parser.add_option('--resources-zip', help='Path to target\'s resources zip.')
parser.add_option('--r-text', help='Path to target\'s R.txt file.')
parser.add_option('--package-name',
help='Java package name for these resources.')
parser.add_option('--android-manifest', help='Path to android manifest.')
parser.add_option('--is-locale-resource', action='store_true',
help='Whether it is locale resource.')
# android_assets options
parser.add_option('--asset-sources', help='List of asset sources.')
parser.add_option('--asset-renaming-sources',
help='List of asset sources with custom destinations.')
parser.add_option('--asset-renaming-destinations',
help='List of asset custom destinations.')
parser.add_option('--disable-asset-compression', action='store_true',
help='Whether to disable asset compression.')
# java library options
parser.add_option('--jar-path', help='Path to target\'s jar output.')
parser.add_option('--supports-android', action='store_true',
help='Whether this library supports running on the Android platform.')
parser.add_option('--requires-android', action='store_true',
help='Whether this library requires running on the Android platform.')
parser.add_option('--bypass-platform-checks', action='store_true',
help='Bypass checks for support/require Android platform.')
# android library options
parser.add_option('--dex-path', help='Path to target\'s dex output.')
# native library options
parser.add_option('--native-libs', help='List of top-level native libs.')
parser.add_option('--readelf-path', help='Path to toolchain\'s readelf.')
# apk options
parser.add_option('--apk-path', help='Path to the target\'s apk output.')
parser.add_option('--incremental-apk-path',
help="Path to the target's incremental apk output.")
parser.add_option('--incremental-install-script-path',
help="Path to the target's generated incremental install "
"script.")
parser.add_option('--tested-apk-config',
help='Path to the build config of the tested apk (for an instrumentation '
'test apk).')
parser.add_option('--proguard-enabled', action='store_true',
help='Whether proguard is enabled for this apk.')
parser.add_option('--proguard-info',
help='Path to the proguard .info output for this apk.')
parser.add_option('--has-alternative-locale-resource', action='store_true',
help='Whether there is alternative-locale-resource in direct deps')
options, args = parser.parse_args(argv)
if args:
parser.error('No positional arguments should be given.')
required_options_map = {
'java_binary': ['build_config', 'jar_path'],
'java_library': ['build_config', 'jar_path'],
'android_assets': ['build_config'],
'android_resources': ['build_config', 'resources_zip'],
'android_apk': ['build_config', 'jar_path', 'dex_path', 'resources_zip'],
'deps_dex': ['build_config', 'dex_path'],
'resource_rewriter': ['build_config'],
'group': ['build_config'],
}
required_options = required_options_map.get(options.type)
if not required_options:
raise Exception('Unknown type: <%s>' % options.type)
if options.native_libs:
required_options.append('readelf_path')
build_utils.CheckOptions(options, parser, required_options)
if options.type == 'java_library':
if options.supports_android and not options.dex_path:
raise Exception('java_library that supports Android requires a dex path.')
if options.requires_android and not options.supports_android:
raise Exception(
'--supports-android is required when using --requires-android')
possible_deps_config_paths = build_utils.ParseGypList(
options.possible_deps_configs)
allow_unknown_deps = (options.type in
('android_apk', 'android_assets', 'android_resources'))
unknown_deps = [
c for c in possible_deps_config_paths if not os.path.exists(c)]
if unknown_deps and not allow_unknown_deps:
raise Exception('Unknown deps: ' + str(unknown_deps))
direct_deps_config_paths = [
c for c in possible_deps_config_paths if not c in unknown_deps]
direct_deps_config_paths = _FilterUnwantedDepsPaths(direct_deps_config_paths,
options.type)
deps = Deps(direct_deps_config_paths)
all_inputs = deps.AllConfigPaths() + build_utils.GetPythonDependencies()
# Remove other locale resources if there is alternative_locale_resource in
# direct deps.
if options.has_alternative_locale_resource:
alternative = [r['path'] for r in deps.Direct('android_resources')
if r.get('is_locale_resource')]
# We can only have one locale resources in direct deps.
if len(alternative) != 1:
raise Exception('The number of locale resource in direct deps is wrong %d'
% len(alternative))
unwanted = [r['path'] for r in deps.All('android_resources')
if r.get('is_locale_resource') and r['path'] not in alternative]
for p in unwanted:
deps.RemoveNonDirectDep(p)
direct_library_deps = deps.Direct('java_library')
all_library_deps = deps.All('java_library')
direct_resources_deps = deps.Direct('android_resources')
all_resources_deps = deps.All('android_resources')
# Resources should be ordered with the highest-level dependency first so that
# overrides are done correctly.
all_resources_deps.reverse()
if options.type == 'android_apk' and options.tested_apk_config:
tested_apk_deps = Deps([options.tested_apk_config])
tested_apk_resources_deps = tested_apk_deps.All('android_resources')
all_resources_deps = [
d for d in all_resources_deps if not d in tested_apk_resources_deps]
# Initialize some common config.
config = {
'deps_info': {
'name': os.path.basename(options.build_config),
'path': options.build_config,
'type': options.type,
'deps_configs': direct_deps_config_paths
}
}
deps_info = config['deps_info']
if (options.type in ('java_binary', 'java_library') and
not options.bypass_platform_checks):
deps_info['requires_android'] = options.requires_android
deps_info['supports_android'] = options.supports_android
deps_require_android = (all_resources_deps +
[d['name'] for d in all_library_deps if d['requires_android']])
deps_not_support_android = (
[d['name'] for d in all_library_deps if not d['supports_android']])
if deps_require_android and not options.requires_android:
raise Exception('Some deps require building for the Android platform: ' +
str(deps_require_android))
if deps_not_support_android and options.supports_android:
raise Exception('Not all deps support the Android platform: ' +
str(deps_not_support_android))
if options.type in ('java_binary', 'java_library', 'android_apk'):
javac_classpath = [c['jar_path'] for c in direct_library_deps]
java_full_classpath = [c['jar_path'] for c in all_library_deps]
deps_info['resources_deps'] = [c['path'] for c in all_resources_deps]
deps_info['jar_path'] = options.jar_path
if options.type == 'android_apk' or options.supports_android:
deps_info['dex_path'] = options.dex_path
if options.type == 'android_apk':
deps_info['apk_path'] = options.apk_path
deps_info['incremental_apk_path'] = options.incremental_apk_path
deps_info['incremental_install_script_path'] = (
options.incremental_install_script_path)
# Classpath values filled in below (after applying tested_apk_config).
config['javac'] = {}
if options.type in ('java_binary', 'java_library'):
# Only resources might have srcjars (normal srcjar targets are listed in
# srcjar_deps). A resource's srcjar contains the R.java file for those
# resources, and (like Android's default build system) we allow a library to
# refer to the resources in any of its dependents.
config['javac']['srcjars'] = [
c['srcjar'] for c in direct_resources_deps if 'srcjar' in c]
if options.type == 'android_apk':
# Apks will get their resources srcjar explicitly passed to the java step.
config['javac']['srcjars'] = []
if options.type == 'android_assets':
all_asset_sources = []
if options.asset_renaming_sources:
all_asset_sources.extend(
build_utils.ParseGypList(options.asset_renaming_sources))
if options.asset_sources:
all_asset_sources.extend(build_utils.ParseGypList(options.asset_sources))
deps_info['assets'] = {
'sources': all_asset_sources
}
if options.asset_renaming_destinations:
deps_info['assets']['outputs'] = (
build_utils.ParseGypList(options.asset_renaming_destinations))
if options.disable_asset_compression:
deps_info['assets']['disable_compression'] = True
if options.type == 'android_resources':
deps_info['resources_zip'] = options.resources_zip
if options.srcjar:
deps_info['srcjar'] = options.srcjar
if options.android_manifest:
manifest = AndroidManifest(options.android_manifest)
deps_info['package_name'] = manifest.GetPackageName()
if options.package_name:
deps_info['package_name'] = options.package_name
if options.r_text:
deps_info['r_text'] = options.r_text
if options.is_locale_resource:
deps_info['is_locale_resource'] = True
if options.type in ('android_resources','android_apk', 'resource_rewriter'):
config['resources'] = {}
config['resources']['dependency_zips'] = [
c['resources_zip'] for c in all_resources_deps]
config['resources']['extra_package_names'] = []
config['resources']['extra_r_text_files'] = []
if options.type == 'android_apk' or options.type == 'resource_rewriter':
config['resources']['extra_package_names'] = [
c['package_name'] for c in all_resources_deps if 'package_name' in c]
config['resources']['extra_r_text_files'] = [
c['r_text'] for c in all_resources_deps if 'r_text' in c]
if options.type in ['android_apk', 'deps_dex']:
deps_dex_files = [c['dex_path'] for c in all_library_deps]
proguard_enabled = options.proguard_enabled
if options.type == 'android_apk':
deps_info['proguard_enabled'] = proguard_enabled
if proguard_enabled:
deps_info['proguard_info'] = options.proguard_info
config['proguard'] = {}
proguard_config = config['proguard']
proguard_config['input_paths'] = [options.jar_path] + java_full_classpath
# An instrumentation test apk should exclude the dex files that are in the apk
# under test.
if options.type == 'android_apk' and options.tested_apk_config:
tested_apk_library_deps = tested_apk_deps.All('java_library')
tested_apk_deps_dex_files = [c['dex_path'] for c in tested_apk_library_deps]
# Include in the classpath classes that are added directly to the apk under
# test (those that are not a part of a java_library).
tested_apk_config = GetDepConfig(options.tested_apk_config)
javac_classpath.append(tested_apk_config['jar_path'])
# Exclude dex files from the test apk that exist within the apk under test.
deps_dex_files = [
p for p in deps_dex_files if not p in tested_apk_deps_dex_files]
expected_tested_package = tested_apk_config['package_name']
AndroidManifest(options.android_manifest).CheckInstrumentation(
expected_tested_package)
if tested_apk_config['proguard_enabled']:
assert proguard_enabled, ('proguard must be enabled for instrumentation'
' apks if it\'s enabled for the tested apk')
# Dependencies for the final dex file of an apk or a 'deps_dex'.
if options.type in ['android_apk', 'deps_dex']:
config['final_dex'] = {}
dex_config = config['final_dex']
dex_config['dependency_dex_files'] = deps_dex_files
if options.type in ('java_binary', 'java_library', 'android_apk'):
config['javac']['classpath'] = javac_classpath
config['javac']['interface_classpath'] = [
_AsInterfaceJar(p) for p in javac_classpath]
config['java'] = {
'full_classpath': java_full_classpath
}
if options.type == 'android_apk':
dependency_jars = [c['jar_path'] for c in all_library_deps]
all_interface_jars = [
_AsInterfaceJar(p) for p in dependency_jars + [options.jar_path]]
config['dist_jar'] = {
'dependency_jars': dependency_jars,
'all_interface_jars': all_interface_jars,
}
manifest = AndroidManifest(options.android_manifest)
deps_info['package_name'] = manifest.GetPackageName()
if not options.tested_apk_config and manifest.GetInstrumentation():
# This must then have instrumentation only for itself.
manifest.CheckInstrumentation(manifest.GetPackageName())
library_paths = []
java_libraries_list_holder = [None]
libraries = build_utils.ParseGypList(options.native_libs or '[]')
if libraries:
def recompute_ordered_libraries():
libraries_dir = os.path.dirname(libraries[0])
write_ordered_libraries.SetReadelfPath(options.readelf_path)
write_ordered_libraries.SetLibraryDirs([libraries_dir])
all_deps = (
write_ordered_libraries.GetSortedTransitiveDependenciesForBinaries(
libraries))
# Create a java literal array with the "base" library names:
# e.g. libfoo.so -> foo
java_libraries_list_holder[0] = ('{%s}' % ','.join(
['"%s"' % s[3:-3] for s in all_deps]))
library_paths.extend(
write_ordered_libraries.FullLibraryPath(x) for x in all_deps)
# This step takes about 600ms on a z620 for chrome_apk, so it's worth
# caching.
md5_check.CallAndRecordIfStale(
recompute_ordered_libraries,
record_path=options.build_config + '.nativelibs.md5.stamp',
input_paths=libraries,
output_paths=[options.build_config])
if not library_paths:
prev_config = build_utils.ReadJson(options.build_config)
java_libraries_list_holder[0] = (
prev_config['native']['java_libraries_list'])
library_paths.extend(prev_config['native']['libraries'])
all_inputs.extend(library_paths)
config['native'] = {
'libraries': library_paths,
'java_libraries_list': java_libraries_list_holder[0],
}
config['assets'], config['uncompressed_assets'] = (
_MergeAssets(deps.All('android_assets')))
build_utils.WriteJson(config, options.build_config, only_if_changed=True)
if options.depfile:
build_utils.WriteDepfile(options.depfile, all_inputs)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
|
Wafflespeanut/servo
|
refs/heads/master
|
tests/wpt/css-tests/tools/py/doc/example/genxml.py
|
217
|
import py
class ns(py.xml.Namespace):
pass
doc = ns.books(
ns.book(
ns.author("May Day"),
ns.title("python for java programmers"),),
ns.book(
ns.author("why", class_="somecssclass"),
ns.title("Java for Python programmers"),),
publisher="N.N",
)
print doc.unicode(indent=2).encode('utf8')
|
uweschmitt/emzed
|
refs/heads/master
|
patched_modules/sitecustomize.py
|
1
|
# -*- coding: utf-8 -*-
# Spyder's ExternalPythonShell sitecustomize
import sys
import os
import os.path as osp
import pdb
import bdb
import __builtin__
############ EMZED ADDDONS BEGIN ##############################
print "run patched sitecustomize"
sys.path.insert(0, os.environ.get("EMZED_HOME",""))
try:
import external_shell_patches
external_shell_patches.patch_external_shell()
except Exception, e:
print e
print "patches applied"
############ EMZED ADDDONS END ################################
# Colorization of sys.stderr (standard Python interpreter)
if os.environ.get("COLORIZE_SYS_STDERR", "").lower() == "true"\
and not os.environ.get('IPYTHON', False):
class StderrProxy(object):
"""Proxy to sys.stderr file object overriding only the `write` method
to provide red colorization for the whole stream, and blue-underlined
for traceback file links"""
def __init__(self):
self.old_stderr = sys.stderr
self.__buffer = ''
sys.stderr = self
def __getattr__(self, name):
return getattr(self.old_stderr, name)
def write(self, text):
if os.name == 'nt' and '\n' not in text:
self.__buffer += text
return
for text in (self.__buffer+text).splitlines(True):
if text.startswith(' File') \
and not text.startswith(' File "<'):
# Show error links in blue underlined text
colored_text = ' '+'\x1b[4;34m'+text[2:]+'\x1b[0m'
else:
# Show error messages in red
colored_text = '\x1b[31m'+text+'\x1b[0m'
self.old_stderr.write(colored_text)
self.__buffer = ''
stderrproxy = StderrProxy()
# Prepending this spyderlib package's path to sys.path to be sure
# that another version of spyderlib won't be imported instead:
if os.environ.get("SPYDER_PARENT_DIR") is None:
spyderlib_path = osp.dirname(__file__)
while not osp.isdir(osp.join(spyderlib_path, 'spyder')):
print spyderlib_path
spyderlib_path = osp.abspath(osp.join(spyderlib_path, os.pardir))
if not spyderlib_path.startswith(sys.prefix):
# Spyder is not installed: moving its parent directory to the top of
# sys.path to be sure that this spyderlib package will be imported in
# the remote process (instead of another installed version of Spyder)
while spyderlib_path in sys.path:
sys.path.remove(spyderlib_path)
sys.path.insert(0, spyderlib_path)
print os.environ["EMZED_HOME"]
os.environ['SPYDER_PARENT_DIR'] = spyderlib_path
else:
spyderlib_path = os.environ.get("SPYDER_PARENT_DIR")
# Set PyQt4 API to #1 or #2
pyqt_api = int(os.environ.get("PYQT_API", "0"))
if pyqt_api:
try:
import sip
try:
for qtype in ('QString', 'QVariant'):
sip.setapi(qtype, pyqt_api)
except AttributeError:
# Old version of sip
pass
except ImportError:
pass
mpl_backend = os.environ.get("MATPLOTLIB_BACKEND")
if mpl_backend:
try:
import matplotlib
matplotlib.use(mpl_backend)
except ImportError:
pass
if os.environ.get("MATPLOTLIB_PATCH", "").lower() == "true":
try:
from spyderlib import mpl_patch
mpl_patch.apply()
except ImportError:
pass
if os.name == 'nt': # Windows platforms
# Setting console encoding (otherwise Python does not recognize encoding)
try:
import locale, ctypes
_t, _cp = locale.getdefaultlocale('LANG')
try:
_cp = int(_cp[2:])
ctypes.windll.kernel32.SetConsoleCP(_cp)
ctypes.windll.kernel32.SetConsoleOutputCP(_cp)
except (ValueError, TypeError):
# Code page number in locale is not valid
pass
except ImportError:
pass
# Workaround for IPython thread issues with win32 comdlg32
if os.environ.get('IPYTHON', False):
try:
import win32gui, win32api
try:
win32gui.GetOpenFileNameW(File=win32api.GetSystemDirectory()[:2])
except win32gui.error:
# This error is triggered intentionally
pass
except ImportError:
# Unfortunately, pywin32 is not installed...
pass
# Set standard outputs encoding:
# (otherwise, for example, print u"é" will fail)
encoding = None
try:
import locale
except ImportError:
pass
else:
loc = locale.getdefaultlocale()
if loc[1]:
encoding = loc[1]
if encoding is None:
encoding = "UTF-8"
sys.setdefaultencoding(encoding)
os.environ['SPYDER_ENCODING'] = encoding
try:
import sitecustomize #analysis:ignore
except ImportError:
pass
print "install __appemzed__"
import guidata
__builtin__.__appemzed__ = guidata.qapplication()
# Communication between Spyder and the remote process
if os.environ.get('SPYDER_SHELL_ID') is None:
monitor = None
else:
from spyderlib.widgets.externalshell.monitor import Monitor
monitor = Monitor("127.0.0.1",
int(os.environ['SPYDER_I_PORT']),
int(os.environ['SPYDER_N_PORT']),
os.environ['SPYDER_SHELL_ID'],
float(os.environ['SPYDER_AR_TIMEOUT']),
os.environ["SPYDER_AR_STATE"].lower() == "true")
monitor.start()
def open_in_spyder(source, lineno=1):
"""Open in Spyder's editor the source file
(may be a filename or a Python module/package)"""
if not isinstance(source, basestring):
try:
source = source.__file__
except AttributeError:
raise ValueError("source argument must be either "
"a string or a module object")
if source.endswith('.pyc'):
source = source[:-1]
monitor.notify_open_file(source, lineno=lineno)
__builtin__.open_in_spyder = open_in_spyder
# * PyQt4:
# * Removing PyQt4 input hook which is not working well on Windows since
# opening a subprocess do not attach a real console to it
# (with keyboard events...)
# * Replacing it with our own input hook
# * PySide:
# * Installing an input hook: this feature is not yet supported
# natively by PySide
if os.environ.get("INSTALL_QT_INPUTHOOK", "").lower() == "true"\
and not os.environ.get('IPYTHON', False):
# For now, the Spyder's input hook does not work with IPython:
# with IPython v0.10 or non-Windows platforms, this is not a
# problem. However, with IPython v0.11 on Windows, this will be
# fixed by patching IPython to force it to use our inputhook.
if os.environ["QT_API"] == 'pyqt':
from PyQt4 import QtCore
# Removing PyQt's PyOS_InputHook implementation:
QtCore.pyqtRemoveInputHook()
elif os.environ["QT_API"] == 'pyside':
from PySide import QtCore
# XXX: when PySide will implement an input hook, we will have to
# remove it here
else:
assert False
def qt_inputhook():
"""Qt input hook for Spyder's console
This input hook wait for available stdin data (notified by
ExternalPythonShell through the monitor's inputhook_flag
attribute), and in the meantime it processes Qt events."""
# Refreshing variable explorer, except on first input hook call:
# (otherwise, on slow machines, this may freeze Spyder)
monitor.refresh_from_inputhook()
if os.name == 'nt':
try:
# This call fails for Python without readline support
# (or on Windows platforms) when PyOS_InputHook is called
# for the second consecutive time, because the 100-bytes
# stdin buffer is full.
# For more details, see the `PyOS_StdioReadline` function
# in Python source code (Parser/myreadline.c)
sys.stdin.tell()
except IOError:
return 0
app = QtCore.QCoreApplication.instance()
if app and app.thread() is QtCore.QThread.currentThread():
timer = QtCore.QTimer()
QtCore.QObject.connect(timer, QtCore.SIGNAL('timeout()'),
app, QtCore.SLOT('quit()'))
monitor.toggle_inputhook_flag(False)
while not monitor.inputhook_flag:
timer.start(50)
QtCore.QCoreApplication.exec_()
timer.stop()
# # Socket-based alternative:
# socket = QtNetwork.QLocalSocket()
# socket.connectToServer(os.environ['SPYDER_SHELL_ID'])
# socket.waitForConnected(-1)
# while not socket.waitForReadyRead(10):
# timer.start(50)
# QtCore.QCoreApplication.exec_()
# timer.stop()
# socket.read(3)
# socket.disconnectFromServer()
return 0
# Installing Spyder's PyOS_InputHook implementation:
import ctypes
cb_pyfunctype = ctypes.PYFUNCTYPE(ctypes.c_int)(qt_inputhook)
pyos_ih = ctypes.c_void_p.in_dll(ctypes.pythonapi, "PyOS_InputHook")
pyos_ih.value = ctypes.cast(cb_pyfunctype, ctypes.c_void_p).value
else:
# Quite limited feature: notify only when a result is displayed in
# console (does not notify at every prompt)
def displayhook(obj):
sys.__displayhook__(obj)
monitor.refresh()
sys.displayhook = displayhook
#===============================================================================
# Monkey-patching pdb
#===============================================================================
class SpyderPdb(pdb.Pdb):
def set_spyder_breakpoints(self):
self.clear_all_breaks()
#------Really deleting all breakpoints:
for bp in bdb.Breakpoint.bpbynumber:
if bp:
bp.deleteMe()
bdb.Breakpoint.next = 1
bdb.Breakpoint.bplist = {}
bdb.Breakpoint.bpbynumber = [None]
#------
from spyderlib.config import CONF
CONF.load_from_ini()
if CONF.get('run', 'breakpoints/enabled', True):
breakpoints = CONF.get('run', 'breakpoints', {})
i = 0
for fname, data in breakpoints.iteritems():
for linenumber, condition in data:
i += 1
self.set_break(self.canonic(fname), linenumber,
cond=condition)
def notify_spyder(self, frame):
if not frame:
return
fname = self.canonic(frame.f_code.co_filename)
lineno = frame.f_lineno
if isinstance(fname, basestring) and isinstance(lineno, int):
if osp.isfile(fname) and monitor is not None:
monitor.notify_pdb_step(fname, lineno)
pdb.Pdb = SpyderPdb
def monkeypatch_method(cls, patch_name):
# This function's code was inspired from the following thread:
# "[Python-Dev] Monkeypatching idioms -- elegant or ugly?"
# by Robert Brewer <fumanchu at aminus.org>
# (Tue Jan 15 19:13:25 CET 2008)
"""
Add the decorated method to the given class; replace as needed.
If the named method already exists on the given class, it will
be replaced, and a reference to the old method is created as
cls._old<patch_name><name>. If the "_old_<patch_name>_<name>" attribute
already exists, KeyError is raised.
"""
def decorator(func):
fname = func.__name__
old_func = getattr(cls, fname, None)
if old_func is not None:
# Add the old func to a list of old funcs.
old_ref = "_old_%s_%s" % (patch_name, fname)
#print old_ref, old_func
old_attr = getattr(cls, old_ref, None)
if old_attr is None:
setattr(cls, old_ref, old_func)
else:
raise KeyError("%s.%s already exists."
% (cls.__name__, old_ref))
setattr(cls, fname, func)
return func
return decorator
@monkeypatch_method(pdb.Pdb, 'Pdb')
def user_return(self, frame, return_value):
"""This function is called when a return trap is set here."""
# This is useful when debugging in an active interpreter (otherwise,
# the debugger will stop before reaching the target file)
if self._wait_for_mainpyfile:
if (self.mainpyfile != self.canonic(frame.f_code.co_filename)
or frame.f_lineno<= 0):
return
self._wait_for_mainpyfile = 0
self._old_Pdb_user_return(frame, return_value)
@monkeypatch_method(pdb.Pdb, 'Pdb')
def interaction(self, frame, traceback):
self.setup(frame, traceback)
self.notify_spyder(frame) #-----Spyder-specific-------------------------
self.print_stack_entry(self.stack[self.curindex])
self.cmdloop()
self.forget()
@monkeypatch_method(pdb.Pdb, 'Pdb')
def reset(self):
self._old_Pdb_reset()
if monitor is not None:
monitor.register_pdb_session(self)
self.set_spyder_breakpoints()
#XXX: notify spyder on any pdb command (is that good or too lazy? i.e. is more
# specific behaviour desired?)
@monkeypatch_method(pdb.Pdb, 'Pdb')
def postcmd(self, stop, line):
self.notify_spyder(self.curframe)
return self._old_Pdb_postcmd(stop, line)
# Restoring (almost) original sys.path:
# (Note: do not remove spyderlib_path from sys.path because if Spyder has been
# installed using python setup.py install, then this could remove the
# 'site-packages' directory from sys.path!)
try:
sys.path.remove(osp.join(spyderlib_path,
"spyderlib", "widgets", "externalshell"))
except ValueError:
pass
# Ignore PyQt4's sip API changes (this should be used wisely -e.g. for
# debugging- as dynamic API change is not supported by PyQt)
if os.environ.get("IGNORE_SIP_SETAPI_ERRORS", "").lower() == "true":
try:
import sip
from sip import setapi as original_setapi
def patched_setapi(name, no):
try:
original_setapi(name, no)
except ValueError, msg:
print >>sys.stderr, "Warning/PyQt4-Spyder (%s)" % str(msg)
sip.setapi = patched_setapi
except ImportError:
pass
# Workaround #1 to make the HDF5 I/O variable explorer plugin work:
# we import h5py without IPython support (otherwise, Spyder will crash
# when initializing IPython in startup.py).
# (see startup.py for the Workaround #2)
if monitor and not os.environ.get('IPYTHON', False):
sys.modules['IPython'] = None
try:
import h5py #@UnusedImport
except ImportError:
pass
del sys.modules['IPython']
# The following classes and functions are mainly intended to be used from
# an interactive Python/IPython session
class UserModuleDeleter(object):
"""
User Module Deleter (UMD) aims at deleting user modules
to force Python to deeply reload them during import
pathlist [list]: blacklist in terms of module path
namelist [list]: blacklist in terms of module name
"""
def __init__(self, namelist=None, pathlist=None):
if namelist is None:
namelist = []
self.namelist = namelist+['sitecustomize', 'spyderlib', 'spyderplugins']
if pathlist is None:
pathlist = []
self.pathlist = pathlist
self.previous_modules = sys.modules.keys()
def is_module_blacklisted(self, modname, modpath):
for path in [sys.prefix]+self.pathlist:
if modpath.startswith(path):
return True
else:
return set(modname.split('.')) & set(self.namelist)
def run(self, verbose=False):
"""
Del user modules to force Python to deeply reload them
Do not del modules which are considered as system modules, i.e.
modules installed in subdirectories of Python interpreter's binary
Do not del C modules
"""
log = []
for modname, module in sys.modules.items():
if modname not in self.previous_modules:
modpath = getattr(module, '__file__', None)
if modpath is None:
# *module* is a C module that is statically linked into the
# interpreter. There is no way to know its path, so we
# choose to ignore it.
continue
if not self.is_module_blacklisted(modname, modpath):
log.append(modname)
del sys.modules[modname]
if verbose and log:
print "\x1b[4;33m%s\x1b[24m%s\x1b[0m" % ("UMD has deleted",
": "+", ".join(log))
__umd__ = None
def _get_globals():
"""Return current Python/IPython interpreter globals namespace"""
from __main__ import __dict__ as namespace
if hasattr(__builtin__, '__IPYTHON__'):
# IPython 0.10
shell = __builtin__.__IPYTHON__
else:
# IPython 0.11+
shell = namespace.get('__ipythonshell__')
if shell is not None and hasattr(shell, 'user_ns'):
# IPython
return shell.user_ns
else:
return namespace
def runfile(filename, args=None, wdir=None, namespace=None):
"""
Run filename
args: command line arguments (string)
wdir: working directory
"""
global __umd__
if os.environ.get("UMD_ENABLED", "").lower() == "true":
if __umd__ is None:
namelist = os.environ.get("UMD_NAMELIST", None)
if namelist is not None:
namelist = namelist.split(',')
__umd__ = UserModuleDeleter(namelist=namelist)
else:
verbose = os.environ.get("UMD_VERBOSE", "").lower() == "true"
__umd__.run(verbose=verbose)
if args is not None and not isinstance(args, basestring):
raise TypeError("expected a character buffer object")
if namespace is None:
namespace = _get_globals()
namespace['__file__'] = filename
sys.argv = [filename]
if args is not None:
for arg in args.split():
sys.argv.append(arg)
if wdir is not None:
os.chdir(wdir)
execfile(filename, namespace)
sys.argv = ['']
namespace.pop('__file__')
__builtin__.runfile = runfile
def debugfile(filename, args=None, wdir=None):
"""
Debug filename
args: command line arguments (string)
wdir: working directory
"""
debugger = pdb.Pdb()
filename = debugger.canonic(filename)
debugger._wait_for_mainpyfile = 1
debugger.mainpyfile = filename
debugger._user_requested_quit = 0
debugger.run("runfile(%r, args=%r, wdir=%r)" % (filename, args, wdir))
__builtin__.debugfile = debugfile
def evalsc(command):
"""Evaluate special commands
(analog to IPython's magic commands but far less powerful/complete)"""
assert command.startswith(('%', '!'))
system_command = command.startswith('!')
command = command[1:].strip()
if system_command:
# System command
if command.startswith('cd '):
evalsc('%'+command)
else:
from subprocess import Popen, PIPE
Popen(command, shell=True, stdin=PIPE)
print '\n'
else:
# General command
namespace = _get_globals()
import re
clear_match = re.match(r"^clear ([a-zA-Z0-9_, ]+)", command)
cd_match = re.match(r"^cd \"?\'?([a-zA-Z0-9_\ \:\\\/\.]+)", command)
if cd_match:
os.chdir(eval('r"%s"' % cd_match.groups()[0].strip()))
elif clear_match:
varnames = clear_match.groups()[0].replace(' ', '').split(',')
for varname in varnames:
try:
namespace.pop(varname)
except KeyError:
pass
elif command in ('cd', 'pwd'):
print os.getcwdu()
elif command == 'ls':
if os.name == 'nt':
evalsc('!dir')
else:
evalsc('!ls')
elif command == 'scientific':
from spyderlib import baseconfig
execfile(baseconfig.SCIENTIFIC_STARTUP, namespace)
else:
raise NotImplementedError, "Unsupported command: '%s'" % command
__builtin__.evalsc = evalsc
## Restoring original PYTHONPATH
#try:
# os.environ['PYTHONPATH'] = os.environ['OLD_PYTHONPATH']
# del os.environ['OLD_PYTHONPATH']
#except KeyError:
# if os.environ.get('PYTHONPATH') is not None:
# del os.environ['PYTHONPATH']
|
archlinux/archweb
|
refs/heads/master
|
mirrors/views/mirrorlist.py
|
1
|
from operator import attrgetter, itemgetter
from django import forms
from django.db.models import Q
from django.forms.widgets import SelectMultiple, CheckboxSelectMultiple
from django.shortcuts import get_object_or_404, redirect, render
from django.views.decorators.csrf import csrf_exempt
from django_countries import countries
from ..models import MirrorUrl, MirrorProtocol
from ..utils import get_mirror_statuses
import random
class MirrorlistForm(forms.Form):
country = forms.MultipleChoiceField(required=False, widget=SelectMultiple(attrs={'size': '12'}))
protocol = forms.MultipleChoiceField(required=False, widget=CheckboxSelectMultiple)
ip_version = forms.MultipleChoiceField(
required=False, label="IP version", choices=(('4', 'IPv4'), ('6', 'IPv6')),
widget=CheckboxSelectMultiple)
use_mirror_status = forms.BooleanField(required=False)
def __init__(self, *args, **kwargs):
super(MirrorlistForm, self).__init__(*args, **kwargs)
fields = self.fields
fields['country'].choices = [('all', 'All')] + self.get_countries()
fields['country'].initial = ['all']
protos = [(p.protocol, p.protocol) for p in MirrorProtocol.objects.filter(is_download=True)]
initial = MirrorProtocol.objects.filter(is_download=True, default=True)
fields['protocol'].choices = protos
fields['protocol'].initial = [p.protocol for p in initial]
fields['ip_version'].initial = ['4']
def get_countries(self):
country_codes = set()
country_codes.update(MirrorUrl.objects.filter(
active=True, mirror__active=True).exclude(country='').values_list(
'country', flat=True).order_by().distinct())
code_list = [(code, countries.name(code)) for code in country_codes]
return sorted(code_list, key=itemgetter(1))
def as_div(self):
"Returns this form rendered as HTML <divs>s."
return self._html_output(
normal_row=u'<div%(html_class_attr)s>%(label)s %(field)s%(help_text)s</div>',
error_row=u'%s',
row_ender='</div>',
help_text_html=u' <span class="helptext">%s</span>',
errors_on_separate_row=True)
@csrf_exempt
def generate_mirrorlist(request):
if request.method == 'POST' or len(request.GET) > 0:
data = request.POST if request.method == 'POST' else request.GET
form = MirrorlistForm(data=data)
if form.is_valid():
countries = form.cleaned_data['country']
protocols = form.cleaned_data['protocol']
use_status = form.cleaned_data['use_mirror_status']
ipv4 = '4' in form.cleaned_data['ip_version']
ipv6 = '6' in form.cleaned_data['ip_version']
return find_mirrors(request, countries, protocols,
use_status, ipv4, ipv6)
else:
form = MirrorlistForm()
return render(request, 'mirrors/mirrorlist_generate.html',
{'mirrorlist_form': form})
def status_filter(original_urls):
status_info = get_mirror_statuses()
scores = {u.id: u.score for u in status_info['urls']}
urls = []
for u in original_urls:
u.score = scores.get(u.id, None)
# also include mirrors that don't have an up to date score
# (as opposed to those that have been set with no score)
if (u.id not in scores) or (u.score and u.score < 100.0):
urls.append(u)
# randomize list to prevent users from overloading the first mirror in the returned list
random.shuffle(urls)
return urls
def find_mirrors(request, countries=None, protocols=None, use_status=False,
ipv4_supported=True, ipv6_supported=True):
if not protocols:
protocols = MirrorProtocol.objects.filter(is_download=True)
elif hasattr(protocols, 'model') and protocols.model == MirrorProtocol:
# we already have a queryset, no need to query again
pass
else:
protocols = MirrorProtocol.objects.filter(protocol__in=protocols)
qset = MirrorUrl.objects.select_related().filter(
protocol__in=protocols, active=True,
mirror__public=True, mirror__active=True)
if countries and 'all' not in countries:
qset = qset.filter(country__in=countries)
ip_version = Q()
if ipv4_supported:
ip_version |= Q(has_ipv4=True)
if ipv6_supported:
ip_version |= Q(has_ipv6=True)
qset = qset.filter(ip_version)
if not use_status:
sort_key = attrgetter('country.name', 'mirror.name', 'url')
urls = sorted(qset, key=sort_key)
template = 'mirrors/mirrorlist.txt'
else:
urls = status_filter(qset)
template = 'mirrors/mirrorlist_status.txt'
context = {
'mirror_urls': urls,
}
return render(request, template, context, content_type='text/plain')
def find_mirrors_simple(request, protocol):
if protocol == 'smart':
return redirect('mirrorlist_simple', 'http', permanent=True)
proto = get_object_or_404(MirrorProtocol, protocol=protocol)
return find_mirrors(request, protocols=[proto])
# vim: set ts=4 sw=4 et:
|
zanderle/django
|
refs/heads/master
|
tests/admin_registration/tests.py
|
266
|
from __future__ import unicode_literals
from django.contrib import admin
from django.contrib.admin.decorators import register
from django.contrib.admin.sites import site
from django.core.exceptions import ImproperlyConfigured
from django.test import SimpleTestCase
from .models import Location, Person, Place, Traveler
class NameAdmin(admin.ModelAdmin):
list_display = ['name']
save_on_top = True
class CustomSite(admin.AdminSite):
pass
class TestRegistration(SimpleTestCase):
def setUp(self):
self.site = admin.AdminSite()
def test_bare_registration(self):
self.site.register(Person)
self.assertTrue(
isinstance(self.site._registry[Person], admin.options.ModelAdmin)
)
def test_registration_with_model_admin(self):
self.site.register(Person, NameAdmin)
self.assertTrue(
isinstance(self.site._registry[Person], NameAdmin)
)
def test_prevent_double_registration(self):
self.site.register(Person)
self.assertRaises(admin.sites.AlreadyRegistered,
self.site.register,
Person)
def test_registration_with_star_star_options(self):
self.site.register(Person, search_fields=['name'])
self.assertEqual(self.site._registry[Person].search_fields, ['name'])
def test_star_star_overrides(self):
self.site.register(Person, NameAdmin,
search_fields=["name"], list_display=['__str__'])
self.assertEqual(self.site._registry[Person].search_fields, ['name'])
self.assertEqual(self.site._registry[Person].list_display,
['__str__'])
self.assertTrue(self.site._registry[Person].save_on_top)
def test_iterable_registration(self):
self.site.register([Person, Place], search_fields=['name'])
self.assertTrue(
isinstance(self.site._registry[Person], admin.options.ModelAdmin)
)
self.assertEqual(self.site._registry[Person].search_fields, ['name'])
self.assertTrue(
isinstance(self.site._registry[Place], admin.options.ModelAdmin)
)
self.assertEqual(self.site._registry[Place].search_fields, ['name'])
def test_abstract_model(self):
"""
Exception is raised when trying to register an abstract model.
Refs #12004.
"""
self.assertRaises(ImproperlyConfigured, self.site.register, Location)
def test_is_registered_model(self):
"Checks for registered models should return true."
self.site.register(Person)
self.assertTrue(self.site.is_registered(Person))
def test_is_registered_not_registered_model(self):
"Checks for unregistered models should return false."
self.assertFalse(self.site.is_registered(Person))
class TestRegistrationDecorator(SimpleTestCase):
"""
Tests the register decorator in admin.decorators
For clarity:
@register(Person)
class AuthorAdmin(ModelAdmin):
pass
is functionally equal to (the way it is written in these tests):
AuthorAdmin = register(Person)(AuthorAdmin)
"""
def setUp(self):
self.default_site = site
self.custom_site = CustomSite()
def test_basic_registration(self):
register(Person)(NameAdmin)
self.assertTrue(
isinstance(self.default_site._registry[Person],
admin.options.ModelAdmin)
)
self.default_site.unregister(Person)
def test_custom_site_registration(self):
register(Person, site=self.custom_site)(NameAdmin)
self.assertTrue(
isinstance(self.custom_site._registry[Person],
admin.options.ModelAdmin)
)
def test_multiple_registration(self):
register(Traveler, Place)(NameAdmin)
self.assertTrue(
isinstance(self.default_site._registry[Traveler],
admin.options.ModelAdmin)
)
self.default_site.unregister(Traveler)
self.assertTrue(
isinstance(self.default_site._registry[Place],
admin.options.ModelAdmin)
)
self.default_site.unregister(Place)
def test_wrapped_class_not_a_model_admin(self):
self.assertRaisesMessage(ValueError, 'Wrapped class must subclass ModelAdmin.',
register(Person), CustomSite)
def test_custom_site_not_an_admin_site(self):
self.assertRaisesMessage(ValueError, 'site must subclass AdminSite',
register(Person, site=Traveler), NameAdmin)
|
PaulSec/theHarvester
|
refs/heads/master
|
htmlExport.py
|
19
|
from lib import markup
from lib import graphs
import re
class htmlExport():
def __init__(self, users, hosts, vhosts, dnsres,
dnsrev, file, domain, shodan, tldres):
self.users = users
self.hosts = hosts
self.vhost = vhosts
self.fname = file
self.dnsres = dnsres
self.dnsrev = dnsrev
self.domain = domain
self.shodan = shodan
self.tldres = tldres
self.style = ""
def styler(self):
a = """<style type='text/css'>body {
background: #FFFFFF top no-repeat;
}
h1 { font-family: arial, Times New Roman, times-roman, georgia, serif;
color: #680000;
margin: 0;
padding: 0px 0px 6px 0px;
font-size: 51px;
line-height: 44px;
letter-spacing: -2px;
font-weight: bold;
}
h3 { font-family: arial, Times New Roman, times-roman, georgia, serif;
color: #444;
margin: 0;
padding: 0px 0px 6px 0px;
font-size: 30px;
line-height: 44px;
letter-spacing: -2px;
font-weight: bold;
}
li { font-family: arial, Times New Roman, times-roman, georgia, serif;
color: #444;
margin: 0;
padding: 0px 0px 6px 0px;
font-size: 15px;
line-height: 15px;
letter-spacing: 0.4px;
}
h2{
font-family: arial, Times New Roman, times-roman, georgia, serif;
font-size: 48px;
line-height: 40px;
letter-spacing: -1px;
color: #680000 ;
margin: 0 0 0 0;
padding: 0 0 0 0;
font-weight: 100;
}
pre {
overflow: auto;
padding-left: 15px;
padding-right: 15px;
font-size: 11px;
line-height: 15px;
margin-top: 10px;
width: 93%;
display: block;
background-color: #eeeeee;
color: #000000;
max-height: 300px;
}
</style>
"""
self.style = a
def writehtml(self):
page = markup.page()
# page.init (title="theHarvester
# Results",css=('edge.css'),footer="Edge-security 2011")A
page.html()
self.styler()
page.head(self.style)
page.body()
page.h1("theHarvester results")
page.h2("for :" + self.domain)
page.h3("Dashboard:")
graph = graphs.BarGraph('vBar')
graph.values = [len(
self.users),
len(self.hosts),
len(self.vhost),
len(self.tldres),
len(self.shodan)]
graph.labels = ['Emails', 'hosts', 'Vhost', 'TLD', 'Shodan']
graph.showValues = 1
page.body(graph.create())
page.h3("E-mails names found:")
if self.users != []:
page.ul(class_="userslist")
page.li(self.users, class_="useritem")
page.ul.close()
else:
page.h2("No emails found")
page.h3("Hosts found:")
if self.hosts != []:
page.ul(class_="softlist")
page.li(self.hosts, class_="softitem")
page.ul.close()
else:
page.h2("No hosts found")
if self.tldres != []:
page.h3("TLD domains found in TLD expansion:")
page.ul(class_="tldlist")
page.li(self.tldres, class_="tlditem")
page.ul.close()
if self.dnsres != []:
page.h3("Hosts found in DNS brute force:")
page.ul(class_="dnslist")
page.li(self.dnsres, class_="dnsitem")
page.ul.close()
if self.dnsrev != []:
page.h3("Hosts found with reverse lookup :")
page.ul(class_="dnsrevlist")
page.li(self.dnsrev, class_="dnsrevitem")
page.ul.close()
if self.vhost != []:
page.h3("Virtual hosts found:")
page.ul(class_="pathslist")
page.li(self.vhost, class_="pathitem")
page.ul.close()
if self.shodan != []:
shodanalysis = []
page.h3("Shodan results:")
for x in self.shodan:
res = x.split("SAPO")
page.h3(res[0])
page.a("Port :" + res[2])
page.pre(res[1])
page.pre.close()
ban = res[1]
reg_server = re.compile('Server:.*')
temp = reg_server.findall(res[1])
if temp != []:
shodanalysis.append(res[0] + ":" + temp[0])
if shodanalysis != []:
page.h3("Server technologies:")
repeated = []
for x in shodanalysis:
if x not in repeated:
page.pre(x)
page.pre.close()
repeated.append(x)
page.body.close()
page.html.close()
file = open(self.fname, 'w')
for x in page.content:
try:
file.write(x)
except:
print "Exception" + x # send to logs
pass
file.close
return "ok"
|
GDGND/evm
|
refs/heads/master
|
allauth/socialaccount/providers/facebook/south_migrations/__init__.py
|
12133432
| |
LouisChen1905/OneAnalyser
|
refs/heads/master
|
src/one_analyse/test/argument/__init__.py
|
12133432
| |
BehavioralInsightsTeam/edx-platform
|
refs/heads/release-bit
|
openedx/core/djangoapps/course_groups/migrations/__init__.py
|
12133432
| |
andmos/ansible
|
refs/heads/devel
|
lib/ansible/modules/messaging/rabbitmq/__init__.py
|
12133432
| |
bowang/tensorflow
|
refs/heads/master
|
tensorflow/python/kernel_tests/decode_png_op_test.py
|
134
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for DecodePngOp."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import image_ops
import tensorflow.python.ops.nn_grad # pylint: disable=unused-import
from tensorflow.python.platform import test
class DecodePngOpTest(test.TestCase):
def test16bit(self):
img_bytes = [[0, 255], [1024, 1024 + 255]]
# Encoded PNG bytes resulting from encoding the above img_bytes
# using go's image/png encoder.
encoded_bytes = [
137, 80, 78, 71, 13, 10, 26, 10, 0, 0, 0, 13, 73, 72, 68, 82, 0, 0, 0,
2, 0, 0, 0, 2, 16, 0, 0, 0, 0, 7, 77, 142, 187, 0, 0, 0, 21, 73, 68, 65,
84, 120, 156, 98, 98, 96, 96, 248, 207, 194, 2, 36, 1, 1, 0, 0, 255,
255, 6, 60, 1, 10, 68, 160, 26, 131, 0, 0, 0, 0, 73, 69, 78, 68, 174,
66, 96, 130
]
byte_string = bytes(bytearray(encoded_bytes))
img_in = constant_op.constant(byte_string, dtype=dtypes.string)
decode = array_ops.squeeze(
image_ops.decode_png(
img_in, dtype=dtypes.uint16))
with self.test_session():
decoded = decode.eval()
self.assertAllEqual(decoded, img_bytes)
if __name__ == "__main__":
test.main()
|
Venturi/cms
|
refs/heads/master
|
env/lib/python2.7/site-packages/cms/models/placeholdermodel.py
|
29
|
# -*- coding: utf-8 -*-
from cms.utils.compat import DJANGO_1_7
from django.contrib import admin
from django.contrib.auth import get_permission_codename
from django.db import models
from django.template.defaultfilters import title
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from cms.exceptions import LanguageError
from cms.utils.helpers import reversion_register
from cms.utils.i18n import get_language_object
from cms.utils.placeholder import PlaceholderNoAction, get_placeholder_conf
from cms.utils.urlutils import admin_reverse
@python_2_unicode_compatible
class Placeholder(models.Model):
"""
Attributes:
is_static Set to "True" for static placeholders by the template tag
is_editable If False the content of the placeholder is not editable in the frontend
"""
slot = models.CharField(_("slot"), max_length=255, db_index=True, editable=False)
default_width = models.PositiveSmallIntegerField(_("width"), null=True, editable=False)
cache_placeholder = True
is_static = False
is_editable = True
class Meta:
app_label = 'cms'
permissions = (
(u"use_structure", u"Can use Structure mode"),
)
def __str__(self):
return self.slot
def clear(self, language=None):
if language:
qs = self.cmsplugin_set.filter(language=language)
else:
qs = self.cmsplugin_set.all()
qs = qs.order_by('-depth').select_related()
for plugin in qs:
inst, cls = plugin.get_plugin_instance()
if inst and getattr(inst, 'cmsplugin_ptr', False):
inst.cmsplugin_ptr._no_reorder = True
inst._no_reorder = True
inst.delete(no_mp=True)
else:
plugin._no_reorder = True
plugin.delete(no_mp=True)
def get_label(self):
name = get_placeholder_conf("name", self.slot, default=title(self.slot))
name = _(name)
return name
def get_add_url(self):
return self._get_url('add_plugin')
def get_edit_url(self, plugin_pk):
return self._get_url('edit_plugin', plugin_pk)
def get_move_url(self):
return self._get_url('move_plugin')
def get_delete_url(self, plugin_pk):
return self._get_url('delete_plugin', plugin_pk)
def get_changelist_url(self):
return self._get_url('changelist')
def get_clear_url(self):
return self._get_url('clear_placeholder', self.pk)
def get_copy_url(self):
return self._get_url('copy_plugins')
def get_extra_menu_items(self):
from cms.plugin_pool import plugin_pool
return plugin_pool.get_extra_placeholder_menu_items(self)
def _get_url(self, key, pk=None):
model = self._get_attached_model()
args = []
if pk:
args.append(pk)
if not model:
return admin_reverse('cms_page_%s' % key, args=args)
else:
app_label = model._meta.app_label
model_name = model.__name__.lower()
return admin_reverse('%s_%s_%s' % (app_label, model_name, key), args=args)
def _get_permission(self, request, key):
"""
Generic method to check the permissions for a request for a given key,
the key can be: 'add', 'change' or 'delete'. For each attached object
permission has to be granted either on attached model or on attached object.
* 'add' and 'change' permissions on placeholder need either on add or change
permission on attached object to be granted.
* 'delete' need either on add, change or delete
"""
if getattr(request, 'user', None) and request.user.is_superuser:
return True
perm_keys = {
'add': ('add', 'change',),
'change': ('add', 'change',),
'delete': ('add', 'change', 'delete'),
}
if key not in perm_keys:
raise Exception("%s is not a valid perm key. "
"'Only 'add', 'change' and 'delete' are allowed" % key)
objects = [self.page] if self.page else self._get_attached_objects()
obj_perm = None
for obj in objects:
obj_perm = False
for key in perm_keys[key]:
if self._get_object_permission(obj, request, key):
obj_perm = True
break
if not obj_perm:
return False
return obj_perm
def _get_object_permission(self, obj, request, key):
if not getattr(request, 'user', None):
return False
opts = obj._meta
perm_code = '%s.%s' % (opts.app_label, get_permission_codename(key, opts))
return request.user.has_perm(perm_code) or request.user.has_perm(perm_code, obj)
def has_change_permission(self, request):
return self._get_permission(request, 'change')
def has_add_permission(self, request):
return self._get_permission(request, 'add')
def has_delete_permission(self, request):
return self._get_permission(request, 'delete')
def render(self, context, width, lang=None, editable=True, use_cache=True):
'''
Set editable = False to disable front-end rendering for this render.
'''
from cms.plugin_rendering import render_placeholder
if not 'request' in context:
return '<!-- missing request -->'
width = width or self.default_width
if width:
context['width'] = width
return render_placeholder(self, context, lang=lang, editable=editable,
use_cache=use_cache)
def _get_attached_fields(self):
"""
Returns an ITERATOR of all non-cmsplugin reverse foreign key related fields.
"""
from cms.models import CMSPlugin
if not hasattr(self, '_attached_fields_cache'):
self._attached_fields_cache = []
for rel in self._meta.get_all_related_objects():
if issubclass(rel.model, CMSPlugin):
continue
from cms.admin.placeholderadmin import PlaceholderAdminMixin
if DJANGO_1_7:
parent = rel.model
else:
parent = rel.related_model
if parent in admin.site._registry and isinstance(admin.site._registry[parent], PlaceholderAdminMixin):
field = getattr(self, rel.get_accessor_name())
try:
if field.count():
self._attached_fields_cache.append(rel.field)
except:
pass
return self._attached_fields_cache
def _get_attached_field(self):
from cms.models import CMSPlugin, StaticPlaceholder, Page
if not hasattr(self, '_attached_field_cache'):
self._attached_field_cache = None
relations = self._meta.get_all_related_objects()
for rel in relations:
if DJANGO_1_7:
parent = rel.model
else:
parent = rel.related_model
if parent == Page or parent == StaticPlaceholder:
relations.insert(0, relations.pop(relations.index(rel)))
for rel in relations:
if issubclass(rel.model, CMSPlugin):
continue
from cms.admin.placeholderadmin import PlaceholderAdminMixin
if DJANGO_1_7:
parent = rel.model
else:
parent = rel.related_model
if parent in admin.site._registry and isinstance(admin.site._registry[parent], PlaceholderAdminMixin):
field = getattr(self, rel.get_accessor_name())
try:
if field.count():
self._attached_field_cache = rel.field
break
except:
pass
return self._attached_field_cache
def _get_attached_field_name(self):
field = self._get_attached_field()
if field:
return field.name
return None
def _get_attached_model(self):
if hasattr(self, '_attached_model_cache'):
return self._attached_model_cache
if self.page or self.page_set.all().count():
from cms.models import Page
self._attached_model_cache = Page
return Page
field = self._get_attached_field()
if field:
self._attached_model_cache = field.model
return field.model
self._attached_model_cache = None
return None
def _get_attached_models(self):
"""
Returns a list of models of attached to this placeholder.
"""
if hasattr(self, '_attached_models_cache'):
return self._attached_models_cache
self._attached_models_cache = [field.model for field in self._get_attached_fields()]
return self._attached_models_cache
def _get_attached_objects(self):
"""
Returns a list of objects attached to this placeholder.
"""
return [obj for field in self._get_attached_fields()
for obj in getattr(self, field.related.get_accessor_name()).all()]
def page_getter(self):
if not hasattr(self, '_page'):
from cms.models.pagemodel import Page
try:
self._page = Page.objects.get(placeholders=self)
except (Page.DoesNotExist, Page.MultipleObjectsReturned,):
self._page = None
return self._page
def page_setter(self, value):
self._page = value
page = property(page_getter, page_setter)
def get_plugins_list(self, language=None):
return list(self.get_plugins(language))
def get_plugins(self, language=None):
if language:
return self.cmsplugin_set.filter(language=language).order_by('path')
else:
return self.cmsplugin_set.all().order_by('path')
def get_filled_languages(self):
"""
Returns language objects for every language for which the placeholder
has plugins.
This is not cached as it's meant to eb used in the frontend editor.
"""
languages = []
for lang_code in set(self.get_plugins().values_list('language', flat=True)):
try:
languages.append(get_language_object(lang_code))
except LanguageError:
pass
return languages
def get_cached_plugins(self):
return getattr(self, '_plugins_cache', [])
@property
def actions(self):
if not hasattr(self, '_actions_cache'):
field = self._get_attached_field()
self._actions_cache = getattr(field, 'actions', PlaceholderNoAction())
return self._actions_cache
reversion_register(Placeholder) # follow=["cmsplugin_set"] not following plugins since they are a spechial case
|
aconrad/webassets
|
refs/heads/master
|
src/webassets/filter/cssrewrite/__init__.py
|
19
|
import os
from os.path import join
from webassets.utils import common_path_prefix
from webassets.utils import urlparse
from . import urlpath
try:
from collections import OrderedDict
except ImportError:
# Use an ordered dict when available, otherwise we simply don't
# support ordering - it's just a nice bonus.
OrderedDict = dict
from .base import CSSUrlRewriter, addsep, path2url
__all__ = ('CSSRewrite',)
class CSSRewrite(CSSUrlRewriter):
"""Source filter that rewrites relative urls in CSS files.
CSS allows you to specify urls relative to the location of the CSS file.
However, you may want to store your compressed assets in a different place
than source files, or merge source files from different locations. This
would then break these relative CSS references, since the base URL changed.
This filter transparently rewrites CSS ``url()`` instructions in the source
files to make them relative to the location of the output path. It works as
a *source filter*, i.e. it is applied individually to each source file
before they are merged.
No configuration is necessary.
The filter also supports a manual mode::
get_filter('cssrewrite', replace={'old_directory':'/custom/path/'})
This will rewrite all urls that point to files within ``old_directory`` to
use ``/custom/path`` as a prefix instead.
You may plug in your own replace function::
get_filter('cssrewrite', replace=lambda url: re.sub(r'^/?images/', '/images/', url))
get_filter('cssrewrite', replace=lambda url: '/images/'+url[7:] if url.startswith('images/') else url)
"""
# TODO: If we want to support inline assets, this needs to be
# updated to optionally convert URLs to absolute ones based on
# MEDIA_URL.
name = 'cssrewrite'
max_debug_level = 'merge'
def __init__(self, replace=False):
super(CSSRewrite, self).__init__()
self.replace = replace
def unique(self):
# Allow mixing the standard version of this filter, and replace mode.
return self.replace
def input(self, _in, out, **kw):
if self.replace not in (False, None) and not callable(self.replace):
# For replace mode, make sure we have all the directories to be
# rewritten in form of a url, so we can later easily match it
# against the urls encountered in the CSS.
replace_dict = False
root = addsep(self.ctx.directory)
replace_dict = OrderedDict()
for repldir, sub in self.replace.items():
repldir = addsep(os.path.normpath(join(root, repldir)))
replurl = path2url(repldir[len(common_path_prefix([root, repldir])):])
replace_dict[replurl] = sub
self.replace_dict = replace_dict
return super(CSSRewrite, self).input(_in, out, **kw)
def replace_url(self, url):
# Replace mode: manually adjust the location of files
if callable(self.replace):
return self.replace(url)
elif self.replace is not False:
for to_replace, sub in self.replace_dict.items():
targeturl = urlparse.urljoin(self.source_url, url)
if targeturl.startswith(to_replace):
url = "%s%s" % (sub, targeturl[len(to_replace):])
# Only apply the first match
break
# Default mode: auto correct relative urls
else:
# If path is an absolute one, keep it
parsed = urlparse.urlparse(url)
if not parsed.scheme and not parsed.path.startswith('/'):
abs_source_url = urlparse.urljoin(self.source_url, url)
# relpath() will not detect this case
if urlparse.urlparse(abs_source_url).scheme:
return abs_source_url
# rewritten url: relative path from new location (output)
# to location of referenced file (source + current url)
url = urlpath.relpath(self.output_url, abs_source_url)
return url
def get_additional_cache_keys(self, **kw):
if 'output_path' in kw:
return [os.path.dirname(kw['output_path'])]
return []
|
simartin/servo
|
refs/heads/master
|
tests/wpt/web-platform-tests/tools/third_party/hyper/hyper/httplib_compat.py
|
48
|
# -*- coding: utf-8 -*-
"""
hyper/httplib_compat
~~~~~~~~~~~~~~~~~~~~
This file defines the publicly-accessible API for hyper. This API also
constitutes the abstraction layer between HTTP/1.1 and HTTP/2.
This API doesn't currently work, and is a lower priority than the HTTP/2
stack at this time.
"""
import socket
try:
import http.client as httplib
except ImportError:
import httplib
from .compat import ssl
from .http20.tls import wrap_socket
# If there's no NPN support, we're going to drop all support for HTTP/2.
try:
support_20 = ssl.HAS_NPN
except AttributeError:
support_20 = False
# The HTTPConnection object is currently always the underlying one.
HTTPConnection = httplib.HTTPConnection
HTTPSConnection = httplib.HTTPSConnection
# If we have NPN support, define our custom one, otherwise just use the
# default.
if support_20:
class HTTPSConnection(object):
"""
An object representing a single HTTPS connection, whether HTTP/1.1 or
HTTP/2.
More specifically, this object represents an abstraction over the
distinction. This object encapsulates a connection object for one of
the specific types of connection, and delegates most of the work to
that object.
"""
def __init__(self, *args, **kwargs):
# Whatever arguments and keyword arguments are passed to this
# object need to be saved off for when we initialise one of our
# subsidiary objects.
self._original_args = args
self._original_kwargs = kwargs
# Set up some variables we're going to use later.
self._sock = None
self._conn = None
# Prepare our backlog of method calls.
self._call_queue = []
def __getattr__(self, name):
# Anything that can't be found on this instance is presumably a
# property of underlying connection object.
# We need to be a little bit careful here. There are a few methods
# that can act on a HTTPSConnection before it actually connects to
# the remote server. We don't want to change the semantics of the,
# HTTPSConnection so we need to spot these and queue them up. When
# we actually create the backing Connection, we'll apply them
# immediately. These methods can't throw exceptions, so we should
# be fine.
delay_methods = ["set_tunnel", "set_debuglevel"]
if self._conn is None and name in delay_methods:
# Return a little closure that saves off the method call to
# apply later.
def capture(obj, *args, **kwargs):
self._call_queue.append((name, args, kwargs))
return capture
elif self._conn is None:
# We're being told to do something! We can now connect to the
# remote server and build the connection object.
self._delayed_connect()
# Call through to the underlying object.
return getattr(self._conn, name)
def _delayed_connect(self):
"""
Called when we need to work out what kind of HTTPS connection we're
actually going to use.
"""
# Because we're ghetto, we're going to quickly create a
# HTTPConnection object to parse the args and kwargs for us, and
# grab the values out.
tempconn = httplib.HTTPConnection(*self._original_args,
**self._original_kwargs)
host = tempconn.host
port = tempconn.port
timeout = tempconn.timeout
source_address = tempconn.source_address
# Connect to the remote server.
sock = socket.create_connection(
(host, port),
timeout,
source_address
)
# Wrap it in TLS. This needs to be looked at in future when I pull
# in the TLS verification logic from urllib3, but right now we
# accept insecurity because no-one's using this anyway.
sock = wrap_socket(sock, host)
# At this early stage the library can't do HTTP/2, so who cares?
tempconn.sock = sock
self._sock = sock
self._conn = tempconn
return
|
seadsystem/website
|
refs/heads/ddavisscott-patch-2
|
web2py/gluon/contrib/pyrtf/Styles.py
|
44
|
"""
A Styles is a collection of PropertySets that can be applied to a particular RTF element.
At present there are only two, Text and Paragraph but ListStyles will be added soon too.
"""
from PropertySets import *
class TextStyle :
def __init__( self, text_props, name=None, shading_props=None ) :
self.SetTextPropertySet ( text_props )
self.SetName ( name )
self.SetShadingPropertySet( shading_props )
def Copy( self ) :
return deepcopy( self )
def SetName( self, value ) :
self.Name = value
return self
def SetTextPropertySet( self, value ) :
assert isinstance( value, TextPropertySet )
self.TextPropertySet = value
return self
def SetShadingPropertySet( self, value ) :
assert value is None or isinstance( value, ShadingPropertySet )
self.ShadingPropertySet = value or ShadingPropertySet()
return self
class ParagraphStyle :
def __init__( self, name, text_style, paragraph_props=None, frame_props=None, shading_props=None ) :
# A style must have Font and a Font Size but the Text property set doesn't
# make these mandatory so that they can be used for overrides so at this point
# we need to make sure that that we have these values set
if not text_style.TextPropertySet.Font : raise Exception( 'Paragraph Styles must have a Font specified.' )
if not text_style.TextPropertySet.Size : raise Exception( 'Paragraph Styles must have a Font Size specified.' )
self.SetName ( name )
self.SetTextStyle ( text_style )
self.SetParagraphPropertySet( paragraph_props )
self.SetFramePropertySet ( frame_props )
self.SetShadingPropertySet ( shading_props )
self.SetBasedOn( None )
self.SetNext ( None )
def Copy( self ) :
return deepcopy( self )
def SetName( self, value ) :
self.Name = value
return self
def SetTextStyle( self, value ) :
assert isinstance( value, TextStyle )
self.TextStyle = value
return self
def SetParagraphPropertySet( self, value ) :
assert value is None or isinstance( value, ParagraphPropertySet )
self.ParagraphPropertySet = value or ParagraphPropertySet()
return self
def SetFramePropertySet( self, value ) :
assert value is None or isinstance( value, FramePropertySet )
self.FramePropertySet = value or FramePropertySet()
return self
def SetShadingPropertySet( self, value ) :
"""Set the background shading for the paragraph."""
assert value is None or isinstance( value, ShadingPropertySet )
self.ShadingPropertySet = value or ShadingPropertySet()
return self
def SetBasedOn( self, value ) :
"""Set the Paragraph Style that this one is based on."""
assert not value or isinstance( value, ParagraphStyle )
self.BasedOn = value
return self
def SetNext( self, value ) :
"""Set the Paragraph Style that should follow this one."""
assert not value or isinstance( value, ParagraphStyle )
self.Next = value
return self
|
bitifirefly/edx-platform
|
refs/heads/master
|
common/djangoapps/student/views.py
|
12
|
"""
Student Views
"""
import datetime
import logging
import uuid
import json
import warnings
from collections import defaultdict
from pytz import UTC
from requests import HTTPError
from ipware.ip import get_ip
from django.conf import settings
from django.contrib.auth import logout, authenticate, login
from django.contrib.auth.models import User, AnonymousUser
from django.contrib.auth.decorators import login_required
from django.contrib.auth.views import password_reset_confirm
from django.contrib import messages
from django.core.context_processors import csrf
from django.core import mail
from django.core.urlresolvers import reverse
from django.core.validators import validate_email, ValidationError
from django.db import IntegrityError, transaction
from django.http import (HttpResponse, HttpResponseBadRequest, HttpResponseForbidden,
HttpResponseServerError, Http404)
from django.shortcuts import redirect
from django.utils.translation import ungettext
from django.utils.http import base36_to_int
from django.utils.translation import ugettext as _, get_language
from django.views.decorators.cache import never_cache
from django.views.decorators.csrf import csrf_exempt, ensure_csrf_cookie
from django.views.decorators.http import require_POST, require_GET
from django.db.models.signals import post_save
from django.dispatch import receiver
from django.template.response import TemplateResponse
from ratelimitbackend.exceptions import RateLimitException
from social.apps.django_app import utils as social_utils
from social.backends import oauth as social_oauth
from social.exceptions import AuthException, AuthAlreadyAssociated
from edxmako.shortcuts import render_to_response, render_to_string
from course_modes.models import CourseMode
from shoppingcart.api import order_history
from student.models import (
Registration, UserProfile,
PendingEmailChange, CourseEnrollment, CourseEnrollmentAttribute, unique_id_for_user,
CourseEnrollmentAllowed, UserStanding, LoginFailures,
create_comments_service_user, PasswordHistory, UserSignupSource,
DashboardConfiguration, LinkedInAddToProfileConfiguration, ManualEnrollmentAudit, ALLOWEDTOENROLL_TO_ENROLLED)
from student.forms import AccountCreationForm, PasswordResetFormNoActive
from verify_student.models import SoftwareSecurePhotoVerification # pylint: disable=import-error
from certificates.models import CertificateStatuses, certificate_status_for_student
from certificates.api import ( # pylint: disable=import-error
get_certificate_url,
has_html_certificates_enabled,
)
from xmodule.modulestore.django import modulestore
from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import CourseKey
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from opaque_keys.edx.locator import CourseLocator
from xmodule.modulestore import ModuleStoreEnum
from collections import namedtuple
from courseware.courses import get_courses, sort_by_announcement, sort_by_start_date # pylint: disable=import-error
from courseware.access import has_access
from django_comment_common.models import Role
from external_auth.models import ExternalAuthMap
import external_auth.views
from external_auth.login_and_register import (
login as external_auth_login,
register as external_auth_register
)
from bulk_email.models import Optout, CourseAuthorization
from lang_pref import LANGUAGE_KEY
import track.views
import dogstats_wrapper as dog_stats_api
from util.db import commit_on_success_with_read_committed
from util.json_request import JsonResponse
from util.bad_request_rate_limiter import BadRequestRateLimiter
from util.milestones_helpers import (
get_pre_requisite_courses_not_completed,
)
from microsite_configuration import microsite
from util.password_policy_validators import (
validate_password_length, validate_password_complexity,
validate_password_dictionary
)
import third_party_auth
from third_party_auth import pipeline, provider
from student.helpers import (
check_verify_status_by_course,
auth_pipeline_urls, get_next_url_for_login_page
)
from student.cookies import set_logged_in_cookies, delete_logged_in_cookies
from student.models import anonymous_id_for_user
from shoppingcart.models import DonationConfiguration, CourseRegistrationCode
from embargo import api as embargo_api
import analytics
from eventtracking import tracker
# Note that this lives in LMS, so this dependency should be refactored.
from notification_prefs.views import enable_notifications
# Note that this lives in openedx, so this dependency should be refactored.
from openedx.core.djangoapps.user_api.preferences import api as preferences_api
log = logging.getLogger("edx.student")
AUDIT_LOG = logging.getLogger("audit")
ReverifyInfo = namedtuple('ReverifyInfo', 'course_id course_name course_number date status display') # pylint: disable=invalid-name
SETTING_CHANGE_INITIATED = 'edx.user.settings.change_initiated'
def csrf_token(context):
"""A csrf token that can be included in a form."""
token = context.get('csrf_token', '')
if token == 'NOTPROVIDED':
return ''
return (u'<div style="display:none"><input type="hidden"'
' name="csrfmiddlewaretoken" value="%s" /></div>' % (token))
# NOTE: This view is not linked to directly--it is called from
# branding/views.py:index(), which is cached for anonymous users.
# This means that it should always return the same thing for anon
# users. (in particular, no switching based on query params allowed)
def index(request, extra_context=None, user=AnonymousUser()):
"""
Render the edX main page.
extra_context is used to allow immediate display of certain modal windows, eg signup,
as used by external_auth.
"""
if extra_context is None:
extra_context = {}
# The course selection work is done in courseware.courses.
domain = settings.FEATURES.get('FORCE_UNIVERSITY_DOMAIN') # normally False
# do explicit check, because domain=None is valid
if domain is False:
domain = request.META.get('HTTP_HOST')
courses = get_courses(user, domain=domain)
if microsite.get_value("ENABLE_COURSE_SORTING_BY_START_DATE",
settings.FEATURES["ENABLE_COURSE_SORTING_BY_START_DATE"]):
courses = sort_by_start_date(courses)
else:
courses = sort_by_announcement(courses)
context = {'courses': courses}
context.update(extra_context)
return render_to_response('index.html', context)
def process_survey_link(survey_link, user):
"""
If {UNIQUE_ID} appears in the link, replace it with a unique id for the user.
Currently, this is sha1(user.username). Otherwise, return survey_link.
"""
return survey_link.format(UNIQUE_ID=unique_id_for_user(user))
def cert_info(user, course_overview, course_mode):
"""
Get the certificate info needed to render the dashboard section for the given
student and course.
Arguments:
user (User): A user.
course_overview (CourseOverview): A course.
course_mode (str): The enrollment mode (honor, verified, audit, etc.)
Returns:
dict: A dictionary with keys:
'status': one of 'generating', 'ready', 'notpassing', 'processing', 'restricted'
'show_download_url': bool
'download_url': url, only present if show_download_url is True
'show_disabled_download_button': bool -- true if state is 'generating'
'show_survey_button': bool
'survey_url': url, only if show_survey_button is True
'grade': if status is not 'processing'
"""
if not course_overview.may_certify():
return {}
return _cert_info(
user,
course_overview,
certificate_status_for_student(user, course_overview.id),
course_mode
)
def reverification_info(statuses):
"""
Returns reverification-related information for *all* of user's enrollments whose
reverification status is in statuses.
Args:
statuses (list): a list of reverification statuses we want information for
example: ["must_reverify", "denied"]
Returns:
dictionary of lists: dictionary with one key per status, e.g.
dict["must_reverify"] = []
dict["must_reverify"] = [some information]
"""
reverifications = defaultdict(list)
# Sort the data by the reverification_end_date
for status in statuses:
if reverifications[status]:
reverifications[status].sort(key=lambda x: x.date)
return reverifications
def get_course_enrollments(user, org_to_include, orgs_to_exclude):
"""
Given a user, return a filtered set of his or her course enrollments.
Arguments:
user (User): the user in question.
org_to_include (str): for use in Microsites. If not None, ONLY courses
of this org will be returned.
orgs_to_exclude (list[str]): If org_to_include is not None, this
argument is ignored. Else, courses of this org will be excluded.
Returns:
generator[CourseEnrollment]: a sequence of enrollments to be displayed
on the user's dashboard.
"""
for enrollment in CourseEnrollment.enrollments_for_user(user):
# If the course is missing or broken, log an error and skip it.
course_overview = enrollment.course_overview
if not course_overview:
log.error(
"User %s enrolled in broken or non-existent course %s",
user.username,
enrollment.course_id
)
continue
# If we are in a Microsite, then filter out anything that is not
# attributed (by ORG) to that Microsite.
if org_to_include and course_overview.location.org != org_to_include:
continue
# Conversely, if we are not in a Microsite, then filter out any enrollments
# with courses attributed (by ORG) to Microsites.
elif course_overview.location.org in orgs_to_exclude:
continue
# Else, include the enrollment.
else:
yield enrollment
def _cert_info(user, course_overview, cert_status, course_mode): # pylint: disable=unused-argument
"""
Implements the logic for cert_info -- split out for testing.
Arguments:
user (User): A user.
course_overview (CourseOverview): A course.
course_mode (str): The enrollment mode (honor, verified, audit, etc.)
"""
# simplify the status for the template using this lookup table
template_state = {
CertificateStatuses.generating: 'generating',
CertificateStatuses.regenerating: 'generating',
CertificateStatuses.downloadable: 'ready',
CertificateStatuses.notpassing: 'notpassing',
CertificateStatuses.restricted: 'restricted',
}
default_status = 'processing'
default_info = {'status': default_status,
'show_disabled_download_button': False,
'show_download_url': False,
'show_survey_button': False,
}
if cert_status is None:
return default_info
is_hidden_status = cert_status['status'] in ('unavailable', 'processing', 'generating', 'notpassing')
if course_overview.certificates_display_behavior == 'early_no_info' and is_hidden_status:
return None
status = template_state.get(cert_status['status'], default_status)
status_dict = {
'status': status,
'show_download_url': status == 'ready',
'show_disabled_download_button': status == 'generating',
'mode': cert_status.get('mode', None),
'linked_in_url': None
}
if (status in ('generating', 'ready', 'notpassing', 'restricted') and
course_overview.end_of_course_survey_url is not None):
status_dict.update({
'show_survey_button': True,
'survey_url': process_survey_link(course_overview.end_of_course_survey_url, user)})
else:
status_dict['show_survey_button'] = False
if status == 'ready':
# showing the certificate web view button if certificate is ready state and feature flags are enabled.
if has_html_certificates_enabled(course_overview.id, course_overview):
if course_overview.has_any_active_web_certificate:
certificate_url = get_certificate_url(
user_id=user.id,
course_id=unicode(course_overview.id),
)
status_dict.update({
'show_cert_web_view': True,
'cert_web_view_url': u'{url}'.format(url=certificate_url)
})
else:
# don't show download certificate button if we don't have an active certificate for course
status_dict['show_download_url'] = False
elif 'download_url' not in cert_status:
log.warning(
u"User %s has a downloadable cert for %s, but no download url",
user.username,
course_overview.id
)
return default_info
else:
status_dict['download_url'] = cert_status['download_url']
# If enabled, show the LinkedIn "add to profile" button
# Clicking this button sends the user to LinkedIn where they
# can add the certificate information to their profile.
linkedin_config = LinkedInAddToProfileConfiguration.current()
if linkedin_config.enabled:
status_dict['linked_in_url'] = linkedin_config.add_to_profile_url(
course_overview.id,
course_overview.display_name,
cert_status.get('mode'),
cert_status['download_url']
)
if status in ('generating', 'ready', 'notpassing', 'restricted'):
if 'grade' not in cert_status:
# Note: as of 11/20/2012, we know there are students in this state-- cs169.1x,
# who need to be regraded (we weren't tracking 'notpassing' at first).
# We can add a log.warning here once we think it shouldn't happen.
return default_info
else:
status_dict['grade'] = cert_status['grade']
return status_dict
@ensure_csrf_cookie
def signin_user(request):
"""Deprecated. To be replaced by :class:`student_account.views.login_and_registration_form`."""
external_auth_response = external_auth_login(request)
if external_auth_response is not None:
return external_auth_response
# Determine the URL to redirect to following login:
redirect_to = get_next_url_for_login_page(request)
if request.user.is_authenticated():
return redirect(redirect_to)
third_party_auth_error = None
for msg in messages.get_messages(request):
if msg.extra_tags.split()[0] == "social-auth":
# msg may or may not be translated. Try translating [again] in case we are able to:
third_party_auth_error = _(unicode(msg)) # pylint: disable=translation-of-non-string
break
context = {
'login_redirect_url': redirect_to, # This gets added to the query string of the "Sign In" button in the header
# Bool injected into JS to submit form if we're inside a running third-
# party auth pipeline; distinct from the actual instance of the running
# pipeline, if any.
'pipeline_running': 'true' if pipeline.running(request) else 'false',
'pipeline_url': auth_pipeline_urls(pipeline.AUTH_ENTRY_LOGIN, redirect_url=redirect_to),
'platform_name': microsite.get_value(
'platform_name',
settings.PLATFORM_NAME
),
'third_party_auth_error': third_party_auth_error
}
return render_to_response('login.html', context)
@ensure_csrf_cookie
def register_user(request, extra_context=None):
"""Deprecated. To be replaced by :class:`student_account.views.login_and_registration_form`."""
# Determine the URL to redirect to following login:
redirect_to = get_next_url_for_login_page(request)
if request.user.is_authenticated():
return redirect(redirect_to)
external_auth_response = external_auth_register(request)
if external_auth_response is not None:
return external_auth_response
context = {
'login_redirect_url': redirect_to, # This gets added to the query string of the "Sign In" button in the header
'email': '',
'name': '',
'running_pipeline': None,
'pipeline_urls': auth_pipeline_urls(pipeline.AUTH_ENTRY_REGISTER, redirect_url=redirect_to),
'platform_name': microsite.get_value(
'platform_name',
settings.PLATFORM_NAME
),
'selected_provider': '',
'username': '',
}
if extra_context is not None:
context.update(extra_context)
if context.get("extauth_domain", '').startswith(external_auth.views.SHIBBOLETH_DOMAIN_PREFIX):
return render_to_response('register-shib.html', context)
# If third-party auth is enabled, prepopulate the form with data from the
# selected provider.
if third_party_auth.is_enabled() and pipeline.running(request):
running_pipeline = pipeline.get(request)
current_provider = provider.Registry.get_from_pipeline(running_pipeline)
if current_provider is not None:
overrides = current_provider.get_register_form_data(running_pipeline.get('kwargs'))
overrides['running_pipeline'] = running_pipeline
overrides['selected_provider'] = current_provider.name
context.update(overrides)
return render_to_response('register.html', context)
def complete_course_mode_info(course_id, enrollment, modes=None):
"""
We would like to compute some more information from the given course modes
and the user's current enrollment
Returns the given information:
- whether to show the course upsell information
- numbers of days until they can't upsell anymore
"""
if modes is None:
modes = CourseMode.modes_for_course_dict(course_id)
mode_info = {'show_upsell': False, 'days_for_upsell': None}
# we want to know if the user is already verified and if verified is an
# option
if 'verified' in modes and enrollment.mode != 'verified':
mode_info['show_upsell'] = True
# if there is an expiration date, find out how long from now it is
if modes['verified'].expiration_datetime:
today = datetime.datetime.now(UTC).date()
mode_info['days_for_upsell'] = (modes['verified'].expiration_datetime.date() - today).days
return mode_info
def is_course_blocked(request, redeemed_registration_codes, course_key):
"""Checking either registration is blocked or not ."""
blocked = False
for redeemed_registration in redeemed_registration_codes:
# registration codes may be generated via Bulk Purchase Scenario
# we have to check only for the invoice generated registration codes
# that their invoice is valid or not
if redeemed_registration.invoice_item:
if not getattr(redeemed_registration.invoice_item.invoice, 'is_valid'):
blocked = True
# disabling email notifications for unpaid registration courses
Optout.objects.get_or_create(user=request.user, course_id=course_key)
log.info(
u"User %s (%s) opted out of receiving emails from course %s",
request.user.username,
request.user.email,
course_key
)
track.views.server_track(request, "change-email1-settings", {"receive_emails": "no", "course": course_key.to_deprecated_string()}, page='dashboard')
break
return blocked
@login_required
@ensure_csrf_cookie
def dashboard(request):
user = request.user
platform_name = microsite.get_value("platform_name", settings.PLATFORM_NAME)
# for microsites, we want to filter and only show enrollments for courses within
# the microsites 'ORG'
course_org_filter = microsite.get_value('course_org_filter')
# Let's filter out any courses in an "org" that has been declared to be
# in a Microsite
org_filter_out_set = microsite.get_all_orgs()
# remove our current Microsite from the "filter out" list, if applicable
if course_org_filter:
org_filter_out_set.remove(course_org_filter)
# Build our (course, enrollment) list for the user, but ignore any courses that no
# longer exist (because the course IDs have changed). Still, we don't delete those
# enrollments, because it could have been a data push snafu.
course_enrollments = list(get_course_enrollments(user, course_org_filter, org_filter_out_set))
# sort the enrollment pairs by the enrollment date
course_enrollments.sort(key=lambda x: x.created, reverse=True)
# Retrieve the course modes for each course
enrolled_course_ids = [enrollment.course_id for enrollment in course_enrollments]
__, unexpired_course_modes = CourseMode.all_and_unexpired_modes_for_courses(enrolled_course_ids)
course_modes_by_course = {
course_id: {
mode.slug: mode
for mode in modes
}
for course_id, modes in unexpired_course_modes.iteritems()
}
# Check to see if the student has recently enrolled in a course.
# If so, display a notification message confirming the enrollment.
enrollment_message = _create_recent_enrollment_message(
course_enrollments, course_modes_by_course
)
course_optouts = Optout.objects.filter(user=user).values_list('course_id', flat=True)
message = ""
if not user.is_active:
message = render_to_string(
'registration/activate_account_notice.html',
{'email': user.email, 'platform_name': platform_name}
)
# Global staff can see what courses errored on their dashboard
staff_access = False
errored_courses = {}
if has_access(user, 'staff', 'global'):
# Show any courses that errored on load
staff_access = True
errored_courses = modulestore().get_errored_courses()
show_courseware_links_for = frozenset(
enrollment.course_id for enrollment in course_enrollments
if has_access(request.user, 'load', enrollment.course_overview)
and has_access(request.user, 'view_courseware_with_prerequisites', enrollment.course_overview)
)
# Construct a dictionary of course mode information
# used to render the course list. We re-use the course modes dict
# we loaded earlier to avoid hitting the database.
course_mode_info = {
enrollment.course_id: complete_course_mode_info(
enrollment.course_id, enrollment,
modes=course_modes_by_course[enrollment.course_id]
)
for enrollment in course_enrollments
}
# Determine the per-course verification status
# This is a dictionary in which the keys are course locators
# and the values are one of:
#
# VERIFY_STATUS_NEED_TO_VERIFY
# VERIFY_STATUS_SUBMITTED
# VERIFY_STATUS_APPROVED
# VERIFY_STATUS_MISSED_DEADLINE
#
# Each of which correspond to a particular message to display
# next to the course on the dashboard.
#
# If a course is not included in this dictionary,
# there is no verification messaging to display.
verify_status_by_course = check_verify_status_by_course(user, course_enrollments)
cert_statuses = {
enrollment.course_id: cert_info(request.user, enrollment.course_overview, enrollment.mode)
for enrollment in course_enrollments
}
# only show email settings for Mongo course and when bulk email is turned on
show_email_settings_for = frozenset(
enrollment.course_id for enrollment in course_enrollments if (
settings.FEATURES['ENABLE_INSTRUCTOR_EMAIL'] and
modulestore().get_modulestore_type(enrollment.course_id) != ModuleStoreEnum.Type.xml and
CourseAuthorization.instructor_email_enabled(enrollment.course_id)
)
)
# Verification Attempts
# Used to generate the "you must reverify for course x" banner
verification_status, verification_msg = SoftwareSecurePhotoVerification.user_status(user)
# Gets data for midcourse reverifications, if any are necessary or have failed
statuses = ["approved", "denied", "pending", "must_reverify"]
reverifications = reverification_info(statuses)
show_refund_option_for = frozenset(
enrollment.course_id for enrollment in course_enrollments
if enrollment.refundable()
)
block_courses = frozenset(
enrollment.course_id for enrollment in course_enrollments
if is_course_blocked(
request,
CourseRegistrationCode.objects.filter(
course_id=enrollment.course_id,
registrationcoderedemption__redeemed_by=request.user
),
enrollment.course_id
)
)
enrolled_courses_either_paid = frozenset(
enrollment.course_id for enrollment in course_enrollments
if enrollment.is_paid_course()
)
# If there are *any* denied reverifications that have not been toggled off,
# we'll display the banner
denied_banner = any(item.display for item in reverifications["denied"])
# Populate the Order History for the side-bar.
order_history_list = order_history(user, course_org_filter=course_org_filter, org_filter_out_set=org_filter_out_set)
# get list of courses having pre-requisites yet to be completed
courses_having_prerequisites = frozenset(
enrollment.course_id for enrollment in course_enrollments
if enrollment.course_overview.pre_requisite_courses
)
courses_requirements_not_met = get_pre_requisite_courses_not_completed(user, courses_having_prerequisites)
if 'notlive' in request.GET:
redirect_message = _("The course you are looking for does not start until {date}.").format(
date=request.GET['notlive']
)
else:
redirect_message = ''
context = {
'enrollment_message': enrollment_message,
'redirect_message': redirect_message,
'course_enrollments': course_enrollments,
'course_optouts': course_optouts,
'message': message,
'staff_access': staff_access,
'errored_courses': errored_courses,
'show_courseware_links_for': show_courseware_links_for,
'all_course_modes': course_mode_info,
'cert_statuses': cert_statuses,
'credit_statuses': _credit_statuses(user, course_enrollments),
'show_email_settings_for': show_email_settings_for,
'reverifications': reverifications,
'verification_status': verification_status,
'verification_status_by_course': verify_status_by_course,
'verification_msg': verification_msg,
'show_refund_option_for': show_refund_option_for,
'block_courses': block_courses,
'denied_banner': denied_banner,
'billing_email': settings.PAYMENT_SUPPORT_EMAIL,
'user': user,
'logout_url': reverse(logout_user),
'platform_name': platform_name,
'enrolled_courses_either_paid': enrolled_courses_either_paid,
'provider_states': [],
'order_history_list': order_history_list,
'courses_requirements_not_met': courses_requirements_not_met,
'nav_hidden': True,
}
return render_to_response('dashboard.html', context)
def _create_recent_enrollment_message(course_enrollments, course_modes): # pylint: disable=invalid-name
"""
Builds a recent course enrollment message.
Constructs a new message template based on any recent course enrollments
for the student.
Args:
course_enrollments (list[CourseEnrollment]): a list of course enrollments.
course_modes (dict): Mapping of course ID's to course mode dictionaries.
Returns:
A string representing the HTML message output from the message template.
None if there are no recently enrolled courses.
"""
recently_enrolled_courses = _get_recently_enrolled_courses(course_enrollments)
if recently_enrolled_courses:
messages = [
{
"course_id": enrollment.course_overview.id,
"course_name": enrollment.course_overview.display_name,
"allow_donation": _allow_donation(course_modes, enrollment.course_overview.id, enrollment)
}
for enrollment in recently_enrolled_courses
]
platform_name = microsite.get_value('platform_name', settings.PLATFORM_NAME)
return render_to_string(
'enrollment/course_enrollment_message.html',
{'course_enrollment_messages': messages, 'platform_name': platform_name}
)
def _get_recently_enrolled_courses(course_enrollments):
"""
Given a list of enrollments, filter out all but recent enrollments.
Args:
course_enrollments (list[CourseEnrollment]): A list of course enrollments.
Returns:
list[CourseEnrollment]: A list of recent course enrollments.
"""
seconds = DashboardConfiguration.current().recent_enrollment_time_delta
time_delta = (datetime.datetime.now(UTC) - datetime.timedelta(seconds=seconds))
return [
enrollment for enrollment in course_enrollments
# If the enrollment has no created date, we are explicitly excluding the course
# from the list of recent enrollments.
if enrollment.is_active and enrollment.created > time_delta
]
def _allow_donation(course_modes, course_id, enrollment):
"""Determines if the dashboard will request donations for the given course.
Check if donations are configured for the platform, and if the current course is accepting donations.
Args:
course_modes (dict): Mapping of course ID's to course mode dictionaries.
course_id (str): The unique identifier for the course.
enrollment(CourseEnrollment): The enrollment object in which the user is enrolled
Returns:
True if the course is allowing donations.
"""
donations_enabled = DonationConfiguration.current().enabled
return donations_enabled and enrollment.mode in course_modes[course_id] and course_modes[course_id][enrollment.mode].min_price == 0
def _update_email_opt_in(request, org):
"""Helper function used to hit the profile API if email opt-in is enabled."""
email_opt_in = request.POST.get('email_opt_in')
if email_opt_in is not None:
email_opt_in_boolean = email_opt_in == 'true'
preferences_api.update_email_opt_in(request.user, org, email_opt_in_boolean)
def _credit_statuses(user, course_enrollments):
"""
Retrieve the status for credit courses.
A credit course is a course for which a user can purchased
college credit. The current flow is:
1. User becomes eligible for credit (submits verifications, passes the course, etc.)
2. User purchases credit from a particular credit provider.
3. User requests credit from the provider, usually creating an account on the provider's site.
4. The credit provider notifies us whether the user's request for credit has been accepted or rejected.
The dashboard is responsible for communicating the user's state in this flow.
Arguments:
user (User): The currently logged-in user.
course_enrollments (list[CourseEnrollment]): List of enrollments for the
user.
Returns: dict
The returned dictionary has keys that are `CourseKey`s and values that
are dictionaries with:
* eligible (bool): True if the user is eligible for credit in this course.
* deadline (datetime): The deadline for purchasing and requesting credit for this course.
* purchased (bool): Whether the user has purchased credit for this course.
* provider_name (string): The display name of the credit provider.
* provider_status_url (string): A URL the user can visit to check on their credit request status.
* request_status (string): Either "pending", "approved", or "rejected"
* error (bool): If true, an unexpected error occurred when retrieving the credit status,
so the user should contact the support team.
Example:
>>> _credit_statuses(user, course_enrollments)
{
CourseKey.from_string("edX/DemoX/Demo_Course"): {
"course_key": "edX/DemoX/Demo_Course",
"eligible": True,
"deadline": 2015-11-23 00:00:00 UTC,
"purchased": True,
"provider_name": "Hogwarts",
"provider_status_url": "http://example.com/status",
"request_status": "pending",
"error": False
}
}
"""
from openedx.core.djangoapps.credit import api as credit_api
# Feature flag off
if not settings.FEATURES.get("ENABLE_CREDIT_ELIGIBILITY"):
return {}
request_status_by_course = {
request["course_key"]: request["status"]
for request in credit_api.get_credit_requests_for_user(user.username)
}
credit_enrollments = {
enrollment.course_id: enrollment
for enrollment in course_enrollments
if enrollment.mode == "credit"
}
# When a user purchases credit in a course, the user's enrollment
# mode is set to "credit" and an enrollment attribute is set
# with the ID of the credit provider. We retrieve *all* such attributes
# here to minimize the number of database queries.
purchased_credit_providers = {
attribute.enrollment.course_id: attribute.value
for attribute in CourseEnrollmentAttribute.objects.filter(
namespace="credit",
name="provider_id",
enrollment__in=credit_enrollments.values()
).select_related("enrollment")
}
provider_info_by_id = {
provider["id"]: provider
for provider in credit_api.get_credit_providers()
}
statuses = {}
for eligibility in credit_api.get_eligibilities_for_user(user.username):
course_key = CourseKey.from_string(unicode(eligibility["course_key"]))
status = {
"course_key": unicode(course_key),
"eligible": True,
"deadline": eligibility["deadline"],
"purchased": course_key in credit_enrollments,
"provider_name": None,
"provider_status_url": None,
"provider_id": None,
"request_status": request_status_by_course.get(course_key),
"error": False,
}
# If the user has purchased credit, then include information about the credit
# provider from which the user purchased credit.
# We retrieve the provider's ID from the an "enrollment attribute" set on the user's
# enrollment when the user's order for credit is fulfilled by the E-Commerce service.
if status["purchased"]:
provider_id = purchased_credit_providers.get(course_key)
if provider_id is None:
status["error"] = True
log.error(
u"Could not find credit provider associated with credit enrollment "
u"for user %s in course %s. The user will not be able to see his or her "
u"credit request status on the student dashboard. This attribute should "
u"have been set when the user purchased credit in the course.",
user.id, course_key
)
else:
provider_info = provider_info_by_id.get(provider_id, {})
status["provider_name"] = provider_info.get("display_name")
status["provider_status_url"] = provider_info.get("status_url")
status["provider_id"] = provider_id
statuses[course_key] = status
return statuses
@require_POST
@commit_on_success_with_read_committed
def change_enrollment(request, check_access=True):
"""
Modify the enrollment status for the logged-in user.
The request parameter must be a POST request (other methods return 405)
that specifies course_id and enrollment_action parameters. If course_id or
enrollment_action is not specified, if course_id is not valid, if
enrollment_action is something other than "enroll" or "unenroll", if
enrollment_action is "enroll" and enrollment is closed for the course, or
if enrollment_action is "unenroll" and the user is not enrolled in the
course, a 400 error will be returned. If the user is not logged in, 403
will be returned; it is important that only this case return 403 so the
front end can redirect the user to a registration or login page when this
happens. This function should only be called from an AJAX request, so
the error messages in the responses should never actually be user-visible.
Args:
request (`Request`): The Django request object
Keyword Args:
check_access (boolean): If True, we check that an accessible course actually
exists for the given course_key before we enroll the student.
The default is set to False to avoid breaking legacy code or
code with non-standard flows (ex. beta tester invitations), but
for any standard enrollment flow you probably want this to be True.
Returns:
Response
"""
# Get the user
user = request.user
# Ensure the user is authenticated
if not user.is_authenticated():
return HttpResponseForbidden()
# Ensure we received a course_id
action = request.POST.get("enrollment_action")
if 'course_id' not in request.POST:
return HttpResponseBadRequest(_("Course id not specified"))
try:
course_id = SlashSeparatedCourseKey.from_deprecated_string(request.POST.get("course_id"))
except InvalidKeyError:
log.warning(
u"User %s tried to %s with invalid course id: %s",
user.username,
action,
request.POST.get("course_id"),
)
return HttpResponseBadRequest(_("Invalid course id"))
if action == "enroll":
# Make sure the course exists
# We don't do this check on unenroll, or a bad course id can't be unenrolled from
if not modulestore().has_course(course_id):
log.warning(
u"User %s tried to enroll in non-existent course %s",
user.username,
course_id
)
return HttpResponseBadRequest(_("Course id is invalid"))
# Record the user's email opt-in preference
if settings.FEATURES.get('ENABLE_MKTG_EMAIL_OPT_IN'):
_update_email_opt_in(request, course_id.org)
available_modes = CourseMode.modes_for_course_dict(course_id)
# Check whether the user is blocked from enrolling in this course
# This can occur if the user's IP is on a global blacklist
# or if the user is enrolling in a country in which the course
# is not available.
redirect_url = embargo_api.redirect_if_blocked(
course_id, user=user, ip_address=get_ip(request),
url=request.path
)
if redirect_url:
return HttpResponse(redirect_url)
# Check that auto enrollment is allowed for this course
# (= the course is NOT behind a paywall)
if CourseMode.can_auto_enroll(course_id):
# Enroll the user using the default mode (honor)
# We're assuming that users of the course enrollment table
# will NOT try to look up the course enrollment model
# by its slug. If they do, it's possible (based on the state of the database)
# for no such model to exist, even though we've set the enrollment type
# to "honor".
try:
CourseEnrollment.enroll(user, course_id, check_access=check_access)
except Exception:
return HttpResponseBadRequest(_("Could not enroll"))
# If we have more than one course mode or professional ed is enabled,
# then send the user to the choose your track page.
# (In the case of no-id-professional/professional ed, this will redirect to a page that
# funnels users directly into the verification / payment flow)
if CourseMode.has_verified_mode(available_modes) or CourseMode.has_professional_mode(available_modes):
return HttpResponse(
reverse("course_modes_choose", kwargs={'course_id': unicode(course_id)})
)
# Otherwise, there is only one mode available (the default)
return HttpResponse()
elif action == "unenroll":
if not CourseEnrollment.is_enrolled(user, course_id):
return HttpResponseBadRequest(_("You are not enrolled in this course"))
CourseEnrollment.unenroll(user, course_id)
return HttpResponse()
else:
return HttpResponseBadRequest(_("Enrollment action is invalid"))
# Need different levels of logging
@ensure_csrf_cookie
def login_user(request, error=""): # pylint: disable=too-many-statements,unused-argument
"""AJAX request to log in the user."""
backend_name = None
email = None
password = None
redirect_url = None
response = None
running_pipeline = None
third_party_auth_requested = third_party_auth.is_enabled() and pipeline.running(request)
third_party_auth_successful = False
trumped_by_first_party_auth = bool(request.POST.get('email')) or bool(request.POST.get('password'))
user = None
platform_name = microsite.get_value("platform_name", settings.PLATFORM_NAME)
if third_party_auth_requested and not trumped_by_first_party_auth:
# The user has already authenticated via third-party auth and has not
# asked to do first party auth by supplying a username or password. We
# now want to put them through the same logging and cookie calculation
# logic as with first-party auth.
running_pipeline = pipeline.get(request)
username = running_pipeline['kwargs'].get('username')
backend_name = running_pipeline['backend']
third_party_uid = running_pipeline['kwargs']['uid']
requested_provider = provider.Registry.get_from_pipeline(running_pipeline)
try:
user = pipeline.get_authenticated_user(requested_provider, username, third_party_uid)
third_party_auth_successful = True
except User.DoesNotExist:
AUDIT_LOG.warning(
u'Login failed - user with username {username} has no social auth with backend_name {backend_name}'.format(
username=username, backend_name=backend_name))
return HttpResponse(
_("You've successfully logged into your {provider_name} account, but this account isn't linked with an {platform_name} account yet.").format(
platform_name=platform_name, provider_name=requested_provider.name
)
+ "<br/><br/>" +
_("Use your {platform_name} username and password to log into {platform_name} below, "
"and then link your {platform_name} account with {provider_name} from your dashboard.").format(
platform_name=platform_name, provider_name=requested_provider.name
)
+ "<br/><br/>" +
_("If you don't have an {platform_name} account yet, "
"click <strong>Register</strong> at the top of the page.").format(
platform_name=platform_name),
content_type="text/plain",
status=403
)
else:
if 'email' not in request.POST or 'password' not in request.POST:
return JsonResponse({
"success": False,
"value": _('There was an error receiving your login information. Please email us.'), # TODO: User error message
}) # TODO: this should be status code 400 # pylint: disable=fixme
email = request.POST['email']
password = request.POST['password']
try:
user = User.objects.get(email=email)
except User.DoesNotExist:
if settings.FEATURES['SQUELCH_PII_IN_LOGS']:
AUDIT_LOG.warning(u"Login failed - Unknown user email")
else:
AUDIT_LOG.warning(u"Login failed - Unknown user email: {0}".format(email))
# check if the user has a linked shibboleth account, if so, redirect the user to shib-login
# This behavior is pretty much like what gmail does for shibboleth. Try entering some @stanford.edu
# address into the Gmail login.
if settings.FEATURES.get('AUTH_USE_SHIB') and user:
try:
eamap = ExternalAuthMap.objects.get(user=user)
if eamap.external_domain.startswith(external_auth.views.SHIBBOLETH_DOMAIN_PREFIX):
return JsonResponse({
"success": False,
"redirect": reverse('shib-login'),
}) # TODO: this should be status code 301 # pylint: disable=fixme
except ExternalAuthMap.DoesNotExist:
# This is actually the common case, logging in user without external linked login
AUDIT_LOG.info(u"User %s w/o external auth attempting login", user)
# see if account has been locked out due to excessive login failures
user_found_by_email_lookup = user
if user_found_by_email_lookup and LoginFailures.is_feature_enabled():
if LoginFailures.is_user_locked_out(user_found_by_email_lookup):
return JsonResponse({
"success": False,
"value": _('This account has been temporarily locked due to excessive login failures. Try again later.'),
}) # TODO: this should be status code 429 # pylint: disable=fixme
# see if the user must reset his/her password due to any policy settings
if user_found_by_email_lookup and PasswordHistory.should_user_reset_password_now(user_found_by_email_lookup):
return JsonResponse({
"success": False,
"value": _('Your password has expired due to password policy on this account. You must '
'reset your password before you can log in again. Please click the '
'"Forgot Password" link on this page to reset your password before logging in again.'),
}) # TODO: this should be status code 403 # pylint: disable=fixme
# if the user doesn't exist, we want to set the username to an invalid
# username so that authentication is guaranteed to fail and we can take
# advantage of the ratelimited backend
username = user.username if user else ""
if not third_party_auth_successful:
try:
user = authenticate(username=username, password=password, request=request)
# this occurs when there are too many attempts from the same IP address
except RateLimitException:
return JsonResponse({
"success": False,
"value": _('Too many failed login attempts. Try again later.'),
}) # TODO: this should be status code 429 # pylint: disable=fixme
if user is None:
# tick the failed login counters if the user exists in the database
if user_found_by_email_lookup and LoginFailures.is_feature_enabled():
LoginFailures.increment_lockout_counter(user_found_by_email_lookup)
# if we didn't find this username earlier, the account for this email
# doesn't exist, and doesn't have a corresponding password
if username != "":
if settings.FEATURES['SQUELCH_PII_IN_LOGS']:
loggable_id = user_found_by_email_lookup.id if user_found_by_email_lookup else "<unknown>"
AUDIT_LOG.warning(u"Login failed - password for user.id: {0} is invalid".format(loggable_id))
else:
AUDIT_LOG.warning(u"Login failed - password for {0} is invalid".format(email))
return JsonResponse({
"success": False,
"value": _('Email or password is incorrect.'),
}) # TODO: this should be status code 400 # pylint: disable=fixme
# successful login, clear failed login attempts counters, if applicable
if LoginFailures.is_feature_enabled():
LoginFailures.clear_lockout_counter(user)
# Track the user's sign in
if settings.FEATURES.get('SEGMENT_IO_LMS') and hasattr(settings, 'SEGMENT_IO_LMS_KEY'):
tracking_context = tracker.get_tracker().resolve_context()
analytics.identify(user.id, {
'email': email,
'username': username,
})
analytics.track(
user.id,
"edx.bi.user.account.authenticated",
{
'category': "conversion",
'label': request.POST.get('course_id'),
'provider': None
},
context={
'Google Analytics': {
'clientId': tracking_context.get('client_id')
}
}
)
if user is not None and user.is_active:
try:
# We do not log here, because we have a handler registered
# to perform logging on successful logins.
login(request, user)
if request.POST.get('remember') == 'true':
request.session.set_expiry(604800)
log.debug("Setting user session to never expire")
else:
request.session.set_expiry(0)
except Exception as exc: # pylint: disable=broad-except
AUDIT_LOG.critical("Login failed - Could not create session. Is memcached running?")
log.critical("Login failed - Could not create session. Is memcached running?")
log.exception(exc)
raise
redirect_url = None # The AJAX method calling should know the default destination upon success
if third_party_auth_successful:
redirect_url = pipeline.get_complete_url(backend_name)
response = JsonResponse({
"success": True,
"redirect_url": redirect_url,
})
# Ensure that the external marketing site can
# detect that the user is logged in.
return set_logged_in_cookies(request, response, user)
if settings.FEATURES['SQUELCH_PII_IN_LOGS']:
AUDIT_LOG.warning(u"Login failed - Account not active for user.id: {0}, resending activation".format(user.id))
else:
AUDIT_LOG.warning(u"Login failed - Account not active for user {0}, resending activation".format(username))
reactivation_email_for_user(user)
not_activated_msg = _("This account has not been activated. We have sent another activation message. Please check your email for the activation instructions.")
return JsonResponse({
"success": False,
"value": not_activated_msg,
}) # TODO: this should be status code 400 # pylint: disable=fixme
@csrf_exempt
@require_POST
@social_utils.strategy("social:complete")
def login_oauth_token(request, backend):
"""
Authenticate the client using an OAuth access token by using the token to
retrieve information from a third party and matching that information to an
existing user.
"""
warnings.warn("Please use AccessTokenExchangeView instead.", DeprecationWarning)
backend = request.backend
if isinstance(backend, social_oauth.BaseOAuth1) or isinstance(backend, social_oauth.BaseOAuth2):
if "access_token" in request.POST:
# Tell third party auth pipeline that this is an API call
request.session[pipeline.AUTH_ENTRY_KEY] = pipeline.AUTH_ENTRY_LOGIN_API
user = None
try:
user = backend.do_auth(request.POST["access_token"])
except (HTTPError, AuthException):
pass
# do_auth can return a non-User object if it fails
if user and isinstance(user, User):
login(request, user)
return JsonResponse(status=204)
else:
# Ensure user does not re-enter the pipeline
request.social_strategy.clean_partial_pipeline()
return JsonResponse({"error": "invalid_token"}, status=401)
else:
return JsonResponse({"error": "invalid_request"}, status=400)
raise Http404
@ensure_csrf_cookie
def logout_user(request):
"""
HTTP request to log out the user. Redirects to marketing page.
Deletes both the CSRF and sessionid cookies so the marketing
site can determine the logged in state of the user
"""
# We do not log here, because we have a handler registered
# to perform logging on successful logouts.
logout(request)
if settings.FEATURES.get('AUTH_USE_CAS'):
target = reverse('cas-logout')
else:
target = '/'
response = redirect(target)
delete_logged_in_cookies(response)
return response
@require_GET
@login_required
@ensure_csrf_cookie
def manage_user_standing(request):
"""
Renders the view used to manage user standing. Also displays a table
of user accounts that have been disabled and who disabled them.
"""
if not request.user.is_staff:
raise Http404
all_disabled_accounts = UserStanding.objects.filter(
account_status=UserStanding.ACCOUNT_DISABLED
)
all_disabled_users = [standing.user for standing in all_disabled_accounts]
headers = ['username', 'account_changed_by']
rows = []
for user in all_disabled_users:
row = [user.username, user.standing.all()[0].changed_by]
rows.append(row)
context = {'headers': headers, 'rows': rows}
return render_to_response("manage_user_standing.html", context)
@require_POST
@login_required
@ensure_csrf_cookie
def disable_account_ajax(request):
"""
Ajax call to change user standing. Endpoint of the form
in manage_user_standing.html
"""
if not request.user.is_staff:
raise Http404
username = request.POST.get('username')
context = {}
if username is None or username.strip() == '':
context['message'] = _('Please enter a username')
return JsonResponse(context, status=400)
account_action = request.POST.get('account_action')
if account_action is None:
context['message'] = _('Please choose an option')
return JsonResponse(context, status=400)
username = username.strip()
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
context['message'] = _("User with username {} does not exist").format(username)
return JsonResponse(context, status=400)
else:
user_account, _success = UserStanding.objects.get_or_create(
user=user, defaults={'changed_by': request.user},
)
if account_action == 'disable':
user_account.account_status = UserStanding.ACCOUNT_DISABLED
context['message'] = _("Successfully disabled {}'s account").format(username)
log.info(u"%s disabled %s's account", request.user, username)
elif account_action == 'reenable':
user_account.account_status = UserStanding.ACCOUNT_ENABLED
context['message'] = _("Successfully reenabled {}'s account").format(username)
log.info(u"%s reenabled %s's account", request.user, username)
else:
context['message'] = _("Unexpected account status")
return JsonResponse(context, status=400)
user_account.changed_by = request.user
user_account.standing_last_changed_at = datetime.datetime.now(UTC)
user_account.save()
return JsonResponse(context)
@login_required
@ensure_csrf_cookie
def change_setting(request):
"""JSON call to change a profile setting: Right now, location"""
# TODO (vshnayder): location is no longer used
u_prof = UserProfile.objects.get(user=request.user) # request.user.profile_cache
if 'location' in request.POST:
u_prof.location = request.POST['location']
u_prof.save()
return JsonResponse({
"success": True,
"location": u_prof.location,
})
class AccountValidationError(Exception):
def __init__(self, message, field):
super(AccountValidationError, self).__init__(message)
self.field = field
@receiver(post_save, sender=User)
def user_signup_handler(sender, **kwargs): # pylint: disable=unused-argument
"""
handler that saves the user Signup Source
when the user is created
"""
if 'created' in kwargs and kwargs['created']:
site = microsite.get_value('SITE_NAME')
if site:
user_signup_source = UserSignupSource(user=kwargs['instance'], site=site)
user_signup_source.save()
log.info(u'user {} originated from a white labeled "Microsite"'.format(kwargs['instance'].id))
def _do_create_account(form):
"""
Given cleaned post variables, create the User and UserProfile objects, as well as the
registration for this user.
Returns a tuple (User, UserProfile, Registration).
Note: this function is also used for creating test users.
"""
if not form.is_valid():
raise ValidationError(form.errors)
user = User(
username=form.cleaned_data["username"],
email=form.cleaned_data["email"],
is_active=False
)
user.set_password(form.cleaned_data["password"])
registration = Registration()
# TODO: Rearrange so that if part of the process fails, the whole process fails.
# Right now, we can have e.g. no registration e-mail sent out and a zombie account
try:
user.save()
except IntegrityError:
# Figure out the cause of the integrity error
if len(User.objects.filter(username=user.username)) > 0:
raise AccountValidationError(
_("An account with the Public Username '{username}' already exists.").format(username=user.username),
field="username"
)
elif len(User.objects.filter(email=user.email)) > 0:
raise AccountValidationError(
_("An account with the Email '{email}' already exists.").format(email=user.email),
field="email"
)
else:
raise
# add this account creation to password history
# NOTE, this will be a NOP unless the feature has been turned on in configuration
password_history_entry = PasswordHistory()
password_history_entry.create(user)
registration.register(user)
profile_fields = [
"name", "level_of_education", "gender", "mailing_address", "city", "country", "goals",
"year_of_birth"
]
profile = UserProfile(
user=user,
**{key: form.cleaned_data.get(key) for key in profile_fields}
)
extended_profile = form.cleaned_extended_profile
if extended_profile:
profile.meta = json.dumps(extended_profile)
try:
profile.save()
except Exception: # pylint: disable=broad-except
log.exception("UserProfile creation failed for user {id}.".format(id=user.id))
raise
return (user, profile, registration)
def create_account_with_params(request, params):
"""
Given a request and a dict of parameters (which may or may not have come
from the request), create an account for the requesting user, including
creating a comments service user object and sending an activation email.
This also takes external/third-party auth into account, updates that as
necessary, and authenticates the user for the request's session.
Does not return anything.
Raises AccountValidationError if an account with the username or email
specified by params already exists, or ValidationError if any of the given
parameters is invalid for any other reason.
Issues with this code:
* It is not transactional. If there is a failure part-way, an incomplete
account will be created and left in the database.
* Third-party auth passwords are not verified. There is a comment that
they are unused, but it would be helpful to have a sanity check that
they are sane.
* It is over 300 lines long (!) and includes disprate functionality, from
registration e-mails to all sorts of other things. It should be broken
up into semantically meaningful functions.
* The user-facing text is rather unfriendly (e.g. "Username must be a
minimum of two characters long" rather than "Please use a username of
at least two characters").
"""
# Copy params so we can modify it; we can't just do dict(params) because if
# params is request.POST, that results in a dict containing lists of values
params = dict(params.items())
# allow for microsites to define their own set of required/optional/hidden fields
extra_fields = microsite.get_value(
'REGISTRATION_EXTRA_FIELDS',
getattr(settings, 'REGISTRATION_EXTRA_FIELDS', {})
)
# Boolean of whether a 3rd party auth provider and credentials were provided in
# the API so the newly created account can link with the 3rd party account.
#
# Note: this is orthogonal to the 3rd party authentication pipeline that occurs
# when the account is created via the browser and redirect URLs.
should_link_with_social_auth = third_party_auth.is_enabled() and 'provider' in params
if should_link_with_social_auth or (third_party_auth.is_enabled() and pipeline.running(request)):
params["password"] = pipeline.make_random_password()
# if doing signup for an external authorization, then get email, password, name from the eamap
# don't use the ones from the form, since the user could have hacked those
# unless originally we didn't get a valid email or name from the external auth
# TODO: We do not check whether these values meet all necessary criteria, such as email length
do_external_auth = 'ExternalAuthMap' in request.session
if do_external_auth:
eamap = request.session['ExternalAuthMap']
try:
validate_email(eamap.external_email)
params["email"] = eamap.external_email
except ValidationError:
pass
if eamap.external_name.strip() != '':
params["name"] = eamap.external_name
params["password"] = eamap.internal_password
log.debug(u'In create_account with external_auth: user = %s, email=%s', params["name"], params["email"])
extended_profile_fields = microsite.get_value('extended_profile_fields', [])
enforce_password_policy = (
settings.FEATURES.get("ENFORCE_PASSWORD_POLICY", False) and
not do_external_auth
)
# Can't have terms of service for certain SHIB users, like at Stanford
tos_required = (
not settings.FEATURES.get("AUTH_USE_SHIB") or
not settings.FEATURES.get("SHIB_DISABLE_TOS") or
not do_external_auth or
not eamap.external_domain.startswith(
external_auth.views.SHIBBOLETH_DOMAIN_PREFIX
)
)
form = AccountCreationForm(
data=params,
extra_fields=extra_fields,
extended_profile_fields=extended_profile_fields,
enforce_username_neq_password=True,
enforce_password_policy=enforce_password_policy,
tos_required=tos_required,
)
# Perform operations within a transaction that are critical to account creation
with transaction.commit_on_success():
# first, create the account
(user, profile, registration) = _do_create_account(form)
# next, link the account with social auth, if provided via the API.
# (If the user is using the normal register page, the social auth pipeline does the linking, not this code)
if should_link_with_social_auth:
backend_name = params['provider']
request.social_strategy = social_utils.load_strategy(request)
redirect_uri = reverse('social:complete', args=(backend_name, ))
request.backend = social_utils.load_backend(request.social_strategy, backend_name, redirect_uri)
social_access_token = params.get('access_token')
if not social_access_token:
raise ValidationError({
'access_token': [
_("An access_token is required when passing value ({}) for provider.").format(
params['provider']
)
]
})
request.session[pipeline.AUTH_ENTRY_KEY] = pipeline.AUTH_ENTRY_REGISTER_API
pipeline_user = None
error_message = ""
try:
pipeline_user = request.backend.do_auth(social_access_token, user=user)
except AuthAlreadyAssociated:
error_message = _("The provided access_token is already associated with another user.")
except (HTTPError, AuthException):
error_message = _("The provided access_token is not valid.")
if not pipeline_user or not isinstance(pipeline_user, User):
# Ensure user does not re-enter the pipeline
request.social_strategy.clean_partial_pipeline()
raise ValidationError({'access_token': [error_message]})
# Perform operations that are non-critical parts of account creation
preferences_api.set_user_preference(user, LANGUAGE_KEY, get_language())
if settings.FEATURES.get('ENABLE_DISCUSSION_EMAIL_DIGEST'):
try:
enable_notifications(user)
except Exception:
log.exception("Enable discussion notifications failed for user {id}.".format(id=user.id))
dog_stats_api.increment("common.student.account_created")
# If the user is registering via 3rd party auth, track which provider they use
third_party_provider = None
running_pipeline = None
if third_party_auth.is_enabled() and pipeline.running(request):
running_pipeline = pipeline.get(request)
third_party_provider = provider.Registry.get_from_pipeline(running_pipeline)
# Track the user's registration
if settings.FEATURES.get('SEGMENT_IO_LMS') and hasattr(settings, 'SEGMENT_IO_LMS_KEY'):
tracking_context = tracker.get_tracker().resolve_context()
analytics.identify(user.id, {
'email': user.email,
'username': user.username,
})
analytics.track(
user.id,
"edx.bi.user.account.registered",
{
'category': 'conversion',
'label': params.get('course_id'),
'provider': third_party_provider.name if third_party_provider else None
},
context={
'Google Analytics': {
'clientId': tracking_context.get('client_id')
}
}
)
create_comments_service_user(user)
# Don't send email if we are:
#
# 1. Doing load testing.
# 2. Random user generation for other forms of testing.
# 3. External auth bypassing activation.
# 4. Have the platform configured to not require e-mail activation.
# 5. Registering a new user using a trusted third party provider (with skip_email_verification=True)
#
# Note that this feature is only tested as a flag set one way or
# the other for *new* systems. we need to be careful about
# changing settings on a running system to make sure no users are
# left in an inconsistent state (or doing a migration if they are).
send_email = (
not settings.FEATURES.get('SKIP_EMAIL_VALIDATION', None) and
not settings.FEATURES.get('AUTOMATIC_AUTH_FOR_TESTING') and
not (do_external_auth and settings.FEATURES.get('BYPASS_ACTIVATION_EMAIL_FOR_EXTAUTH')) and
not (
third_party_provider and third_party_provider.skip_email_verification and
user.email == running_pipeline['kwargs'].get('details', {}).get('email')
)
)
if send_email:
context = {
'name': profile.name,
'key': registration.activation_key,
}
# composes activation email
subject = render_to_string('emails/activation_email_subject.txt', context)
# Email subject *must not* contain newlines
subject = ''.join(subject.splitlines())
message = render_to_string('emails/activation_email.txt', context)
from_address = microsite.get_value(
'email_from_address',
settings.DEFAULT_FROM_EMAIL
)
try:
if settings.FEATURES.get('REROUTE_ACTIVATION_EMAIL'):
dest_addr = settings.FEATURES['REROUTE_ACTIVATION_EMAIL']
message = ("Activation for %s (%s): %s\n" % (user, user.email, profile.name) +
'-' * 80 + '\n\n' + message)
mail.send_mail(subject, message, from_address, [dest_addr], fail_silently=False)
else:
user.email_user(subject, message, from_address)
except Exception: # pylint: disable=broad-except
log.error(u'Unable to send activation email to user from "%s"', from_address, exc_info=True)
else:
registration.activate()
# Immediately after a user creates an account, we log them in. They are only
# logged in until they close the browser. They can't log in again until they click
# the activation link from the email.
new_user = authenticate(username=user.username, password=params['password'])
login(request, new_user)
request.session.set_expiry(0)
# TODO: there is no error checking here to see that the user actually logged in successfully,
# and is not yet an active user.
if new_user is not None:
AUDIT_LOG.info(u"Login success on new account creation - {0}".format(new_user.username))
if do_external_auth:
eamap.user = new_user
eamap.dtsignup = datetime.datetime.now(UTC)
eamap.save()
AUDIT_LOG.info(u"User registered with external_auth %s", new_user.username)
AUDIT_LOG.info(u'Updated ExternalAuthMap for %s to be %s', new_user.username, eamap)
if settings.FEATURES.get('BYPASS_ACTIVATION_EMAIL_FOR_EXTAUTH'):
log.info('bypassing activation email')
new_user.is_active = True
new_user.save()
AUDIT_LOG.info(u"Login activated on extauth account - {0} ({1})".format(new_user.username, new_user.email))
return new_user
@csrf_exempt
def create_account(request, post_override=None):
"""
JSON call to create new edX account.
Used by form in signup_modal.html, which is included into navigation.html
"""
warnings.warn("Please use RegistrationView instead.", DeprecationWarning)
try:
user = create_account_with_params(request, post_override or request.POST)
except AccountValidationError as exc:
return JsonResponse({'success': False, 'value': exc.message, 'field': exc.field}, status=400)
except ValidationError as exc:
field, error_list = next(exc.message_dict.iteritems())
return JsonResponse(
{
"success": False,
"field": field,
"value": error_list[0],
},
status=400
)
redirect_url = None # The AJAX method calling should know the default destination upon success
# Resume the third-party-auth pipeline if necessary.
if third_party_auth.is_enabled() and pipeline.running(request):
running_pipeline = pipeline.get(request)
redirect_url = pipeline.get_complete_url(running_pipeline['backend'])
response = JsonResponse({
'success': True,
'redirect_url': redirect_url,
})
set_logged_in_cookies(request, response, user)
return response
def auto_auth(request):
"""
Create or configure a user account, then log in as that user.
Enabled only when
settings.FEATURES['AUTOMATIC_AUTH_FOR_TESTING'] is true.
Accepts the following querystring parameters:
* `username`, `email`, and `password` for the user account
* `full_name` for the user profile (the user's full name; defaults to the username)
* `staff`: Set to "true" to make the user global staff.
* `course_id`: Enroll the student in the course with `course_id`
* `roles`: Comma-separated list of roles to grant the student in the course with `course_id`
* `no_login`: Define this to create the user but not login
If username, email, or password are not provided, use
randomly generated credentials.
"""
# Generate a unique name to use if none provided
unique_name = uuid.uuid4().hex[0:30]
# Use the params from the request, otherwise use these defaults
username = request.GET.get('username', unique_name)
password = request.GET.get('password', unique_name)
email = request.GET.get('email', unique_name + "@example.com")
full_name = request.GET.get('full_name', username)
is_staff = request.GET.get('staff', None)
course_id = request.GET.get('course_id', None)
# mode has to be one of 'honor'/'professional'/'verified'/'audit'/'no-id-professional'/'credit'
enrollment_mode = request.GET.get('enrollment_mode', 'honor')
course_key = None
if course_id:
course_key = CourseLocator.from_string(course_id)
role_names = [v.strip() for v in request.GET.get('roles', '').split(',') if v.strip()]
login_when_done = 'no_login' not in request.GET
form = AccountCreationForm(
data={
'username': username,
'email': email,
'password': password,
'name': full_name,
},
tos_required=False
)
# Attempt to create the account.
# If successful, this will return a tuple containing
# the new user object.
try:
user, profile, reg = _do_create_account(form)
except AccountValidationError:
# Attempt to retrieve the existing user.
user = User.objects.get(username=username)
user.email = email
user.set_password(password)
user.save()
profile = UserProfile.objects.get(user=user)
reg = Registration.objects.get(user=user)
# Set the user's global staff bit
if is_staff is not None:
user.is_staff = (is_staff == "true")
user.save()
# Activate the user
reg.activate()
reg.save()
# ensure parental consent threshold is met
year = datetime.date.today().year
age_limit = settings.PARENTAL_CONSENT_AGE_LIMIT
profile.year_of_birth = (year - age_limit) - 1
profile.save()
# Enroll the user in a course
if course_key is not None:
CourseEnrollment.enroll(user, course_key, mode=enrollment_mode)
# Apply the roles
for role_name in role_names:
role = Role.objects.get(name=role_name, course_id=course_key)
user.roles.add(role)
# Log in as the user
if login_when_done:
user = authenticate(username=username, password=password)
login(request, user)
create_comments_service_user(user)
# Provide the user with a valid CSRF token
# then return a 200 response
if request.META.get('HTTP_ACCEPT') == 'application/json':
response = JsonResponse({
'created_status': u"Logged in" if login_when_done else "Created",
'username': username,
'email': email,
'password': password,
'user_id': user.id, # pylint: disable=no-member
'anonymous_id': anonymous_id_for_user(user, None),
})
else:
success_msg = u"{} user {} ({}) with password {} and user_id {}".format(
u"Logged in" if login_when_done else "Created",
username, email, password, user.id # pylint: disable=no-member
)
response = HttpResponse(success_msg)
response.set_cookie('csrftoken', csrf(request)['csrf_token'])
return response
@ensure_csrf_cookie
def activate_account(request, key):
"""When link in activation e-mail is clicked"""
regs = Registration.objects.filter(activation_key=key)
if len(regs) == 1:
user_logged_in = request.user.is_authenticated()
already_active = True
if not regs[0].user.is_active:
regs[0].activate()
already_active = False
# Enroll student in any pending courses he/she may have if auto_enroll flag is set
student = User.objects.filter(id=regs[0].user_id)
if student:
ceas = CourseEnrollmentAllowed.objects.filter(email=student[0].email)
for cea in ceas:
if cea.auto_enroll:
enrollment = CourseEnrollment.enroll(student[0], cea.course_id)
manual_enrollment_audit = ManualEnrollmentAudit.get_manual_enrollment_by_email(student[0].email)
if manual_enrollment_audit is not None:
# get the enrolled by user and reason from the ManualEnrollmentAudit table.
# then create a new ManualEnrollmentAudit table entry for the same email
# different transition state.
ManualEnrollmentAudit.create_manual_enrollment_audit(
manual_enrollment_audit.enrolled_by, student[0].email, ALLOWEDTOENROLL_TO_ENROLLED,
manual_enrollment_audit.reason, enrollment
)
resp = render_to_response(
"registration/activation_complete.html",
{
'user_logged_in': user_logged_in,
'already_active': already_active
}
)
return resp
if len(regs) == 0:
return render_to_response(
"registration/activation_invalid.html",
{'csrf': csrf(request)['csrf_token']}
)
return HttpResponseServerError(_("Unknown error. Please e-mail us to let us know how it happened."))
@csrf_exempt
@require_POST
def password_reset(request):
""" Attempts to send a password reset e-mail. """
# Add some rate limiting here by re-using the RateLimitMixin as a helper class
limiter = BadRequestRateLimiter()
if limiter.is_rate_limit_exceeded(request):
AUDIT_LOG.warning("Rate limit exceeded in password_reset")
return HttpResponseForbidden()
form = PasswordResetFormNoActive(request.POST)
if form.is_valid():
form.save(use_https=request.is_secure(),
from_email=microsite.get_value('email_from_address', settings.DEFAULT_FROM_EMAIL),
request=request,
domain_override=request.get_host())
# When password change is complete, a "edx.user.settings.changed" event will be emitted.
# But because changing the password is multi-step, we also emit an event here so that we can
# track where the request was initiated.
tracker.emit(
SETTING_CHANGE_INITIATED,
{
"setting": "password",
"old": None,
"new": None,
"user_id": request.user.id,
}
)
else:
# bad user? tick the rate limiter counter
AUDIT_LOG.info("Bad password_reset user passed in.")
limiter.tick_bad_request_counter(request)
return JsonResponse({
'success': True,
'value': render_to_string('registration/password_reset_done.html', {}),
})
def password_reset_confirm_wrapper(
request,
uidb36=None,
token=None,
):
""" A wrapper around django.contrib.auth.views.password_reset_confirm.
Needed because we want to set the user as active at this step.
"""
# cribbed from django.contrib.auth.views.password_reset_confirm
try:
uid_int = base36_to_int(uidb36)
user = User.objects.get(id=uid_int)
user.is_active = True
user.save()
except (ValueError, User.DoesNotExist):
pass
# tie in password strength enforcement as an optional level of
# security protection
err_msg = None
if request.method == 'POST':
password = request.POST['new_password1']
if settings.FEATURES.get('ENFORCE_PASSWORD_POLICY', False):
try:
validate_password_length(password)
validate_password_complexity(password)
validate_password_dictionary(password)
except ValidationError, err:
err_msg = _('Password: ') + '; '.join(err.messages)
# also, check the password reuse policy
if not PasswordHistory.is_allowable_password_reuse(user, password):
if user.is_staff:
num_distinct = settings.ADVANCED_SECURITY_CONFIG['MIN_DIFFERENT_STAFF_PASSWORDS_BEFORE_REUSE']
else:
num_distinct = settings.ADVANCED_SECURITY_CONFIG['MIN_DIFFERENT_STUDENT_PASSWORDS_BEFORE_REUSE']
err_msg = ungettext(
"You are re-using a password that you have used recently. You must have {num} distinct password before reusing a previous password.",
"You are re-using a password that you have used recently. You must have {num} distinct passwords before reusing a previous password.",
num_distinct
).format(num=num_distinct)
# also, check to see if passwords are getting reset too frequent
if PasswordHistory.is_password_reset_too_soon(user):
num_days = settings.ADVANCED_SECURITY_CONFIG['MIN_TIME_IN_DAYS_BETWEEN_ALLOWED_RESETS']
err_msg = ungettext(
"You are resetting passwords too frequently. Due to security policies, {num} day must elapse between password resets.",
"You are resetting passwords too frequently. Due to security policies, {num} days must elapse between password resets.",
num_days
).format(num=num_days)
if err_msg:
# We have an password reset attempt which violates some security policy, use the
# existing Django template to communicate this back to the user
context = {
'validlink': True,
'form': None,
'title': _('Password reset unsuccessful'),
'err_msg': err_msg,
'platform_name': microsite.get_value('platform_name', settings.PLATFORM_NAME),
}
return TemplateResponse(request, 'registration/password_reset_confirm.html', context)
else:
# we also want to pass settings.PLATFORM_NAME in as extra_context
extra_context = {"platform_name": microsite.get_value('platform_name', settings.PLATFORM_NAME)}
if request.method == 'POST':
# remember what the old password hash is before we call down
old_password_hash = user.password
result = password_reset_confirm(
request, uidb36=uidb36, token=token, extra_context=extra_context
)
# get the updated user
updated_user = User.objects.get(id=uid_int)
# did the password hash change, if so record it in the PasswordHistory
if updated_user.password != old_password_hash:
entry = PasswordHistory()
entry.create(updated_user)
return result
else:
return password_reset_confirm(
request, uidb36=uidb36, token=token, extra_context=extra_context
)
def reactivation_email_for_user(user):
try:
reg = Registration.objects.get(user=user)
except Registration.DoesNotExist:
return JsonResponse({
"success": False,
"error": _('No inactive user with this e-mail exists'),
}) # TODO: this should be status code 400 # pylint: disable=fixme
context = {
'name': user.profile.name,
'key': reg.activation_key,
}
subject = render_to_string('emails/activation_email_subject.txt', context)
subject = ''.join(subject.splitlines())
message = render_to_string('emails/activation_email.txt', context)
try:
user.email_user(subject, message, settings.DEFAULT_FROM_EMAIL)
except Exception: # pylint: disable=broad-except
log.error(u'Unable to send reactivation email from "%s"', settings.DEFAULT_FROM_EMAIL, exc_info=True)
return JsonResponse({
"success": False,
"error": _('Unable to send reactivation email')
}) # TODO: this should be status code 500 # pylint: disable=fixme
return JsonResponse({"success": True})
def validate_new_email(user, new_email):
"""
Given a new email for a user, does some basic verification of the new address If any issues are encountered
with verification a ValueError will be thrown.
"""
try:
validate_email(new_email)
except ValidationError:
raise ValueError(_('Valid e-mail address required.'))
if new_email == user.email:
raise ValueError(_('Old email is the same as the new email.'))
if User.objects.filter(email=new_email).count() != 0:
raise ValueError(_('An account with this e-mail already exists.'))
def do_email_change_request(user, new_email, activation_key=None):
"""
Given a new email for a user, does some basic verification of the new address and sends an activation message
to the new address. If any issues are encountered with verification or sending the message, a ValueError will
be thrown.
"""
pec_list = PendingEmailChange.objects.filter(user=user)
if len(pec_list) == 0:
pec = PendingEmailChange()
pec.user = user
else:
pec = pec_list[0]
# if activation_key is not passing as an argument, generate a random key
if not activation_key:
activation_key = uuid.uuid4().hex
pec.new_email = new_email
pec.activation_key = activation_key
pec.save()
context = {
'key': pec.activation_key,
'old_email': user.email,
'new_email': pec.new_email
}
subject = render_to_string('emails/email_change_subject.txt', context)
subject = ''.join(subject.splitlines())
message = render_to_string('emails/email_change.txt', context)
from_address = microsite.get_value(
'email_from_address',
settings.DEFAULT_FROM_EMAIL
)
try:
mail.send_mail(subject, message, from_address, [pec.new_email])
except Exception: # pylint: disable=broad-except
log.error(u'Unable to send email activation link to user from "%s"', from_address, exc_info=True)
raise ValueError(_('Unable to send email activation link. Please try again later.'))
# When the email address change is complete, a "edx.user.settings.changed" event will be emitted.
# But because changing the email address is multi-step, we also emit an event here so that we can
# track where the request was initiated.
tracker.emit(
SETTING_CHANGE_INITIATED,
{
"setting": "email",
"old": context['old_email'],
"new": context['new_email'],
"user_id": user.id,
}
)
@ensure_csrf_cookie
@transaction.commit_manually
def confirm_email_change(request, key): # pylint: disable=unused-argument
"""
User requested a new e-mail. This is called when the activation
link is clicked. We confirm with the old e-mail, and update
"""
try:
try:
pec = PendingEmailChange.objects.get(activation_key=key)
except PendingEmailChange.DoesNotExist:
response = render_to_response("invalid_email_key.html", {})
transaction.rollback()
return response
user = pec.user
address_context = {
'old_email': user.email,
'new_email': pec.new_email
}
if len(User.objects.filter(email=pec.new_email)) != 0:
response = render_to_response("email_exists.html", {})
transaction.rollback()
return response
subject = render_to_string('emails/email_change_subject.txt', address_context)
subject = ''.join(subject.splitlines())
message = render_to_string('emails/confirm_email_change.txt', address_context)
u_prof = UserProfile.objects.get(user=user)
meta = u_prof.get_meta()
if 'old_emails' not in meta:
meta['old_emails'] = []
meta['old_emails'].append([user.email, datetime.datetime.now(UTC).isoformat()])
u_prof.set_meta(meta)
u_prof.save()
# Send it to the old email...
try:
user.email_user(subject, message, settings.DEFAULT_FROM_EMAIL)
except Exception: # pylint: disable=broad-except
log.warning('Unable to send confirmation email to old address', exc_info=True)
response = render_to_response("email_change_failed.html", {'email': user.email})
transaction.rollback()
return response
user.email = pec.new_email
user.save()
pec.delete()
# And send it to the new email...
try:
user.email_user(subject, message, settings.DEFAULT_FROM_EMAIL)
except Exception: # pylint: disable=broad-except
log.warning('Unable to send confirmation email to new address', exc_info=True)
response = render_to_response("email_change_failed.html", {'email': pec.new_email})
transaction.rollback()
return response
response = render_to_response("email_change_successful.html", address_context)
transaction.commit()
return response
except Exception: # pylint: disable=broad-except
# If we get an unexpected exception, be sure to rollback the transaction
transaction.rollback()
raise
@require_POST
@login_required
@ensure_csrf_cookie
def change_email_settings(request):
"""Modify logged-in user's setting for receiving emails from a course."""
user = request.user
course_id = request.POST.get("course_id")
course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id)
receive_emails = request.POST.get("receive_emails")
if receive_emails:
optout_object = Optout.objects.filter(user=user, course_id=course_key)
if optout_object:
optout_object.delete()
log.info(
u"User %s (%s) opted in to receive emails from course %s",
user.username,
user.email,
course_id
)
track.views.server_track(request, "change-email-settings", {"receive_emails": "yes", "course": course_id}, page='dashboard')
else:
Optout.objects.get_or_create(user=user, course_id=course_key)
log.info(
u"User %s (%s) opted out of receiving emails from course %s",
user.username,
user.email,
course_id
)
track.views.server_track(request, "change-email-settings", {"receive_emails": "no", "course": course_id}, page='dashboard')
return JsonResponse({"success": True})
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.