repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
|---|---|---|---|---|
py4a/peewee
|
refs/heads/master
|
setup.py
|
3
|
import os
from setuptools import find_packages
from setuptools import setup
f = open(os.path.join(os.path.dirname(__file__), 'README.rst'))
readme = f.read()
f.close()
try:
from Cython.Build import cythonize
except ImportError:
ext_modules = None
else:
speedups = cythonize('playhouse/speedups.pyx')
ext_modules = speedups
setup(
name='peewee',
version=__import__('peewee').__version__,
description='a little orm',
long_description=readme,
author='Charles Leifer',
author_email='coleifer@gmail.com',
url='http://github.com/coleifer/peewee/',
packages=['playhouse'],
py_modules=['peewee', 'pwiz'],
ext_modules=ext_modules,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
test_suite='tests',
scripts = ['pwiz.py'],
)
|
japeto/Vigtech-Services
|
refs/heads/master
|
env/lib/python2.7/site-packages/django/conf/app_template/tests.py
|
24123
|
from django.test import TestCase
# Create your tests here.
|
geminy/aidear
|
refs/heads/master
|
oss/qt/qt-everywhere-opensource-src-5.9.0/qtwebengine/src/3rdparty/chromium/build/android/adb_logcat_printer.py
|
27
|
#!/usr/bin/env python
#
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Shutdown adb_logcat_monitor and print accumulated logs.
To test, call './adb_logcat_printer.py <base_dir>' where
<base_dir> contains 'adb logcat -v threadtime' files named as
logcat_<deviceID>_<sequenceNum>
The script will print the files to out, and will combine multiple
logcats from a single device if there is overlap.
Additionally, if a <base_dir>/LOGCAT_MONITOR_PID exists, the script
will attempt to terminate the contained PID by sending a SIGINT and
monitoring for the deletion of the aforementioned file.
"""
# pylint: disable=W0702
import cStringIO
import logging
import optparse
import os
import re
import signal
import sys
import time
# Set this to debug for more verbose output
LOG_LEVEL = logging.INFO
def CombineLogFiles(list_of_lists, logger):
"""Splices together multiple logcats from the same device.
Args:
list_of_lists: list of pairs (filename, list of timestamped lines)
logger: handler to log events
Returns:
list of lines with duplicates removed
"""
cur_device_log = ['']
for cur_file, cur_file_lines in list_of_lists:
# Ignore files with just the logcat header
if len(cur_file_lines) < 2:
continue
common_index = 0
# Skip this step if list just has empty string
if len(cur_device_log) > 1:
try:
line = cur_device_log[-1]
# Used to make sure we only splice on a timestamped line
if re.match(r'^\d{2}-\d{2} \d{2}:\d{2}:\d{2}.\d{3} ', line):
common_index = cur_file_lines.index(line)
else:
logger.warning('splice error - no timestamp in "%s"?', line.strip())
except ValueError:
# The last line was valid but wasn't found in the next file
cur_device_log += ['***** POSSIBLE INCOMPLETE LOGCAT *****']
logger.info('Unable to splice %s. Incomplete logcat?', cur_file)
cur_device_log += ['*'*30 + ' %s' % cur_file]
cur_device_log.extend(cur_file_lines[common_index:])
return cur_device_log
def FindLogFiles(base_dir):
"""Search a directory for logcat files.
Args:
base_dir: directory to search
Returns:
Mapping of device_id to a sorted list of file paths for a given device
"""
logcat_filter = re.compile(r'^logcat_(\S+)_(\d+)$')
# list of tuples (<device_id>, <seq num>, <full file path>)
filtered_list = []
for cur_file in os.listdir(base_dir):
matcher = logcat_filter.match(cur_file)
if matcher:
filtered_list += [(matcher.group(1), int(matcher.group(2)),
os.path.join(base_dir, cur_file))]
filtered_list.sort()
file_map = {}
for device_id, _, cur_file in filtered_list:
if device_id not in file_map:
file_map[device_id] = []
file_map[device_id] += [cur_file]
return file_map
def GetDeviceLogs(log_filenames, logger):
"""Read log files, combine and format.
Args:
log_filenames: mapping of device_id to sorted list of file paths
logger: logger handle for logging events
Returns:
list of formatted device logs, one for each device.
"""
device_logs = []
for device, device_files in log_filenames.iteritems():
logger.debug('%s: %s', device, str(device_files))
device_file_lines = []
for cur_file in device_files:
with open(cur_file) as f:
device_file_lines += [(cur_file, f.read().splitlines())]
combined_lines = CombineLogFiles(device_file_lines, logger)
# Prepend each line with a short unique ID so it's easy to see
# when the device changes. We don't use the start of the device
# ID because it can be the same among devices. Example lines:
# AB324: foo
# AB324: blah
device_logs += [('\n' + device[-5:] + ': ').join(combined_lines)]
return device_logs
def ShutdownLogcatMonitor(base_dir, logger):
"""Attempts to shutdown adb_logcat_monitor and blocks while waiting."""
try:
monitor_pid_path = os.path.join(base_dir, 'LOGCAT_MONITOR_PID')
with open(monitor_pid_path) as f:
monitor_pid = int(f.readline())
logger.info('Sending SIGTERM to %d', monitor_pid)
os.kill(monitor_pid, signal.SIGTERM)
i = 0
while True:
time.sleep(.2)
if not os.path.exists(monitor_pid_path):
return
if not os.path.exists('/proc/%d' % monitor_pid):
logger.warning('Monitor (pid %d) terminated uncleanly?', monitor_pid)
return
logger.info('Waiting for logcat process to terminate.')
i += 1
if i >= 10:
logger.warning('Monitor pid did not terminate. Continuing anyway.')
return
except (ValueError, IOError, OSError):
logger.exception('Error signaling logcat monitor - continuing')
def main(argv):
parser = optparse.OptionParser(usage='Usage: %prog [options] <log dir>')
parser.add_option('--output-path',
help='Output file path (if unspecified, prints to stdout)')
options, args = parser.parse_args(argv)
if len(args) != 1:
parser.error('Wrong number of unparsed args')
base_dir = args[0]
log_stringio = cStringIO.StringIO()
logger = logging.getLogger('LogcatPrinter')
logger.setLevel(LOG_LEVEL)
sh = logging.StreamHandler(log_stringio)
sh.setFormatter(logging.Formatter('%(asctime)-2s %(levelname)-8s'
' %(message)s'))
logger.addHandler(sh)
if options.output_path:
if not os.path.exists(os.path.dirname(options.output_path)):
logger.warning('Output dir %s doesn\'t exist. Creating it.',
os.path.dirname(options.output_path))
os.makedirs(os.path.dirname(options.output_path))
output_file = open(options.output_path, 'w')
logger.info('Dumping logcat to local file %s. If running in a build, '
'this file will likely will be uploaded to google storage '
'in a later step. It can be downloaded from there.',
options.output_path)
else:
output_file = sys.stdout
try:
# Wait at least 5 seconds after base_dir is created before printing.
#
# The idea is that 'adb logcat > file' output consists of 2 phases:
# 1 Dump all the saved logs to the file
# 2 Stream log messages as they are generated
#
# We want to give enough time for phase 1 to complete. There's no
# good method to tell how long to wait, but it usually only takes a
# second. On most bots, this code path won't occur at all, since
# adb_logcat_monitor.py command will have spawned more than 5 seconds
# prior to called this shell script.
try:
sleep_time = 5 - (time.time() - os.path.getctime(base_dir))
except OSError:
sleep_time = 5
if sleep_time > 0:
logger.warning('Monitor just started? Sleeping %.1fs', sleep_time)
time.sleep(sleep_time)
assert os.path.exists(base_dir), '%s does not exist' % base_dir
ShutdownLogcatMonitor(base_dir, logger)
separator = '\n' + '*' * 80 + '\n\n'
for log in GetDeviceLogs(FindLogFiles(base_dir), logger):
output_file.write(log)
output_file.write(separator)
with open(os.path.join(base_dir, 'eventlog')) as f:
output_file.write('\nLogcat Monitor Event Log\n')
output_file.write(f.read())
except:
logger.exception('Unexpected exception')
logger.info('Done.')
sh.flush()
output_file.write('\nLogcat Printer Event Log\n')
output_file.write(log_stringio.getvalue())
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
|
muntasirsyed/intellij-community
|
refs/heads/master
|
python/lib/Lib/doctest.py
|
81
|
# Module doctest.
# Released to the public domain 16-Jan-2001, by Tim Peters (tim@python.org).
# Major enhancements and refactoring by:
# Jim Fulton
# Edward Loper
# Provided as-is; use at your own risk; no warranty; no promises; enjoy!
r"""Module doctest -- a framework for running examples in docstrings.
In simplest use, end each module M to be tested with:
def _test():
import doctest
doctest.testmod()
if __name__ == "__main__":
_test()
Then running the module as a script will cause the examples in the
docstrings to get executed and verified:
python M.py
This won't display anything unless an example fails, in which case the
failing example(s) and the cause(s) of the failure(s) are printed to stdout
(why not stderr? because stderr is a lame hack <0.2 wink>), and the final
line of output is "Test failed.".
Run it with the -v switch instead:
python M.py -v
and a detailed report of all examples tried is printed to stdout, along
with assorted summaries at the end.
You can force verbose mode by passing "verbose=True" to testmod, or prohibit
it by passing "verbose=False". In either of those cases, sys.argv is not
examined by testmod.
There are a variety of other ways to run doctests, including integration
with the unittest framework, and support for running non-Python text
files containing doctests. There are also many ways to override parts
of doctest's default behaviors. See the Library Reference Manual for
details.
"""
__docformat__ = 'reStructuredText en'
__all__ = [
# 0, Option Flags
'register_optionflag',
'DONT_ACCEPT_TRUE_FOR_1',
'DONT_ACCEPT_BLANKLINE',
'NORMALIZE_WHITESPACE',
'ELLIPSIS',
'SKIP',
'IGNORE_EXCEPTION_DETAIL',
'COMPARISON_FLAGS',
'REPORT_UDIFF',
'REPORT_CDIFF',
'REPORT_NDIFF',
'REPORT_ONLY_FIRST_FAILURE',
'REPORTING_FLAGS',
# 1. Utility Functions
# 2. Example & DocTest
'Example',
'DocTest',
# 3. Doctest Parser
'DocTestParser',
# 4. Doctest Finder
'DocTestFinder',
# 5. Doctest Runner
'DocTestRunner',
'OutputChecker',
'DocTestFailure',
'UnexpectedException',
'DebugRunner',
# 6. Test Functions
'testmod',
'testfile',
'run_docstring_examples',
# 7. Tester
'Tester',
# 8. Unittest Support
'DocTestSuite',
'DocFileSuite',
'set_unittest_reportflags',
# 9. Debugging Support
'script_from_examples',
'testsource',
'debug_src',
'debug',
]
import __future__
import sys, traceback, inspect, linecache, os, re
import unittest, difflib, pdb, tempfile
import warnings
from StringIO import StringIO
# There are 4 basic classes:
# - Example: a <source, want> pair, plus an intra-docstring line number.
# - DocTest: a collection of examples, parsed from a docstring, plus
# info about where the docstring came from (name, filename, lineno).
# - DocTestFinder: extracts DocTests from a given object's docstring and
# its contained objects' docstrings.
# - DocTestRunner: runs DocTest cases, and accumulates statistics.
#
# So the basic picture is:
#
# list of:
# +------+ +---------+ +-------+
# |object| --DocTestFinder-> | DocTest | --DocTestRunner-> |results|
# +------+ +---------+ +-------+
# | Example |
# | ... |
# | Example |
# +---------+
# Option constants.
OPTIONFLAGS_BY_NAME = {}
def register_optionflag(name):
# Create a new flag unless `name` is already known.
return OPTIONFLAGS_BY_NAME.setdefault(name, 1 << len(OPTIONFLAGS_BY_NAME))
DONT_ACCEPT_TRUE_FOR_1 = register_optionflag('DONT_ACCEPT_TRUE_FOR_1')
DONT_ACCEPT_BLANKLINE = register_optionflag('DONT_ACCEPT_BLANKLINE')
NORMALIZE_WHITESPACE = register_optionflag('NORMALIZE_WHITESPACE')
ELLIPSIS = register_optionflag('ELLIPSIS')
SKIP = register_optionflag('SKIP')
IGNORE_EXCEPTION_DETAIL = register_optionflag('IGNORE_EXCEPTION_DETAIL')
COMPARISON_FLAGS = (DONT_ACCEPT_TRUE_FOR_1 |
DONT_ACCEPT_BLANKLINE |
NORMALIZE_WHITESPACE |
ELLIPSIS |
SKIP |
IGNORE_EXCEPTION_DETAIL)
REPORT_UDIFF = register_optionflag('REPORT_UDIFF')
REPORT_CDIFF = register_optionflag('REPORT_CDIFF')
REPORT_NDIFF = register_optionflag('REPORT_NDIFF')
REPORT_ONLY_FIRST_FAILURE = register_optionflag('REPORT_ONLY_FIRST_FAILURE')
REPORTING_FLAGS = (REPORT_UDIFF |
REPORT_CDIFF |
REPORT_NDIFF |
REPORT_ONLY_FIRST_FAILURE)
# Special string markers for use in `want` strings:
BLANKLINE_MARKER = '<BLANKLINE>'
ELLIPSIS_MARKER = '...'
######################################################################
## Table of Contents
######################################################################
# 1. Utility Functions
# 2. Example & DocTest -- store test cases
# 3. DocTest Parser -- extracts examples from strings
# 4. DocTest Finder -- extracts test cases from objects
# 5. DocTest Runner -- runs test cases
# 6. Test Functions -- convenient wrappers for testing
# 7. Tester Class -- for backwards compatibility
# 8. Unittest Support
# 9. Debugging Support
# 10. Example Usage
######################################################################
## 1. Utility Functions
######################################################################
def _extract_future_flags(globs):
"""
Return the compiler-flags associated with the future features that
have been imported into the given namespace (globs).
"""
flags = 0
for fname in __future__.all_feature_names:
feature = globs.get(fname, None)
if feature is getattr(__future__, fname):
flags |= feature.compiler_flag
return flags
def _normalize_module(module, depth=2):
"""
Return the module specified by `module`. In particular:
- If `module` is a module, then return module.
- If `module` is a string, then import and return the
module with that name.
- If `module` is None, then return the calling module.
The calling module is assumed to be the module of
the stack frame at the given depth in the call stack.
"""
if inspect.ismodule(module):
return module
elif isinstance(module, (str, unicode)):
return __import__(module, globals(), locals(), ["*"])
elif module is None:
return sys.modules[sys._getframe(depth).f_globals['__name__']]
else:
raise TypeError("Expected a module, string, or None")
def _load_testfile(filename, package, module_relative):
if module_relative:
package = _normalize_module(package, 3)
filename = _module_relative_path(package, filename)
if hasattr(package, '__loader__'):
if hasattr(package.__loader__, 'get_data'):
file_contents = package.__loader__.get_data(filename)
# get_data() opens files as 'rb', so one must do the equivalent
# conversion as universal newlines would do.
return file_contents.replace(os.linesep, '\n'), filename
return open(filename).read(), filename
def _indent(s, indent=4):
"""
Add the given number of space characters to the beginning every
non-blank line in `s`, and return the result.
"""
# This regexp matches the start of non-blank lines:
return re.sub('(?m)^(?!$)', indent*' ', s)
def _exception_traceback(exc_info):
"""
Return a string containing a traceback message for the given
exc_info tuple (as returned by sys.exc_info()).
"""
# Get a traceback message.
excout = StringIO()
exc_type, exc_val, exc_tb = exc_info
traceback.print_exception(exc_type, exc_val, exc_tb, file=excout)
return excout.getvalue()
# Override some StringIO methods.
class _SpoofOut(StringIO):
def getvalue(self):
result = StringIO.getvalue(self)
# If anything at all was written, make sure there's a trailing
# newline. There's no way for the expected output to indicate
# that a trailing newline is missing.
if result and not result.endswith("\n"):
result += "\n"
# Prevent softspace from screwing up the next test case, in
# case they used print with a trailing comma in an example.
if hasattr(self, "softspace"):
del self.softspace
return result
def truncate(self, size=None):
StringIO.truncate(self, size)
if hasattr(self, "softspace"):
del self.softspace
# Worst-case linear-time ellipsis matching.
def _ellipsis_match(want, got):
"""
Essentially the only subtle case:
>>> _ellipsis_match('aa...aa', 'aaa')
False
"""
if ELLIPSIS_MARKER not in want:
return want == got
# Find "the real" strings.
ws = want.split(ELLIPSIS_MARKER)
assert len(ws) >= 2
# Deal with exact matches possibly needed at one or both ends.
startpos, endpos = 0, len(got)
w = ws[0]
if w: # starts with exact match
if got.startswith(w):
startpos = len(w)
del ws[0]
else:
return False
w = ws[-1]
if w: # ends with exact match
if got.endswith(w):
endpos -= len(w)
del ws[-1]
else:
return False
if startpos > endpos:
# Exact end matches required more characters than we have, as in
# _ellipsis_match('aa...aa', 'aaa')
return False
# For the rest, we only need to find the leftmost non-overlapping
# match for each piece. If there's no overall match that way alone,
# there's no overall match period.
for w in ws:
# w may be '' at times, if there are consecutive ellipses, or
# due to an ellipsis at the start or end of `want`. That's OK.
# Search for an empty string succeeds, and doesn't change startpos.
startpos = got.find(w, startpos, endpos)
if startpos < 0:
return False
startpos += len(w)
return True
def _comment_line(line):
"Return a commented form of the given line"
line = line.rstrip()
if line:
return '# '+line
else:
return '#'
class _OutputRedirectingPdb(pdb.Pdb):
"""
A specialized version of the python debugger that redirects stdout
to a given stream when interacting with the user. Stdout is *not*
redirected when traced code is executed.
"""
def __init__(self, out):
self.__out = out
self.__debugger_used = False
pdb.Pdb.__init__(self, stdout=out)
def set_trace(self, frame=None):
self.__debugger_used = True
if frame is None:
frame = sys._getframe().f_back
pdb.Pdb.set_trace(self, frame)
def set_continue(self):
# Calling set_continue unconditionally would break unit test
# coverage reporting, as Bdb.set_continue calls sys.settrace(None).
if self.__debugger_used:
pdb.Pdb.set_continue(self)
def trace_dispatch(self, *args):
# Redirect stdout to the given stream.
save_stdout = sys.stdout
sys.stdout = self.__out
# Call Pdb's trace dispatch method.
try:
return pdb.Pdb.trace_dispatch(self, *args)
finally:
sys.stdout = save_stdout
# [XX] Normalize with respect to os.path.pardir?
def _module_relative_path(module, path):
if not inspect.ismodule(module):
raise TypeError, 'Expected a module: %r' % module
if path.startswith('/'):
raise ValueError, 'Module-relative files may not have absolute paths'
# Find the base directory for the path.
if hasattr(module, '__file__'):
# A normal module/package
basedir = os.path.split(module.__file__)[0]
elif module.__name__ == '__main__':
# An interactive session.
if len(sys.argv)>0 and sys.argv[0] != '':
basedir = os.path.split(sys.argv[0])[0]
else:
basedir = os.curdir
else:
# A module w/o __file__ (this includes builtins)
raise ValueError("Can't resolve paths relative to the module " +
module + " (it has no __file__)")
# Combine the base directory and the path.
return os.path.join(basedir, *(path.split('/')))
######################################################################
## 2. Example & DocTest
######################################################################
## - An "example" is a <source, want> pair, where "source" is a
## fragment of source code, and "want" is the expected output for
## "source." The Example class also includes information about
## where the example was extracted from.
##
## - A "doctest" is a collection of examples, typically extracted from
## a string (such as an object's docstring). The DocTest class also
## includes information about where the string was extracted from.
class Example:
"""
A single doctest example, consisting of source code and expected
output. `Example` defines the following attributes:
- source: A single Python statement, always ending with a newline.
The constructor adds a newline if needed.
- want: The expected output from running the source code (either
from stdout, or a traceback in case of exception). `want` ends
with a newline unless it's empty, in which case it's an empty
string. The constructor adds a newline if needed.
- exc_msg: The exception message generated by the example, if
the example is expected to generate an exception; or `None` if
it is not expected to generate an exception. This exception
message is compared against the return value of
`traceback.format_exception_only()`. `exc_msg` ends with a
newline unless it's `None`. The constructor adds a newline
if needed.
- lineno: The line number within the DocTest string containing
this Example where the Example begins. This line number is
zero-based, with respect to the beginning of the DocTest.
- indent: The example's indentation in the DocTest string.
I.e., the number of space characters that preceed the
example's first prompt.
- options: A dictionary mapping from option flags to True or
False, which is used to override default options for this
example. Any option flags not contained in this dictionary
are left at their default value (as specified by the
DocTestRunner's optionflags). By default, no options are set.
"""
def __init__(self, source, want, exc_msg=None, lineno=0, indent=0,
options=None):
# Normalize inputs.
if not source.endswith('\n'):
source += '\n'
if want and not want.endswith('\n'):
want += '\n'
if exc_msg is not None and not exc_msg.endswith('\n'):
exc_msg += '\n'
# Store properties.
self.source = source
self.want = want
self.lineno = lineno
self.indent = indent
if options is None: options = {}
self.options = options
self.exc_msg = exc_msg
class DocTest:
"""
A collection of doctest examples that should be run in a single
namespace. Each `DocTest` defines the following attributes:
- examples: the list of examples.
- globs: The namespace (aka globals) that the examples should
be run in.
- name: A name identifying the DocTest (typically, the name of
the object whose docstring this DocTest was extracted from).
- filename: The name of the file that this DocTest was extracted
from, or `None` if the filename is unknown.
- lineno: The line number within filename where this DocTest
begins, or `None` if the line number is unavailable. This
line number is zero-based, with respect to the beginning of
the file.
- docstring: The string that the examples were extracted from,
or `None` if the string is unavailable.
"""
def __init__(self, examples, globs, name, filename, lineno, docstring):
"""
Create a new DocTest containing the given examples. The
DocTest's globals are initialized with a copy of `globs`.
"""
assert not isinstance(examples, basestring), \
"DocTest no longer accepts str; use DocTestParser instead"
self.examples = examples
self.docstring = docstring
self.globs = globs.copy()
self.name = name
self.filename = filename
self.lineno = lineno
def __repr__(self):
if len(self.examples) == 0:
examples = 'no examples'
elif len(self.examples) == 1:
examples = '1 example'
else:
examples = '%d examples' % len(self.examples)
return ('<DocTest %s from %s:%s (%s)>' %
(self.name, self.filename, self.lineno, examples))
# This lets us sort tests by name:
def __cmp__(self, other):
if not isinstance(other, DocTest):
return -1
return cmp((self.name, self.filename, self.lineno, id(self)),
(other.name, other.filename, other.lineno, id(other)))
######################################################################
## 3. DocTestParser
######################################################################
class DocTestParser:
"""
A class used to parse strings containing doctest examples.
"""
# This regular expression is used to find doctest examples in a
# string. It defines three groups: `source` is the source code
# (including leading indentation and prompts); `indent` is the
# indentation of the first (PS1) line of the source code; and
# `want` is the expected output (including leading indentation).
_EXAMPLE_RE = re.compile(r'''
# Source consists of a PS1 line followed by zero or more PS2 lines.
(?P<source>
(?:^(?P<indent> [ ]*) >>> .*) # PS1 line
(?:\n [ ]* \.\.\. .*)*) # PS2 lines
\n?
# Want consists of any non-blank lines that do not start with PS1.
(?P<want> (?:(?![ ]*$) # Not a blank line
(?![ ]*>>>) # Not a line starting with PS1
.*$\n? # But any other line
)*)
''', re.MULTILINE | re.VERBOSE)
# A regular expression for handling `want` strings that contain
# expected exceptions. It divides `want` into three pieces:
# - the traceback header line (`hdr`)
# - the traceback stack (`stack`)
# - the exception message (`msg`), as generated by
# traceback.format_exception_only()
# `msg` may have multiple lines. We assume/require that the
# exception message is the first non-indented line starting with a word
# character following the traceback header line.
_EXCEPTION_RE = re.compile(r"""
# Grab the traceback header. Different versions of Python have
# said different things on the first traceback line.
^(?P<hdr> Traceback\ \(
(?: most\ recent\ call\ last
| innermost\ last
) \) :
)
\s* $ # toss trailing whitespace on the header.
(?P<stack> .*?) # don't blink: absorb stuff until...
^ (?P<msg> \w+ .*) # a line *starts* with alphanum.
""", re.VERBOSE | re.MULTILINE | re.DOTALL)
# A callable returning a true value iff its argument is a blank line
# or contains a single comment.
_IS_BLANK_OR_COMMENT = re.compile(r'^[ ]*(#.*)?$').match
def parse(self, string, name='<string>'):
"""
Divide the given string into examples and intervening text,
and return them as a list of alternating Examples and strings.
Line numbers for the Examples are 0-based. The optional
argument `name` is a name identifying this string, and is only
used for error messages.
"""
string = string.expandtabs()
# If all lines begin with the same indentation, then strip it.
min_indent = self._min_indent(string)
if min_indent > 0:
string = '\n'.join([l[min_indent:] for l in string.split('\n')])
output = []
charno, lineno = 0, 0
# Find all doctest examples in the string:
for m in self._EXAMPLE_RE.finditer(string):
# Add the pre-example text to `output`.
output.append(string[charno:m.start()])
# Update lineno (lines before this example)
lineno += string.count('\n', charno, m.start())
# Extract info from the regexp match.
(source, options, want, exc_msg) = \
self._parse_example(m, name, lineno)
# Create an Example, and add it to the list.
if not self._IS_BLANK_OR_COMMENT(source):
output.append( Example(source, want, exc_msg,
lineno=lineno,
indent=min_indent+len(m.group('indent')),
options=options) )
# Update lineno (lines inside this example)
lineno += string.count('\n', m.start(), m.end())
# Update charno.
charno = m.end()
# Add any remaining post-example text to `output`.
output.append(string[charno:])
return output
def get_doctest(self, string, globs, name, filename, lineno):
"""
Extract all doctest examples from the given string, and
collect them into a `DocTest` object.
`globs`, `name`, `filename`, and `lineno` are attributes for
the new `DocTest` object. See the documentation for `DocTest`
for more information.
"""
return DocTest(self.get_examples(string, name), globs,
name, filename, lineno, string)
def get_examples(self, string, name='<string>'):
"""
Extract all doctest examples from the given string, and return
them as a list of `Example` objects. Line numbers are
0-based, because it's most common in doctests that nothing
interesting appears on the same line as opening triple-quote,
and so the first interesting line is called \"line 1\" then.
The optional argument `name` is a name identifying this
string, and is only used for error messages.
"""
return [x for x in self.parse(string, name)
if isinstance(x, Example)]
def _parse_example(self, m, name, lineno):
"""
Given a regular expression match from `_EXAMPLE_RE` (`m`),
return a pair `(source, want)`, where `source` is the matched
example's source code (with prompts and indentation stripped);
and `want` is the example's expected output (with indentation
stripped).
`name` is the string's name, and `lineno` is the line number
where the example starts; both are used for error messages.
"""
# Get the example's indentation level.
indent = len(m.group('indent'))
# Divide source into lines; check that they're properly
# indented; and then strip their indentation & prompts.
source_lines = m.group('source').split('\n')
self._check_prompt_blank(source_lines, indent, name, lineno)
self._check_prefix(source_lines[1:], ' '*indent + '.', name, lineno)
source = '\n'.join([sl[indent+4:] for sl in source_lines])
# Divide want into lines; check that it's properly indented; and
# then strip the indentation. Spaces before the last newline should
# be preserved, so plain rstrip() isn't good enough.
want = m.group('want')
want_lines = want.split('\n')
if len(want_lines) > 1 and re.match(r' *$', want_lines[-1]):
del want_lines[-1] # forget final newline & spaces after it
self._check_prefix(want_lines, ' '*indent, name,
lineno + len(source_lines))
want = '\n'.join([wl[indent:] for wl in want_lines])
# If `want` contains a traceback message, then extract it.
m = self._EXCEPTION_RE.match(want)
if m:
exc_msg = m.group('msg')
else:
exc_msg = None
# Extract options from the source.
options = self._find_options(source, name, lineno)
return source, options, want, exc_msg
# This regular expression looks for option directives in the
# source code of an example. Option directives are comments
# starting with "doctest:". Warning: this may give false
# positives for string-literals that contain the string
# "#doctest:". Eliminating these false positives would require
# actually parsing the string; but we limit them by ignoring any
# line containing "#doctest:" that is *followed* by a quote mark.
_OPTION_DIRECTIVE_RE = re.compile(r'#\s*doctest:\s*([^\n\'"]*)$',
re.MULTILINE)
def _find_options(self, source, name, lineno):
"""
Return a dictionary containing option overrides extracted from
option directives in the given source string.
`name` is the string's name, and `lineno` is the line number
where the example starts; both are used for error messages.
"""
options = {}
# (note: with the current regexp, this will match at most once:)
for m in self._OPTION_DIRECTIVE_RE.finditer(source):
option_strings = m.group(1).replace(',', ' ').split()
for option in option_strings:
if (option[0] not in '+-' or
option[1:] not in OPTIONFLAGS_BY_NAME):
raise ValueError('line %r of the doctest for %s '
'has an invalid option: %r' %
(lineno+1, name, option))
flag = OPTIONFLAGS_BY_NAME[option[1:]]
options[flag] = (option[0] == '+')
if options and self._IS_BLANK_OR_COMMENT(source):
raise ValueError('line %r of the doctest for %s has an option '
'directive on a line with no example: %r' %
(lineno, name, source))
return options
# This regular expression finds the indentation of every non-blank
# line in a string.
_INDENT_RE = re.compile('^([ ]*)(?=\S)', re.MULTILINE)
def _min_indent(self, s):
"Return the minimum indentation of any non-blank line in `s`"
indents = [len(indent) for indent in self._INDENT_RE.findall(s)]
if len(indents) > 0:
return min(indents)
else:
return 0
def _check_prompt_blank(self, lines, indent, name, lineno):
"""
Given the lines of a source string (including prompts and
leading indentation), check to make sure that every prompt is
followed by a space character. If any line is not followed by
a space character, then raise ValueError.
"""
for i, line in enumerate(lines):
if len(line) >= indent+4 and line[indent+3] != ' ':
raise ValueError('line %r of the docstring for %s '
'lacks blank after %s: %r' %
(lineno+i+1, name,
line[indent:indent+3], line))
def _check_prefix(self, lines, prefix, name, lineno):
"""
Check that every line in the given list starts with the given
prefix; if any line does not, then raise a ValueError.
"""
for i, line in enumerate(lines):
if line and not line.startswith(prefix):
raise ValueError('line %r of the docstring for %s has '
'inconsistent leading whitespace: %r' %
(lineno+i+1, name, line))
######################################################################
## 4. DocTest Finder
######################################################################
class DocTestFinder:
"""
A class used to extract the DocTests that are relevant to a given
object, from its docstring and the docstrings of its contained
objects. Doctests can currently be extracted from the following
object types: modules, functions, classes, methods, staticmethods,
classmethods, and properties.
"""
def __init__(self, verbose=False, parser=DocTestParser(),
recurse=True, exclude_empty=True):
"""
Create a new doctest finder.
The optional argument `parser` specifies a class or
function that should be used to create new DocTest objects (or
objects that implement the same interface as DocTest). The
signature for this factory function should match the signature
of the DocTest constructor.
If the optional argument `recurse` is false, then `find` will
only examine the given object, and not any contained objects.
If the optional argument `exclude_empty` is false, then `find`
will include tests for objects with empty docstrings.
"""
self._parser = parser
self._verbose = verbose
self._recurse = recurse
self._exclude_empty = exclude_empty
def find(self, obj, name=None, module=None, globs=None, extraglobs=None):
"""
Return a list of the DocTests that are defined by the given
object's docstring, or by any of its contained objects'
docstrings.
The optional parameter `module` is the module that contains
the given object. If the module is not specified or is None, then
the test finder will attempt to automatically determine the
correct module. The object's module is used:
- As a default namespace, if `globs` is not specified.
- To prevent the DocTestFinder from extracting DocTests
from objects that are imported from other modules.
- To find the name of the file containing the object.
- To help find the line number of the object within its
file.
Contained objects whose module does not match `module` are ignored.
If `module` is False, no attempt to find the module will be made.
This is obscure, of use mostly in tests: if `module` is False, or
is None but cannot be found automatically, then all objects are
considered to belong to the (non-existent) module, so all contained
objects will (recursively) be searched for doctests.
The globals for each DocTest is formed by combining `globs`
and `extraglobs` (bindings in `extraglobs` override bindings
in `globs`). A new copy of the globals dictionary is created
for each DocTest. If `globs` is not specified, then it
defaults to the module's `__dict__`, if specified, or {}
otherwise. If `extraglobs` is not specified, then it defaults
to {}.
"""
# If name was not specified, then extract it from the object.
if name is None:
name = getattr(obj, '__name__', None)
if name is None:
raise ValueError("DocTestFinder.find: name must be given "
"when obj.__name__ doesn't exist: %r" %
(type(obj),))
# Find the module that contains the given object (if obj is
# a module, then module=obj.). Note: this may fail, in which
# case module will be None.
if module is False:
module = None
elif module is None:
module = inspect.getmodule(obj)
# Read the module's source code. This is used by
# DocTestFinder._find_lineno to find the line number for a
# given object's docstring.
try:
file = inspect.getsourcefile(obj) or inspect.getfile(obj)
source_lines = linecache.getlines(file)
if not source_lines:
source_lines = None
except TypeError:
source_lines = None
# Initialize globals, and merge in extraglobs.
if globs is None:
if module is None:
globs = {}
else:
globs = module.__dict__.copy()
else:
globs = globs.copy()
if extraglobs is not None:
globs.update(extraglobs)
# Recursively expore `obj`, extracting DocTests.
tests = []
self._find(tests, obj, name, module, source_lines, globs, {})
# Sort the tests by alpha order of names, for consistency in
# verbose-mode output. This was a feature of doctest in Pythons
# <= 2.3 that got lost by accident in 2.4. It was repaired in
# 2.4.4 and 2.5.
tests.sort()
return tests
def _from_module(self, module, object):
"""
Return true if the given object is defined in the given
module.
"""
if module is None:
return True
elif inspect.isfunction(object):
return module.__dict__ is object.func_globals
elif inspect.isclass(object):
# XXX: Jython transition 2.5
# Java classes appear as Python classes to inspect, but they
# have no __module__ http://jython.org/bugs/1758279
# org.python.modules uses Java classes to masq
if not hasattr(object, '__module__'):
return False
return module.__name__ == object.__module__
elif inspect.getmodule(object) is not None:
return module is inspect.getmodule(object)
elif hasattr(object, '__module__'):
return module.__name__ == object.__module__
elif isinstance(object, property):
return True # [XX] no way not be sure.
else:
raise ValueError("object must be a class or function")
def _find(self, tests, obj, name, module, source_lines, globs, seen):
"""
Find tests for the given object and any contained objects, and
add them to `tests`.
"""
if self._verbose:
print 'Finding tests in %s' % name
# If we've already processed this object, then ignore it.
if id(obj) in seen:
return
seen[id(obj)] = 1
# Find a test for this object, and add it to the list of tests.
test = self._get_test(obj, name, module, globs, source_lines)
if test is not None:
tests.append(test)
# Look for tests in a module's contained objects.
if inspect.ismodule(obj) and self._recurse:
for valname, val in obj.__dict__.items():
valname = '%s.%s' % (name, valname)
# Recurse to functions & classes.
if ((inspect.isfunction(val) or inspect.isclass(val)) and
self._from_module(module, val)):
self._find(tests, val, valname, module, source_lines,
globs, seen)
# Look for tests in a module's __test__ dictionary.
if inspect.ismodule(obj) and self._recurse:
for valname, val in getattr(obj, '__test__', {}).items():
if not isinstance(valname, basestring):
raise ValueError("DocTestFinder.find: __test__ keys "
"must be strings: %r" %
(type(valname),))
if not (inspect.isfunction(val) or inspect.isclass(val) or
inspect.ismethod(val) or inspect.ismodule(val) or
isinstance(val, basestring)):
raise ValueError("DocTestFinder.find: __test__ values "
"must be strings, functions, methods, "
"classes, or modules: %r" %
(type(val),))
valname = '%s.__test__.%s' % (name, valname)
self._find(tests, val, valname, module, source_lines,
globs, seen)
# Look for tests in a class's contained objects.
if inspect.isclass(obj) and self._recurse:
for valname, val in obj.__dict__.items():
# Special handling for staticmethod/classmethod.
if isinstance(val, staticmethod):
val = getattr(obj, valname)
if isinstance(val, classmethod):
val = getattr(obj, valname).im_func
# Recurse to methods, properties, and nested classes.
if ((inspect.isfunction(val) or inspect.isclass(val) or
isinstance(val, property)) and
self._from_module(module, val)):
valname = '%s.%s' % (name, valname)
self._find(tests, val, valname, module, source_lines,
globs, seen)
def _get_test(self, obj, name, module, globs, source_lines):
"""
Return a DocTest for the given object, if it defines a docstring;
otherwise, return None.
"""
# Extract the object's docstring. If it doesn't have one,
# then return None (no test for this object).
if isinstance(obj, basestring):
docstring = obj
else:
try:
if obj.__doc__ is None:
docstring = ''
else:
docstring = obj.__doc__
if not isinstance(docstring, basestring):
docstring = str(docstring)
except (TypeError, AttributeError):
docstring = ''
# Find the docstring's location in the file.
lineno = self._find_lineno(obj, source_lines)
# Don't bother if the docstring is empty.
if self._exclude_empty and not docstring:
return None
# Return a DocTest for this object.
if module is None:
filename = None
else:
filename = getattr(module, '__file__', module.__name__)
if filename[-4:] in (".pyc", ".pyo"):
filename = filename[:-1]
elif filename.endswith('$py.class'):
filename = '%s.py' % filename[:-9]
return self._parser.get_doctest(docstring, globs, name,
filename, lineno)
def _find_lineno(self, obj, source_lines):
"""
Return a line number of the given object's docstring. Note:
this method assumes that the object has a docstring.
"""
lineno = None
# Find the line number for modules.
if inspect.ismodule(obj):
lineno = 0
# Find the line number for classes.
# Note: this could be fooled if a class is defined multiple
# times in a single file.
if inspect.isclass(obj):
if source_lines is None:
return None
pat = re.compile(r'^\s*class\s*%s\b' %
getattr(obj, '__name__', '-'))
for i, line in enumerate(source_lines):
if pat.match(line):
lineno = i
break
# Find the line number for functions & methods.
if inspect.ismethod(obj): obj = obj.im_func
if inspect.isfunction(obj): obj = obj.func_code
if inspect.istraceback(obj): obj = obj.tb_frame
if inspect.isframe(obj): obj = obj.f_code
if inspect.iscode(obj):
lineno = getattr(obj, 'co_firstlineno', None)-1
# Find the line number where the docstring starts. Assume
# that it's the first line that begins with a quote mark.
# Note: this could be fooled by a multiline function
# signature, where a continuation line begins with a quote
# mark.
if lineno is not None:
if source_lines is None:
return lineno+1
pat = re.compile('(^|.*:)\s*\w*("|\')')
for lineno in range(lineno, len(source_lines)):
if pat.match(source_lines[lineno]):
return lineno
# We couldn't find the line number.
return None
######################################################################
## 5. DocTest Runner
######################################################################
class DocTestRunner:
"""
A class used to run DocTest test cases, and accumulate statistics.
The `run` method is used to process a single DocTest case. It
returns a tuple `(f, t)`, where `t` is the number of test cases
tried, and `f` is the number of test cases that failed.
>>> tests = DocTestFinder().find(_TestClass)
>>> runner = DocTestRunner(verbose=False)
>>> tests.sort(key = lambda test: test.name)
>>> for test in tests:
... print test.name, '->', runner.run(test)
_TestClass -> (0, 2)
_TestClass.__init__ -> (0, 2)
_TestClass.get -> (0, 2)
_TestClass.square -> (0, 1)
The `summarize` method prints a summary of all the test cases that
have been run by the runner, and returns an aggregated `(f, t)`
tuple:
>>> runner.summarize(verbose=1)
4 items passed all tests:
2 tests in _TestClass
2 tests in _TestClass.__init__
2 tests in _TestClass.get
1 tests in _TestClass.square
7 tests in 4 items.
7 passed and 0 failed.
Test passed.
(0, 7)
The aggregated number of tried examples and failed examples is
also available via the `tries` and `failures` attributes:
>>> runner.tries
7
>>> runner.failures
0
The comparison between expected outputs and actual outputs is done
by an `OutputChecker`. This comparison may be customized with a
number of option flags; see the documentation for `testmod` for
more information. If the option flags are insufficient, then the
comparison may also be customized by passing a subclass of
`OutputChecker` to the constructor.
The test runner's display output can be controlled in two ways.
First, an output function (`out) can be passed to
`TestRunner.run`; this function will be called with strings that
should be displayed. It defaults to `sys.stdout.write`. If
capturing the output is not sufficient, then the display output
can be also customized by subclassing DocTestRunner, and
overriding the methods `report_start`, `report_success`,
`report_unexpected_exception`, and `report_failure`.
"""
# This divider string is used to separate failure messages, and to
# separate sections of the summary.
DIVIDER = "*" * 70
def __init__(self, checker=None, verbose=None, optionflags=0):
"""
Create a new test runner.
Optional keyword arg `checker` is the `OutputChecker` that
should be used to compare the expected outputs and actual
outputs of doctest examples.
Optional keyword arg 'verbose' prints lots of stuff if true,
only failures if false; by default, it's true iff '-v' is in
sys.argv.
Optional argument `optionflags` can be used to control how the
test runner compares expected output to actual output, and how
it displays failures. See the documentation for `testmod` for
more information.
"""
self._checker = checker or OutputChecker()
if verbose is None:
verbose = '-v' in sys.argv
self._verbose = verbose
self.optionflags = optionflags
self.original_optionflags = optionflags
# Keep track of the examples we've run.
self.tries = 0
self.failures = 0
self._name2ft = {}
# Create a fake output target for capturing doctest output.
self._fakeout = _SpoofOut()
#/////////////////////////////////////////////////////////////////
# Reporting methods
#/////////////////////////////////////////////////////////////////
def report_start(self, out, test, example):
"""
Report that the test runner is about to process the given
example. (Only displays a message if verbose=True)
"""
if self._verbose:
if example.want:
out('Trying:\n' + _indent(example.source) +
'Expecting:\n' + _indent(example.want))
else:
out('Trying:\n' + _indent(example.source) +
'Expecting nothing\n')
def report_success(self, out, test, example, got):
"""
Report that the given example ran successfully. (Only
displays a message if verbose=True)
"""
if self._verbose:
out("ok\n")
def report_failure(self, out, test, example, got):
"""
Report that the given example failed.
"""
out(self._failure_header(test, example) +
self._checker.output_difference(example, got, self.optionflags))
def report_unexpected_exception(self, out, test, example, exc_info):
"""
Report that the given example raised an unexpected exception.
"""
out(self._failure_header(test, example) +
'Exception raised:\n' + _indent(_exception_traceback(exc_info)))
def _failure_header(self, test, example):
out = [self.DIVIDER]
if test.filename:
if test.lineno is not None and example.lineno is not None:
lineno = test.lineno + example.lineno + 1
else:
lineno = '?'
out.append('File "%s", line %s, in %s' %
(test.filename, lineno, test.name))
else:
out.append('Line %s, in %s' % (example.lineno+1, test.name))
out.append('Failed example:')
source = example.source
out.append(_indent(source))
return '\n'.join(out)
#/////////////////////////////////////////////////////////////////
# DocTest Running
#/////////////////////////////////////////////////////////////////
def __run(self, test, compileflags, out):
"""
Run the examples in `test`. Write the outcome of each example
with one of the `DocTestRunner.report_*` methods, using the
writer function `out`. `compileflags` is the set of compiler
flags that should be used to execute examples. Return a tuple
`(f, t)`, where `t` is the number of examples tried, and `f`
is the number of examples that failed. The examples are run
in the namespace `test.globs`.
"""
# Keep track of the number of failures and tries.
failures = tries = 0
# Save the option flags (since option directives can be used
# to modify them).
original_optionflags = self.optionflags
SUCCESS, FAILURE, BOOM = range(3) # `outcome` state
check = self._checker.check_output
# Process each example.
for examplenum, example in enumerate(test.examples):
# If REPORT_ONLY_FIRST_FAILURE is set, then supress
# reporting after the first failure.
quiet = (self.optionflags & REPORT_ONLY_FIRST_FAILURE and
failures > 0)
# Merge in the example's options.
self.optionflags = original_optionflags
if example.options:
for (optionflag, val) in example.options.items():
if val:
self.optionflags |= optionflag
else:
self.optionflags &= ~optionflag
# If 'SKIP' is set, then skip this example.
if self.optionflags & SKIP:
continue
# Record that we started this example.
tries += 1
if not quiet:
self.report_start(out, test, example)
# Use a special filename for compile(), so we can retrieve
# the source code during interactive debugging (see
# __patched_linecache_getlines).
filename = '<doctest %s[%d]>' % (test.name, examplenum)
# Run the example in the given context (globs), and record
# any exception that gets raised. (But don't intercept
# keyboard interrupts.)
try:
# Don't blink! This is where the user's code gets run.
exec compile(example.source, filename, "single",
compileflags, 1) in test.globs
self.debugger.set_continue() # ==== Example Finished ====
exception = None
except KeyboardInterrupt:
raise
except:
exception = sys.exc_info()
self.debugger.set_continue() # ==== Example Finished ====
got = self._fakeout.getvalue() # the actual output
self._fakeout.truncate(0)
outcome = FAILURE # guilty until proved innocent or insane
# If the example executed without raising any exceptions,
# verify its output.
if exception is None:
if check(example.want, got, self.optionflags):
outcome = SUCCESS
# The example raised an exception: check if it was expected.
else:
exc_info = sys.exc_info()
exc_msg = traceback.format_exception_only(*exc_info[:2])[-1]
if not quiet:
got += _exception_traceback(exc_info)
# If `example.exc_msg` is None, then we weren't expecting
# an exception.
if example.exc_msg is None:
outcome = BOOM
# We expected an exception: see whether it matches.
elif check(example.exc_msg, exc_msg, self.optionflags):
outcome = SUCCESS
# Another chance if they didn't care about the detail.
elif self.optionflags & IGNORE_EXCEPTION_DETAIL:
m1 = re.match(r'[^:]*:', example.exc_msg)
m2 = re.match(r'[^:]*:', exc_msg)
if m1 and m2 and check(m1.group(0), m2.group(0),
self.optionflags):
outcome = SUCCESS
# Report the outcome.
if outcome is SUCCESS:
if not quiet:
self.report_success(out, test, example, got)
elif outcome is FAILURE:
if not quiet:
self.report_failure(out, test, example, got)
failures += 1
elif outcome is BOOM:
if not quiet:
self.report_unexpected_exception(out, test, example,
exc_info)
failures += 1
else:
assert False, ("unknown outcome", outcome)
# Restore the option flags (in case they were modified)
self.optionflags = original_optionflags
# Record and return the number of failures and tries.
self.__record_outcome(test, failures, tries)
return failures, tries
def __record_outcome(self, test, f, t):
"""
Record the fact that the given DocTest (`test`) generated `f`
failures out of `t` tried examples.
"""
f2, t2 = self._name2ft.get(test.name, (0,0))
self._name2ft[test.name] = (f+f2, t+t2)
self.failures += f
self.tries += t
__LINECACHE_FILENAME_RE = re.compile(r'<doctest '
r'(?P<name>[\w\.]+)'
r'\[(?P<examplenum>\d+)\]>$')
def __patched_linecache_getlines(self, filename, module_globals=None):
m = self.__LINECACHE_FILENAME_RE.match(filename)
if m and m.group('name') == self.test.name:
example = self.test.examples[int(m.group('examplenum'))]
return example.source.splitlines(True)
else:
return self.save_linecache_getlines(filename, module_globals)
def run(self, test, compileflags=None, out=None, clear_globs=True):
"""
Run the examples in `test`, and display the results using the
writer function `out`.
The examples are run in the namespace `test.globs`. If
`clear_globs` is true (the default), then this namespace will
be cleared after the test runs, to help with garbage
collection. If you would like to examine the namespace after
the test completes, then use `clear_globs=False`.
`compileflags` gives the set of flags that should be used by
the Python compiler when running the examples. If not
specified, then it will default to the set of future-import
flags that apply to `globs`.
The output of each example is checked using
`DocTestRunner.check_output`, and the results are formatted by
the `DocTestRunner.report_*` methods.
"""
self.test = test
if compileflags is None:
compileflags = _extract_future_flags(test.globs)
save_stdout = sys.stdout
if out is None:
out = save_stdout.write
sys.stdout = self._fakeout
# Patch pdb.set_trace to restore sys.stdout during interactive
# debugging (so it's not still redirected to self._fakeout).
# Note that the interactive output will go to *our*
# save_stdout, even if that's not the real sys.stdout; this
# allows us to write test cases for the set_trace behavior.
save_set_trace = pdb.set_trace
self.debugger = _OutputRedirectingPdb(save_stdout)
self.debugger.reset()
pdb.set_trace = self.debugger.set_trace
# Patch linecache.getlines, so we can see the example's source
# when we're inside the debugger.
self.save_linecache_getlines = linecache.getlines
linecache.getlines = self.__patched_linecache_getlines
try:
return self.__run(test, compileflags, out)
finally:
sys.stdout = save_stdout
pdb.set_trace = save_set_trace
linecache.getlines = self.save_linecache_getlines
if clear_globs:
test.globs.clear()
#/////////////////////////////////////////////////////////////////
# Summarization
#/////////////////////////////////////////////////////////////////
def summarize(self, verbose=None):
"""
Print a summary of all the test cases that have been run by
this DocTestRunner, and return a tuple `(f, t)`, where `f` is
the total number of failed examples, and `t` is the total
number of tried examples.
The optional `verbose` argument controls how detailed the
summary is. If the verbosity is not specified, then the
DocTestRunner's verbosity is used.
"""
if verbose is None:
verbose = self._verbose
notests = []
passed = []
failed = []
totalt = totalf = 0
for x in self._name2ft.items():
name, (f, t) = x
assert f <= t
totalt += t
totalf += f
if t == 0:
notests.append(name)
elif f == 0:
passed.append( (name, t) )
else:
failed.append(x)
if verbose:
if notests:
print len(notests), "items had no tests:"
notests.sort()
for thing in notests:
print " ", thing
if passed:
print len(passed), "items passed all tests:"
passed.sort()
for thing, count in passed:
print " %3d tests in %s" % (count, thing)
if failed:
print self.DIVIDER
print len(failed), "items had failures:"
failed.sort()
for thing, (f, t) in failed:
print " %3d of %3d in %s" % (f, t, thing)
if verbose:
print totalt, "tests in", len(self._name2ft), "items."
print totalt - totalf, "passed and", totalf, "failed."
if totalf:
print "***Test Failed***", totalf, "failures."
elif verbose:
print "Test passed."
return totalf, totalt
#/////////////////////////////////////////////////////////////////
# Backward compatibility cruft to maintain doctest.master.
#/////////////////////////////////////////////////////////////////
def merge(self, other):
d = self._name2ft
for name, (f, t) in other._name2ft.items():
if name in d:
print "*** DocTestRunner.merge: '" + name + "' in both" \
" testers; summing outcomes."
f2, t2 = d[name]
f = f + f2
t = t + t2
d[name] = f, t
class OutputChecker:
"""
A class used to check the whether the actual output from a doctest
example matches the expected output. `OutputChecker` defines two
methods: `check_output`, which compares a given pair of outputs,
and returns true if they match; and `output_difference`, which
returns a string describing the differences between two outputs.
"""
def check_output(self, want, got, optionflags):
"""
Return True iff the actual output from an example (`got`)
matches the expected output (`want`). These strings are
always considered to match if they are identical; but
depending on what option flags the test runner is using,
several non-exact match types are also possible. See the
documentation for `TestRunner` for more information about
option flags.
"""
# Handle the common case first, for efficiency:
# if they're string-identical, always return true.
if got == want:
return True
# The values True and False replaced 1 and 0 as the return
# value for boolean comparisons in Python 2.3.
if not (optionflags & DONT_ACCEPT_TRUE_FOR_1):
if (got,want) == ("True\n", "1\n"):
return True
if (got,want) == ("False\n", "0\n"):
return True
# <BLANKLINE> can be used as a special sequence to signify a
# blank line, unless the DONT_ACCEPT_BLANKLINE flag is used.
if not (optionflags & DONT_ACCEPT_BLANKLINE):
# Replace <BLANKLINE> in want with a blank line.
want = re.sub('(?m)^%s\s*?$' % re.escape(BLANKLINE_MARKER),
'', want)
# If a line in got contains only spaces, then remove the
# spaces.
got = re.sub('(?m)^\s*?$', '', got)
if got == want:
return True
# This flag causes doctest to ignore any differences in the
# contents of whitespace strings. Note that this can be used
# in conjunction with the ELLIPSIS flag.
if optionflags & NORMALIZE_WHITESPACE:
got = ' '.join(got.split())
want = ' '.join(want.split())
if got == want:
return True
# The ELLIPSIS flag says to let the sequence "..." in `want`
# match any substring in `got`.
if optionflags & ELLIPSIS:
if _ellipsis_match(want, got):
return True
# We didn't find any match; return false.
return False
# Should we do a fancy diff?
def _do_a_fancy_diff(self, want, got, optionflags):
# Not unless they asked for a fancy diff.
if not optionflags & (REPORT_UDIFF |
REPORT_CDIFF |
REPORT_NDIFF):
return False
# If expected output uses ellipsis, a meaningful fancy diff is
# too hard ... or maybe not. In two real-life failures Tim saw,
# a diff was a major help anyway, so this is commented out.
# [todo] _ellipsis_match() knows which pieces do and don't match,
# and could be the basis for a kick-ass diff in this case.
##if optionflags & ELLIPSIS and ELLIPSIS_MARKER in want:
## return False
# ndiff does intraline difference marking, so can be useful even
# for 1-line differences.
if optionflags & REPORT_NDIFF:
return True
# The other diff types need at least a few lines to be helpful.
return want.count('\n') > 2 and got.count('\n') > 2
def output_difference(self, example, got, optionflags):
"""
Return a string describing the differences between the
expected output for a given example (`example`) and the actual
output (`got`). `optionflags` is the set of option flags used
to compare `want` and `got`.
"""
want = example.want
# If <BLANKLINE>s are being used, then replace blank lines
# with <BLANKLINE> in the actual output string.
if not (optionflags & DONT_ACCEPT_BLANKLINE):
got = re.sub('(?m)^[ ]*(?=\n)', BLANKLINE_MARKER, got)
# Check if we should use diff.
if self._do_a_fancy_diff(want, got, optionflags):
# Split want & got into lines.
want_lines = want.splitlines(True) # True == keep line ends
got_lines = got.splitlines(True)
# Use difflib to find their differences.
if optionflags & REPORT_UDIFF:
diff = difflib.unified_diff(want_lines, got_lines, n=2)
diff = list(diff)[2:] # strip the diff header
kind = 'unified diff with -expected +actual'
elif optionflags & REPORT_CDIFF:
diff = difflib.context_diff(want_lines, got_lines, n=2)
diff = list(diff)[2:] # strip the diff header
kind = 'context diff with expected followed by actual'
elif optionflags & REPORT_NDIFF:
engine = difflib.Differ(charjunk=difflib.IS_CHARACTER_JUNK)
diff = list(engine.compare(want_lines, got_lines))
kind = 'ndiff with -expected +actual'
else:
assert 0, 'Bad diff option'
# Remove trailing whitespace on diff output.
diff = [line.rstrip() + '\n' for line in diff]
return 'Differences (%s):\n' % kind + _indent(''.join(diff))
# If we're not using diff, then simply list the expected
# output followed by the actual output.
if want and got:
return 'Expected:\n%sGot:\n%s' % (_indent(want), _indent(got))
elif want:
return 'Expected:\n%sGot nothing\n' % _indent(want)
elif got:
return 'Expected nothing\nGot:\n%s' % _indent(got)
else:
return 'Expected nothing\nGot nothing\n'
class DocTestFailure(Exception):
"""A DocTest example has failed in debugging mode.
The exception instance has variables:
- test: the DocTest object being run
- example: the Example object that failed
- got: the actual output
"""
def __init__(self, test, example, got):
self.test = test
self.example = example
self.got = got
def __str__(self):
return str(self.test)
class UnexpectedException(Exception):
"""A DocTest example has encountered an unexpected exception
The exception instance has variables:
- test: the DocTest object being run
- example: the Example object that failed
- exc_info: the exception info
"""
def __init__(self, test, example, exc_info):
self.test = test
self.example = example
self.exc_info = exc_info
def __str__(self):
return str(self.test)
class DebugRunner(DocTestRunner):
r"""Run doc tests but raise an exception as soon as there is a failure.
If an unexpected exception occurs, an UnexpectedException is raised.
It contains the test, the example, and the original exception:
>>> runner = DebugRunner(verbose=False)
>>> test = DocTestParser().get_doctest('>>> raise KeyError\n42',
... {}, 'foo', 'foo.py', 0)
>>> try:
... runner.run(test)
... except UnexpectedException, failure:
... pass
>>> failure.test is test
True
>>> failure.example.want
'42\n'
>>> exc_info = failure.exc_info
>>> raise exc_info[0], exc_info[1], exc_info[2]
Traceback (most recent call last):
...
KeyError
We wrap the original exception to give the calling application
access to the test and example information.
If the output doesn't match, then a DocTestFailure is raised:
>>> test = DocTestParser().get_doctest('''
... >>> x = 1
... >>> x
... 2
... ''', {}, 'foo', 'foo.py', 0)
>>> try:
... runner.run(test)
... except DocTestFailure, failure:
... pass
DocTestFailure objects provide access to the test:
>>> failure.test is test
True
As well as to the example:
>>> failure.example.want
'2\n'
and the actual output:
>>> failure.got
'1\n'
If a failure or error occurs, the globals are left intact:
>>> if '__builtins__' in test.globs:
... del test.globs['__builtins__']
>>> test.globs
{'x': 1}
>>> test = DocTestParser().get_doctest('''
... >>> x = 2
... >>> raise KeyError
... ''', {}, 'foo', 'foo.py', 0)
>>> runner.run(test)
Traceback (most recent call last):
...
UnexpectedException: <DocTest foo from foo.py:0 (2 examples)>
>>> if '__builtins__' in test.globs:
... del test.globs['__builtins__']
>>> test.globs
{'x': 2}
But the globals are cleared if there is no error:
>>> test = DocTestParser().get_doctest('''
... >>> x = 2
... ''', {}, 'foo', 'foo.py', 0)
>>> runner.run(test)
(0, 1)
>>> test.globs
{}
"""
def run(self, test, compileflags=None, out=None, clear_globs=True):
r = DocTestRunner.run(self, test, compileflags, out, False)
if clear_globs:
test.globs.clear()
return r
def report_unexpected_exception(self, out, test, example, exc_info):
raise UnexpectedException(test, example, exc_info)
def report_failure(self, out, test, example, got):
raise DocTestFailure(test, example, got)
######################################################################
## 6. Test Functions
######################################################################
# These should be backwards compatible.
# For backward compatibility, a global instance of a DocTestRunner
# class, updated by testmod.
master = None
def testmod(m=None, name=None, globs=None, verbose=None,
report=True, optionflags=0, extraglobs=None,
raise_on_error=False, exclude_empty=False):
"""m=None, name=None, globs=None, verbose=None, report=True,
optionflags=0, extraglobs=None, raise_on_error=False,
exclude_empty=False
Test examples in docstrings in functions and classes reachable
from module m (or the current module if m is not supplied), starting
with m.__doc__.
Also test examples reachable from dict m.__test__ if it exists and is
not None. m.__test__ maps names to functions, classes and strings;
function and class docstrings are tested even if the name is private;
strings are tested directly, as if they were docstrings.
Return (#failures, #tests).
See doctest.__doc__ for an overview.
Optional keyword arg "name" gives the name of the module; by default
use m.__name__.
Optional keyword arg "globs" gives a dict to be used as the globals
when executing examples; by default, use m.__dict__. A copy of this
dict is actually used for each docstring, so that each docstring's
examples start with a clean slate.
Optional keyword arg "extraglobs" gives a dictionary that should be
merged into the globals that are used to execute examples. By
default, no extra globals are used. This is new in 2.4.
Optional keyword arg "verbose" prints lots of stuff if true, prints
only failures if false; by default, it's true iff "-v" is in sys.argv.
Optional keyword arg "report" prints a summary at the end when true,
else prints nothing at the end. In verbose mode, the summary is
detailed, else very brief (in fact, empty if all tests passed).
Optional keyword arg "optionflags" or's together module constants,
and defaults to 0. This is new in 2.3. Possible values (see the
docs for details):
DONT_ACCEPT_TRUE_FOR_1
DONT_ACCEPT_BLANKLINE
NORMALIZE_WHITESPACE
ELLIPSIS
SKIP
IGNORE_EXCEPTION_DETAIL
REPORT_UDIFF
REPORT_CDIFF
REPORT_NDIFF
REPORT_ONLY_FIRST_FAILURE
Optional keyword arg "raise_on_error" raises an exception on the
first unexpected exception or failure. This allows failures to be
post-mortem debugged.
Advanced tomfoolery: testmod runs methods of a local instance of
class doctest.Tester, then merges the results into (or creates)
global Tester instance doctest.master. Methods of doctest.master
can be called directly too, if you want to do something unusual.
Passing report=0 to testmod is especially useful then, to delay
displaying a summary. Invoke doctest.master.summarize(verbose)
when you're done fiddling.
"""
global master
# If no module was given, then use __main__.
if m is None:
# DWA - m will still be None if this wasn't invoked from the command
# line, in which case the following TypeError is about as good an error
# as we should expect
m = sys.modules.get('__main__')
# Check that we were actually given a module.
if not inspect.ismodule(m):
raise TypeError("testmod: module required; %r" % (m,))
# If no name was given, then use the module's name.
if name is None:
name = m.__name__
# Find, parse, and run all tests in the given module.
finder = DocTestFinder(exclude_empty=exclude_empty)
if raise_on_error:
runner = DebugRunner(verbose=verbose, optionflags=optionflags)
else:
runner = DocTestRunner(verbose=verbose, optionflags=optionflags)
for test in finder.find(m, name, globs=globs, extraglobs=extraglobs):
runner.run(test)
if report:
runner.summarize()
if master is None:
master = runner
else:
master.merge(runner)
return runner.failures, runner.tries
def testfile(filename, module_relative=True, name=None, package=None,
globs=None, verbose=None, report=True, optionflags=0,
extraglobs=None, raise_on_error=False, parser=DocTestParser(),
encoding=None):
"""
Test examples in the given file. Return (#failures, #tests).
Optional keyword arg "module_relative" specifies how filenames
should be interpreted:
- If "module_relative" is True (the default), then "filename"
specifies a module-relative path. By default, this path is
relative to the calling module's directory; but if the
"package" argument is specified, then it is relative to that
package. To ensure os-independence, "filename" should use
"/" characters to separate path segments, and should not
be an absolute path (i.e., it may not begin with "/").
- If "module_relative" is False, then "filename" specifies an
os-specific path. The path may be absolute or relative (to
the current working directory).
Optional keyword arg "name" gives the name of the test; by default
use the file's basename.
Optional keyword argument "package" is a Python package or the
name of a Python package whose directory should be used as the
base directory for a module relative filename. If no package is
specified, then the calling module's directory is used as the base
directory for module relative filenames. It is an error to
specify "package" if "module_relative" is False.
Optional keyword arg "globs" gives a dict to be used as the globals
when executing examples; by default, use {}. A copy of this dict
is actually used for each docstring, so that each docstring's
examples start with a clean slate.
Optional keyword arg "extraglobs" gives a dictionary that should be
merged into the globals that are used to execute examples. By
default, no extra globals are used.
Optional keyword arg "verbose" prints lots of stuff if true, prints
only failures if false; by default, it's true iff "-v" is in sys.argv.
Optional keyword arg "report" prints a summary at the end when true,
else prints nothing at the end. In verbose mode, the summary is
detailed, else very brief (in fact, empty if all tests passed).
Optional keyword arg "optionflags" or's together module constants,
and defaults to 0. Possible values (see the docs for details):
DONT_ACCEPT_TRUE_FOR_1
DONT_ACCEPT_BLANKLINE
NORMALIZE_WHITESPACE
ELLIPSIS
SKIP
IGNORE_EXCEPTION_DETAIL
REPORT_UDIFF
REPORT_CDIFF
REPORT_NDIFF
REPORT_ONLY_FIRST_FAILURE
Optional keyword arg "raise_on_error" raises an exception on the
first unexpected exception or failure. This allows failures to be
post-mortem debugged.
Optional keyword arg "parser" specifies a DocTestParser (or
subclass) that should be used to extract tests from the files.
Optional keyword arg "encoding" specifies an encoding that should
be used to convert the file to unicode.
Advanced tomfoolery: testmod runs methods of a local instance of
class doctest.Tester, then merges the results into (or creates)
global Tester instance doctest.master. Methods of doctest.master
can be called directly too, if you want to do something unusual.
Passing report=0 to testmod is especially useful then, to delay
displaying a summary. Invoke doctest.master.summarize(verbose)
when you're done fiddling.
"""
global master
if package and not module_relative:
raise ValueError("Package may only be specified for module-"
"relative paths.")
# Relativize the path
text, filename = _load_testfile(filename, package, module_relative)
# If no name was given, then use the file's name.
if name is None:
name = os.path.basename(filename)
# Assemble the globals.
if globs is None:
globs = {}
else:
globs = globs.copy()
if extraglobs is not None:
globs.update(extraglobs)
if raise_on_error:
runner = DebugRunner(verbose=verbose, optionflags=optionflags)
else:
runner = DocTestRunner(verbose=verbose, optionflags=optionflags)
if encoding is not None:
text = text.decode(encoding)
# Read the file, convert it to a test, and run it.
test = parser.get_doctest(text, globs, name, filename, 0)
runner.run(test)
if report:
runner.summarize()
if master is None:
master = runner
else:
master.merge(runner)
return runner.failures, runner.tries
def run_docstring_examples(f, globs, verbose=False, name="NoName",
compileflags=None, optionflags=0):
"""
Test examples in the given object's docstring (`f`), using `globs`
as globals. Optional argument `name` is used in failure messages.
If the optional argument `verbose` is true, then generate output
even if there are no failures.
`compileflags` gives the set of flags that should be used by the
Python compiler when running the examples. If not specified, then
it will default to the set of future-import flags that apply to
`globs`.
Optional keyword arg `optionflags` specifies options for the
testing and output. See the documentation for `testmod` for more
information.
"""
# Find, parse, and run all tests in the given module.
finder = DocTestFinder(verbose=verbose, recurse=False)
runner = DocTestRunner(verbose=verbose, optionflags=optionflags)
for test in finder.find(f, name, globs=globs):
runner.run(test, compileflags=compileflags)
######################################################################
## 7. Tester
######################################################################
# This is provided only for backwards compatibility. It's not
# actually used in any way.
class Tester:
def __init__(self, mod=None, globs=None, verbose=None, optionflags=0):
warnings.warn("class Tester is deprecated; "
"use class doctest.DocTestRunner instead",
DeprecationWarning, stacklevel=2)
if mod is None and globs is None:
raise TypeError("Tester.__init__: must specify mod or globs")
if mod is not None and not inspect.ismodule(mod):
raise TypeError("Tester.__init__: mod must be a module; %r" %
(mod,))
if globs is None:
globs = mod.__dict__
self.globs = globs
self.verbose = verbose
self.optionflags = optionflags
self.testfinder = DocTestFinder()
self.testrunner = DocTestRunner(verbose=verbose,
optionflags=optionflags)
def runstring(self, s, name):
test = DocTestParser().get_doctest(s, self.globs, name, None, None)
if self.verbose:
print "Running string", name
(f,t) = self.testrunner.run(test)
if self.verbose:
print f, "of", t, "examples failed in string", name
return (f,t)
def rundoc(self, object, name=None, module=None):
f = t = 0
tests = self.testfinder.find(object, name, module=module,
globs=self.globs)
for test in tests:
(f2, t2) = self.testrunner.run(test)
(f,t) = (f+f2, t+t2)
return (f,t)
def rundict(self, d, name, module=None):
import new
m = new.module(name)
m.__dict__.update(d)
if module is None:
module = False
return self.rundoc(m, name, module)
def run__test__(self, d, name):
import new
m = new.module(name)
m.__test__ = d
return self.rundoc(m, name)
def summarize(self, verbose=None):
return self.testrunner.summarize(verbose)
def merge(self, other):
self.testrunner.merge(other.testrunner)
######################################################################
## 8. Unittest Support
######################################################################
_unittest_reportflags = 0
def set_unittest_reportflags(flags):
"""Sets the unittest option flags.
The old flag is returned so that a runner could restore the old
value if it wished to:
>>> import doctest
>>> old = doctest._unittest_reportflags
>>> doctest.set_unittest_reportflags(REPORT_NDIFF |
... REPORT_ONLY_FIRST_FAILURE) == old
True
>>> doctest._unittest_reportflags == (REPORT_NDIFF |
... REPORT_ONLY_FIRST_FAILURE)
True
Only reporting flags can be set:
>>> doctest.set_unittest_reportflags(ELLIPSIS)
Traceback (most recent call last):
...
ValueError: ('Only reporting flags allowed', 8)
>>> doctest.set_unittest_reportflags(old) == (REPORT_NDIFF |
... REPORT_ONLY_FIRST_FAILURE)
True
"""
global _unittest_reportflags
if (flags & REPORTING_FLAGS) != flags:
raise ValueError("Only reporting flags allowed", flags)
old = _unittest_reportflags
_unittest_reportflags = flags
return old
class DocTestCase(unittest.TestCase):
def __init__(self, test, optionflags=0, setUp=None, tearDown=None,
checker=None):
unittest.TestCase.__init__(self)
self._dt_optionflags = optionflags
self._dt_checker = checker
self._dt_test = test
self._dt_setUp = setUp
self._dt_tearDown = tearDown
def setUp(self):
test = self._dt_test
if self._dt_setUp is not None:
self._dt_setUp(test)
def tearDown(self):
test = self._dt_test
if self._dt_tearDown is not None:
self._dt_tearDown(test)
test.globs.clear()
def runTest(self):
test = self._dt_test
old = sys.stdout
new = StringIO()
optionflags = self._dt_optionflags
if not (optionflags & REPORTING_FLAGS):
# The option flags don't include any reporting flags,
# so add the default reporting flags
optionflags |= _unittest_reportflags
runner = DocTestRunner(optionflags=optionflags,
checker=self._dt_checker, verbose=False)
try:
runner.DIVIDER = "-"*70
failures, tries = runner.run(
test, out=new.write, clear_globs=False)
finally:
sys.stdout = old
if failures:
raise self.failureException(self.format_failure(new.getvalue()))
def format_failure(self, err):
test = self._dt_test
if test.lineno is None:
lineno = 'unknown line number'
else:
lineno = '%s' % test.lineno
lname = '.'.join(test.name.split('.')[-1:])
return ('Failed doctest test for %s\n'
' File "%s", line %s, in %s\n\n%s'
% (test.name, test.filename, lineno, lname, err)
)
def debug(self):
r"""Run the test case without results and without catching exceptions
The unit test framework includes a debug method on test cases
and test suites to support post-mortem debugging. The test code
is run in such a way that errors are not caught. This way a
caller can catch the errors and initiate post-mortem debugging.
The DocTestCase provides a debug method that raises
UnexpectedException errors if there is an unexepcted
exception:
>>> test = DocTestParser().get_doctest('>>> raise KeyError\n42',
... {}, 'foo', 'foo.py', 0)
>>> case = DocTestCase(test)
>>> try:
... case.debug()
... except UnexpectedException, failure:
... pass
The UnexpectedException contains the test, the example, and
the original exception:
>>> failure.test is test
True
>>> failure.example.want
'42\n'
>>> exc_info = failure.exc_info
>>> raise exc_info[0], exc_info[1], exc_info[2]
Traceback (most recent call last):
...
KeyError
If the output doesn't match, then a DocTestFailure is raised:
>>> test = DocTestParser().get_doctest('''
... >>> x = 1
... >>> x
... 2
... ''', {}, 'foo', 'foo.py', 0)
>>> case = DocTestCase(test)
>>> try:
... case.debug()
... except DocTestFailure, failure:
... pass
DocTestFailure objects provide access to the test:
>>> failure.test is test
True
As well as to the example:
>>> failure.example.want
'2\n'
and the actual output:
>>> failure.got
'1\n'
"""
self.setUp()
runner = DebugRunner(optionflags=self._dt_optionflags,
checker=self._dt_checker, verbose=False)
runner.run(self._dt_test)
self.tearDown()
def id(self):
return self._dt_test.name
def __repr__(self):
name = self._dt_test.name.split('.')
return "%s (%s)" % (name[-1], '.'.join(name[:-1]))
__str__ = __repr__
def shortDescription(self):
return "Doctest: " + self._dt_test.name
def DocTestSuite(module=None, globs=None, extraglobs=None, test_finder=None,
**options):
"""
Convert doctest tests for a module to a unittest test suite.
This converts each documentation string in a module that
contains doctest tests to a unittest test case. If any of the
tests in a doc string fail, then the test case fails. An exception
is raised showing the name of the file containing the test and a
(sometimes approximate) line number.
The `module` argument provides the module to be tested. The argument
can be either a module or a module name.
If no argument is given, the calling module is used.
A number of options may be provided as keyword arguments:
setUp
A set-up function. This is called before running the
tests in each file. The setUp function will be passed a DocTest
object. The setUp function can access the test globals as the
globs attribute of the test passed.
tearDown
A tear-down function. This is called after running the
tests in each file. The tearDown function will be passed a DocTest
object. The tearDown function can access the test globals as the
globs attribute of the test passed.
globs
A dictionary containing initial global variables for the tests.
optionflags
A set of doctest option flags expressed as an integer.
"""
if test_finder is None:
test_finder = DocTestFinder()
module = _normalize_module(module)
tests = test_finder.find(module, globs=globs, extraglobs=extraglobs)
if globs is None:
globs = module.__dict__
if not tests:
# Why do we want to do this? Because it reveals a bug that might
# otherwise be hidden.
raise ValueError(module, "has no tests")
tests.sort()
suite = unittest.TestSuite()
for test in tests:
if len(test.examples) == 0:
continue
if not test.filename:
filename = module.__file__
if filename[-4:] in (".pyc", ".pyo"):
filename = filename[:-1]
elif filename.endswith('$py.class'):
filename = '%s.py' % filename[:-9]
test.filename = filename
suite.addTest(DocTestCase(test, **options))
return suite
class DocFileCase(DocTestCase):
def id(self):
return '_'.join(self._dt_test.name.split('.'))
def __repr__(self):
return self._dt_test.filename
__str__ = __repr__
def format_failure(self, err):
return ('Failed doctest test for %s\n File "%s", line 0\n\n%s'
% (self._dt_test.name, self._dt_test.filename, err)
)
def DocFileTest(path, module_relative=True, package=None,
globs=None, parser=DocTestParser(),
encoding=None, **options):
if globs is None:
globs = {}
else:
globs = globs.copy()
if package and not module_relative:
raise ValueError("Package may only be specified for module-"
"relative paths.")
# Relativize the path.
doc, path = _load_testfile(path, package, module_relative)
if "__file__" not in globs:
globs["__file__"] = path
# Find the file and read it.
name = os.path.basename(path)
# If an encoding is specified, use it to convert the file to unicode
if encoding is not None:
doc = doc.decode(encoding)
# Convert it to a test, and wrap it in a DocFileCase.
test = parser.get_doctest(doc, globs, name, path, 0)
return DocFileCase(test, **options)
def DocFileSuite(*paths, **kw):
"""A unittest suite for one or more doctest files.
The path to each doctest file is given as a string; the
interpretation of that string depends on the keyword argument
"module_relative".
A number of options may be provided as keyword arguments:
module_relative
If "module_relative" is True, then the given file paths are
interpreted as os-independent module-relative paths. By
default, these paths are relative to the calling module's
directory; but if the "package" argument is specified, then
they are relative to that package. To ensure os-independence,
"filename" should use "/" characters to separate path
segments, and may not be an absolute path (i.e., it may not
begin with "/").
If "module_relative" is False, then the given file paths are
interpreted as os-specific paths. These paths may be absolute
or relative (to the current working directory).
package
A Python package or the name of a Python package whose directory
should be used as the base directory for module relative paths.
If "package" is not specified, then the calling module's
directory is used as the base directory for module relative
filenames. It is an error to specify "package" if
"module_relative" is False.
setUp
A set-up function. This is called before running the
tests in each file. The setUp function will be passed a DocTest
object. The setUp function can access the test globals as the
globs attribute of the test passed.
tearDown
A tear-down function. This is called after running the
tests in each file. The tearDown function will be passed a DocTest
object. The tearDown function can access the test globals as the
globs attribute of the test passed.
globs
A dictionary containing initial global variables for the tests.
optionflags
A set of doctest option flags expressed as an integer.
parser
A DocTestParser (or subclass) that should be used to extract
tests from the files.
encoding
An encoding that will be used to convert the files to unicode.
"""
suite = unittest.TestSuite()
# We do this here so that _normalize_module is called at the right
# level. If it were called in DocFileTest, then this function
# would be the caller and we might guess the package incorrectly.
if kw.get('module_relative', True):
kw['package'] = _normalize_module(kw.get('package'))
for path in paths:
suite.addTest(DocFileTest(path, **kw))
return suite
######################################################################
## 9. Debugging Support
######################################################################
def script_from_examples(s):
r"""Extract script from text with examples.
Converts text with examples to a Python script. Example input is
converted to regular code. Example output and all other words
are converted to comments:
>>> text = '''
... Here are examples of simple math.
...
... Python has super accurate integer addition
...
... >>> 2 + 2
... 5
...
... And very friendly error messages:
...
... >>> 1/0
... To Infinity
... And
... Beyond
...
... You can use logic if you want:
...
... >>> if 0:
... ... blah
... ... blah
... ...
...
... Ho hum
... '''
>>> print script_from_examples(text)
# Here are examples of simple math.
#
# Python has super accurate integer addition
#
2 + 2
# Expected:
## 5
#
# And very friendly error messages:
#
1/0
# Expected:
## To Infinity
## And
## Beyond
#
# You can use logic if you want:
#
if 0:
blah
blah
#
# Ho hum
<BLANKLINE>
"""
output = []
for piece in DocTestParser().parse(s):
if isinstance(piece, Example):
# Add the example's source code (strip trailing NL)
output.append(piece.source[:-1])
# Add the expected output:
want = piece.want
if want:
output.append('# Expected:')
output += ['## '+l for l in want.split('\n')[:-1]]
else:
# Add non-example text.
output += [_comment_line(l)
for l in piece.split('\n')[:-1]]
# Trim junk on both ends.
while output and output[-1] == '#':
output.pop()
while output and output[0] == '#':
output.pop(0)
# Combine the output, and return it.
# Add a courtesy newline to prevent exec from choking (see bug #1172785)
return '\n'.join(output) + '\n'
def testsource(module, name):
"""Extract the test sources from a doctest docstring as a script.
Provide the module (or dotted name of the module) containing the
test to be debugged and the name (within the module) of the object
with the doc string with tests to be debugged.
"""
module = _normalize_module(module)
tests = DocTestFinder().find(module)
test = [t for t in tests if t.name == name]
if not test:
raise ValueError(name, "not found in tests")
test = test[0]
testsrc = script_from_examples(test.docstring)
return testsrc
def debug_src(src, pm=False, globs=None):
"""Debug a single doctest docstring, in argument `src`'"""
testsrc = script_from_examples(src)
debug_script(testsrc, pm, globs)
def debug_script(src, pm=False, globs=None):
"Debug a test script. `src` is the script, as a string."
import pdb
# Note that tempfile.NameTemporaryFile() cannot be used. As the
# docs say, a file so created cannot be opened by name a second time
# on modern Windows boxes, and execfile() needs to open it.
srcfilename = tempfile.mktemp(".py", "doctestdebug")
f = open(srcfilename, 'w')
f.write(src)
f.close()
try:
if globs:
globs = globs.copy()
else:
globs = {}
if pm:
try:
execfile(srcfilename, globs, globs)
except:
print sys.exc_info()[1]
pdb.post_mortem(sys.exc_info()[2])
else:
# Note that %r is vital here. '%s' instead can, e.g., cause
# backslashes to get treated as metacharacters on Windows.
pdb.run("execfile(%r)" % srcfilename, globs, globs)
finally:
os.remove(srcfilename)
def debug(module, name, pm=False):
"""Debug a single doctest docstring.
Provide the module (or dotted name of the module) containing the
test to be debugged and the name (within the module) of the object
with the docstring with tests to be debugged.
"""
module = _normalize_module(module)
testsrc = testsource(module, name)
debug_script(testsrc, pm, module.__dict__)
######################################################################
## 10. Example Usage
######################################################################
class _TestClass:
"""
A pointless class, for sanity-checking of docstring testing.
Methods:
square()
get()
>>> _TestClass(13).get() + _TestClass(-12).get()
1
>>> hex(_TestClass(13).square().get())
'0xa9'
"""
def __init__(self, val):
"""val -> _TestClass object with associated value val.
>>> t = _TestClass(123)
>>> print t.get()
123
"""
self.val = val
def square(self):
"""square() -> square TestClass's associated value
>>> _TestClass(13).square().get()
169
"""
self.val = self.val ** 2
return self
def get(self):
"""get() -> return TestClass's associated value.
>>> x = _TestClass(-42)
>>> print x.get()
-42
"""
return self.val
__test__ = {"_TestClass": _TestClass,
"string": r"""
Example of a string object, searched as-is.
>>> x = 1; y = 2
>>> x + y, x * y
(3, 2)
""",
"bool-int equivalence": r"""
In 2.2, boolean expressions displayed
0 or 1. By default, we still accept
them. This can be disabled by passing
DONT_ACCEPT_TRUE_FOR_1 to the new
optionflags argument.
>>> 4 == 4
1
>>> 4 == 4
True
>>> 4 > 4
0
>>> 4 > 4
False
""",
"blank lines": r"""
Blank lines can be marked with <BLANKLINE>:
>>> print 'foo\n\nbar\n'
foo
<BLANKLINE>
bar
<BLANKLINE>
""",
"ellipsis": r"""
If the ellipsis flag is used, then '...' can be used to
elide substrings in the desired output:
>>> print range(1000) #doctest: +ELLIPSIS
[0, 1, 2, ..., 999]
""",
"whitespace normalization": r"""
If the whitespace normalization flag is used, then
differences in whitespace are ignored.
>>> print range(30) #doctest: +NORMALIZE_WHITESPACE
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14,
15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26,
27, 28, 29]
""",
}
def _test():
r = unittest.TextTestRunner()
r.run(DocTestSuite())
if __name__ == "__main__":
_test()
|
chenyujie/hybrid-murano
|
refs/heads/hybrid-master
|
murano/dsl/serializer.py
|
2
|
# Copyright (c) 2014 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import types
import murano.dsl.helpers as helpers
import murano.dsl.murano_method as murano_method
import murano.dsl.murano_object as murano_object
class ObjRef(object):
def __init__(self, obj):
self.ref_obj = obj
def serialize_object(obj):
if isinstance(obj, (collections.Sequence, collections.Set)) and not \
isinstance(obj, types.StringTypes):
return [serialize_object(t) for t in obj]
elif isinstance(obj, collections.Mapping):
result = {}
for key, value in obj.iteritems():
result[key] = serialize_object(value)
return result
elif isinstance(obj, murano_object.MuranoObject):
return _serialize_object(obj, None)[0]
return obj
def _serialize_object(root_object, designer_attributes=None):
serialized_objects = set()
tree = _pass1_serialize(
root_object, None, serialized_objects, designer_attributes)
_pass2_serialize(tree, serialized_objects)
return tree, serialized_objects
def serialize_model(root_object, executor):
if root_object is None:
tree = None
tree_copy = None
attributes = []
else:
tree, serialized_objects = _serialize_object(
root_object, executor.object_store.designer_attributes)
tree_copy, _ = _serialize_object(root_object, None)
attributes = executor.attribute_store.serialize(serialized_objects)
return {
'Objects': tree,
'ObjectsCopy': tree_copy,
'Attributes': attributes
}
def _cmp_objects(obj1, obj2):
if obj1 is None and obj2 is None:
return True
if obj1 is None or obj2 is None:
return False
return obj1.object_id == obj2.object_id
def _serialize_available_action(obj):
def _serialize(obj_type):
actions = {}
for name, method in obj_type.methods.iteritems():
if method.usage == murano_method.MethodUsages.Action:
action_id = '{0}_{1}'.format(obj.object_id, name)
actions[action_id] = {
'name': name,
'enabled': True
}
for parent in obj_type.parents:
parent_actions = _serialize(parent)
actions = helpers.merge_dicts(parent_actions, actions)
return actions
return _serialize(obj.type)
def _merge_actions(dict1, dict2):
result = helpers.merge_dicts(dict1, dict2)
for action_id in dict1:
if action_id not in dict2:
del result[action_id]
return result
def _pass1_serialize(value, parent, serialized_objects,
designer_attributes_getter):
if isinstance(value, (types.StringTypes, types.IntType, types.FloatType,
types.BooleanType, types.NoneType)):
return value
elif isinstance(value, murano_object.MuranoObject):
if not _cmp_objects(value.owner, parent) \
or value.object_id in serialized_objects:
return ObjRef(value)
else:
result = value.to_dictionary()
if designer_attributes_getter is not None:
result['?'].update(designer_attributes_getter(value.object_id))
# deserialize and merge list of actions
actions = _serialize_available_action(value)
result['?']['_actions'] = _merge_actions(
result['?'].get('_actions', {}), actions)
serialized_objects.add(value.object_id)
return _pass1_serialize(
result, value, serialized_objects, designer_attributes_getter)
elif isinstance(value, types.DictionaryType):
result = {}
for d_key, d_value in value.iteritems():
result_key = str(d_key)
result[result_key] = _pass1_serialize(
d_value, parent, serialized_objects,
designer_attributes_getter)
return result
elif isinstance(value, types.ListType):
return [_pass1_serialize(t, parent, serialized_objects,
designer_attributes_getter) for t in value]
elif isinstance(value, types.TupleType):
return _pass1_serialize(
list(value), parent, serialized_objects,
designer_attributes_getter)
else:
raise ValueError()
def _pass2_serialize(value, serialized_objects):
if isinstance(value, types.DictionaryType):
for d_key, d_value in value.iteritems():
if isinstance(d_value, ObjRef):
if d_value.ref_obj.object_id in serialized_objects:
value[d_key] = d_value.ref_obj.object_id
else:
value[d_key] = None
else:
_pass2_serialize(d_value, serialized_objects)
elif isinstance(value, types.ListType):
index = 0
while index < len(value):
item = value[index]
if isinstance(item, ObjRef):
if item.ref_obj.object_id in serialized_objects:
value[index] = item.ref_obj.object_id
else:
value.pop(index)
index -= 1
else:
_pass2_serialize(item, serialized_objects)
index += 1
|
aperigault/ansible
|
refs/heads/devel
|
lib/ansible/module_utils/f5_utils.py
|
124
|
#
# Copyright 2016 F5 Networks Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Legacy
try:
import bigsuds
bigsuds_found = True
except ImportError:
bigsuds_found = False
from ansible.module_utils.basic import env_fallback
def f5_argument_spec():
return dict(
server=dict(
type='str',
required=True,
fallback=(env_fallback, ['F5_SERVER'])
),
user=dict(
type='str',
required=True,
fallback=(env_fallback, ['F5_USER'])
),
password=dict(
type='str',
aliases=['pass', 'pwd'],
required=True,
no_log=True,
fallback=(env_fallback, ['F5_PASSWORD'])
),
validate_certs=dict(
default='yes',
type='bool',
fallback=(env_fallback, ['F5_VALIDATE_CERTS'])
),
server_port=dict(
type='int',
default=443,
fallback=(env_fallback, ['F5_SERVER_PORT'])
),
state=dict(
type='str',
default='present',
choices=['present', 'absent']
),
partition=dict(
type='str',
default='Common',
fallback=(env_fallback, ['F5_PARTITION'])
)
)
def f5_parse_arguments(module):
if not bigsuds_found:
module.fail_json(msg="the python bigsuds module is required")
if module.params['validate_certs']:
import ssl
if not hasattr(ssl, 'SSLContext'):
module.fail_json(
msg="bigsuds does not support verifying certificates with python < 2.7.9."
"Either update python or set validate_certs=False on the task'")
return (
module.params['server'],
module.params['user'],
module.params['password'],
module.params['state'],
module.params['partition'],
module.params['validate_certs'],
module.params['server_port']
)
def bigip_api(bigip, user, password, validate_certs, port=443):
try:
if bigsuds.__version__ >= '1.0.4':
api = bigsuds.BIGIP(hostname=bigip, username=user, password=password, verify=validate_certs, port=port)
elif bigsuds.__version__ == '1.0.3':
api = bigsuds.BIGIP(hostname=bigip, username=user, password=password, verify=validate_certs)
else:
api = bigsuds.BIGIP(hostname=bigip, username=user, password=password)
except TypeError:
# bigsuds < 1.0.3, no verify param
if validate_certs:
# Note: verified we have SSLContext when we parsed params
api = bigsuds.BIGIP(hostname=bigip, username=user, password=password)
else:
import ssl
if hasattr(ssl, 'SSLContext'):
# Really, you should never do this. It disables certificate
# verification *globally*. But since older bigip libraries
# don't give us a way to toggle verification we need to
# disable it at the global level.
# From https://www.python.org/dev/peps/pep-0476/#id29
ssl._create_default_https_context = ssl._create_unverified_context
api = bigsuds.BIGIP(hostname=bigip, username=user, password=password)
return api
# Fully Qualified name (with the partition)
def fq_name(partition, name):
if name is not None and not name.startswith('/'):
return '/%s/%s' % (partition, name)
return name
# Fully Qualified name (with partition) for a list
def fq_list_names(partition, list_names):
if list_names is None:
return None
return map(lambda x: fq_name(partition, x), list_names)
def to_commands(module, commands):
spec = {
'command': dict(key=True),
'prompt': dict(),
'answer': dict()
}
transform = ComplexList(spec, module)
return transform(commands)
def run_commands(module, commands, check_rc=True):
responses = list()
commands = to_commands(module, to_list(commands))
for cmd in commands:
cmd = module.jsonify(cmd)
rc, out, err = exec_command(module, cmd)
if check_rc and rc != 0:
module.fail_json(msg=to_text(err, errors='surrogate_then_replace'), rc=rc)
responses.append(to_text(out, errors='surrogate_then_replace'))
return responses
# New style
from abc import ABCMeta, abstractproperty
from collections import defaultdict
try:
from f5.bigip import ManagementRoot as BigIpMgmt
from f5.bigip.contexts import TransactionContextManager as BigIpTxContext
from f5.bigiq import ManagementRoot as BigIqMgmt
from f5.iworkflow import ManagementRoot as iWorkflowMgmt
from icontrol.exceptions import iControlUnexpectedHTTPError
HAS_F5SDK = True
except ImportError:
HAS_F5SDK = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six import iteritems, with_metaclass
from ansible.module_utils.network.common.utils import to_list, ComplexList
from ansible.module_utils.connection import exec_command
from ansible.module_utils._text import to_text
F5_COMMON_ARGS = dict(
server=dict(
type='str',
required=True,
fallback=(env_fallback, ['F5_SERVER'])
),
user=dict(
type='str',
required=True,
fallback=(env_fallback, ['F5_USER'])
),
password=dict(
type='str',
aliases=['pass', 'pwd'],
required=True,
no_log=True,
fallback=(env_fallback, ['F5_PASSWORD'])
),
validate_certs=dict(
default='yes',
type='bool',
fallback=(env_fallback, ['F5_VALIDATE_CERTS'])
),
server_port=dict(
type='int',
default=443,
fallback=(env_fallback, ['F5_SERVER_PORT'])
),
state=dict(
type='str',
default='present',
choices=['present', 'absent']
),
partition=dict(
type='str',
default='Common',
fallback=(env_fallback, ['F5_PARTITION'])
)
)
class AnsibleF5Client(object):
def __init__(self, argument_spec=None, supports_check_mode=False,
mutually_exclusive=None, required_together=None,
required_if=None, required_one_of=None, add_file_common_args=False,
f5_product_name='bigip', sans_state=False, sans_partition=False):
self.f5_product_name = f5_product_name
merged_arg_spec = dict()
merged_arg_spec.update(F5_COMMON_ARGS)
if argument_spec:
merged_arg_spec.update(argument_spec)
if sans_state:
del merged_arg_spec['state']
if sans_partition:
del merged_arg_spec['partition']
self.arg_spec = merged_arg_spec
mutually_exclusive_params = []
if mutually_exclusive:
mutually_exclusive_params += mutually_exclusive
required_together_params = []
if required_together:
required_together_params += required_together
self.module = AnsibleModule(
argument_spec=merged_arg_spec,
supports_check_mode=supports_check_mode,
mutually_exclusive=mutually_exclusive_params,
required_together=required_together_params,
required_if=required_if,
required_one_of=required_one_of,
add_file_common_args=add_file_common_args
)
self.check_mode = self.module.check_mode
self._connect_params = self._get_connect_params()
if 'transport' not in self.module.params or self.module.params['transport'] != 'cli':
try:
self.api = self._get_mgmt_root(
f5_product_name, **self._connect_params
)
except iControlUnexpectedHTTPError as exc:
self.fail(str(exc))
def fail(self, msg):
self.module.fail_json(msg=msg)
def _get_connect_params(self):
params = dict(
user=self.module.params['user'],
password=self.module.params['password'],
server=self.module.params['server'],
server_port=self.module.params['server_port'],
validate_certs=self.module.params['validate_certs']
)
return params
def _get_mgmt_root(self, type, **kwargs):
if type == 'bigip':
return BigIpMgmt(
kwargs['server'],
kwargs['user'],
kwargs['password'],
port=kwargs['server_port'],
token='tmos'
)
elif type == 'iworkflow':
return iWorkflowMgmt(
kwargs['server'],
kwargs['user'],
kwargs['password'],
port=kwargs['server_port'],
token='local'
)
elif type == 'bigiq':
return BigIqMgmt(
kwargs['server'],
kwargs['user'],
kwargs['password'],
port=kwargs['server_port'],
auth_provider='local'
)
def reconnect(self):
"""Attempts to reconnect to a device
The existing token from a ManagementRoot can become invalid if you,
for example, upgrade the device (such as is done in the *_software
module.
This method can be used to reconnect to a remote device without
having to re-instantiate the ArgumentSpec and AnsibleF5Client classes
it will use the same values that were initially provided to those
classes
:return:
:raises iControlUnexpectedHTTPError
"""
self.api = self._get_mgmt_root(
self.f5_product_name, **self._connect_params
)
class AnsibleF5Parameters(object):
def __init__(self, params=None):
self._values = defaultdict(lambda: None)
self._values['__warnings'] = []
if params:
self.update(params=params)
def update(self, params=None):
if params:
for k, v in iteritems(params):
if self.api_map is not None and k in self.api_map:
dict_to_use = self.api_map
map_key = self.api_map[k]
else:
dict_to_use = self._values
map_key = k
# Handle weird API parameters like `dns.proxy.__iter__` by
# using a map provided by the module developer
class_attr = getattr(type(self), map_key, None)
if isinstance(class_attr, property):
# There is a mapped value for the api_map key
if class_attr.fset is None:
# If the mapped value does not have an associated setter
self._values[map_key] = v
else:
# The mapped value has a setter
setattr(self, map_key, v)
else:
# If the mapped value is not a @property
self._values[map_key] = v
def __getattr__(self, item):
# Ensures that properties that weren't defined, and therefore stashed
# in the `_values` dict, will be retrievable.
return self._values[item]
@property
def partition(self):
if self._values['partition'] is None:
return 'Common'
return self._values['partition'].strip('/')
@partition.setter
def partition(self, value):
self._values['partition'] = value
def _filter_params(self, params):
return dict((k, v) for k, v in iteritems(params) if v is not None)
class F5ModuleError(Exception):
pass
|
sbc100/native_client
|
refs/heads/master
|
src/trusted/validator_ragel/proof_tools_test.py
|
8
|
#!/usr/bin/python
# Copyright (c) 2014 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import re
import unittest
import proof_tools
import spec
class ProofToolsTest(unittest.TestCase):
def testOpsMerge(self):
op1 = proof_tools.Operands(disasms=('0x0(%r13)',), input_rr='%r13',
output_rr=None)
op2 = proof_tools.Operands(disasms=('0x0(%r14)',), input_rr='%r14',
output_rr=None)
op3 = proof_tools.Operands(disasms=('%r12',),
input_rr=None, output_rr='%r12')
self.assertEquals(
proof_tools.Operands(disasms=('0x0(%r13)', '%r12'),
input_rr='%r13', output_rr='%r12'),
proof_tools.MergeOperands(op1, op3))
self.assertEquals(
proof_tools.Operands(disasms=('0x0(%r14)', '%r12'),
input_rr='%r14', output_rr='%r12'),
proof_tools.MergeOperands(op2, op3))
try:
proof_tools.MergeOperands(op1, op2)
except AssertionError:
pass
else:
self.fail('Should have thrown exception as restrictions conflict')
def testOpsProd(self):
ops1 = set(
[proof_tools.Operands(disasms=('a',)),
proof_tools.Operands(disasms=('b',))])
ops2 = set(
[proof_tools.Operands(disasms=('1',)),
proof_tools.Operands(disasms=('2',))])
ops3 = set(
[proof_tools.Operands(disasms=('i',)),
proof_tools.Operands(disasms=('ii',))])
self.assertEquals(
set([proof_tools.Operands(disasms=('a', '1', 'i')),
proof_tools.Operands(disasms=('a', '1', 'ii')),
proof_tools.Operands(disasms=('a', '2', 'i')),
proof_tools.Operands(disasms=('a', '2', 'ii')),
proof_tools.Operands(disasms=('b', '1', 'i')),
proof_tools.Operands(disasms=('b', '1', 'ii')),
proof_tools.Operands(disasms=('b', '2', 'i')),
proof_tools.Operands(disasms=('b', '2', 'ii'))]),
proof_tools.OpsProd(ops1, ops2, ops3))
def testMemoryOperandsTemplate32(self):
mem = proof_tools.MemoryOperandsTemplate(
disp='0x0', base='%ebx', index='%eax', scale=2, bitness=32)
self.assertEquals(
[proof_tools.Operands(disasms=('(%ebx)',)),
proof_tools.Operands(disasms=('0x0',)),
proof_tools.Operands(disasms=('(%ebx,%eax,2)',)),
proof_tools.Operands(disasms=('0x0(,%eax,2)',)),
proof_tools.Operands(disasms=('0x0(%ebx)',)),
proof_tools.Operands(disasms=('0x0(%ebx,%eax,2)',))],
mem)
def testMemoryOperandsTemplate64(self):
mem = proof_tools.MemoryOperandsTemplate(
disp='0x0', base='%rsp', index='%r8', scale=2, bitness=64)
self.assertEquals(
[proof_tools.Operands(disasms=('(%rsp)',)),
proof_tools.Operands(disasms=('0x0(%rsp)',)),
proof_tools.Operands(disasms=('(%rsp,%r8,2)',), input_rr='%r8'),
proof_tools.Operands(disasms=('0x0(%rsp,%r8,2)',), input_rr='%r8')],
mem)
def testAllMemoryOps32(self):
mems = proof_tools.AllMemoryOperands(bitness=32)
indexes = set()
bases = set()
for mem in mems:
self.assertTrue(mem.input_rr is None, mem)
self.assertTrue(mem.output_rr is None, mem)
self.assertEquals(len(mem.disasms), 1)
m = re.match(spec.MemoryRE() + r'$', mem.disasms[0])
self.assertTrue(m is not None, msg=mem.disasms[0])
self.assertTrue(m.group('memory_segment') is None, msg=mem.disasms[0])
base = m.group('memory_base')
index = m.group('memory_index')
if base is not None:
bases.add(base)
if index is not None:
indexes.add(index)
self.assertEquals(
set(['%ebp', '%eax', '%edi', '%ebx', '%esi', '%ecx', '%edx', '%eiz']),
indexes)
self.assertEquals(
set(['%ebp', '%eax', '%edi', '%ebx', '%esi', '%ecx', '%edx', '%esp']),
bases)
def testAllMemoryOps64(self):
mems = proof_tools.AllMemoryOperands(bitness=64)
indexes = set()
bases = set()
for mem in mems:
self.assertTrue(mem.output_rr is None, mem)
self.assertEquals(len(mem.disasms), 1)
m = re.match(spec.MemoryRE() + r'$', mem.disasms[0])
self.assertTrue(m is not None, msg=mem.disasms[0])
self.assertTrue(m.group('memory_segment') is None, msg=mem.disasms[0])
base = m.group('memory_base')
index = m.group('memory_index')
if base is not None:
bases.add(base)
if index is not None and index != '%riz':
indexes.add(index)
self.assertEquals(mem.input_rr, index)
self.assertEquals(
set(['%rax', '%rbx', '%rcx', '%rdx', '%rsi', '%rdi',
'%r8', '%r9', '%r10', '%r11', '%r12', '%r13', '%r14', '%r15']),
indexes)
self.assertEquals(
set(['%rsp', '%r15', '%rbp', '%rip']),
bases)
if __name__ == '__main__':
unittest.main()
|
DirectXMan12/nova-hacking
|
refs/heads/feature_novnc_krb
|
nova/objectstore/__init__.py
|
29
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
:mod:`nova.objectstore` -- S3-type object store
=====================================================
.. automodule:: nova.objectstore
:platform: Unix
:synopsis: Currently a trivial file-based system, getting extended w/ swift.
"""
|
TheTimmy/spack
|
refs/heads/develop
|
var/spack/repos/builtin/packages/r-curl/package.py
|
2
|
##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class RCurl(RPackage):
"""The curl() and curl_download() functions provide highly configurable
drop-in replacements for base url() and download.file() with better
performance, support for encryption (https, ftps), gzip compression,
authentication, and other libcurl goodies. The core of the package
implements a framework for performing fully customized requests where data
can be processed either in memory, on disk, or streaming via the callback
or connection interfaces. Some knowledge of libcurl is recommended; for a
more-user-friendly web client see the 'httr' package which builds on this
package with http specific tools and logic."""
homepage = "https://github.com/jeroenooms/curl"
url = "https://cran.r-project.org/src/contrib/curl_2.3.tar.gz"
version('2.3', '7250ee8caed98ba76906ab4d32da60f8')
version('1.0', '93d34926d6071e1fba7e728b482f0dd9')
version('0.9.7', 'a101f7de948cb828fef571c730f39217')
depends_on('r@3.0.0:')
depends_on('curl')
|
jmatthed/clamav-devel
|
refs/heads/master
|
libclamav/c++/llvm/utils/DSAclean.py
|
147
|
#! /usr/bin/python
#changelog:
#10/13/2005b: replaced the # in tmp(.#*)* with alphanumeric and _, this will then remove
#nodes such as %tmp.1.i and %tmp._i.3
#10/13/2005: exntended to remove variables of the form %tmp(.#)* rather than just
#%tmp.#, i.e. it now will remove %tmp.12.3.15 etc, additionally fixed a spelling error in
#the comments
#10/12/2005: now it only removes nodes and edges for which the label is %tmp.# rather
#than removing all lines for which the lable CONTAINS %tmp.#
import re
import sys
if( len(sys.argv) < 3 ):
print 'usage is: ./DSAclean <dot_file_to_be_cleaned> <out_put_file>'
sys.exit(1)
#get a file object
input = open(sys.argv[1], 'r')
output = open(sys.argv[2], 'w')
#we'll get this one line at a time...while we could just put the whole thing in a string
#it would kill old computers
buffer = input.readline()
while buffer != '':
if re.compile("label(\s*)=(\s*)\"\s%tmp(.\w*)*(\s*)\"").search(buffer):
#skip next line, write neither this line nor the next
buffer = input.readline()
else:
#this isn't a tmp Node, we can write it
output.write(buffer)
#prepare for the next iteration
buffer = input.readline()
input.close()
output.close()
|
tuxfux-hlp-notes/python-batches
|
refs/heads/master
|
archieves/Batch-63/14-files/myenv/lib/python2.7/site-packages/wheel/pkginfo.py
|
565
|
"""Tools for reading and writing PKG-INFO / METADATA without caring
about the encoding."""
from email.parser import Parser
try:
unicode
_PY3 = False
except NameError:
_PY3 = True
if not _PY3:
from email.generator import Generator
def read_pkg_info_bytes(bytestr):
return Parser().parsestr(bytestr)
def read_pkg_info(path):
with open(path, "r") as headers:
message = Parser().parse(headers)
return message
def write_pkg_info(path, message):
with open(path, 'w') as metadata:
Generator(metadata, maxheaderlen=0).flatten(message)
else:
from email.generator import BytesGenerator
def read_pkg_info_bytes(bytestr):
headers = bytestr.decode(encoding="ascii", errors="surrogateescape")
message = Parser().parsestr(headers)
return message
def read_pkg_info(path):
with open(path, "r",
encoding="ascii",
errors="surrogateescape") as headers:
message = Parser().parse(headers)
return message
def write_pkg_info(path, message):
with open(path, "wb") as out:
BytesGenerator(out, maxheaderlen=0).flatten(message)
|
stvstnfrd/edx-platform
|
refs/heads/master
|
import_shims/studio/third_party_auth/tests/specs/test_google.py
|
4
|
"""Deprecated import support. Auto-generated by import_shims/generate_shims.sh."""
# pylint: disable=redefined-builtin,wrong-import-position,wildcard-import,useless-suppression,line-too-long
from import_shims.warn import warn_deprecated_import
warn_deprecated_import('third_party_auth.tests.specs.test_google', 'common.djangoapps.third_party_auth.tests.specs.test_google')
from common.djangoapps.third_party_auth.tests.specs.test_google import *
|
adamjmcgrath/glancydesign
|
refs/heads/master
|
src/django-nonrel/django/db/backends/postgresql/client.py
|
638
|
import os
import sys
from django.db.backends import BaseDatabaseClient
class DatabaseClient(BaseDatabaseClient):
executable_name = 'psql'
def runshell(self):
settings_dict = self.connection.settings_dict
args = [self.executable_name]
if settings_dict['USER']:
args += ["-U", settings_dict['USER']]
if settings_dict['HOST']:
args.extend(["-h", settings_dict['HOST']])
if settings_dict['PORT']:
args.extend(["-p", str(settings_dict['PORT'])])
args += [settings_dict['NAME']]
if os.name == 'nt':
sys.exit(os.system(" ".join(args)))
else:
os.execvp(self.executable_name, args)
|
nuuuboo/odoo
|
refs/heads/8.0
|
openerp/addons/test_access_rights/tests/__init__.py
|
404
|
import test_ir_rules
|
GuillaumeGomez/servo
|
refs/heads/master
|
tests/wpt/web-platform-tests/XMLHttpRequest/resources/status.py
|
405
|
def main(request, response):
code = int(request.GET.first("code", 200))
text = request.GET.first("text", "OMG")
content = request.GET.first("content", "")
type = request.GET.first("type", "")
status = (code, text)
headers = [("Content-Type", type),
("X-Request-Method", request.method)]
return status, headers, content
|
wd5/jangr
|
refs/heads/master
|
django/db/backends/oracle/creation.py
|
153
|
import sys, time
from django.db.backends.creation import BaseDatabaseCreation
TEST_DATABASE_PREFIX = 'test_'
PASSWORD = 'Im_a_lumberjack'
class DatabaseCreation(BaseDatabaseCreation):
# This dictionary maps Field objects to their associated Oracle column
# types, as strings. Column-type strings can contain format strings; they'll
# be interpolated against the values of Field.__dict__ before being output.
# If a column type is set to None, it won't be included in the output.
#
# Any format strings starting with "qn_" are quoted before being used in the
# output (the "qn_" prefix is stripped before the lookup is performed.
data_types = {
'AutoField': 'NUMBER(11)',
'BooleanField': 'NUMBER(1) CHECK (%(qn_column)s IN (0,1))',
'CharField': 'NVARCHAR2(%(max_length)s)',
'CommaSeparatedIntegerField': 'VARCHAR2(%(max_length)s)',
'DateField': 'DATE',
'DateTimeField': 'TIMESTAMP',
'DecimalField': 'NUMBER(%(max_digits)s, %(decimal_places)s)',
'FileField': 'NVARCHAR2(%(max_length)s)',
'FilePathField': 'NVARCHAR2(%(max_length)s)',
'FloatField': 'DOUBLE PRECISION',
'IntegerField': 'NUMBER(11)',
'BigIntegerField': 'NUMBER(19)',
'IPAddressField': 'VARCHAR2(15)',
'NullBooleanField': 'NUMBER(1) CHECK ((%(qn_column)s IN (0,1)) OR (%(qn_column)s IS NULL))',
'OneToOneField': 'NUMBER(11)',
'PositiveIntegerField': 'NUMBER(11) CHECK (%(qn_column)s >= 0)',
'PositiveSmallIntegerField': 'NUMBER(11) CHECK (%(qn_column)s >= 0)',
'SlugField': 'NVARCHAR2(%(max_length)s)',
'SmallIntegerField': 'NUMBER(11)',
'TextField': 'NCLOB',
'TimeField': 'TIMESTAMP',
'URLField': 'VARCHAR2(%(max_length)s)',
}
def __init__(self, connection):
self.remember = {}
super(DatabaseCreation, self).__init__(connection)
def _create_test_db(self, verbosity=1, autoclobber=False):
TEST_NAME = self._test_database_name()
TEST_USER = self._test_database_user()
TEST_PASSWD = self._test_database_passwd()
TEST_TBLSPACE = self._test_database_tblspace()
TEST_TBLSPACE_TMP = self._test_database_tblspace_tmp()
parameters = {
'dbname': TEST_NAME,
'user': TEST_USER,
'password': TEST_PASSWD,
'tblspace': TEST_TBLSPACE,
'tblspace_temp': TEST_TBLSPACE_TMP,
}
self.remember['user'] = self.connection.settings_dict['USER']
self.remember['passwd'] = self.connection.settings_dict['PASSWORD']
cursor = self.connection.cursor()
if self._test_database_create():
try:
self._execute_test_db_creation(cursor, parameters, verbosity)
except Exception, e:
sys.stderr.write("Got an error creating the test database: %s\n" % e)
if not autoclobber:
confirm = raw_input("It appears the test database, %s, already exists. Type 'yes' to delete it, or 'no' to cancel: " % TEST_NAME)
if autoclobber or confirm == 'yes':
try:
if verbosity >= 1:
print "Destroying old test database '%s'..." % self.connection.alias
self._execute_test_db_destruction(cursor, parameters, verbosity)
self._execute_test_db_creation(cursor, parameters, verbosity)
except Exception, e:
sys.stderr.write("Got an error recreating the test database: %s\n" % e)
sys.exit(2)
else:
print "Tests cancelled."
sys.exit(1)
if self._test_user_create():
if verbosity >= 1:
print "Creating test user..."
try:
self._create_test_user(cursor, parameters, verbosity)
except Exception, e:
sys.stderr.write("Got an error creating the test user: %s\n" % e)
if not autoclobber:
confirm = raw_input("It appears the test user, %s, already exists. Type 'yes' to delete it, or 'no' to cancel: " % TEST_USER)
if autoclobber or confirm == 'yes':
try:
if verbosity >= 1:
print "Destroying old test user..."
self._destroy_test_user(cursor, parameters, verbosity)
if verbosity >= 1:
print "Creating test user..."
self._create_test_user(cursor, parameters, verbosity)
except Exception, e:
sys.stderr.write("Got an error recreating the test user: %s\n" % e)
sys.exit(2)
else:
print "Tests cancelled."
sys.exit(1)
self.connection.settings_dict['TEST_USER'] = self.connection.settings_dict["USER"] = TEST_USER
self.connection.settings_dict["PASSWORD"] = TEST_PASSWD
return self.connection.settings_dict['NAME']
def _destroy_test_db(self, test_database_name, verbosity=1):
"""
Destroy a test database, prompting the user for confirmation if the
database already exists. Returns the name of the test database created.
"""
TEST_NAME = self._test_database_name()
TEST_USER = self._test_database_user()
TEST_PASSWD = self._test_database_passwd()
TEST_TBLSPACE = self._test_database_tblspace()
TEST_TBLSPACE_TMP = self._test_database_tblspace_tmp()
self.connection.settings_dict["USER"] = self.remember['user']
self.connection.settings_dict["PASSWORD"] = self.remember['passwd']
parameters = {
'dbname': TEST_NAME,
'user': TEST_USER,
'password': TEST_PASSWD,
'tblspace': TEST_TBLSPACE,
'tblspace_temp': TEST_TBLSPACE_TMP,
}
cursor = self.connection.cursor()
time.sleep(1) # To avoid "database is being accessed by other users" errors.
if self._test_user_create():
if verbosity >= 1:
print 'Destroying test user...'
self._destroy_test_user(cursor, parameters, verbosity)
if self._test_database_create():
if verbosity >= 1:
print 'Destroying test database tables...'
self._execute_test_db_destruction(cursor, parameters, verbosity)
self.connection.close()
def _execute_test_db_creation(self, cursor, parameters, verbosity):
if verbosity >= 2:
print "_create_test_db(): dbname = %s" % parameters['dbname']
statements = [
"""CREATE TABLESPACE %(tblspace)s
DATAFILE '%(tblspace)s.dbf' SIZE 20M
REUSE AUTOEXTEND ON NEXT 10M MAXSIZE 200M
""",
"""CREATE TEMPORARY TABLESPACE %(tblspace_temp)s
TEMPFILE '%(tblspace_temp)s.dbf' SIZE 20M
REUSE AUTOEXTEND ON NEXT 10M MAXSIZE 100M
""",
]
self._execute_statements(cursor, statements, parameters, verbosity)
def _create_test_user(self, cursor, parameters, verbosity):
if verbosity >= 2:
print "_create_test_user(): username = %s" % parameters['user']
statements = [
"""CREATE USER %(user)s
IDENTIFIED BY %(password)s
DEFAULT TABLESPACE %(tblspace)s
TEMPORARY TABLESPACE %(tblspace_temp)s
""",
"""GRANT CONNECT, RESOURCE TO %(user)s""",
]
self._execute_statements(cursor, statements, parameters, verbosity)
def _execute_test_db_destruction(self, cursor, parameters, verbosity):
if verbosity >= 2:
print "_execute_test_db_destruction(): dbname=%s" % parameters['dbname']
statements = [
'DROP TABLESPACE %(tblspace)s INCLUDING CONTENTS AND DATAFILES CASCADE CONSTRAINTS',
'DROP TABLESPACE %(tblspace_temp)s INCLUDING CONTENTS AND DATAFILES CASCADE CONSTRAINTS',
]
self._execute_statements(cursor, statements, parameters, verbosity)
def _destroy_test_user(self, cursor, parameters, verbosity):
if verbosity >= 2:
print "_destroy_test_user(): user=%s" % parameters['user']
print "Be patient. This can take some time..."
statements = [
'DROP USER %(user)s CASCADE',
]
self._execute_statements(cursor, statements, parameters, verbosity)
def _execute_statements(self, cursor, statements, parameters, verbosity):
for template in statements:
stmt = template % parameters
if verbosity >= 2:
print stmt
try:
cursor.execute(stmt)
except Exception, err:
sys.stderr.write("Failed (%s)\n" % (err))
raise
def _test_database_name(self):
name = TEST_DATABASE_PREFIX + self.connection.settings_dict['NAME']
try:
if self.connection.settings_dict['TEST_NAME']:
name = self.connection.settings_dict['TEST_NAME']
except AttributeError:
pass
return name
def _test_database_create(self):
return self.connection.settings_dict.get('TEST_CREATE', True)
def _test_user_create(self):
return self.connection.settings_dict.get('TEST_USER_CREATE', True)
def _test_database_user(self):
name = TEST_DATABASE_PREFIX + self.connection.settings_dict['USER']
try:
if self.connection.settings_dict['TEST_USER']:
name = self.connection.settings_dict['TEST_USER']
except KeyError:
pass
return name
def _test_database_passwd(self):
name = PASSWORD
try:
if self.connection.settings_dict['TEST_PASSWD']:
name = self.connection.settings_dict['TEST_PASSWD']
except KeyError:
pass
return name
def _test_database_tblspace(self):
name = TEST_DATABASE_PREFIX + self.connection.settings_dict['NAME']
try:
if self.connection.settings_dict['TEST_TBLSPACE']:
name = self.connection.settings_dict['TEST_TBLSPACE']
except KeyError:
pass
return name
def _test_database_tblspace_tmp(self):
name = TEST_DATABASE_PREFIX + self.connection.settings_dict['NAME'] + '_temp'
try:
if self.connection.settings_dict['TEST_TBLSPACE_TMP']:
name = self.connection.settings_dict['TEST_TBLSPACE_TMP']
except KeyError:
pass
return name
def _get_test_db_name(self):
"""
We need to return the 'production' DB name to get the test DB creation
machinery to work. This isn't a great deal in this case because DB
names as handled by Django haven't real counterparts in Oracle.
"""
return self.connection.settings_dict['NAME']
def test_db_signature(self):
settings_dict = self.connection.settings_dict
return (
settings_dict['HOST'],
settings_dict['PORT'],
settings_dict['ENGINE'],
settings_dict['NAME'],
self._test_database_user(),
)
|
gimoh/ansible-modules-core
|
refs/heads/devel
|
files/acl.py
|
38
|
#!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: acl
version_added: "1.4"
short_description: Sets and retrieves file ACL information.
description:
- Sets and retrieves file ACL information.
options:
name:
required: true
default: null
description:
- The full path of the file or object.
aliases: ['path']
state:
required: false
default: query
choices: [ 'query', 'present', 'absent' ]
description:
- defines whether the ACL should be present or not. The C(query) state gets the current acl without changing it, for use in 'register' operations.
follow:
required: false
default: yes
choices: [ 'yes', 'no' ]
description:
- whether to follow symlinks on the path if a symlink is encountered.
default:
version_added: "1.5"
required: false
default: no
choices: [ 'yes', 'no' ]
description:
- if the target is a directory, setting this to yes will make it the default acl for entities created inside the directory. It causes an error if name is a file.
entity:
version_added: "1.5"
required: false
description:
- actual user or group that the ACL applies to when matching entity types user or group are selected.
etype:
version_added: "1.5"
required: false
default: null
choices: [ 'user', 'group', 'mask', 'other' ]
description:
- the entity type of the ACL to apply, see setfacl documentation for more info.
permissions:
version_added: "1.5"
required: false
default: null
description:
- Permissions to apply/remove can be any combination of r, w and x (read, write and execute respectively)
entry:
required: false
default: null
description:
- DEPRECATED. The acl to set or remove. This must always be quoted in the form of '<etype>:<qualifier>:<perms>'. The qualifier may be empty for some types, but the type and perms are always requried. '-' can be used as placeholder when you do not care about permissions. This is now superseded by entity, type and permissions fields.
author: Brian Coca
notes:
- The "acl" module requires that acls are enabled on the target filesystem and that the setfacl and getfacl binaries are installed.
'''
EXAMPLES = '''
# Grant user Joe read access to a file
- acl: name=/etc/foo.conf entity=joe etype=user permissions="r" state=present
# Removes the acl for Joe on a specific file
- acl: name=/etc/foo.conf entity=joe etype=user state=absent
# Sets default acl for joe on foo.d
- acl: name=/etc/foo.d entity=joe etype=user permissions=rw default=yes state=present
# Same as previous but using entry shorthand
- acl: name=/etc/foo.d entry="default:user:joe:rw-" state=present
# Obtain the acl for a specific file
- acl: name=/etc/foo.conf
register: acl_info
'''
RETURN = '''
acl:
description: Current acl on provided path (after changes, if any)
returned: success
type: list
sample: [ "user::rwx", "group::rwx", "other::rwx" ]
'''
def normalize_permissions(p):
perms = ['-','-','-']
for char in p:
if char == 'r':
perms[0] = 'r'
if char == 'w':
perms[1] = 'w'
if char == 'x':
perms[2] = 'x'
if char == 'X':
if perms[2] != 'x': # 'x' is more permissive
perms[2] = 'X'
return ''.join(perms)
def split_entry(entry):
''' splits entry and ensures normalized return'''
a = entry.split(':')
a.reverse()
if len(a) == 3:
a.append(False)
try:
p,e,t,d = a
except ValueError, e:
print "wtf?? %s => %s" % (entry,a)
raise e
if d:
d = True
if t.startswith("u"):
t = "user"
elif t.startswith("g"):
t = "group"
elif t.startswith("m"):
t = "mask"
elif t.startswith("o"):
t = "other"
else:
t = None
p = normalize_permissions(p)
return [d,t,e,p]
def get_acls(module,path,follow):
cmd = [ module.get_bin_path('getfacl', True) ]
if not follow:
cmd.append('-h')
# prevents absolute path warnings and removes headers
cmd.append('--omit-header')
cmd.append('--absolute-names')
cmd.append(path)
return _run_acl(module,cmd)
def set_acl(module,path,entry,follow,default):
cmd = [ module.get_bin_path('setfacl', True) ]
if not follow:
cmd.append('-h')
if default:
cmd.append('-d')
cmd.append('-m "%s"' % entry)
cmd.append(path)
return _run_acl(module,cmd)
def rm_acl(module,path,entry,follow,default):
cmd = [ module.get_bin_path('setfacl', True) ]
if not follow:
cmd.append('-h')
if default:
cmd.append('-k')
entry = entry[0:entry.rfind(':')]
cmd.append('-x "%s"' % entry)
cmd.append(path)
return _run_acl(module,cmd,False)
def _run_acl(module,cmd,check_rc=True):
try:
(rc, out, err) = module.run_command(' '.join(cmd), check_rc=check_rc)
except Exception, e:
module.fail_json(msg=e.strerror)
# trim last line as it is always empty
ret = out.splitlines()
return ret[0:len(ret)-1]
def main():
module = AnsibleModule(
argument_spec = dict(
name = dict(required=True,aliases=['path'], type='str'),
entry = dict(required=False, etype='str'),
entity = dict(required=False, type='str', default=''),
etype = dict(required=False, choices=['other', 'user', 'group', 'mask'], type='str'),
permissions = dict(required=False, type='str'),
state = dict(required=False, default='query', choices=[ 'query', 'present', 'absent' ], type='str'),
follow = dict(required=False, type='bool', default=True),
default= dict(required=False, type='bool', default=False),
),
supports_check_mode=True,
)
path = os.path.expanduser(module.params.get('name'))
entry = module.params.get('entry')
entity = module.params.get('entity')
etype = module.params.get('etype')
permissions = module.params.get('permissions')
state = module.params.get('state')
follow = module.params.get('follow')
default = module.params.get('default')
if permissions:
permissions = normalize_permissions(permissions)
if not os.path.exists(path):
module.fail_json(msg="path not found or not accessible!")
if state in ['present','absent']:
if not entry and not etype:
module.fail_json(msg="%s requires either etype and permissions or just entry be set" % state)
if entry:
if etype or entity or permissions:
module.fail_json(msg="entry and another incompatible field (entity, etype or permissions) are also set")
if entry.count(":") not in [2,3]:
module.fail_json(msg="Invalid entry: '%s', it requires 3 or 4 sections divided by ':'" % entry)
default, etype, entity, permissions = split_entry(entry)
changed=False
msg = ""
currentacls = get_acls(module,path,follow)
if (state == 'present'):
matched = False
for oldentry in currentacls:
if oldentry.count(":") == 0:
continue
old_default, old_type, old_entity, old_permissions = split_entry(oldentry)
if old_default == default:
if old_type == etype:
if etype in ['user', 'group']:
if old_entity == entity:
matched = True
if not old_permissions == permissions:
changed = True
break
else:
matched = True
if not old_permissions == permissions:
changed = True
break
if not matched:
changed=True
if changed and not module.check_mode:
set_acl(module,path,':'.join([etype, str(entity), permissions]),follow,default)
msg="%s is present" % ':'.join([etype, str(entity), permissions])
elif state == 'absent':
for oldentry in currentacls:
if oldentry.count(":") == 0:
continue
old_default, old_type, old_entity, old_permissions = split_entry(oldentry)
if old_default == default:
if old_type == etype:
if etype in ['user', 'group']:
if old_entity == entity:
changed=True
break
else:
changed=True
break
if changed and not module.check_mode:
rm_acl(module,path,':'.join([etype, entity, '---']),follow,default)
msg="%s is absent" % ':'.join([etype, entity, '---'])
else:
msg="current acl"
if changed:
currentacls = get_acls(module,path,follow)
module.exit_json(changed=changed, msg=msg, acl=currentacls)
# import module snippets
from ansible.module_utils.basic import *
main()
|
sadmansk/servo
|
refs/heads/master
|
tests/wpt/web-platform-tests/webdriver/tests/set_timeouts/__init__.py
|
12133432
| |
Shrhawk/edx-platform
|
refs/heads/master
|
lms/djangoapps/psychometrics/__init__.py
|
12133432
| |
thomasgilgenast/spqr-nonrel
|
refs/heads/master
|
django/views/static.py
|
151
|
"""
Views and functions for serving static files. These are only to be used
during development, and SHOULD NOT be used in a production setting.
"""
import mimetypes
import os
import posixpath
import re
import urllib
from django.http import Http404, HttpResponse, HttpResponseRedirect, HttpResponseNotModified
from django.template import loader, Template, Context, TemplateDoesNotExist
from django.utils.http import http_date, parse_http_date
def serve(request, path, document_root=None, show_indexes=False):
"""
Serve static files below a given point in the directory structure.
To use, put a URL pattern such as::
(r'^(?P<path>.*)$', 'django.views.static.serve', {'document_root' : '/path/to/my/files/'})
in your URLconf. You must provide the ``document_root`` param. You may
also set ``show_indexes`` to ``True`` if you'd like to serve a basic index
of the directory. This index view will use the template hardcoded below,
but if you'd like to override it, you can create a template called
``static/directory_index.html``.
"""
path = posixpath.normpath(urllib.unquote(path))
path = path.lstrip('/')
newpath = ''
for part in path.split('/'):
if not part:
# Strip empty path components.
continue
drive, part = os.path.splitdrive(part)
head, part = os.path.split(part)
if part in (os.curdir, os.pardir):
# Strip '.' and '..' in path.
continue
newpath = os.path.join(newpath, part).replace('\\', '/')
if newpath and path != newpath:
return HttpResponseRedirect(newpath)
fullpath = os.path.join(document_root, newpath)
if os.path.isdir(fullpath):
if show_indexes:
return directory_index(newpath, fullpath)
raise Http404("Directory indexes are not allowed here.")
if not os.path.exists(fullpath):
raise Http404('"%s" does not exist' % fullpath)
# Respect the If-Modified-Since header.
statobj = os.stat(fullpath)
mimetype, encoding = mimetypes.guess_type(fullpath)
mimetype = mimetype or 'application/octet-stream'
if not was_modified_since(request.META.get('HTTP_IF_MODIFIED_SINCE'),
statobj.st_mtime, statobj.st_size):
return HttpResponseNotModified(mimetype=mimetype)
response = HttpResponse(open(fullpath, 'rb').read(), mimetype=mimetype)
response["Last-Modified"] = http_date(statobj.st_mtime)
response["Content-Length"] = statobj.st_size
if encoding:
response["Content-Encoding"] = encoding
return response
DEFAULT_DIRECTORY_INDEX_TEMPLATE = """
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
<head>
<meta http-equiv="Content-type" content="text/html; charset=utf-8" />
<meta http-equiv="Content-Language" content="en-us" />
<meta name="robots" content="NONE,NOARCHIVE" />
<title>Index of {{ directory }}</title>
</head>
<body>
<h1>Index of {{ directory }}</h1>
<ul>
{% ifnotequal directory "/" %}
<li><a href="../">../</a></li>
{% endifnotequal %}
{% for f in file_list %}
<li><a href="{{ f|urlencode }}">{{ f }}</a></li>
{% endfor %}
</ul>
</body>
</html>
"""
def directory_index(path, fullpath):
try:
t = loader.select_template(['static/directory_index.html',
'static/directory_index'])
except TemplateDoesNotExist:
t = Template(DEFAULT_DIRECTORY_INDEX_TEMPLATE, name='Default directory index template')
files = []
for f in os.listdir(fullpath):
if not f.startswith('.'):
if os.path.isdir(os.path.join(fullpath, f)):
f += '/'
files.append(f)
c = Context({
'directory' : path + '/',
'file_list' : files,
})
return HttpResponse(t.render(c))
def was_modified_since(header=None, mtime=0, size=0):
"""
Was something modified since the user last downloaded it?
header
This is the value of the If-Modified-Since header. If this is None,
I'll just return True.
mtime
This is the modification time of the item we're talking about.
size
This is the size of the item we're talking about.
"""
try:
if header is None:
raise ValueError
matches = re.match(r"^([^;]+)(; length=([0-9]+))?$", header,
re.IGNORECASE)
header_mtime = parse_http_date(matches.group(1))
header_len = matches.group(3)
if header_len and int(header_len) != size:
raise ValueError
if mtime > header_mtime:
raise ValueError
except (AttributeError, ValueError, OverflowError):
return True
return False
|
owais/django-setman
|
refs/heads/master
|
testproject/core/tests/test_forms.py
|
1
|
from django import forms
from django.test import TestCase
from setman.forms import SettingsForm
from setman.utils import AVAILABLE_SETTINGS
from testproject.core.choices import ROLE_CHOICES
from testproject.core.validators import abc_validator, xyz_validator
__all__ = ('TestForms', )
SETTINGS_FIELDS = {
'BOOLEAN_SETTING': forms.BooleanField,
'CHOICE_SETTING': forms.ChoiceField,
'DECIMAL_SETTING': forms.DecimalField,
'FLOAT_SETTING': forms.FloatField,
'INT_SETTING': forms.IntegerField,
'IP_SETTING': forms.IPAddressField,
'STRING_SETTING': forms.RegexField,
'VALIDATOR_SETTING': forms.CharField,
}
class TestForms(TestCase):
def setUp(self):
self.form = SettingsForm()
def test_choice(self):
field = self.form.fields['CHOICE_SETTING']
self.assertEqual(
field.choices,
[('apple', 'apple'), ('grape', 'grape'), ('peach', 'peach'),
('pear', 'pear'), ('waterlemon', 'waterlemon')]
)
self.assertEqual(field.label, 'Choice')
field = self.form.fields['CHOICE_SETTING_WITH_LABELS']
self.assertEqual(
field.choices,
[('apple', 'Apple'), ('grape', 'Grape'), ('peach', 'Peach'),
('pear', 'Pear'), ('waterlemon', 'Waterlemon')]
)
self.assertEqual(field.label, 'Choice with labels')
field = self.form.fields['CHOICE_SETTING_WITH_GROUPS']
self.assertEqual(
field.choices,
[
('Male', (
('Henry', 'Henry'),
('John', 'John'),
('Peter', 'Peter'),
)),
('Female', (
('Henrietta', 'Henrietta'),
('Johanna', 'Johanna'),
('Kate', 'Kate')
)),
]
)
self.assertEqual(field.label, 'Choice with groups')
field = self.form.fields['CHOICE_SETTING_WITH_LABELS_AND_GROUPS']
self.assertEqual(
field.choices,
[
('Fruits', (
('apple', 'Apple'),
('grape', 'Grape'),
('peach', 'Peach'),
('pear', 'Pear')
)),
('Vegetables', (
('carrot', 'Carrot'),
('cucumber', 'Cucumber'),
('potato', 'Potato'),
('tomato', 'Tomato'),
)),
]
)
self.assertEqual(field.label, 'Choice with labels and groups')
field = self.form.fields['CHOICE_SETTING_WITH_INTERNAL_CHOICES']
self.assertEqual(field.choices, list(ROLE_CHOICES))
self.assertEqual(field.label, 'Choice with internal choices')
field = \
self.form.fields['CHOICE_SETTING_WITH_INTERNAL_MODEL_CHOICES_1']
self.assertEqual(field.choices, list(ROLE_CHOICES))
self.assertEqual(field.label, 'Choice with internal model choices')
field = \
self.form.fields['CHOICE_SETTING_WITH_INTERNAL_MODEL_CHOICES_2']
self.assertEqual(field.choices, list(ROLE_CHOICES))
self.assertEqual(field.label, 'Choice with internal model choices')
def test_decimal(self):
field = self.form.fields['DECIMAL_SETTING']
self.assertEqual(field.decimal_places, 2)
self.assertEqual(field.max_digits, 4)
self.assertEqual(field.max_value, 10)
self.assertEqual(field.min_value, 0)
self.assertEqual(field.label, 'Decimal')
def test_float(self):
field = self.form.fields['FLOAT_SETTING']
self.assertIsNone(field.max_value)
self.assertIsNone(field.min_value)
self.assertEqual(field.label, 'Float')
def test_integer(self):
field = self.form.fields['INT_SETTING']
self.assertEqual(field.max_value, 32)
self.assertEqual(field.min_value, 16)
self.assertEqual(field.label, 'Int')
def test_field(self):
form = self.form
self.assertEqual(len(form.fields), len(AVAILABLE_SETTINGS))
for name, field in form.fields.items():
if not name in SETTINGS_FIELDS and name.startswith('CHOICE_'):
result = forms.ChoiceField
else:
result = SETTINGS_FIELDS[name]
self.assertIsInstance(field, result)
def test_regex(self):
field = self.form.fields['STRING_SETTING']
self.assertIsNone(field.max_length)
self.assertIsNone(field.min_length)
self.assertIsNotNone(field.regex)
self.assertEqual(field.label, 'String')
def test_validators(self):
field = self.form.fields['VALIDATOR_SETTING']
self.assertEqual(field.validators, [abc_validator, xyz_validator])
self.assertEqual(field.label, 'Validator')
|
TwinkleChawla/nova
|
refs/heads/master
|
nova/tests/unit/objects/test_instance_action.py
|
52
|
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import traceback
import mock
from oslo_utils import timeutils
import six
from nova import db
from nova.objects import instance_action
from nova import test
from nova.tests.unit.objects import test_objects
NOW = timeutils.utcnow().replace(microsecond=0)
fake_action = {
'created_at': NOW,
'deleted_at': None,
'updated_at': None,
'deleted': False,
'id': 123,
'action': 'fake-action',
'instance_uuid': 'fake-uuid',
'request_id': 'fake-request',
'user_id': 'fake-user',
'project_id': 'fake-project',
'start_time': NOW,
'finish_time': None,
'message': 'foo',
}
fake_event = {
'created_at': NOW,
'deleted_at': None,
'updated_at': None,
'deleted': False,
'id': 123,
'event': 'fake-event',
'action_id': 123,
'start_time': NOW,
'finish_time': None,
'result': 'fake-result',
'traceback': 'fake-tb',
}
class _TestInstanceActionObject(object):
@mock.patch.object(db, 'action_get_by_request_id')
def test_get_by_request_id(self, mock_get):
context = self.context
mock_get.return_value = fake_action
action = instance_action.InstanceAction.get_by_request_id(
context, 'fake-uuid', 'fake-request')
self.compare_obj(action, fake_action)
mock_get.assert_called_once_with(context,
'fake-uuid', 'fake-request')
def test_pack_action_start(self):
values = instance_action.InstanceAction.pack_action_start(
self.context, 'fake-uuid', 'fake-action')
self.assertEqual(values['request_id'], self.context.request_id)
self.assertEqual(values['user_id'], self.context.user_id)
self.assertEqual(values['project_id'], self.context.project_id)
self.assertEqual(values['instance_uuid'], 'fake-uuid')
self.assertEqual(values['action'], 'fake-action')
self.assertEqual(values['start_time'].replace(tzinfo=None),
self.context.timestamp)
def test_pack_action_finish(self):
timeutils.set_time_override(override_time=NOW)
values = instance_action.InstanceAction.pack_action_finish(
self.context, 'fake-uuid')
self.assertEqual(values['request_id'], self.context.request_id)
self.assertEqual(values['instance_uuid'], 'fake-uuid')
self.assertEqual(values['finish_time'].replace(tzinfo=None), NOW)
@mock.patch.object(db, 'action_start')
def test_action_start(self, mock_start):
test_class = instance_action.InstanceAction
expected_packed_values = test_class.pack_action_start(
self.context, 'fake-uuid', 'fake-action')
mock_start.return_value = fake_action
action = instance_action.InstanceAction.action_start(
self.context, 'fake-uuid', 'fake-action', want_result=True)
mock_start.assert_called_once_with(self.context,
expected_packed_values)
self.compare_obj(action, fake_action)
@mock.patch.object(db, 'action_start')
def test_action_start_no_result(self, mock_start):
test_class = instance_action.InstanceAction
expected_packed_values = test_class.pack_action_start(
self.context, 'fake-uuid', 'fake-action')
mock_start.return_value = fake_action
action = instance_action.InstanceAction.action_start(
self.context, 'fake-uuid', 'fake-action', want_result=False)
mock_start.assert_called_once_with(self.context,
expected_packed_values)
self.assertIsNone(action)
@mock.patch.object(db, 'action_finish')
def test_action_finish(self, mock_finish):
timeutils.set_time_override(override_time=NOW)
test_class = instance_action.InstanceAction
expected_packed_values = test_class.pack_action_finish(
self.context, 'fake-uuid')
mock_finish.return_value = fake_action
action = instance_action.InstanceAction.action_finish(
self.context, 'fake-uuid', want_result=True)
mock_finish.assert_called_once_with(self.context,
expected_packed_values)
self.compare_obj(action, fake_action)
@mock.patch.object(db, 'action_finish')
def test_action_finish_no_result(self, mock_finish):
timeutils.set_time_override(override_time=NOW)
test_class = instance_action.InstanceAction
expected_packed_values = test_class.pack_action_finish(
self.context, 'fake-uuid')
mock_finish.return_value = fake_action
action = instance_action.InstanceAction.action_finish(
self.context, 'fake-uuid', want_result=False)
mock_finish.assert_called_once_with(self.context,
expected_packed_values)
self.assertIsNone(action)
@mock.patch.object(db, 'action_finish')
@mock.patch.object(db, 'action_start')
def test_finish(self, mock_start, mock_finish):
timeutils.set_time_override(override_time=NOW)
expected_packed_action_start = {
'request_id': self.context.request_id,
'user_id': self.context.user_id,
'project_id': self.context.project_id,
'instance_uuid': 'fake-uuid',
'action': 'fake-action',
'start_time': self.context.timestamp,
}
expected_packed_action_finish = {
'request_id': self.context.request_id,
'instance_uuid': 'fake-uuid',
'finish_time': NOW,
}
mock_start.return_value = fake_action
mock_finish.return_value = fake_action
action = instance_action.InstanceAction.action_start(
self.context, 'fake-uuid', 'fake-action')
action.finish()
mock_start.assert_called_once_with(self.context,
expected_packed_action_start)
mock_finish.assert_called_once_with(self.context,
expected_packed_action_finish)
self.compare_obj(action, fake_action)
@mock.patch.object(db, 'actions_get')
def test_get_list(self, mock_get):
fake_actions = [dict(fake_action, id=1234),
dict(fake_action, id=5678)]
mock_get.return_value = fake_actions
obj_list = instance_action.InstanceActionList.get_by_instance_uuid(
self.context, 'fake-uuid')
for index, action in enumerate(obj_list):
self.compare_obj(action, fake_actions[index])
mock_get.assert_called_once_with(self.context, 'fake-uuid')
class TestInstanceActionObject(test_objects._LocalTest,
_TestInstanceActionObject):
pass
class TestRemoteInstanceActionObject(test_objects._RemoteTest,
_TestInstanceActionObject):
pass
class _TestInstanceActionEventObject(object):
@mock.patch.object(db, 'action_event_get_by_id')
def test_get_by_id(self, mock_get):
mock_get.return_value = fake_event
event = instance_action.InstanceActionEvent.get_by_id(
self.context, 'fake-action-id', 'fake-event-id')
self.compare_obj(event, fake_event)
mock_get.assert_called_once_with(self.context,
'fake-action-id', 'fake-event-id')
@mock.patch.object(db, 'action_event_start')
def test_event_start(self, mock_start):
timeutils.set_time_override(override_time=NOW)
test_class = instance_action.InstanceActionEvent
expected_packed_values = test_class.pack_action_event_start(
self.context, 'fake-uuid', 'fake-event')
mock_start.return_value = fake_event
event = instance_action.InstanceActionEvent.event_start(
self.context, 'fake-uuid', 'fake-event', want_result=True)
mock_start.assert_called_once_with(self.context,
expected_packed_values)
self.compare_obj(event, fake_event)
@mock.patch.object(db, 'action_event_start')
def test_event_start_no_result(self, mock_start):
timeutils.set_time_override(override_time=NOW)
test_class = instance_action.InstanceActionEvent
expected_packed_values = test_class.pack_action_event_start(
self.context, 'fake-uuid', 'fake-event')
mock_start.return_value = fake_event
event = instance_action.InstanceActionEvent.event_start(
self.context, 'fake-uuid', 'fake-event', want_result=False)
mock_start.assert_called_once_with(self.context,
expected_packed_values)
self.assertIsNone(event)
@mock.patch.object(db, 'action_event_finish')
def test_event_finish(self, mock_finish):
timeutils.set_time_override(override_time=NOW)
test_class = instance_action.InstanceActionEvent
expected_packed_values = test_class.pack_action_event_finish(
self.context, 'fake-uuid', 'fake-event')
expected_packed_values['finish_time'] = timeutils.utcnow()
mock_finish.return_value = fake_event
event = instance_action.InstanceActionEvent.event_finish(
self.context, 'fake-uuid', 'fake-event', want_result=True)
mock_finish.assert_called_once_with(self.context,
expected_packed_values)
self.compare_obj(event, fake_event)
@mock.patch.object(db, 'action_event_finish')
def test_event_finish_no_result(self, mock_finish):
timeutils.set_time_override(override_time=NOW)
test_class = instance_action.InstanceActionEvent
expected_packed_values = test_class.pack_action_event_finish(
self.context, 'fake-uuid', 'fake-event')
expected_packed_values['finish_time'] = timeutils.utcnow()
mock_finish.return_value = fake_event
event = instance_action.InstanceActionEvent.event_finish(
self.context, 'fake-uuid', 'fake-event', want_result=False)
mock_finish.assert_called_once_with(self.context,
expected_packed_values)
self.assertIsNone(event)
@mock.patch.object(traceback, 'format_tb')
@mock.patch.object(db, 'action_event_finish')
def test_event_finish_with_failure(self, mock_finish, mock_tb):
timeutils.set_time_override(override_time=NOW)
test_class = instance_action.InstanceActionEvent
expected_packed_values = test_class.pack_action_event_finish(
self.context, 'fake-uuid', 'fake-event', 'val', 'fake-tb')
expected_packed_values['finish_time'] = timeutils.utcnow()
mock_finish.return_value = fake_event
event = test_class.event_finish_with_failure(
self.context, 'fake-uuid', 'fake-event', 'val', 'fake-tb',
want_result=True)
mock_finish.assert_called_once_with(self.context,
expected_packed_values)
self.compare_obj(event, fake_event)
@mock.patch.object(traceback, 'format_tb')
@mock.patch.object(db, 'action_event_finish')
def test_event_finish_with_failure_legacy(self, mock_finish, mock_tb):
# Tests that exc_tb is serialized when it's not a string type.
mock_tb.return_value = 'fake-tb'
timeutils.set_time_override(override_time=NOW)
test_class = instance_action.InstanceActionEvent
expected_packed_values = test_class.pack_action_event_finish(
self.context, 'fake-uuid', 'fake-event', 'val', 'fake-tb')
expected_packed_values['finish_time'] = timeutils.utcnow()
mock_finish.return_value = fake_event
fake_tb = mock.sentinel.fake_tb
event = test_class.event_finish_with_failure(
self.context, 'fake-uuid', 'fake-event', exc_val='val',
exc_tb=fake_tb, want_result=True)
mock_finish.assert_called_once_with(self.context,
expected_packed_values)
self.compare_obj(event, fake_event)
mock_tb.assert_called_once_with(fake_tb)
@mock.patch.object(db, 'action_event_finish')
def test_event_finish_with_failure_legacy_unicode(self, mock_finish):
# Tests that traceback.format_tb is not called when exc_tb is unicode.
timeutils.set_time_override(override_time=NOW)
test_class = instance_action.InstanceActionEvent
expected_packed_values = test_class.pack_action_event_finish(
self.context, 'fake-uuid', 'fake-event', 'val',
six.text_type('fake-tb'))
expected_packed_values['finish_time'] = timeutils.utcnow()
mock_finish.return_value = fake_event
event = test_class.event_finish_with_failure(
self.context, 'fake-uuid', 'fake-event', exc_val='val',
exc_tb=six.text_type('fake-tb'), want_result=True)
mock_finish.assert_called_once_with(self.context,
expected_packed_values)
self.compare_obj(event, fake_event)
@mock.patch.object(traceback, 'format_tb')
@mock.patch.object(db, 'action_event_finish')
def test_event_finish_with_failure_no_result(self, mock_finish, mock_tb):
# Tests that traceback.format_tb is not called when exc_tb is a str
# and want_result is False, so no event should come back.
mock_tb.return_value = 'fake-tb'
timeutils.set_time_override(override_time=NOW)
test_class = instance_action.InstanceActionEvent
expected_packed_values = test_class.pack_action_event_finish(
self.context, 'fake-uuid', 'fake-event', 'val', 'fake-tb')
expected_packed_values['finish_time'] = timeutils.utcnow()
mock_finish.return_value = fake_event
event = test_class.event_finish_with_failure(
self.context, 'fake-uuid', 'fake-event', 'val', 'fake-tb',
want_result=False)
mock_finish.assert_called_once_with(self.context,
expected_packed_values)
self.assertIsNone(event)
self.assertFalse(mock_tb.called)
@mock.patch.object(db, 'action_events_get')
def test_get_by_action(self, mock_get):
fake_events = [dict(fake_event, id=1234),
dict(fake_event, id=5678)]
mock_get.return_value = fake_events
obj_list = instance_action.InstanceActionEventList.get_by_action(
self.context, 'fake-action-id')
for index, event in enumerate(obj_list):
self.compare_obj(event, fake_events[index])
mock_get.assert_called_once_with(self.context, 'fake-action-id')
@mock.patch('nova.objects.instance_action.InstanceActionEvent.'
'pack_action_event_finish')
@mock.patch('traceback.format_tb')
def test_event_finish_with_failure_serialized(self, mock_format,
mock_pack):
mock_format.return_value = 'traceback'
mock_pack.side_effect = test.TestingException
self.assertRaises(
test.TestingException,
instance_action.InstanceActionEvent.event_finish_with_failure,
self.context, 'fake-uuid', 'fake-event',
exc_val=mock.sentinel.exc_val,
exc_tb=mock.sentinel.exc_tb)
mock_pack.assert_called_once_with(self.context, 'fake-uuid',
'fake-event',
exc_val=str(mock.sentinel.exc_val),
exc_tb='traceback')
mock_format.assert_called_once_with(mock.sentinel.exc_tb)
class TestInstanceActionEventObject(test_objects._LocalTest,
_TestInstanceActionEventObject):
pass
class TestRemoteInstanceActionEventObject(test_objects._RemoteTest,
_TestInstanceActionEventObject):
pass
|
antoinecarme/pyaf
|
refs/heads/master
|
tests/artificial/transf_Quantization/trend_LinearTrend/cycle_7/ar_12/test_artificial_128_Quantization_LinearTrend_7_12_100.py
|
1
|
import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 128 , FREQ = 'D', seed = 0, trendtype = "LinearTrend", cycle_length = 7, transform = "Quantization", sigma = 0.0, exog_count = 100, ar_order = 12);
|
sajeeshcs/nested_quota_final
|
refs/heads/master
|
nova/tests/unit/cells/test_cells_filters.py
|
30
|
# Copyright (c) 2012-2013 Rackspace Hosting
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Unit Tests for cells scheduler filters.
"""
from nova.cells import filters
from nova import context
from nova.db.sqlalchemy import models
from nova import test
from nova.tests.unit.cells import fakes
class FiltersTestCase(test.NoDBTestCase):
"""Makes sure the proper filters are in the directory."""
def test_all_filters(self):
filter_classes = filters.all_filters()
class_names = [cls.__name__ for cls in filter_classes]
self.assertIn("TargetCellFilter", class_names)
class _FilterTestClass(test.NoDBTestCase):
"""Base class for testing individual filter plugins."""
filter_cls_name = None
def setUp(self):
super(_FilterTestClass, self).setUp()
fakes.init(self)
self.msg_runner = fakes.get_message_runner('api-cell')
self.scheduler = self.msg_runner.scheduler
self.my_cell_state = self.msg_runner.state_manager.get_my_state()
self.filter_handler = filters.CellFilterHandler()
filter_classes = self.filter_handler.get_matching_classes(
[self.filter_cls_name])
self.filters = [cls() for cls in filter_classes]
self.context = context.RequestContext('fake', 'fake',
is_admin=True)
def _filter_cells(self, cells, filter_properties):
return self.filter_handler.get_filtered_objects(self.filters,
cells,
filter_properties)
class ImagePropertiesFilter(_FilterTestClass):
filter_cls_name = \
'nova.cells.filters.image_properties.ImagePropertiesFilter'
def setUp(self):
super(ImagePropertiesFilter, self).setUp()
self.cell1 = models.Cell()
self.cell2 = models.Cell()
self.cell3 = models.Cell()
self.cells = [self.cell1, self.cell2, self.cell3]
for cell in self.cells:
cell.capabilities = {}
self.filter_props = {'context': self.context, 'request_spec': {}}
def test_missing_image_properties(self):
self.assertEqual(self.cells,
self._filter_cells(self.cells, self.filter_props))
def test_missing_hypervisor_version_requires(self):
self.filter_props['request_spec'] = {'image': {'properties': {}}}
for cell in self.cells:
cell.capabilities = {"prominent_hypervisor_version": set([u"6.2"])}
self.assertEqual(self.cells,
self._filter_cells(self.cells, self.filter_props))
def test_missing_hypervisor_version_in_cells(self):
image = {'properties': {'hypervisor_version_requires': '>6.2.1'}}
self.filter_props['request_spec'] = {'image': image}
self.cell1.capabilities = {"prominent_hypervisor_version": set([])}
self.assertEqual(self.cells,
self._filter_cells(self.cells, self.filter_props))
def test_cells_matching_hypervisor_version(self):
image = {'properties': {'hypervisor_version_requires': '>6.0, <=6.3'}}
self.filter_props['request_spec'] = {'image': image}
self.cell1.capabilities = {"prominent_hypervisor_version":
set([u"6.2"])}
self.cell2.capabilities = {"prominent_hypervisor_version":
set([u"6.3"])}
self.cell3.capabilities = {"prominent_hypervisor_version":
set([u"6.0"])}
self.assertEqual([self.cell1, self.cell2],
self._filter_cells(self.cells, self.filter_props))
# assert again to verify filter doesn't mutate state
# LP bug #1325705
self.assertEqual([self.cell1, self.cell2],
self._filter_cells(self.cells, self.filter_props))
class TestTargetCellFilter(_FilterTestClass):
filter_cls_name = 'nova.cells.filters.target_cell.TargetCellFilter'
def test_missing_scheduler_hints(self):
cells = [1, 2, 3]
# No filtering
filter_props = {'context': self.context}
self.assertEqual(cells, self._filter_cells(cells, filter_props))
def test_no_target_cell_hint(self):
cells = [1, 2, 3]
filter_props = {'scheduler_hints': {},
'context': self.context}
# No filtering
self.assertEqual(cells, self._filter_cells(cells, filter_props))
def test_target_cell_specified_me(self):
cells = [1, 2, 3]
target_cell = 'fake!cell!path'
current_cell = 'fake!cell!path'
filter_props = {'scheduler_hints': {'target_cell': target_cell},
'routing_path': current_cell,
'scheduler': self.scheduler,
'context': self.context}
# Only myself in the list.
self.assertEqual([self.my_cell_state],
self._filter_cells(cells, filter_props))
def test_target_cell_specified_me_but_not_admin(self):
ctxt = context.RequestContext('fake', 'fake')
cells = [1, 2, 3]
target_cell = 'fake!cell!path'
current_cell = 'fake!cell!path'
filter_props = {'scheduler_hints': {'target_cell': target_cell},
'routing_path': current_cell,
'scheduler': self.scheduler,
'context': ctxt}
# No filtering, because not an admin.
self.assertEqual(cells, self._filter_cells(cells, filter_props))
def test_target_cell_specified_not_me(self):
info = {}
def _fake_build_instances(ctxt, cell, sched_kwargs):
info['ctxt'] = ctxt
info['cell'] = cell
info['sched_kwargs'] = sched_kwargs
self.stubs.Set(self.msg_runner, 'build_instances',
_fake_build_instances)
cells = [1, 2, 3]
target_cell = 'fake!cell!path'
current_cell = 'not!the!same'
filter_props = {'scheduler_hints': {'target_cell': target_cell},
'routing_path': current_cell,
'scheduler': self.scheduler,
'context': self.context,
'host_sched_kwargs': 'meow'}
# None is returned to bypass further scheduling.
self.assertIsNone(self._filter_cells(cells, filter_props))
# The filter should have re-scheduled to the child cell itself.
expected_info = {'ctxt': self.context,
'cell': 'fake!cell!path',
'sched_kwargs': 'meow'}
self.assertEqual(expected_info, info)
|
nickster5001/ctracker
|
refs/heads/master
|
flask/lib/python3.4/site-packages/sqlparse/functions.py
|
127
|
'''
Created on 17/05/2012
@author: piranna
Several utility functions to extract info from the SQL sentences
'''
from sqlparse.filters import ColumnsSelect, Limit
from sqlparse.pipeline import Pipeline
from sqlparse.tokens import Keyword, Whitespace
def getlimit(stream):
"""Function that return the LIMIT of a input SQL """
pipe = Pipeline()
pipe.append(Limit())
result = pipe(stream)
try:
return int(result)
except ValueError:
return result
def getcolumns(stream):
"""Function that return the colums of a SELECT query"""
pipe = Pipeline()
pipe.append(ColumnsSelect())
return pipe(stream)
class IsType(object):
"""Functor that return is the statement is of a specific type"""
def __init__(self, type):
self.type = type
def __call__(self, stream):
for token_type, value in stream:
if token_type not in Whitespace:
return token_type in Keyword and value == self.type
|
chbrown/pybtex
|
refs/heads/master
|
pybtex/textutils.py
|
4
|
# Copyright (c) 2006, 2007, 2008, 2009, 2010, 2011, 2012 Andrey Golovizin
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import re
terminators = '.?!'
dash_re = re.compile(r'-')
whitespace_re = re.compile(r'\s+')
def capfirst(s):
return s[0].upper() + s[1:] if s else s
def is_terminated(s):
"""Return true if s ends with a terminating character.
"""
return (bool(s) and s[-1] in terminators)
def add_period(s):
"""Add a period to the end of s, if there is none yet.
"""
if s and not is_terminated(s):
return s + '.'
return s
def abbreviate(s):
"""Abbreviate some text.
Examples:
abbreviate('Some words') -> "S. w."
abbreviate('First-Second') -> "F.-S."
"""
def parts(s):
start = 0
length = 0
for letter in s:
length += 1
if not letter.isalpha():
yield s[start:length], letter
start += length
length = 0
yield s[start:length], ""
def abbr(part):
if part[0]:
if is_terminated(part[1]):
return part[0][0].upper() + part[1]
else:
return part[0][0].upper() + '.'
else:
return part[1]
return ''.join(abbr(part) for part in parts(s))
def normalize_whitespace(string):
r"""
Replace every sequence of whitespace characters with a single space.
>>> print normalize_whitespace('abc')
abc
>>> print normalize_whitespace('Abc def.')
Abc def.
>>> print normalize_whitespace(' Abc def.')
Abc def.
>>> print normalize_whitespace('Abc\ndef.')
Abc def.
>>> print normalize_whitespace('Abc\r\ndef.')
Abc def.
>>> print normalize_whitespace('Abc \r\n\tdef.')
Abc def.
>>> print normalize_whitespace(' \nAbc\r\ndef.')
Abc def.
"""
return whitespace_re.sub(' ', string.strip())
|
leppa/home-assistant
|
refs/heads/dev
|
tests/components/demo/test_water_heater.py
|
3
|
"""The tests for the demo water_heater component."""
import unittest
import pytest
import voluptuous as vol
from homeassistant.components import water_heater
from homeassistant.setup import setup_component
from homeassistant.util.unit_system import IMPERIAL_SYSTEM
from tests.common import get_test_home_assistant
from tests.components.water_heater import common
ENTITY_WATER_HEATER = "water_heater.demo_water_heater"
ENTITY_WATER_HEATER_CELSIUS = "water_heater.demo_water_heater_celsius"
class TestDemowater_heater(unittest.TestCase):
"""Test the demo water_heater."""
def setUp(self): # pylint: disable=invalid-name
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant()
self.hass.config.units = IMPERIAL_SYSTEM
assert setup_component(
self.hass, water_heater.DOMAIN, {"water_heater": {"platform": "demo"}}
)
def tearDown(self): # pylint: disable=invalid-name
"""Stop down everything that was started."""
self.hass.stop()
def test_setup_params(self):
"""Test the initial parameters."""
state = self.hass.states.get(ENTITY_WATER_HEATER)
assert 119 == state.attributes.get("temperature")
assert "off" == state.attributes.get("away_mode")
assert "eco" == state.attributes.get("operation_mode")
def test_default_setup_params(self):
"""Test the setup with default parameters."""
state = self.hass.states.get(ENTITY_WATER_HEATER)
assert 110 == state.attributes.get("min_temp")
assert 140 == state.attributes.get("max_temp")
def test_set_only_target_temp_bad_attr(self):
"""Test setting the target temperature without required attribute."""
state = self.hass.states.get(ENTITY_WATER_HEATER)
assert 119 == state.attributes.get("temperature")
with pytest.raises(vol.Invalid):
common.set_temperature(self.hass, None, ENTITY_WATER_HEATER)
self.hass.block_till_done()
assert 119 == state.attributes.get("temperature")
def test_set_only_target_temp(self):
"""Test the setting of the target temperature."""
state = self.hass.states.get(ENTITY_WATER_HEATER)
assert 119 == state.attributes.get("temperature")
common.set_temperature(self.hass, 110, ENTITY_WATER_HEATER)
self.hass.block_till_done()
state = self.hass.states.get(ENTITY_WATER_HEATER)
assert 110 == state.attributes.get("temperature")
def test_set_operation_bad_attr_and_state(self):
"""Test setting operation mode without required attribute.
Also check the state.
"""
state = self.hass.states.get(ENTITY_WATER_HEATER)
assert "eco" == state.attributes.get("operation_mode")
assert "eco" == state.state
with pytest.raises(vol.Invalid):
common.set_operation_mode(self.hass, None, ENTITY_WATER_HEATER)
self.hass.block_till_done()
state = self.hass.states.get(ENTITY_WATER_HEATER)
assert "eco" == state.attributes.get("operation_mode")
assert "eco" == state.state
def test_set_operation(self):
"""Test setting of new operation mode."""
state = self.hass.states.get(ENTITY_WATER_HEATER)
assert "eco" == state.attributes.get("operation_mode")
assert "eco" == state.state
common.set_operation_mode(self.hass, "electric", ENTITY_WATER_HEATER)
self.hass.block_till_done()
state = self.hass.states.get(ENTITY_WATER_HEATER)
assert "electric" == state.attributes.get("operation_mode")
assert "electric" == state.state
def test_set_away_mode_bad_attr(self):
"""Test setting the away mode without required attribute."""
state = self.hass.states.get(ENTITY_WATER_HEATER)
assert "off" == state.attributes.get("away_mode")
with pytest.raises(vol.Invalid):
common.set_away_mode(self.hass, None, ENTITY_WATER_HEATER)
self.hass.block_till_done()
assert "off" == state.attributes.get("away_mode")
def test_set_away_mode_on(self):
"""Test setting the away mode on/true."""
common.set_away_mode(self.hass, True, ENTITY_WATER_HEATER)
self.hass.block_till_done()
state = self.hass.states.get(ENTITY_WATER_HEATER)
assert "on" == state.attributes.get("away_mode")
def test_set_away_mode_off(self):
"""Test setting the away mode off/false."""
common.set_away_mode(self.hass, False, ENTITY_WATER_HEATER_CELSIUS)
self.hass.block_till_done()
state = self.hass.states.get(ENTITY_WATER_HEATER_CELSIUS)
assert "off" == state.attributes.get("away_mode")
def test_set_only_target_temp_with_convert(self):
"""Test the setting of the target temperature."""
state = self.hass.states.get(ENTITY_WATER_HEATER_CELSIUS)
assert 113 == state.attributes.get("temperature")
common.set_temperature(self.hass, 114, ENTITY_WATER_HEATER_CELSIUS)
self.hass.block_till_done()
state = self.hass.states.get(ENTITY_WATER_HEATER_CELSIUS)
assert 114 == state.attributes.get("temperature")
|
ajoaoff/django
|
refs/heads/master
|
tests/migrations2/test_migrations_2_first/__init__.py
|
12133432
| |
leonardoo/django-pipeline
|
refs/heads/master
|
tests/models.py
|
12133432
| |
SoundMoose/SoundMoose
|
refs/heads/master
|
server/project/__init__.py
|
12133432
| |
ikaee/bfr-attendant
|
refs/heads/master
|
facerecognitionlibrary/jni-build/jni/include/tensorflow/tensorboard/backend/__init__.py
|
12133432
| |
gennad/Django-nonrel-stub-for-Google-App-Engine
|
refs/heads/master
|
django/contrib/sessions/backends/cache.py
|
268
|
from django.contrib.sessions.backends.base import SessionBase, CreateError
from django.core.cache import cache
class SessionStore(SessionBase):
"""
A cache-based session store.
"""
def __init__(self, session_key=None):
self._cache = cache
super(SessionStore, self).__init__(session_key)
def load(self):
session_data = self._cache.get(self.session_key)
if session_data is not None:
return session_data
self.create()
return {}
def create(self):
# Because a cache can fail silently (e.g. memcache), we don't know if
# we are failing to create a new session because of a key collision or
# because the cache is missing. So we try for a (large) number of times
# and then raise an exception. That's the risk you shoulder if using
# cache backing.
for i in xrange(10000):
self.session_key = self._get_new_session_key()
try:
self.save(must_create=True)
except CreateError:
continue
self.modified = True
return
raise RuntimeError("Unable to create a new session key.")
def save(self, must_create=False):
if must_create:
func = self._cache.add
else:
func = self._cache.set
result = func(self.session_key, self._get_session(no_load=must_create),
self.get_expiry_age())
if must_create and not result:
raise CreateError
def exists(self, session_key):
if self._cache.has_key(session_key):
return True
return False
def delete(self, session_key=None):
if session_key is None:
if self._session_key is None:
return
session_key = self._session_key
self._cache.delete(session_key)
|
forrestv/myhdl
|
refs/heads/master
|
myhdl/test/core/test_Cosimulation.py
|
2
|
# This file is part of the myhdl library, a Python package for using
# Python as a Hardware Description Language.
#
# Copyright (C) 2003-2008 Jan Decaluwe
#
# The myhdl library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 2.1 of the
# License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
""" Run unit tests for Cosimulation """
import sys
import os
import errno
import unittest
from unittest import TestCase
import random
from random import randrange
random.seed(1) # random, but deterministic
MAXLINE = 4096
from myhdl import Signal
from myhdl._Cosimulation import Cosimulation, CosimulationError, _error
exe = "python test_Cosimulation.py CosimulationTest"
fromSignames = ['a', 'bb', 'ccc']
fromSizes = [1, 11, 63]
fromVals = [0x2, 0x43, 0x24]
fromSigs = {}
for s, v in zip(fromSignames, fromVals):
fromSigs[s] = Signal(v)
toSignames = ['d', 'ee', 'fff', 'g']
toSizes = [32, 12, 3, 6]
toSigs = {}
for s in toSignames:
toSigs[s] = Signal(0)
toVals = [0x3, 0x45, 0x14, 0x12]
toXVals = ["X00", "FZ3", "34XZ", "56U"]
allSigs = fromSigs.copy()
allSigs.update(toSigs)
class CosimulationTest(TestCase):
def testWrongExe(self):
try:
Cosimulation("bla -x 45")
except CosimulationError, e:
self.assert_(e.kind in(_error.OSError, _error.SimulationEnd))
else:
self.fail()
def testNotUnique(self):
cosim1 = Cosimulation(exe + ".cosimNotUnique", **allSigs)
try:
Cosimulation(exe + ".cosimNotUnique", **allSigs)
except CosimulationError, e:
self.assertEqual(e.kind, _error.MultipleCosim)
else:
self.fail()
def cosimNotUnique(self):
wt = int(os.environ['MYHDL_TO_PIPE'])
rf = int(os.environ['MYHDL_FROM_PIPE'])
os.write(wt, "TO 00 a 1")
os.read(rf, MAXLINE)
os.write(wt, "FROM 00 d 1")
os.read(rf, MAXLINE)
os.write(wt, "START")
os.read(rf, MAXLINE)
def testFromSignals(self):
cosim = Cosimulation(exe + ".cosimFromSignals", **allSigs)
self.assertEqual(cosim._fromSignames, fromSignames)
self.assertEqual(cosim._fromSizes, fromSizes)
def cosimFromSignals(self):
wt = int(os.environ['MYHDL_TO_PIPE'])
rf = int(os.environ['MYHDL_FROM_PIPE'])
buf = "FROM 00 "
for s, w in zip(fromSignames, fromSizes):
buf += "%s %s " % (s, w)
os.write(wt, buf)
os.read(rf, MAXLINE)
os.write(wt, "TO 0000 a 1")
os.read(rf, MAXLINE)
os.write(wt, "START")
os.read(rf, MAXLINE)
def testToSignals(self):
cosim = Cosimulation(exe + ".cosimToSignals", **toSigs)
self.assertEqual(cosim._fromSignames, [])
self.assertEqual(cosim._fromSizes, [])
self.assertEqual(cosim._toSignames, toSignames)
self.assertEqual(cosim._toSizes, toSizes)
def cosimToSignals(self):
wt = int(os.environ['MYHDL_TO_PIPE'])
rf = int(os.environ['MYHDL_FROM_PIPE'])
buf = "TO 00 "
for s, w in zip(toSignames, toSizes):
buf += "%s %s " % (s, w)
os.write(wt, buf)
os.read(rf, MAXLINE)
os.write(wt, "FROM 0000")
os.read(rf, MAXLINE)
os.write(wt, "START")
os.read(rf, MAXLINE)
def testFromToSignals(self):
cosim = Cosimulation(exe + ".cosimFromToSignals", **allSigs)
self.assertEqual(cosim._fromSignames, fromSignames)
self.assertEqual(cosim._fromSizes, fromSizes)
self.assertEqual(cosim._toSignames, toSignames)
self.assertEqual(cosim._toSizes, toSizes)
def cosimFromToSignals(self):
wt = int(os.environ['MYHDL_TO_PIPE'])
rf = int(os.environ['MYHDL_FROM_PIPE'])
buf = "FROM 00 "
for s, w in zip(fromSignames, fromSizes):
buf += "%s %s " % (s, w)
os.write(wt, buf)
os.read(rf, MAXLINE)
buf = "TO 00 "
for s, w in zip(toSignames, toSizes):
buf += "%s %s " % (s, w)
os.write(wt, buf)
os.read(rf, MAXLINE)
os.write(wt, "START")
os.read(rf, MAXLINE)
def testTimeZero(self):
try:
Cosimulation(exe + ".cosimTimeZero", **allSigs)
except CosimulationError, e:
self.assertEqual(e.kind, _error.TimeZero)
except:
self.fail()
def cosimTimeZero(self):
wt = int(os.environ['MYHDL_TO_PIPE'])
rf = int(os.environ['MYHDL_FROM_PIPE'])
buf = "TO 01 "
for s, w in zip(fromSignames, fromSizes):
buf += "%s %s " % (s, w)
os.write(wt, buf)
def testNoComm(self):
try:
Cosimulation(exe + ".cosimNoComm", **allSigs)
except CosimulationError, e:
self.assertEqual(e.kind, _error.NoCommunication)
else:
self.fail()
def cosimNoComm(self):
wt = int(os.environ['MYHDL_TO_PIPE'])
rf = int(os.environ['MYHDL_FROM_PIPE'])
os.write(wt, "FROM 0000")
os.read(rf, MAXLINE)
os.write(wt, "TO 0000")
os.read(rf, MAXLINE)
os.write(wt, "START ")
os.read(rf, MAXLINE)
def testFromSignalsDupl(self):
try:
Cosimulation(exe + ".cosimFromSignalsDupl", **allSigs)
except CosimulationError, e:
self.assertEqual(e.kind, _error.DuplicateSigNames)
else:
self.fail()
def cosimFromSignalsDupl(self):
wt = int(os.environ['MYHDL_TO_PIPE'])
rf = int(os.environ['MYHDL_FROM_PIPE'])
buf = "FROM 00 "
for s, w in zip(fromSignames, fromSizes):
buf += "%s %s " % (s, w)
buf += "bb 5"
os.write(wt, buf)
def testToSignalsDupl(self):
try:
Cosimulation(exe + ".cosimToSignalsDupl", **allSigs)
except CosimulationError, e:
self.assertEqual(e.kind, _error.DuplicateSigNames)
else:
self.fail()
def cosimToSignalsDupl(self):
wt = int(os.environ['MYHDL_TO_PIPE'])
rf = int(os.environ['MYHDL_FROM_PIPE'])
buf = "TO 00 "
for s, w in zip(toSignames, toSizes):
buf += "%s %s " % (s, w)
buf += "fff 6"
os.write(wt, buf)
def testFromSignalVals(self):
cosim = Cosimulation(exe + ".cosimFromSignalVals", **allSigs)
os.read(cosim._rt, MAXLINE)
cosim._hasChange = 1
cosim._put(0)
def cosimFromSignalVals(self):
wt = int(os.environ['MYHDL_TO_PIPE'])
rf = int(os.environ['MYHDL_FROM_PIPE'])
buf = "FROM 00 "
for s, w in zip(fromSignames, fromSizes):
buf += "%s %s " % (s, w)
os.write(wt, buf)
os.read(rf, MAXLINE)
os.write(wt, "TO 0000 a 1")
os.read(rf, MAXLINE)
os.write(wt, "START")
os.read(rf, MAXLINE)
os.write(wt, "DUMMY")
s = os.read(rf, MAXLINE)
vals = [long(e, 16) for e in s.split()[1:]]
self.assertEqual(vals, fromVals)
def testToSignalVals(self):
cosim = Cosimulation(exe + ".cosimToSignalVals", **allSigs)
for n in toSignames:
self.assertEqual(toSigs[n].next, 0)
cosim._get()
for n, v in zip(toSignames, toVals):
self.assertEqual(toSigs[n].next, v)
os.write(cosim._wf, "DUMMY")
cosim._getMode = 1
cosim._get()
for n in toSignames:
self.assertEqual(toSigs[n].next, 0)
def cosimToSignalVals(self):
wt = int(os.environ['MYHDL_TO_PIPE'])
rf = int(os.environ['MYHDL_FROM_PIPE'])
buf = "FROM 00 "
for s, w in zip(fromSignames, fromSizes):
buf += "%s %s " % (s, w)
os.write(wt, buf)
os.read(rf, MAXLINE)
buf = "TO 00 "
for s, w in zip(toSignames, toSizes):
buf += "%s %s " % (s, w)
os.write(wt, buf)
os.read(rf, MAXLINE)
os.write(wt, "START")
os.read(rf, MAXLINE)
buf = "0 "
for s, v in zip(toSignames, toVals):
buf += s
buf += " "
buf += hex(v)[2:]
buf += " "
os.write(wt, buf)
os.read(rf, MAXLINE)
buf = "0 "
for s, v in zip(toSignames, toXVals):
buf += s
buf += " "
buf += v
buf += " "
os.write(wt, buf)
def suite():
return unittest.makeSuite(CosimulationTest, 'test')
if __name__ == "__main__":
unittest.main()
|
fyfcauc/android_external_chromium-org
|
refs/heads/du44
|
chrome/common/extensions/docs/server2/mock_file_system_test.py
|
24
|
#!/usr/bin/env python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from copy import deepcopy
from file_system import FileNotFoundError, StatInfo
from mock_file_system import MockFileSystem
from test_file_system import TestFileSystem
import unittest
_TEST_DATA = {
'404.html': '404.html contents',
'apps': {
'a11y.html': 'a11y.html contents',
'about_apps.html': 'about_apps.html contents',
'fakedir': {
'file.html': 'file.html contents'
}
},
'extensions': {
'activeTab.html': 'activeTab.html contents',
'alarms.html': 'alarms.html contents'
}
}
def _Get(fn):
'''Returns a function which calls Future.Get on the result of |fn|.
'''
return lambda *args: fn(*args).Get()
class MockFileSystemTest(unittest.TestCase):
def testCheckAndReset(self):
fs = MockFileSystem(TestFileSystem(deepcopy(_TEST_DATA)))
self.assertTrue(*fs.CheckAndReset())
self.assertFalse(*fs.CheckAndReset(read_count=1))
self.assertFalse(*fs.CheckAndReset(stat_count=1))
future = fs.ReadSingle('apps/')
self.assertTrue(*fs.CheckAndReset(read_count=1))
future.Get()
self.assertTrue(*fs.CheckAndReset(read_resolve_count=1))
self.assertFalse(*fs.CheckAndReset(read_count=1))
self.assertTrue(*fs.CheckAndReset())
future = fs.ReadSingle('apps/')
self.assertFalse(*fs.CheckAndReset(read_count=2))
future.Get()
self.assertFalse(*fs.CheckAndReset(read_resolve_count=2))
fs.ReadSingle('extensions/').Get()
fs.ReadSingle('extensions/').Get()
self.assertTrue(*fs.CheckAndReset(read_count=2, read_resolve_count=2))
self.assertFalse(*fs.CheckAndReset(read_count=2, read_resolve_count=2))
self.assertTrue(*fs.CheckAndReset())
fs.ReadSingle('404.html').Get()
self.assertTrue(*fs.CheckAndReset(read_count=1, read_resolve_count=1))
future = fs.Read(['notfound.html', 'apps/'])
self.assertTrue(*fs.CheckAndReset(read_count=1))
self.assertRaises(FileNotFoundError, future.Get)
self.assertTrue(*fs.CheckAndReset(read_resolve_count=1))
fs.Stat('404.html')
fs.Stat('404.html')
fs.Stat('apps/')
self.assertFalse(*fs.CheckAndReset(stat_count=42))
self.assertFalse(*fs.CheckAndReset(stat_count=42))
self.assertTrue(*fs.CheckAndReset())
fs.ReadSingle('404.html').Get()
fs.Stat('404.html')
fs.Stat('apps/')
self.assertTrue(
*fs.CheckAndReset(read_count=1, read_resolve_count=1, stat_count=2))
self.assertTrue(*fs.CheckAndReset())
def testUpdates(self):
fs = MockFileSystem(TestFileSystem(deepcopy(_TEST_DATA)))
self.assertEqual(StatInfo('0', child_versions={
'404.html': '0',
'apps/': '0',
'extensions/': '0'
}), fs.Stat('/'))
self.assertEqual(StatInfo('0'), fs.Stat('404.html'))
self.assertEqual(StatInfo('0', child_versions={
'a11y.html': '0',
'about_apps.html': '0',
'fakedir/': '0',
}), fs.Stat('apps/'))
self.assertEqual('404.html contents', fs.ReadSingle('404.html').Get())
fs.Update({
'404.html': 'New version!'
})
self.assertEqual(StatInfo('1', child_versions={
'404.html': '1',
'apps/': '0',
'extensions/': '0'
}), fs.Stat('/'))
self.assertEqual(StatInfo('1'), fs.Stat('404.html'))
self.assertEqual(StatInfo('0', child_versions={
'a11y.html': '0',
'about_apps.html': '0',
'fakedir/': '0',
}), fs.Stat('apps/'))
self.assertEqual('New version!', fs.ReadSingle('404.html').Get())
fs.Update({
'404.html': 'Newer version!',
'apps': {
'fakedir': {
'file.html': 'yo'
}
}
})
self.assertEqual(StatInfo('2', child_versions={
'404.html': '2',
'apps/': '2',
'extensions/': '0'
}), fs.Stat('/'))
self.assertEqual(StatInfo('2'), fs.Stat('404.html'))
self.assertEqual(StatInfo('2', child_versions={
'a11y.html': '0',
'about_apps.html': '0',
'fakedir/': '2',
}), fs.Stat('apps/'))
self.assertEqual(StatInfo('0'), fs.Stat('apps/a11y.html'))
self.assertEqual(StatInfo('2', child_versions={
'file.html': '2'
}), fs.Stat('apps/fakedir/'))
self.assertEqual(StatInfo('2'), fs.Stat('apps/fakedir/file.html'))
self.assertEqual(StatInfo('0', child_versions={
'activeTab.html': '0',
'alarms.html': '0'
}), fs.Stat('extensions/'))
self.assertEqual('Newer version!', fs.ReadSingle('404.html').Get())
self.assertEqual('yo', fs.ReadSingle('apps/fakedir/file.html').Get())
if __name__ == '__main__':
unittest.main()
|
nicobustillos/odoo
|
refs/heads/8.0
|
addons/warning/warning.py
|
73
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields,osv
from openerp.tools.translate import _
WARNING_MESSAGE = [
('no-message','No Message'),
('warning','Warning'),
('block','Blocking Message')
]
WARNING_HELP = _('Selecting the "Warning" option will notify user with the message, Selecting "Blocking Message" will throw an exception with the message and block the flow. The Message has to be written in the next field.')
class res_partner(osv.osv):
_inherit = 'res.partner'
_columns = {
'sale_warn' : fields.selection(WARNING_MESSAGE, 'Sales Order', help=WARNING_HELP, required=True),
'sale_warn_msg' : fields.text('Message for Sales Order'),
'purchase_warn' : fields.selection(WARNING_MESSAGE, 'Purchase Order', help=WARNING_HELP, required=True),
'purchase_warn_msg' : fields.text('Message for Purchase Order'),
'picking_warn' : fields.selection(WARNING_MESSAGE, 'Stock Picking', help=WARNING_HELP, required=True),
'picking_warn_msg' : fields.text('Message for Stock Picking'),
'invoice_warn' : fields.selection(WARNING_MESSAGE, 'Invoice', help=WARNING_HELP, required=True),
'invoice_warn_msg' : fields.text('Message for Invoice'),
}
_defaults = {
'sale_warn' : 'no-message',
'purchase_warn' : 'no-message',
'picking_warn' : 'no-message',
'invoice_warn' : 'no-message',
}
class sale_order(osv.osv):
_inherit = 'sale.order'
def onchange_partner_id(self, cr, uid, ids, part, context=None):
if not part:
return {'value':{'partner_invoice_id': False, 'partner_shipping_id':False, 'payment_term' : False}}
warning = {}
title = False
message = False
partner = self.pool.get('res.partner').browse(cr, uid, part, context=context)
if partner.sale_warn != 'no-message':
title = _("Warning for %s") % partner.name
message = partner.sale_warn_msg
warning = {
'title': title,
'message': message,
}
if partner.sale_warn == 'block':
return {'value': {'partner_id': False}, 'warning': warning}
result = super(sale_order, self).onchange_partner_id(cr, uid, ids, part, context=context)
if result.get('warning',False):
warning['title'] = title and title +' & '+ result['warning']['title'] or result['warning']['title']
warning['message'] = message and message + ' ' + result['warning']['message'] or result['warning']['message']
return {'value': result.get('value',{}), 'warning':warning}
class purchase_order(osv.osv):
_inherit = 'purchase.order'
def onchange_partner_id(self, cr, uid, ids, part, context=None):
if not part:
return {'value':{'partner_address_id': False}}
warning = {}
title = False
message = False
partner = self.pool.get('res.partner').browse(cr, uid, part, context=context)
if partner.purchase_warn != 'no-message':
title = _("Warning for %s") % partner.name
message = partner.purchase_warn_msg
warning = {
'title': title,
'message': message
}
if partner.purchase_warn == 'block':
return {'value': {'partner_id': False}, 'warning': warning}
result = super(purchase_order, self).onchange_partner_id(cr, uid, ids, part, context=context)
if result.get('warning',False):
warning['title'] = title and title +' & '+ result['warning']['title'] or result['warning']['title']
warning['message'] = message and message + ' ' + result['warning']['message'] or result['warning']['message']
return {'value': result.get('value',{}), 'warning':warning}
class account_invoice(osv.osv):
_inherit = 'account.invoice'
def onchange_partner_id(self, cr, uid, ids, type, partner_id,
date_invoice=False, payment_term=False,
partner_bank_id=False, company_id=False,
context=None):
if not partner_id:
return {'value': {
'account_id': False,
'payment_term': False,
}
}
warning = {}
title = False
message = False
partner = self.pool.get('res.partner').browse(cr, uid, partner_id, context=context)
if partner.invoice_warn != 'no-message':
title = _("Warning for %s") % partner.name
message = partner.invoice_warn_msg
warning = {
'title': title,
'message': message
}
if partner.invoice_warn == 'block':
return {'value': {'partner_id': False}, 'warning': warning}
result = super(account_invoice, self).onchange_partner_id(cr, uid, ids, type, partner_id,
date_invoice=date_invoice, payment_term=payment_term,
partner_bank_id=partner_bank_id, company_id=company_id, context=context)
if result.get('warning',False):
warning['title'] = title and title +' & '+ result['warning']['title'] or result['warning']['title']
warning['message'] = message and message + ' ' + result['warning']['message'] or result['warning']['message']
return {'value': result.get('value',{}), 'warning':warning}
class stock_picking(osv.osv):
_inherit = 'stock.picking'
def onchange_partner_in(self, cr, uid, ids, partner_id=None, context=None):
if not partner_id:
return {}
partner = self.pool.get('res.partner').browse(cr, uid, partner_id, context=context)
warning = {}
title = False
message = False
if partner.picking_warn != 'no-message':
title = _("Warning for %s") % partner.name
message = partner.picking_warn_msg
warning = {
'title': title,
'message': message
}
if partner.picking_warn == 'block':
return {'value': {'partner_id': False}, 'warning': warning}
result = super(stock_picking_in, self).onchange_partner_in(cr, uid, ids, partner_id, context)
if result.get('warning',False):
warning['title'] = title and title +' & '+ result['warning']['title'] or result['warning']['title']
warning['message'] = message and message + ' ' + result['warning']['message'] or result['warning']['message']
return {'value': result.get('value',{}), 'warning':warning}
class product_product(osv.osv):
_inherit = 'product.template'
_columns = {
'sale_line_warn' : fields.selection(WARNING_MESSAGE,'Sales Order Line', help=WARNING_HELP, required=True),
'sale_line_warn_msg' : fields.text('Message for Sales Order Line'),
'purchase_line_warn' : fields.selection(WARNING_MESSAGE,'Purchase Order Line', help=WARNING_HELP, required=True),
'purchase_line_warn_msg' : fields.text('Message for Purchase Order Line'),
}
_defaults = {
'sale_line_warn' : 'no-message',
'purchase_line_warn' : 'no-message',
}
class sale_order_line(osv.osv):
_inherit = 'sale.order.line'
def product_id_change_with_wh(self, cr, uid, ids, pricelist, product, qty=0,
uom=False, qty_uos=0, uos=False, name='', partner_id=False,
lang=False, update_tax=True, date_order=False, packaging=False,
fiscal_position=False, flag=False, warehouse_id=False, context=None):
warning = {}
if not product:
return {'value': {'th_weight' : 0, 'product_packaging': False,
'product_uos_qty': qty}, 'domain': {'product_uom': [],
'product_uos': []}}
product_obj = self.pool.get('product.product')
product_info = product_obj.browse(cr, uid, product)
title = False
message = False
if product_info.sale_line_warn != 'no-message':
title = _("Warning for %s") % product_info.name
message = product_info.sale_line_warn_msg
warning['title'] = title
warning['message'] = message
if product_info.sale_line_warn == 'block':
return {'value': {'product_id': False}, 'warning': warning}
result = super(sale_order_line, self).product_id_change_with_wh( cr, uid, ids, pricelist, product, qty,
uom, qty_uos, uos, name, partner_id,
lang, update_tax, date_order, packaging, fiscal_position, flag, warehouse_id=warehouse_id, context=context)
if result.get('warning',False):
warning['title'] = title and title +' & '+result['warning']['title'] or result['warning']['title']
warning['message'] = message and message +'\n\n'+result['warning']['message'] or result['warning']['message']
return {'value': result.get('value',{}), 'warning':warning}
class purchase_order_line(osv.osv):
_inherit = 'purchase.order.line'
def onchange_product_id(self,cr, uid, ids, pricelist, product, qty, uom,
partner_id, date_order=False, fiscal_position_id=False, date_planned=False,
name=False, price_unit=False, state='draft', notes=False, context=None):
warning = {}
if not product:
return {'value': {'price_unit': price_unit or 0.0, 'name': name or '', 'notes': notes or '', 'product_uom' : uom or False}, 'domain':{'product_uom':[]}}
product_obj = self.pool.get('product.product')
product_info = product_obj.browse(cr, uid, product)
title = False
message = False
if product_info.purchase_line_warn != 'no-message':
title = _("Warning for %s") % product_info.name
message = product_info.purchase_line_warn_msg
warning['title'] = title
warning['message'] = message
if product_info.purchase_line_warn == 'block':
return {'value': {'product_id': False}, 'warning': warning}
result = super(purchase_order_line, self).onchange_product_id(cr, uid, ids, pricelist, product, qty, uom,
partner_id, date_order=date_order, fiscal_position_id=fiscal_position_id, date_planned=date_planned, name=name, price_unit=price_unit, state=state, context=context)
if result.get('warning',False):
warning['title'] = title and title +' & '+result['warning']['title'] or result['warning']['title']
warning['message'] = message and message +'\n\n'+result['warning']['message'] or result['warning']['message']
return {'value': result.get('value',{}), 'warning':warning}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
deathglitch/metarigging
|
refs/heads/master
|
python/model/ui_tools/mirror_mesh_ui.py
|
1
|
import pymel.core as pm
import PySide2.QtCore as qtcore
import PySide2.QtGui as qtgui
from PySide2 import QtWidgets
import qtutils.qt_utils as MetaWindow
import model.mesh
class MirrorMeshToolUI(MetaWindow.MetaWindow):
VERSION = 1.0
def __init__(self):
MetaWindow.MetaWindow.__init__(self, MirrorMeshToolUI.__name__)
self.setWindowTitle("Mirror Blend Shape Mesh {0}".format(MirrorMeshToolUI.VERSION))
QtWidgets.QToolTip.setFont(qtgui.QFont('SansSerif', 10))
main_layout = QtWidgets.QVBoxLayout()
main_layout.setContentsMargins(5, 5, 5, 5)
main_layout.setSpacing(5)
main_layout.setAlignment(qtcore.Qt.AlignTop)
self.setLayout(main_layout)
# description
description_frame = QtWidgets.QHBoxLayout()
main_layout.addLayout(description_frame)
description_label = QtWidgets.QLabel('Tool to mirror blend shape meshes')
description_frame.addWidget(description_label)
# mirror blend shape mesh text edit
mirror_name_frame = QtWidgets.QHBoxLayout()
main_layout.addLayout(mirror_name_frame)
self.mirror_name_lineEdit = QtWidgets.QLineEdit()
mirror_name_frame.addWidget(self.mirror_name_lineEdit)
# mirror blend shape mesh button
mirror_frame = QtWidgets.QHBoxLayout()
main_layout.addLayout(mirror_frame)
self.mirror_button = QtWidgets.QPushButton('Mirror Blend Shape Mesh')
self.mirror_button.setToolTip('Mirror a blend shape mesh. Select a neutral pose mesh (Base Mesh).\n'
+ 'Then select the mesh to me mirrored.')
mirror_frame.addWidget(self.mirror_button)
# mirror non-blendshape mesh
mirror_non_frame = QtWidgets.QHBoxLayout()
main_layout.addLayout(mirror_non_frame)
mirror_non_button = QtWidgets.QPushButton('Mirror Mesh')
mirror_non_button.setToolTip('Mirror a non blend shape mesh.')
mirror_non_frame.addWidget(mirror_non_button)
# --------------------------------------------------------------------------
# Signals
# --------------------------------------------------------------------------
self.mirror_button.clicked.connect(self.mirror_blend_shape)
# --------------------------------------------------------------------------
# SLOTS
# --------------------------------------------------------------------------
def mirror_blend_shape(self):
meshes = pm.selected()
if len(meshes) < 2:
pm.confirmDialog(title='Selection Error.', message='Please select two meshes.')
return
for x in xrange(2):
if not isinstance(meshes[x], pm.nt.Transform) and isinstance(meshes[x].getShape(), pm.nt.Mesh):
pm.confirmDialog(title='No deformable objects selected',
message='Please select two objects of type poly meshes.')
return
if len(meshes) > 2:
pm.warning('More than two meshes were selected. Using the frist two selected.')
if not pm.polyEvaluate(meshes[0], v=True) == pm.polyEvaluate(meshes[1], v=True):
pm.confirmDialog(title='No Matching Deformable Objects',
message='Please select two objects that have the same topology and vertex order.')
return
mesh = model.mesh.Mesh(meshes[1])
name = (self.mirror_name_lineEdit.text()).strip()
if not name or name == '':
name = 'mirrored_mesh'
mesh.mirror(opposite_pose=name, base_mesh=meshes[0])
return
def show_ui():
ui = MirrorMeshToolUI()
ui.show()
|
HuaweiSwitch/ansible
|
refs/heads/devel
|
lib/ansible/cli/adhoc.py
|
4
|
# (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
########################################################
import os
from ansible import constants as C
from ansible.cli import CLI
from ansible.errors import AnsibleError, AnsibleOptionsError
from ansible.executor.task_queue_manager import TaskQueueManager
from ansible.module_utils._text import to_text
from ansible.parsing.splitter import parse_kv
from ansible.playbook.play import Play
from ansible.plugins import get_all_plugin_loaders
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
########################################################
class AdHocCLI(CLI):
''' is an extra-simple tool/framework/API for doing 'remote things'.
this command allows you to define and run a single task 'playbook' against a set of hosts
'''
def parse(self):
''' create an options parser for bin/ansible '''
self.parser = CLI.base_parser(
usage='%prog <host-pattern> [options]',
runas_opts=True,
inventory_opts=True,
async_opts=True,
output_opts=True,
connect_opts=True,
check_opts=True,
runtask_opts=True,
vault_opts=True,
fork_opts=True,
module_opts=True,
desc="Define and run a single task 'playbook' against a set of hosts",
epilog="Some modules do not make sense in Ad-Hoc (include, meta, etc)",
)
# options unique to ansible ad-hoc
self.parser.add_option('-a', '--args', dest='module_args',
help="module arguments", default=C.DEFAULT_MODULE_ARGS)
self.parser.add_option('-m', '--module-name', dest='module_name',
help="module name to execute (default=%s)" % C.DEFAULT_MODULE_NAME,
default=C.DEFAULT_MODULE_NAME)
super(AdHocCLI, self).parse()
if len(self.args) < 1:
raise AnsibleOptionsError("Missing target hosts")
elif len(self.args) > 1:
raise AnsibleOptionsError("Extraneous options or arguments")
display.verbosity = self.options.verbosity
self.validate_conflicts(runas_opts=True, vault_opts=True, fork_opts=True)
def _play_ds(self, pattern, async, poll):
check_raw = self.options.module_name in ('command', 'win_command', 'shell', 'win_shell', 'script', 'raw')
return dict(
name = "Ansible Ad-Hoc",
hosts = pattern,
gather_facts = 'no',
tasks = [ dict(action=dict(module=self.options.module_name, args=parse_kv(self.options.module_args, check_raw=check_raw)), async=async, poll=poll) ]
)
def run(self):
''' create and execute the single task playbook '''
super(AdHocCLI, self).run()
# only thing left should be host pattern
pattern = to_text(self.args[0], errors='surrogate_or_strict')
sshpass = None
becomepass = None
self.normalize_become_options()
(sshpass, becomepass) = self.ask_passwords()
passwords = { 'conn_pass': sshpass, 'become_pass': becomepass }
loader, inventory, variable_manager = self._play_prereqs(self.options)
no_hosts = False
if len(inventory.list_hosts()) == 0:
# Empty inventory
display.warning("provided hosts list is empty, only localhost is available")
no_hosts = True
inventory.subset(self.options.subset)
hosts = inventory.list_hosts(pattern)
if len(hosts) == 0:
if no_hosts is False and self.options.subset:
# Invalid limit
raise AnsibleError("Specified --limit does not match any hosts")
else:
display.warning("No hosts matched, nothing to do")
if self.options.listhosts:
display.display(' hosts (%d):' % len(hosts))
for host in hosts:
display.display(' %s' % host)
return 0
if self.options.module_name in C.MODULE_REQUIRE_ARGS and not self.options.module_args:
err = "No argument passed to %s module" % self.options.module_name
if pattern.endswith(".yml"):
err = err + ' (did you mean to run ansible-playbook?)'
raise AnsibleOptionsError(err)
# Avoid modules that don't work with ad-hoc
if self.options.module_name in ('include', 'include_role'):
raise AnsibleOptionsError("'%s' is not a valid action for ad-hoc commands" % self.options.module_name)
# dynamically load any plugins from the playbook directory
for name, obj in get_all_plugin_loaders():
if obj.subdir:
plugin_path = os.path.join('.', obj.subdir)
if os.path.isdir(plugin_path):
obj.add_directory(plugin_path)
play_ds = self._play_ds(pattern, self.options.seconds, self.options.poll_interval)
play = Play().load(play_ds, variable_manager=variable_manager, loader=loader)
if self.callback:
cb = self.callback
elif self.options.one_line:
cb = 'oneline'
else:
cb = 'minimal'
run_tree=False
if self.options.tree:
C.DEFAULT_CALLBACK_WHITELIST.append('tree')
C.TREE_DIR = self.options.tree
run_tree=True
# now create a task queue manager to execute the play
self._tqm = None
try:
self._tqm = TaskQueueManager(
inventory=inventory,
variable_manager=variable_manager,
loader=loader,
options=self.options,
passwords=passwords,
stdout_callback=cb,
run_additional_callbacks=C.DEFAULT_LOAD_CALLBACK_PLUGINS,
run_tree=run_tree,
)
result = self._tqm.run(play)
finally:
if self._tqm:
self._tqm.cleanup()
if loader:
loader.cleanup_all_tmp_files()
return result
|
chouseknecht/ansible
|
refs/heads/devel
|
lib/ansible/plugins/lookup/aws_ssm.py
|
18
|
# (c) 2016, Bill Wang <ozbillwang(at)gmail.com>
# (c) 2017, Marat Bakeev <hawara(at)gmail.com>
# (c) 2018, Michael De La Rue <siblemitcom.mddlr(at)spamgourmet.com>
# (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
lookup: aws_ssm
author:
- Bill Wang <ozbillwang(at)gmail.com>
- Marat Bakeev <hawara(at)gmail.com>
- Michael De La Rue <siblemitcom.mddlr@spamgourmet.com>
version_added: 2.5
requirements:
- boto3
- botocore
short_description: Get the value for a SSM parameter or all parameters under a path.
description:
- Get the value for an Amazon Simple Systems Manager parameter or a hierarchy of parameters.
The first argument you pass the lookup can either be a parameter name or a hierarchy of
parameters. Hierarchies start with a forward slash and end with the parameter name. Up to
5 layers may be specified.
- If looking up an explicitly listed parameter by name which does not exist then the lookup will
return a None value which will be interpreted by Jinja2 as an empty string. You can use the
```default``` filter to give a default value in this case but must set the second parameter to
true (see examples below)
- When looking up a path for parameters under it a dictionary will be returned for each path.
If there is no parameter under that path then the return will be successful but the
dictionary will be empty.
- If the lookup fails due to lack of permissions or due to an AWS client error then the aws_ssm
will generate an error, normally crashing the current ansible task. This is normally the right
thing since ignoring a value that IAM isn't giving access to could cause bigger problems and
wrong behaviour or loss of data. If you want to continue in this case then you will have to set
up two ansible tasks, one which sets a variable and ignores failures one which uses the value
of that variable with a default. See the examples below.
options:
decrypt:
description: A boolean to indicate whether to decrypt the parameter.
default: true
type: boolean
bypath:
description: A boolean to indicate whether the parameter is provided as a hierarchy.
default: false
type: boolean
recursive:
description: A boolean to indicate whether to retrieve all parameters within a hierarchy.
default: false
type: boolean
shortnames:
description: Indicates whether to return the name only without path if using a parameter hierarchy.
default: false
type: boolean
'''
EXAMPLES = '''
# lookup sample:
- name: lookup ssm parameter store in the current region
debug: msg="{{ lookup('aws_ssm', 'Hello' ) }}"
- name: lookup ssm parameter store in nominated region
debug: msg="{{ lookup('aws_ssm', 'Hello', region='us-east-2' ) }}"
- name: lookup ssm parameter store without decrypted
debug: msg="{{ lookup('aws_ssm', 'Hello', decrypt=False ) }}"
- name: lookup ssm parameter store in nominated aws profile
debug: msg="{{ lookup('aws_ssm', 'Hello', aws_profile='myprofile' ) }}"
- name: lookup ssm parameter store using explicit aws credentials
debug: msg="{{ lookup('aws_ssm', 'Hello', aws_access_key=my_aws_access_key, aws_secret_key=my_aws_secret_key, aws_security_token=my_security_token ) }}"
- name: lookup ssm parameter store with all options.
debug: msg="{{ lookup('aws_ssm', 'Hello', decrypt=false, region='us-east-2', aws_profile='myprofile') }}"
- name: lookup a key which doesn't exist, returns ""
debug: msg="{{ lookup('aws_ssm', 'NoKey') }}"
- name: lookup a key which doesn't exist, returning a default ('root')
debug: msg="{{ lookup('aws_ssm', 'AdminID') | default('root', true) }}"
- name: lookup a key which doesn't exist failing to store it in a fact
set_fact:
temp_secret: "{{ lookup('aws_ssm', '/NoAccess/hiddensecret') }}"
ignore_errors: true
- name: show fact default to "access failed" if we don't have access
debug: msg="{{ 'the secret was:' ~ temp_secret | default('could not access secret') }}"
- name: return a dictionary of ssm parameters from a hierarchy path
debug: msg="{{ lookup('aws_ssm', '/PATH/to/params', region='ap-southeast-2', bypath=true, recursive=true ) }}"
- name: return a dictionary of ssm parameters from a hierarchy path with shortened names (param instead of /PATH/to/param)
debug: msg="{{ lookup('aws_ssm', '/PATH/to/params', region='ap-southeast-2', shortnames=true, bypath=true, recursive=true ) }}"
- name: Iterate over a parameter hierarchy (one iteration per parameter)
debug: msg='Key contains {{ item.key }} , with value {{ item.value }}'
loop: '{{ lookup("aws_ssm", "/demo/", region="ap-southeast-2", bypath=True) | dict2items }}'
- name: Iterate over multiple paths as dictionaries (one iteration per path)
debug: msg='Path contains {{ item }}'
loop: '{{ lookup("aws_ssm", "/demo/", "/demo1/", bypath=True)}}'
'''
from ansible.module_utils._text import to_native
from ansible.module_utils.ec2 import HAS_BOTO3, boto3_tag_list_to_ansible_dict
from ansible.errors import AnsibleError
from ansible.plugins.lookup import LookupBase
from ansible.utils.display import Display
try:
from botocore.exceptions import ClientError
import botocore
import boto3
except ImportError:
pass # will be captured by imported HAS_BOTO3
display = Display()
def _boto3_conn(region, credentials):
if 'boto_profile' in credentials:
boto_profile = credentials.pop('boto_profile')
else:
boto_profile = None
try:
connection = boto3.session.Session(profile_name=boto_profile).client('ssm', region, **credentials)
except (botocore.exceptions.ProfileNotFound, botocore.exceptions.PartialCredentialsError):
if boto_profile:
try:
connection = boto3.session.Session(profile_name=boto_profile).client('ssm', region)
# FIXME: we should probably do better passing on of the error information
except (botocore.exceptions.ProfileNotFound, botocore.exceptions.PartialCredentialsError):
raise AnsibleError("Insufficient credentials found.")
else:
raise AnsibleError("Insufficient credentials found.")
return connection
class LookupModule(LookupBase):
def run(self, terms, variables=None, boto_profile=None, aws_profile=None,
aws_secret_key=None, aws_access_key=None, aws_security_token=None, region=None,
bypath=False, shortnames=False, recursive=False, decrypt=True):
'''
:arg terms: a list of lookups to run.
e.g. ['parameter_name', 'parameter_name_too' ]
:kwarg variables: ansible variables active at the time of the lookup
:kwarg aws_secret_key: identity of the AWS key to use
:kwarg aws_access_key: AWS secret key (matching identity)
:kwarg aws_security_token: AWS session key if using STS
:kwarg decrypt: Set to True to get decrypted parameters
:kwarg region: AWS region in which to do the lookup
:kwarg bypath: Set to True to do a lookup of variables under a path
:kwarg recursive: Set to True to recurse below the path (requires bypath=True)
:returns: A list of parameter values or a list of dictionaries if bypath=True.
'''
if not HAS_BOTO3:
raise AnsibleError('botocore and boto3 are required for aws_ssm lookup.')
ret = []
response = {}
ssm_dict = {}
credentials = {}
if aws_profile:
credentials['boto_profile'] = aws_profile
else:
credentials['boto_profile'] = boto_profile
credentials['aws_secret_access_key'] = aws_secret_key
credentials['aws_access_key_id'] = aws_access_key
credentials['aws_session_token'] = aws_security_token
client = _boto3_conn(region, credentials)
ssm_dict['WithDecryption'] = decrypt
# Lookup by path
if bypath:
ssm_dict['Recursive'] = recursive
for term in terms:
ssm_dict["Path"] = term
display.vvv("AWS_ssm path lookup term: %s in region: %s" % (term, region))
try:
response = client.get_parameters_by_path(**ssm_dict)
except ClientError as e:
raise AnsibleError("SSM lookup exception: {0}".format(to_native(e)))
paramlist = list()
paramlist.extend(response['Parameters'])
# Manual pagination, since boto doesn't support it yet for get_parameters_by_path
while 'NextToken' in response:
response = client.get_parameters_by_path(NextToken=response['NextToken'], **ssm_dict)
paramlist.extend(response['Parameters'])
# shorten parameter names. yes, this will return duplicate names with different values.
if shortnames:
for x in paramlist:
x['Name'] = x['Name'][x['Name'].rfind('/') + 1:]
display.vvvv("AWS_ssm path lookup returned: %s" % str(paramlist))
if len(paramlist):
ret.append(boto3_tag_list_to_ansible_dict(paramlist,
tag_name_key_name="Name",
tag_value_key_name="Value"))
else:
ret.append({})
# Lookup by parameter name - always returns a list with one or no entry.
else:
display.vvv("AWS_ssm name lookup term: %s" % terms)
ssm_dict["Names"] = terms
try:
response = client.get_parameters(**ssm_dict)
except ClientError as e:
raise AnsibleError("SSM lookup exception: {0}".format(to_native(e)))
params = boto3_tag_list_to_ansible_dict(response['Parameters'], tag_name_key_name="Name",
tag_value_key_name="Value")
for i in terms:
if i.split(':', 1)[0] in params:
ret.append(params[i])
elif i in response['InvalidParameters']:
ret.append(None)
else:
raise AnsibleError("Ansible internal error: aws_ssm lookup failed to understand boto3 return value: {0}".format(str(response)))
return ret
display.vvvv("AWS_ssm path lookup returning: %s " % str(ret))
return ret
|
courtiol/choosiness
|
refs/heads/master
|
Visualization/VisualizationWithPygame/CVisualizationWithPygameBaseClass.py
|
1
|
__author__ = 'robert'
import pygame
import CIndividual
from Visualization.CVisualizationBaseClass import CVisualizationBaseClass
"""
In this module all visualization using pyGame are stored. You find here also the base class which deals with standard
user interactions and the initialization of pygame. Since in pyGame it is only possible to have one window open in the
same time we use a static parameter screen which circumvents the problems that several visualizations are used in the
same time.
"""
# ---------------------------------------Visualization with pygame--------------------------
class CVisualizationWithPygameBaseClass(CVisualizationBaseClass):
# There is only one window possible in pygame. Therefore all instances of this class share the same instance of
# screen.
screen = None
def __init__(self, simulation, width_of_window, height_of_window):
CVisualizationBaseClass.__init__(self, simulation)
self.width_of_window = width_of_window
self.height_of_window = height_of_window
# overwrite
def init_display(self):
CVisualizationWithPygameBaseClass.screen = pygame.display.set_mode((self.width_of_window,
self.height_of_window))
pygame.display.set_caption("Simulation with "+str(self.simulation.population.current_number_of_females) +
' females in red and '+str(self.simulation.population.current_number_of_males) +
' males in blue')
pygame.init()
def init_screen(self):
CVisualizationWithPygameBaseClass.screen = pygame.display.set_mode((self.width_of_window,
self.height_of_window))
def do_interaction_with_user(self):
"""
Encapsulates all possible user interactions. (Hot keys, click events)
:return:
"""
for event in pygame.event.get():
self.handle_user_event(event)
def handle_user_event(self, event):
"""
Deal with user events - like pressed keyboard buttoms & mouse
:param event: event which needs to be dealt with
:return:-
"""
# deal with standard events like quitting, pausing the simulation
if event.type == pygame.QUIT:
self.simulation.quit_simulation()
elif event.type == pygame.KEYDOWN:
if event.key == pygame.K_p:
self.simulation.pause_simulation()
elif event.key == pygame.K_c:
self._show_couples_array()
elif event.key == pygame.K_s:
self.simulation.save()
elif event.key == pygame.K_i:
print(str(self.simulation.settings.step_counter))
def print_text_on_screen(self, text, pos_x, pos_y, size=30, colour=(0, 0, 0)):
large_text = pygame.font.Font('freesansbold.ttf', size)
text_surface = large_text.render(text, True, colour)
text_rect = text_surface.get_rect()
text_rect.center = (pos_x, pos_y)
CVisualizationWithPygameBaseClass.screen.blit(text_surface, text_rect)
def _show_couples_array(self):
print("couples with length " + str(len(self.simulation.population.couples)) + "\n")
for couple in self.simulation.population.couples:
print(couple)
def help(self):
"""
Shows information about possible user interactions
"""
text = "p - pause\n"
text += "right arrow - next visualization\n"
text += "left arrow - prior visualization\n"
text += "s - save"
text += "i - current time step\n"
text += "left click on circle - information about individual in terminal"
return text
|
danieljaouen/ansible
|
refs/heads/devel
|
lib/ansible/modules/cloud/profitbricks/profitbricks_nic.py
|
36
|
#!/usr/bin/python
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: profitbricks_nic
short_description: Create or Remove a NIC.
description:
- This module allows you to create or restore a volume snapshot. This module has a dependency on profitbricks >= 1.0.0
version_added: "2.0"
options:
datacenter:
description:
- The datacenter in which to operate.
required: true
server:
description:
- The server name or ID.
required: true
name:
description:
- The name or ID of the NIC. This is only required on deletes, but not on create.
required: true
lan:
description:
- The LAN to place the NIC on. You can pass a LAN that doesn't exist and it will be created. Required on create.
required: true
subscription_user:
description:
- The ProfitBricks username. Overrides the PB_SUBSCRIPTION_ID environment variable.
required: false
subscription_password:
description:
- THe ProfitBricks password. Overrides the PB_PASSWORD environment variable.
required: false
wait:
description:
- wait for the operation to complete before returning
required: false
default: "yes"
type: bool
wait_timeout:
description:
- how long before wait gives up, in seconds
default: 600
state:
description:
- Indicate desired state of the resource
required: false
default: 'present'
choices: ["present", "absent"]
requirements: [ "profitbricks" ]
author: Matt Baldwin (baldwin@stackpointcloud.com)
'''
EXAMPLES = '''
# Create a NIC
- profitbricks_nic:
datacenter: Tardis One
server: node002
lan: 2
wait_timeout: 500
state: present
# Remove a NIC
- profitbricks_nic:
datacenter: Tardis One
server: node002
name: 7341c2454f
wait_timeout: 500
state: absent
'''
import re
import uuid
import time
HAS_PB_SDK = True
try:
from profitbricks.client import ProfitBricksService, NIC
except ImportError:
HAS_PB_SDK = False
from ansible.module_utils.basic import AnsibleModule
uuid_match = re.compile(
r'[\w]{8}-[\w]{4}-[\w]{4}-[\w]{4}-[\w]{12}', re.I)
def _wait_for_completion(profitbricks, promise, wait_timeout, msg):
if not promise:
return
wait_timeout = time.time() + wait_timeout
while wait_timeout > time.time():
time.sleep(5)
operation_result = profitbricks.get_request(
request_id=promise['requestId'],
status=True)
if operation_result['metadata']['status'] == "DONE":
return
elif operation_result['metadata']['status'] == "FAILED":
raise Exception(
'Request failed to complete ' + msg + ' "' + str(
promise['requestId']) + '" to complete.')
raise Exception(
'Timed out waiting for async operation ' + msg + ' "' + str(
promise['requestId']
) + '" to complete.')
def create_nic(module, profitbricks):
"""
Creates a NIC.
module : AnsibleModule object
profitbricks: authenticated profitbricks object.
Returns:
True if the nic creates, false otherwise
"""
datacenter = module.params.get('datacenter')
server = module.params.get('server')
lan = module.params.get('lan')
name = module.params.get('name')
wait = module.params.get('wait')
wait_timeout = module.params.get('wait_timeout')
# Locate UUID for Datacenter
if not (uuid_match.match(datacenter)):
datacenter_list = profitbricks.list_datacenters()
for d in datacenter_list['items']:
dc = profitbricks.get_datacenter(d['id'])
if datacenter == dc['properties']['name']:
datacenter = d['id']
break
# Locate UUID for Server
if not (uuid_match.match(server)):
server_list = profitbricks.list_servers(datacenter)
for s in server_list['items']:
if server == s['properties']['name']:
server = s['id']
break
try:
n = NIC(
name=name,
lan=lan
)
nic_response = profitbricks.create_nic(datacenter, server, n)
if wait:
_wait_for_completion(profitbricks, nic_response,
wait_timeout, "create_nic")
return nic_response
except Exception as e:
module.fail_json(msg="failed to create the NIC: %s" % str(e))
def delete_nic(module, profitbricks):
"""
Removes a NIC
module : AnsibleModule object
profitbricks: authenticated profitbricks object.
Returns:
True if the NIC was removed, false otherwise
"""
datacenter = module.params.get('datacenter')
server = module.params.get('server')
name = module.params.get('name')
# Locate UUID for Datacenter
if not (uuid_match.match(datacenter)):
datacenter_list = profitbricks.list_datacenters()
for d in datacenter_list['items']:
dc = profitbricks.get_datacenter(d['id'])
if datacenter == dc['properties']['name']:
datacenter = d['id']
break
# Locate UUID for Server
server_found = False
if not (uuid_match.match(server)):
server_list = profitbricks.list_servers(datacenter)
for s in server_list['items']:
if server == s['properties']['name']:
server_found = True
server = s['id']
break
if not server_found:
return False
# Locate UUID for NIC
nic_found = False
if not (uuid_match.match(name)):
nic_list = profitbricks.list_nics(datacenter, server)
for n in nic_list['items']:
if name == n['properties']['name']:
nic_found = True
name = n['id']
break
if not nic_found:
return False
try:
nic_response = profitbricks.delete_nic(datacenter, server, name)
return nic_response
except Exception as e:
module.fail_json(msg="failed to remove the NIC: %s" % str(e))
def main():
module = AnsibleModule(
argument_spec=dict(
datacenter=dict(),
server=dict(),
name=dict(default=str(uuid.uuid4()).replace('-', '')[:10]),
lan=dict(),
subscription_user=dict(),
subscription_password=dict(no_log=True),
wait=dict(type='bool', default=True),
wait_timeout=dict(type='int', default=600),
state=dict(default='present'),
)
)
if not HAS_PB_SDK:
module.fail_json(msg='profitbricks required for this module')
if not module.params.get('subscription_user'):
module.fail_json(msg='subscription_user parameter is required')
if not module.params.get('subscription_password'):
module.fail_json(msg='subscription_password parameter is required')
if not module.params.get('datacenter'):
module.fail_json(msg='datacenter parameter is required')
if not module.params.get('server'):
module.fail_json(msg='server parameter is required')
subscription_user = module.params.get('subscription_user')
subscription_password = module.params.get('subscription_password')
profitbricks = ProfitBricksService(
username=subscription_user,
password=subscription_password)
state = module.params.get('state')
if state == 'absent':
if not module.params.get('name'):
module.fail_json(msg='name parameter is required')
try:
(changed) = delete_nic(module, profitbricks)
module.exit_json(changed=changed)
except Exception as e:
module.fail_json(msg='failed to set nic state: %s' % str(e))
elif state == 'present':
if not module.params.get('lan'):
module.fail_json(msg='lan parameter is required')
try:
(nic_dict) = create_nic(module, profitbricks)
module.exit_json(nics=nic_dict)
except Exception as e:
module.fail_json(msg='failed to set nic state: %s' % str(e))
if __name__ == '__main__':
main()
|
nkgilley/home-assistant
|
refs/heads/dev
|
tests/components/honeywell/test_climate.py
|
18
|
"""The test the Honeywell thermostat module."""
import unittest
from unittest import mock
import pytest
import requests.exceptions
import somecomfort
import voluptuous as vol
from homeassistant.components.climate.const import (
ATTR_FAN_MODE,
ATTR_FAN_MODES,
ATTR_HVAC_MODES,
)
import homeassistant.components.honeywell.climate as honeywell
from homeassistant.const import (
CONF_PASSWORD,
CONF_USERNAME,
TEMP_CELSIUS,
TEMP_FAHRENHEIT,
)
pytestmark = pytest.mark.skip("Need to be fixed!")
class TestHoneywell(unittest.TestCase):
"""A test class for Honeywell themostats."""
@mock.patch("somecomfort.SomeComfort")
@mock.patch("homeassistant.components.honeywell.climate.HoneywellUSThermostat")
def test_setup_us(self, mock_ht, mock_sc):
"""Test for the US setup."""
config = {
CONF_USERNAME: "user",
CONF_PASSWORD: "pass",
honeywell.CONF_REGION: "us",
}
bad_pass_config = {CONF_USERNAME: "user", honeywell.CONF_REGION: "us"}
bad_region_config = {
CONF_USERNAME: "user",
CONF_PASSWORD: "pass",
honeywell.CONF_REGION: "un",
}
with pytest.raises(vol.Invalid):
honeywell.PLATFORM_SCHEMA(None)
with pytest.raises(vol.Invalid):
honeywell.PLATFORM_SCHEMA({})
with pytest.raises(vol.Invalid):
honeywell.PLATFORM_SCHEMA(bad_pass_config)
with pytest.raises(vol.Invalid):
honeywell.PLATFORM_SCHEMA(bad_region_config)
hass = mock.MagicMock()
add_entities = mock.MagicMock()
locations = [mock.MagicMock(), mock.MagicMock()]
devices_1 = [mock.MagicMock()]
devices_2 = [mock.MagicMock(), mock.MagicMock]
mock_sc.return_value.locations_by_id.values.return_value = locations
locations[0].devices_by_id.values.return_value = devices_1
locations[1].devices_by_id.values.return_value = devices_2
result = honeywell.setup_platform(hass, config, add_entities)
assert result
assert mock_sc.call_count == 1
assert mock_sc.call_args == mock.call("user", "pass")
mock_ht.assert_has_calls(
[
mock.call(mock_sc.return_value, devices_1[0], 18, 28, "user", "pass"),
mock.call(mock_sc.return_value, devices_2[0], 18, 28, "user", "pass"),
mock.call(mock_sc.return_value, devices_2[1], 18, 28, "user", "pass"),
]
)
@mock.patch("somecomfort.SomeComfort")
def test_setup_us_failures(self, mock_sc):
"""Test the US setup."""
hass = mock.MagicMock()
add_entities = mock.MagicMock()
config = {
CONF_USERNAME: "user",
CONF_PASSWORD: "pass",
honeywell.CONF_REGION: "us",
}
mock_sc.side_effect = somecomfort.AuthError
result = honeywell.setup_platform(hass, config, add_entities)
assert not result
assert not add_entities.called
mock_sc.side_effect = somecomfort.SomeComfortError
result = honeywell.setup_platform(hass, config, add_entities)
assert not result
assert not add_entities.called
@mock.patch("somecomfort.SomeComfort")
@mock.patch("homeassistant.components.honeywell.climate.HoneywellUSThermostat")
def _test_us_filtered_devices(self, mock_ht, mock_sc, loc=None, dev=None):
"""Test for US filtered thermostats."""
config = {
CONF_USERNAME: "user",
CONF_PASSWORD: "pass",
honeywell.CONF_REGION: "us",
"location": loc,
"thermostat": dev,
}
locations = {
1: mock.MagicMock(
locationid=mock.sentinel.loc1,
devices_by_id={
11: mock.MagicMock(deviceid=mock.sentinel.loc1dev1),
12: mock.MagicMock(deviceid=mock.sentinel.loc1dev2),
},
),
2: mock.MagicMock(
locationid=mock.sentinel.loc2,
devices_by_id={21: mock.MagicMock(deviceid=mock.sentinel.loc2dev1)},
),
3: mock.MagicMock(
locationid=mock.sentinel.loc3,
devices_by_id={31: mock.MagicMock(deviceid=mock.sentinel.loc3dev1)},
),
}
mock_sc.return_value = mock.MagicMock(locations_by_id=locations)
hass = mock.MagicMock()
add_entities = mock.MagicMock()
assert honeywell.setup_platform(hass, config, add_entities) is True
return mock_ht.call_args_list, mock_sc
def test_us_filtered_thermostat_1(self):
"""Test for US filtered thermostats."""
result, client = self._test_us_filtered_devices(dev=mock.sentinel.loc1dev1)
devices = [x[0][1].deviceid for x in result]
assert [mock.sentinel.loc1dev1] == devices
def test_us_filtered_thermostat_2(self):
"""Test for US filtered location."""
result, client = self._test_us_filtered_devices(dev=mock.sentinel.loc2dev1)
devices = [x[0][1].deviceid for x in result]
assert [mock.sentinel.loc2dev1] == devices
def test_us_filtered_location_1(self):
"""Test for US filtered locations."""
result, client = self._test_us_filtered_devices(loc=mock.sentinel.loc1)
devices = [x[0][1].deviceid for x in result]
assert [mock.sentinel.loc1dev1, mock.sentinel.loc1dev2] == devices
def test_us_filtered_location_2(self):
"""Test for US filtered locations."""
result, client = self._test_us_filtered_devices(loc=mock.sentinel.loc2)
devices = [x[0][1].deviceid for x in result]
assert [mock.sentinel.loc2dev1] == devices
@mock.patch("evohomeclient.EvohomeClient")
@mock.patch("homeassistant.components.honeywell.climate.HoneywellUSThermostat")
def test_eu_setup_full_config(self, mock_round, mock_evo):
"""Test the EU setup with complete configuration."""
config = {
CONF_USERNAME: "user",
CONF_PASSWORD: "pass",
honeywell.CONF_REGION: "eu",
}
mock_evo.return_value.temperatures.return_value = [{"id": "foo"}, {"id": "bar"}]
hass = mock.MagicMock()
add_entities = mock.MagicMock()
assert honeywell.setup_platform(hass, config, add_entities)
assert mock_evo.call_count == 1
assert mock_evo.call_args == mock.call("user", "pass")
assert mock_evo.return_value.temperatures.call_count == 1
assert mock_evo.return_value.temperatures.call_args == mock.call(
force_refresh=True
)
mock_round.assert_has_calls(
[
mock.call(mock_evo.return_value, "foo", True, 20.0),
mock.call(mock_evo.return_value, "bar", False, 20.0),
]
)
assert 2 == add_entities.call_count
@mock.patch("evohomeclient.EvohomeClient")
@mock.patch("homeassistant.components.honeywell.climate.HoneywellUSThermostat")
def test_eu_setup_partial_config(self, mock_round, mock_evo):
"""Test the EU setup with partial configuration."""
config = {
CONF_USERNAME: "user",
CONF_PASSWORD: "pass",
honeywell.CONF_REGION: "eu",
}
mock_evo.return_value.temperatures.return_value = [{"id": "foo"}, {"id": "bar"}]
hass = mock.MagicMock()
add_entities = mock.MagicMock()
assert honeywell.setup_platform(hass, config, add_entities)
mock_round.assert_has_calls(
[
mock.call(mock_evo.return_value, "foo", True, 16),
mock.call(mock_evo.return_value, "bar", False, 16),
]
)
@mock.patch("evohomeclient.EvohomeClient")
@mock.patch("homeassistant.components.honeywell.climate.HoneywellUSThermostat")
def test_eu_setup_bad_temp(self, mock_round, mock_evo):
"""Test the EU setup with invalid temperature."""
config = {
CONF_USERNAME: "user",
CONF_PASSWORD: "pass",
honeywell.CONF_REGION: "eu",
}
with pytest.raises(vol.Invalid):
honeywell.PLATFORM_SCHEMA(config)
@mock.patch("evohomeclient.EvohomeClient")
@mock.patch("homeassistant.components.honeywell.climate.HoneywellUSThermostat")
def test_eu_setup_error(self, mock_round, mock_evo):
"""Test the EU setup with errors."""
config = {
CONF_USERNAME: "user",
CONF_PASSWORD: "pass",
honeywell.CONF_REGION: "eu",
}
mock_evo.return_value.temperatures.side_effect = (
requests.exceptions.RequestException
)
add_entities = mock.MagicMock()
hass = mock.MagicMock()
assert not honeywell.setup_platform(hass, config, add_entities)
class TestHoneywellRound(unittest.TestCase):
"""A test class for Honeywell Round thermostats."""
def setup_method(self, method):
"""Test the setup method."""
def fake_temperatures(force_refresh=None):
"""Create fake temperatures."""
temps = [
{
"id": "1",
"temp": 20,
"setpoint": 21,
"thermostat": "main",
"name": "House",
},
{
"id": "2",
"temp": 21,
"setpoint": 22,
"thermostat": "DOMESTIC_HOT_WATER",
},
]
return temps
self.device = mock.MagicMock()
self.device.temperatures.side_effect = fake_temperatures
self.round1 = honeywell.RoundThermostat(self.device, "1", True, 16)
self.round1.update()
self.round2 = honeywell.RoundThermostat(self.device, "2", False, 17)
self.round2.update()
def test_attributes(self):
"""Test the attributes."""
assert "House" == self.round1.name
assert TEMP_CELSIUS == self.round1.temperature_unit
assert 20 == self.round1.current_temperature
assert 21 == self.round1.target_temperature
assert not self.round1.is_away_mode_on
assert "Hot Water" == self.round2.name
assert TEMP_CELSIUS == self.round2.temperature_unit
assert 21 == self.round2.current_temperature
assert self.round2.target_temperature is None
assert not self.round2.is_away_mode_on
def test_away_mode(self):
"""Test setting the away mode."""
assert not self.round1.is_away_mode_on
self.round1.turn_away_mode_on()
assert self.round1.is_away_mode_on
assert self.device.set_temperature.call_count == 1
assert self.device.set_temperature.call_args == mock.call("House", 16)
self.device.set_temperature.reset_mock()
self.round1.turn_away_mode_off()
assert not self.round1.is_away_mode_on
assert self.device.cancel_temp_override.call_count == 1
assert self.device.cancel_temp_override.call_args == mock.call("House")
def test_set_temperature(self):
"""Test setting the temperature."""
self.round1.set_temperature(temperature=25)
assert self.device.set_temperature.call_count == 1
assert self.device.set_temperature.call_args == mock.call("House", 25)
def test_set_hvac_mode(self) -> None:
"""Test setting the system operation."""
self.round1.set_hvac_mode("cool")
assert "cool" == self.round1.current_operation
assert "cool" == self.device.system_mode
self.round1.set_hvac_mode("heat")
assert "heat" == self.round1.current_operation
assert "heat" == self.device.system_mode
class TestHoneywellUS(unittest.TestCase):
"""A test class for Honeywell US thermostats."""
def setup_method(self, method):
"""Test the setup method."""
self.client = mock.MagicMock()
self.device = mock.MagicMock()
self.cool_away_temp = 18
self.heat_away_temp = 28
self.honeywell = honeywell.HoneywellUSThermostat(
self.client,
self.device,
self.cool_away_temp,
self.heat_away_temp,
"user",
"password",
)
self.device.fan_running = True
self.device.name = "test"
self.device.temperature_unit = "F"
self.device.current_temperature = 72
self.device.setpoint_cool = 78
self.device.setpoint_heat = 65
self.device.system_mode = "heat"
self.device.fan_mode = "auto"
def test_properties(self):
"""Test the properties."""
assert self.honeywell.is_fan_on
assert "test" == self.honeywell.name
assert 72 == self.honeywell.current_temperature
def test_unit_of_measurement(self):
"""Test the unit of measurement."""
assert TEMP_FAHRENHEIT == self.honeywell.temperature_unit
self.device.temperature_unit = "C"
assert TEMP_CELSIUS == self.honeywell.temperature_unit
def test_target_temp(self):
"""Test the target temperature."""
assert 65 == self.honeywell.target_temperature
self.device.system_mode = "cool"
assert 78 == self.honeywell.target_temperature
def test_set_temp(self):
"""Test setting the temperature."""
self.honeywell.set_temperature(temperature=70)
assert 70 == self.device.setpoint_heat
assert 70 == self.honeywell.target_temperature
self.device.system_mode = "cool"
assert 78 == self.honeywell.target_temperature
self.honeywell.set_temperature(temperature=74)
assert 74 == self.device.setpoint_cool
assert 74 == self.honeywell.target_temperature
def test_set_hvac_mode(self) -> None:
"""Test setting the operation mode."""
self.honeywell.set_hvac_mode("cool")
assert "cool" == self.device.system_mode
self.honeywell.set_hvac_mode("heat")
assert "heat" == self.device.system_mode
def test_set_temp_fail(self):
"""Test if setting the temperature fails."""
self.device.setpoint_heat = mock.MagicMock(
side_effect=somecomfort.SomeComfortError
)
self.honeywell.set_temperature(temperature=123)
def test_attributes(self):
"""Test the attributes."""
expected = {
honeywell.ATTR_FAN: "running",
ATTR_FAN_MODE: "auto",
ATTR_FAN_MODES: somecomfort.FAN_MODES,
ATTR_HVAC_MODES: somecomfort.SYSTEM_MODES,
}
assert expected == self.honeywell.device_state_attributes
expected["fan"] = "idle"
self.device.fan_running = False
assert expected == self.honeywell.device_state_attributes
def test_with_no_fan(self):
"""Test if there is on fan."""
self.device.fan_running = False
self.device.fan_mode = None
expected = {
honeywell.ATTR_FAN: "idle",
ATTR_FAN_MODE: None,
ATTR_FAN_MODES: somecomfort.FAN_MODES,
ATTR_HVAC_MODES: somecomfort.SYSTEM_MODES,
}
assert expected == self.honeywell.device_state_attributes
def test_heat_away_mode(self):
"""Test setting the heat away mode."""
self.honeywell.set_hvac_mode("heat")
assert not self.honeywell.is_away_mode_on
self.honeywell.turn_away_mode_on()
assert self.honeywell.is_away_mode_on
assert self.device.setpoint_heat == self.heat_away_temp
assert self.device.hold_heat is True
self.honeywell.turn_away_mode_off()
assert not self.honeywell.is_away_mode_on
assert self.device.hold_heat is False
@mock.patch("somecomfort.SomeComfort")
def test_retry(self, test_somecomfort):
"""Test retry connection."""
old_device = self.honeywell._device
self.honeywell._retry()
assert self.honeywell._device == old_device
|
pwz3n0/buck
|
refs/heads/master
|
programs/buck_project.py
|
9
|
from __future__ import print_function
import os
import subprocess
import tempfile
import textwrap
import shutil
import sys
from tracing import Tracing
def get_file_contents_if_exists(path, default=None):
with Tracing('BuckProject.get_file_contents_if_it_exists', args={'path': path}):
if not os.path.exists(path):
return default
with open(path) as f:
contents = f.read().strip()
return default if not contents else contents
def write_contents_to_file(path, contents):
with Tracing('BuckProject.write_contents_to_file', args={'path': path}):
with open(path, 'w') as output_file:
output_file.write(str(contents))
class BuckProject:
def __init__(self, root):
self.root = root
self._buck_out = os.path.join(root, "buck-out")
buck_out_tmp = os.path.join(self._buck_out, "tmp")
if not os.path.exists(buck_out_tmp):
os.makedirs(buck_out_tmp)
self._buck_out_log = os.path.join(self._buck_out, "log")
if not os.path.exists(self._buck_out_log):
os.makedirs(self._buck_out_log)
self.tmp_dir = tempfile.mkdtemp(prefix="buck_run.", dir=buck_out_tmp)
# Only created if buckd is used.
self.buckd_tmp_dir = None
self.buckd_dir = os.path.join(root, ".buckd")
self.autobuild_pid_file = os.path.join(self.buckd_dir, "autobuild.pid")
self.buckd_run_count_file = (os.path.join(
self.buckd_dir, "buckd.runcount"))
self.buckd_version_file = os.path.join(self.buckd_dir, "buckd.version")
self.has_no_buck_check = (os.path.exists(os.path.join(
self.root, ".nobuckcheck")))
if self.has_no_buck_check:
print(textwrap.dedent("""\
:::
::: '.nobuckcheck' file is present. Not updating buck.
:::"""), file=sys.stderr)
buck_version_path = os.path.join(self.root, ".buckversion")
buck_version = get_file_contents_if_exists(buck_version_path)
self.buck_version = buck_version.split(':') if buck_version else None
buck_javaargs_path = os.path.join(self.root, ".buckjavaargs")
self.buck_javaargs = get_file_contents_if_exists(buck_javaargs_path)
def get_buckd_run_count(self):
return int(get_file_contents_if_exists(self.buckd_run_count_file, -1))
def get_buckd_socket_path(self):
return os.path.join(self.buckd_dir, 'sock')
def get_running_buckd_version(self):
return get_file_contents_if_exists(self.buckd_version_file)
def get_autobuild_pid(self):
return get_file_contents_if_exists(self.autobuild_pid_file)
def get_buck_out_log_dir(self):
return self._buck_out_log
def update_buckd_run_count(self, new_run_count):
write_contents_to_file(self.buckd_run_count_file, new_run_count)
def clean_up_buckd(self):
with Tracing('BuckProject.clean_up_buckd'):
if os.path.exists(self.buckd_dir):
shutil.rmtree(self.buckd_dir)
def create_buckd_tmp_dir(self):
if self.buckd_tmp_dir is not None:
return self.buckd_tmp_dir
tmp_dir_parent = os.path.join(self.buckd_dir, "tmp")
if not os.path.exists(tmp_dir_parent):
os.makedirs(tmp_dir_parent)
self.buckd_tmp_dir = tempfile.mkdtemp(prefix="buck_run.",
dir=tmp_dir_parent)
return self.buckd_tmp_dir
def save_buckd_version(self, version):
write_contents_to_file(self.buckd_version_file, version)
@staticmethod
def from_current_dir():
with Tracing('BuckProject.from_current_dir'):
current_dir = os.getcwd()
if '--version' in sys.argv or '-V' in sys.argv:
return BuckProject(current_dir)
while current_dir != os.sep:
if os.path.exists(os.path.join(current_dir, ".buckconfig")):
return BuckProject(current_dir)
current_dir = os.path.dirname(current_dir)
raise NoBuckConfigFoundException()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
with Tracing('BuckProject.__exit__'):
if os.path.exists(self.tmp_dir):
shutil.rmtree(self.tmp_dir)
class NoBuckConfigFoundException(Exception):
def __init__(self):
message = textwrap.dedent("""\
This does not appear to be the root of a Buck project. Please 'cd'
to the root of your project before running buck. If this really is
the root of your project, run
'touch .buckconfig'
and then re-run your buck command.""")
Exception.__init__(self, message)
|
y12uc231/edx-platform
|
refs/heads/master
|
common/lib/xmodule/xmodule/foldit_module.py
|
56
|
import logging
from lxml import etree
from pkg_resources import resource_string
from xmodule.editing_module import EditingDescriptor
from xmodule.x_module import XModule
from xmodule.xml_module import XmlDescriptor
from xblock.fields import Scope, Integer, String
from .fields import Date
from .util.duedate import get_extended_due_date
log = logging.getLogger(__name__)
class FolditFields(object):
# default to what Spring_7012x uses
required_level_half_credit = Integer(default=3, scope=Scope.settings)
required_sublevel_half_credit = Integer(default=5, scope=Scope.settings)
required_level = Integer(default=4, scope=Scope.settings)
required_sublevel = Integer(default=5, scope=Scope.settings)
due = Date(help="Date that this problem is due by", scope=Scope.settings)
extended_due = Date(
help="Date that this problem is due by for a particular student. This "
"can be set by an instructor, and will override the global due "
"date if it is set to a date that is later than the global due "
"date.",
default=None,
scope=Scope.user_state,
)
show_basic_score = String(scope=Scope.settings, default='false')
show_leaderboard = String(scope=Scope.settings, default='false')
class FolditModule(FolditFields, XModule):
css = {'scss': [resource_string(__name__, 'css/foldit/leaderboard.scss')]}
def __init__(self, *args, **kwargs):
"""
Example:
<foldit show_basic_score="true"
required_level="4"
required_sublevel="3"
required_level_half_credit="2"
required_sublevel_half_credit="3"
show_leaderboard="false"/>
"""
super(FolditModule, self).__init__(*args, **kwargs)
self.due_time = get_extended_due_date(self)
def is_complete(self):
"""
Did the user get to the required level before the due date?
"""
# We normally don't want django dependencies in xmodule. foldit is
# special. Import this late to avoid errors with things not yet being
# initialized.
from foldit.models import PuzzleComplete
complete = PuzzleComplete.is_level_complete(
self.system.anonymous_student_id,
self.required_level,
self.required_sublevel,
self.due_time)
return complete
def is_half_complete(self):
"""
Did the user reach the required level for half credit?
Ideally this would be more flexible than just 0, 0.5, or 1 credit. On
the other hand, the xml attributes for specifying more specific
cut-offs and partial grades can get more confusing.
"""
from foldit.models import PuzzleComplete
complete = PuzzleComplete.is_level_complete(
self.system.anonymous_student_id,
self.required_level_half_credit,
self.required_sublevel_half_credit,
self.due_time)
return complete
def completed_puzzles(self):
"""
Return a list of puzzles that this user has completed, as an array of
dicts:
[ {'set': int,
'subset': int,
'created': datetime} ]
The list is sorted by set, then subset
"""
from foldit.models import PuzzleComplete
return sorted(
PuzzleComplete.completed_puzzles(self.system.anonymous_student_id),
key=lambda d: (d['set'], d['subset']))
def puzzle_leaders(self, n=10, courses=None):
"""
Returns a list of n pairs (user, score) corresponding to the top
scores; the pairs are in descending order of score.
"""
from foldit.models import Score
if courses is None:
courses = [self.location.course_key]
leaders = [(leader['username'], leader['score']) for leader in Score.get_tops_n(10, course_list=courses)]
leaders.sort(key=lambda x: -x[1])
return leaders
def get_html(self):
"""
Render the html for the module.
"""
goal_level = '{0}-{1}'.format(
self.required_level,
self.required_sublevel)
showbasic = (self.show_basic_score.lower() == "true")
showleader = (self.show_leaderboard.lower() == "true")
context = {
'due': self.due,
'success': self.is_complete(),
'goal_level': goal_level,
'completed': self.completed_puzzles(),
'top_scores': self.puzzle_leaders(),
'show_basic': showbasic,
'show_leader': showleader,
'folditbasic': self.get_basicpuzzles_html(),
'folditchallenge': self.get_challenge_html()
}
return self.system.render_template('foldit.html', context)
def get_basicpuzzles_html(self):
"""
Render html for the basic puzzle section.
"""
goal_level = '{0}-{1}'.format(
self.required_level,
self.required_sublevel)
context = {
'due': self.due,
'success': self.is_complete(),
'goal_level': goal_level,
'completed': self.completed_puzzles(),
}
return self.system.render_template('folditbasic.html', context)
def get_challenge_html(self):
"""
Render html for challenge (i.e., the leaderboard)
"""
context = {
'top_scores': self.puzzle_leaders()}
return self.system.render_template('folditchallenge.html', context)
def get_score(self):
"""
0 if required_level_half_credit - required_sublevel_half_credit not
reached.
0.5 if required_level_half_credit and required_sublevel_half_credit
reached.
1 if requred_level and required_sublevel reached.
"""
if self.is_complete():
score = 1
elif self.is_half_complete():
score = 0.5
else:
score = 0
return {'score': score,
'total': self.max_score()}
def max_score(self):
return 1
class FolditDescriptor(FolditFields, XmlDescriptor, EditingDescriptor):
"""
Module for adding Foldit problems to courses
"""
mako_template = "widgets/html-edit.html"
module_class = FolditModule
filename_extension = "xml"
has_score = True
js = {'coffee': [resource_string(__name__, 'js/src/html/edit.coffee')]}
js_module_name = "HTMLEditingDescriptor"
# The grade changes without any student interaction with the edx website,
# so always need to actually check.
always_recalculate_grades = True
@classmethod
def definition_from_xml(cls, xml_object, system):
return {}, []
def definition_to_xml(self, resource_fs):
xml_object = etree.Element('foldit')
return xml_object
|
rismalrv/edx-platform
|
refs/heads/master
|
cms/djangoapps/contentstore/management/commands/tests/test_reindex_library.py
|
43
|
""" Tests for library reindex command """
import ddt
from django.core.management import call_command, CommandError
import mock
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from common.test.utils import nostderr
from xmodule.modulestore.tests.factories import CourseFactory, LibraryFactory
from opaque_keys import InvalidKeyError
from contentstore.management.commands.reindex_library import Command as ReindexCommand
from contentstore.courseware_index import SearchIndexingError
@ddt.ddt
class TestReindexLibrary(ModuleStoreTestCase):
""" Tests for library reindex command """
def setUp(self):
""" Setup method - create libraries and courses """
super(TestReindexLibrary, self).setUp()
self.store = modulestore()
self.first_lib = LibraryFactory.create(
org="test", library="lib1", display_name="run1", default_store=ModuleStoreEnum.Type.split
)
self.second_lib = LibraryFactory.create(
org="test", library="lib2", display_name="run2", default_store=ModuleStoreEnum.Type.split
)
self.first_course = CourseFactory.create(
org="test", course="course1", display_name="run1", default_store=ModuleStoreEnum.Type.split
)
self.second_course = CourseFactory.create(
org="test", course="course2", display_name="run1", default_store=ModuleStoreEnum.Type.split
)
REINDEX_PATH_LOCATION = 'contentstore.management.commands.reindex_library.LibrarySearchIndexer.do_library_reindex'
MODULESTORE_PATCH_LOCATION = 'contentstore.management.commands.reindex_library.modulestore'
YESNO_PATCH_LOCATION = 'contentstore.management.commands.reindex_library.query_yes_no'
def _get_lib_key(self, library):
""" Get's library key as it is passed to indexer """
return library.location.library_key
def _build_calls(self, *libraries):
""" BUilds a list of mock.call instances representing calls to reindexing method """
return [mock.call(self.store, self._get_lib_key(lib)) for lib in libraries]
def test_given_no_arguments_raises_command_error(self):
""" Test that raises CommandError for incorrect arguments """
with self.assertRaises(SystemExit), nostderr():
with self.assertRaisesRegexp(CommandError, ".* requires one or more arguments .*"):
call_command('reindex_library')
@ddt.data('qwerty', 'invalid_key', 'xblock-v1:qwe+rty')
def test_given_invalid_lib_key_raises_not_found(self, invalid_key):
""" Test that raises InvalidKeyError for invalid keys """
with self.assertRaises(InvalidKeyError):
call_command('reindex_library', invalid_key)
def test_given_course_key_raises_command_error(self):
""" Test that raises CommandError if course key is passed """
with self.assertRaises(SystemExit), nostderr():
with self.assertRaisesRegexp(CommandError, ".* is not a library key"):
call_command('reindex_library', unicode(self.first_course.id))
with self.assertRaises(SystemExit), nostderr():
with self.assertRaisesRegexp(CommandError, ".* is not a library key"):
call_command('reindex_library', unicode(self.second_course.id))
with self.assertRaises(SystemExit), nostderr():
with self.assertRaisesRegexp(CommandError, ".* is not a library key"):
call_command(
'reindex_library',
unicode(self.second_course.id),
unicode(self._get_lib_key(self.first_lib))
)
def test_given_id_list_indexes_libraries(self):
""" Test that reindexes libraries when given single library key or a list of library keys """
with mock.patch(self.REINDEX_PATH_LOCATION) as patched_index, \
mock.patch(self.MODULESTORE_PATCH_LOCATION, mock.Mock(return_value=self.store)):
call_command('reindex_library', unicode(self._get_lib_key(self.first_lib)))
self.assertEqual(patched_index.mock_calls, self._build_calls(self.first_lib))
patched_index.reset_mock()
call_command('reindex_library', unicode(self._get_lib_key(self.second_lib)))
self.assertEqual(patched_index.mock_calls, self._build_calls(self.second_lib))
patched_index.reset_mock()
call_command(
'reindex_library',
unicode(self._get_lib_key(self.first_lib)),
unicode(self._get_lib_key(self.second_lib))
)
expected_calls = self._build_calls(self.first_lib, self.second_lib)
self.assertEqual(patched_index.mock_calls, expected_calls)
def test_given_all_key_prompts_and_reindexes_all_libraries(self):
""" Test that reindexes all libraries when --all key is given and confirmed """
with mock.patch(self.YESNO_PATCH_LOCATION) as patched_yes_no:
patched_yes_no.return_value = True
with mock.patch(self.REINDEX_PATH_LOCATION) as patched_index, \
mock.patch(self.MODULESTORE_PATCH_LOCATION, mock.Mock(return_value=self.store)):
call_command('reindex_library', all=True)
patched_yes_no.assert_called_once_with(ReindexCommand.CONFIRMATION_PROMPT, default='no')
expected_calls = self._build_calls(self.first_lib, self.second_lib)
self.assertItemsEqual(patched_index.mock_calls, expected_calls)
def test_given_all_key_prompts_and_reindexes_all_libraries_cancelled(self):
""" Test that does not reindex anything when --all key is given and cancelled """
with mock.patch(self.YESNO_PATCH_LOCATION) as patched_yes_no:
patched_yes_no.return_value = False
with mock.patch(self.REINDEX_PATH_LOCATION) as patched_index, \
mock.patch(self.MODULESTORE_PATCH_LOCATION, mock.Mock(return_value=self.store)):
call_command('reindex_library', all=True)
patched_yes_no.assert_called_once_with(ReindexCommand.CONFIRMATION_PROMPT, default='no')
patched_index.assert_not_called()
def test_fail_fast_if_reindex_fails(self):
""" Test that fails on first reindexing exception """
with mock.patch(self.REINDEX_PATH_LOCATION) as patched_index:
patched_index.side_effect = SearchIndexingError("message", [])
with self.assertRaises(SearchIndexingError):
call_command('reindex_library', unicode(self._get_lib_key(self.second_lib)))
|
migueldvb/george
|
refs/heads/master
|
george/__init__.py
|
3
|
# -*- coding: utf-8 -*-
__version__ = "0.2.1"
try:
__GEORGE_SETUP__
except NameError:
__GEORGE_SETUP__ = False
if not __GEORGE_SETUP__:
__all__ = ["kernels", "GP", "BasicSolver", "HODLRSolver"]
from . import kernels
from .gp import GP
from .basic import BasicSolver
from .hodlr import HODLRSolver
|
rutsky/letsencrypt
|
refs/heads/master
|
letsencrypt/tests/renewer_test.py
|
2
|
"""Tests for letsencrypt.renewer."""
import datetime
import os
import tempfile
import shutil
import unittest
import configobj
import mock
import pytz
from letsencrypt import configuration
from letsencrypt import errors
from letsencrypt.storage import ALL_FOUR
from letsencrypt.tests import test_util
CERT = test_util.load_cert('cert.pem')
def unlink_all(rc_object):
"""Unlink all four items associated with this RenewableCert."""
for kind in ALL_FOUR:
os.unlink(getattr(rc_object, kind))
def fill_with_sample_data(rc_object):
"""Put dummy data into all four files of this RenewableCert."""
for kind in ALL_FOUR:
with open(getattr(rc_object, kind), "w") as f:
f.write(kind)
class RenewableCertTests(unittest.TestCase):
# pylint: disable=too-many-public-methods
"""Tests for letsencrypt.renewer.*."""
def setUp(self):
from letsencrypt import storage
self.tempdir = tempfile.mkdtemp()
self.cli_config = configuration.RenewerConfiguration(
namespace=mock.MagicMock(config_dir=self.tempdir))
# TODO: maybe provide RenewerConfiguration.make_dirs?
os.makedirs(os.path.join(self.tempdir, "live", "example.org"))
os.makedirs(os.path.join(self.tempdir, "archive", "example.org"))
os.makedirs(os.path.join(self.tempdir, "configs"))
config = configobj.ConfigObj()
for kind in ALL_FOUR:
config[kind] = os.path.join(self.tempdir, "live", "example.org",
kind + ".pem")
config.filename = os.path.join(self.tempdir, "configs",
"example.org.conf")
self.defaults = configobj.ConfigObj()
self.test_rc = storage.RenewableCert(
config, self.defaults, self.cli_config)
def tearDown(self):
shutil.rmtree(self.tempdir)
def test_initialization(self):
self.assertEqual(self.test_rc.lineagename, "example.org")
for kind in ALL_FOUR:
self.assertEqual(
getattr(self.test_rc, kind), os.path.join(
self.tempdir, "live", "example.org", kind + ".pem"))
def test_renewal_bad_config(self):
"""Test that the RenewableCert constructor will complain if
the renewal configuration file doesn't end in ".conf" or if it
isn't a ConfigObj."""
from letsencrypt import storage
defaults = configobj.ConfigObj()
config = configobj.ConfigObj()
# These files don't exist and aren't created here; the point of the test
# is to confirm that the constructor rejects them outright because of
# the configfile's name.
for kind in ALL_FOUR:
config["cert"] = "nonexistent_" + kind + ".pem"
config.filename = "nonexistent_sillyfile"
self.assertRaises(
errors.CertStorageError, storage.RenewableCert, config, defaults)
self.assertRaises(TypeError, storage.RenewableCert, "fun", defaults)
def test_renewal_incomplete_config(self):
"""Test that the RenewableCert constructor will complain if
the renewal configuration file is missing a required file element."""
from letsencrypt import storage
defaults = configobj.ConfigObj()
config = configobj.ConfigObj()
config["cert"] = "imaginary_cert.pem"
# Here the required privkey is missing.
config["chain"] = "imaginary_chain.pem"
config["fullchain"] = "imaginary_fullchain.pem"
config.filename = "imaginary_config.conf"
self.assertRaises(
errors.CertStorageError, storage.RenewableCert, config, defaults)
def test_consistent(self): # pylint: disable=too-many-statements
oldcert = self.test_rc.cert
self.test_rc.cert = "relative/path"
# Absolute path for item requirement
self.assertFalse(self.test_rc.consistent())
self.test_rc.cert = oldcert
# Items must exist requirement
self.assertFalse(self.test_rc.consistent())
# Items must be symlinks requirements
fill_with_sample_data(self.test_rc)
self.assertFalse(self.test_rc.consistent())
unlink_all(self.test_rc)
# Items must point to desired place if they are relative
for kind in ALL_FOUR:
os.symlink(os.path.join("..", kind + "17.pem"),
getattr(self.test_rc, kind))
self.assertFalse(self.test_rc.consistent())
unlink_all(self.test_rc)
# Items must point to desired place if they are absolute
for kind in ALL_FOUR:
os.symlink(os.path.join(self.tempdir, kind + "17.pem"),
getattr(self.test_rc, kind))
self.assertFalse(self.test_rc.consistent())
unlink_all(self.test_rc)
# Items must point to things that exist
for kind in ALL_FOUR:
os.symlink(os.path.join("..", "..", "archive", "example.org",
kind + "17.pem"),
getattr(self.test_rc, kind))
self.assertFalse(self.test_rc.consistent())
# This version should work
fill_with_sample_data(self.test_rc)
self.assertTrue(self.test_rc.consistent())
# Items must point to things that follow the naming convention
os.unlink(self.test_rc.fullchain)
os.symlink(os.path.join("..", "..", "archive", "example.org",
"fullchain_17.pem"), self.test_rc.fullchain)
with open(self.test_rc.fullchain, "w") as f:
f.write("wrongly-named fullchain")
self.assertFalse(self.test_rc.consistent())
def test_current_target(self):
# Relative path logic
os.symlink(os.path.join("..", "..", "archive", "example.org",
"cert17.pem"), self.test_rc.cert)
with open(self.test_rc.cert, "w") as f:
f.write("cert")
self.assertTrue(os.path.samefile(self.test_rc.current_target("cert"),
os.path.join(self.tempdir, "archive",
"example.org",
"cert17.pem")))
# Absolute path logic
os.unlink(self.test_rc.cert)
os.symlink(os.path.join(self.tempdir, "archive", "example.org",
"cert17.pem"), self.test_rc.cert)
with open(self.test_rc.cert, "w") as f:
f.write("cert")
self.assertTrue(os.path.samefile(self.test_rc.current_target("cert"),
os.path.join(self.tempdir, "archive",
"example.org",
"cert17.pem")))
def test_current_version(self):
for ver in (1, 5, 10, 20):
os.symlink(os.path.join("..", "..", "archive", "example.org",
"cert{0}.pem".format(ver)),
self.test_rc.cert)
with open(self.test_rc.cert, "w") as f:
f.write("cert")
os.unlink(self.test_rc.cert)
os.symlink(os.path.join("..", "..", "archive", "example.org",
"cert10.pem"), self.test_rc.cert)
self.assertEqual(self.test_rc.current_version("cert"), 10)
def test_no_current_version(self):
self.assertEqual(self.test_rc.current_version("cert"), None)
def test_latest_and_next_versions(self):
for ver in xrange(1, 6):
for kind in ALL_FOUR:
where = getattr(self.test_rc, kind)
if os.path.islink(where):
os.unlink(where)
os.symlink(os.path.join("..", "..", "archive", "example.org",
"{0}{1}.pem".format(kind, ver)), where)
with open(where, "w") as f:
f.write(kind)
self.assertEqual(self.test_rc.latest_common_version(), 5)
self.assertEqual(self.test_rc.next_free_version(), 6)
# Having one kind of file of a later version doesn't change the
# result
os.unlink(self.test_rc.privkey)
os.symlink(os.path.join("..", "..", "archive", "example.org",
"privkey7.pem"), self.test_rc.privkey)
with open(self.test_rc.privkey, "w") as f:
f.write("privkey")
self.assertEqual(self.test_rc.latest_common_version(), 5)
# ... although it does change the next free version
self.assertEqual(self.test_rc.next_free_version(), 8)
# Nor does having three out of four change the result
os.unlink(self.test_rc.cert)
os.symlink(os.path.join("..", "..", "archive", "example.org",
"cert7.pem"), self.test_rc.cert)
with open(self.test_rc.cert, "w") as f:
f.write("cert")
os.unlink(self.test_rc.fullchain)
os.symlink(os.path.join("..", "..", "archive", "example.org",
"fullchain7.pem"), self.test_rc.fullchain)
with open(self.test_rc.fullchain, "w") as f:
f.write("fullchain")
self.assertEqual(self.test_rc.latest_common_version(), 5)
# If we have everything from a much later version, it does change
# the result
ver = 17
for kind in ALL_FOUR:
where = getattr(self.test_rc, kind)
if os.path.islink(where):
os.unlink(where)
os.symlink(os.path.join("..", "..", "archive", "example.org",
"{0}{1}.pem".format(kind, ver)), where)
with open(where, "w") as f:
f.write(kind)
self.assertEqual(self.test_rc.latest_common_version(), 17)
self.assertEqual(self.test_rc.next_free_version(), 18)
def test_update_link_to(self):
for ver in xrange(1, 6):
for kind in ALL_FOUR:
where = getattr(self.test_rc, kind)
if os.path.islink(where):
os.unlink(where)
os.symlink(os.path.join("..", "..", "archive", "example.org",
"{0}{1}.pem".format(kind, ver)), where)
with open(where, "w") as f:
f.write(kind)
self.assertEqual(ver, self.test_rc.current_version(kind))
self.test_rc.update_link_to("cert", 3)
self.test_rc.update_link_to("privkey", 2)
self.assertEqual(3, self.test_rc.current_version("cert"))
self.assertEqual(2, self.test_rc.current_version("privkey"))
self.assertEqual(5, self.test_rc.current_version("chain"))
self.assertEqual(5, self.test_rc.current_version("fullchain"))
# Currently we are allowed to update to a version that doesn't exist
self.test_rc.update_link_to("chain", 3000)
# However, current_version doesn't allow querying the resulting
# version (because it's a broken link).
self.assertEqual(os.path.basename(os.readlink(self.test_rc.chain)),
"chain3000.pem")
def test_version(self):
os.symlink(os.path.join("..", "..", "archive", "example.org",
"cert12.pem"), self.test_rc.cert)
with open(self.test_rc.cert, "w") as f:
f.write("cert")
# TODO: We should probably test that the directory is still the
# same, but it's tricky because we can get an absolute
# path out when we put a relative path in.
self.assertEqual("cert8.pem",
os.path.basename(self.test_rc.version("cert", 8)))
def test_update_all_links_to(self):
for ver in xrange(1, 6):
for kind in ALL_FOUR:
where = getattr(self.test_rc, kind)
if os.path.islink(where):
os.unlink(where)
os.symlink(os.path.join("..", "..", "archive", "example.org",
"{0}{1}.pem".format(kind, ver)), where)
with open(where, "w") as f:
f.write(kind)
self.assertEqual(ver, self.test_rc.current_version(kind))
self.assertEqual(self.test_rc.latest_common_version(), 5)
for ver in xrange(1, 6):
self.test_rc.update_all_links_to(ver)
for kind in ALL_FOUR:
self.assertEqual(ver, self.test_rc.current_version(kind))
self.assertEqual(self.test_rc.latest_common_version(), 5)
def test_has_pending_deployment(self):
for ver in xrange(1, 6):
for kind in ALL_FOUR:
where = getattr(self.test_rc, kind)
if os.path.islink(where):
os.unlink(where)
os.symlink(os.path.join("..", "..", "archive", "example.org",
"{0}{1}.pem".format(kind, ver)), where)
with open(where, "w") as f:
f.write(kind)
self.assertEqual(ver, self.test_rc.current_version(kind))
for ver in xrange(1, 6):
self.test_rc.update_all_links_to(ver)
for kind in ALL_FOUR:
self.assertEqual(ver, self.test_rc.current_version(kind))
if ver < 5:
self.assertTrue(self.test_rc.has_pending_deployment())
else:
self.assertFalse(self.test_rc.has_pending_deployment())
def _test_notafterbefore(self, function, timestamp):
test_cert = test_util.load_vector("cert.pem")
os.symlink(os.path.join("..", "..", "archive", "example.org",
"cert12.pem"), self.test_rc.cert)
with open(self.test_rc.cert, "w") as f:
f.write(test_cert)
desired_time = datetime.datetime.utcfromtimestamp(timestamp)
desired_time = desired_time.replace(tzinfo=pytz.UTC)
for result in (function(), function(12)):
self.assertEqual(result, desired_time)
self.assertEqual(result.utcoffset(), datetime.timedelta(0))
def test_notbefore(self):
self._test_notafterbefore(self.test_rc.notbefore, 1418337285)
# 2014-12-11 22:34:45+00:00 = Unix time 1418337285
def test_notafter(self):
self._test_notafterbefore(self.test_rc.notafter, 1418942085)
# 2014-12-18 22:34:45+00:00 = Unix time 1418942085
@mock.patch("letsencrypt.storage.datetime")
def test_time_interval_judgments(self, mock_datetime):
"""Test should_autodeploy() and should_autorenew() on the basis
of expiry time windows."""
test_cert = test_util.load_vector("cert.pem")
for kind in ALL_FOUR:
where = getattr(self.test_rc, kind)
os.symlink(os.path.join("..", "..", "archive", "example.org",
"{0}12.pem".format(kind)), where)
with open(where, "w") as f:
f.write(kind)
os.unlink(where)
os.symlink(os.path.join("..", "..", "archive", "example.org",
"{0}11.pem".format(kind)), where)
with open(where, "w") as f:
f.write(kind)
self.test_rc.update_all_links_to(12)
with open(self.test_rc.cert, "w") as f:
f.write(test_cert)
self.test_rc.update_all_links_to(11)
with open(self.test_rc.cert, "w") as f:
f.write(test_cert)
mock_datetime.timedelta = datetime.timedelta
for (current_time, interval, result) in [
# 2014-12-13 12:00:00+00:00 (about 5 days prior to expiry)
# Times that should result in autorenewal/autodeployment
(1418472000, "2 months", True), (1418472000, "1 week", True),
# Times that should not
(1418472000, "4 days", False), (1418472000, "2 days", False),
# 2009-05-01 12:00:00+00:00 (about 5 years prior to expiry)
# Times that should result in autorenewal/autodeployment
(1241179200, "7 years", True),
(1241179200, "11 years 2 months", True),
# Times that should not
(1241179200, "8 hours", False), (1241179200, "2 days", False),
(1241179200, "40 days", False), (1241179200, "9 months", False),
# 2015-01-01 (after expiry has already happened, so all
# intervals should cause autorenewal/autodeployment)
(1420070400, "0 seconds", True),
(1420070400, "10 seconds", True),
(1420070400, "10 minutes", True),
(1420070400, "10 weeks", True), (1420070400, "10 months", True),
(1420070400, "10 years", True), (1420070400, "99 months", True),
]:
sometime = datetime.datetime.utcfromtimestamp(current_time)
mock_datetime.datetime.utcnow.return_value = sometime
self.test_rc.configuration["deploy_before_expiry"] = interval
self.test_rc.configuration["renew_before_expiry"] = interval
self.assertEqual(self.test_rc.should_autodeploy(), result)
self.assertEqual(self.test_rc.should_autorenew(), result)
def test_should_autodeploy(self):
"""Test should_autodeploy() on the basis of reasons other than
expiry time window."""
# pylint: disable=too-many-statements
# Autodeployment turned off
self.test_rc.configuration["autodeploy"] = "0"
self.assertFalse(self.test_rc.should_autodeploy())
self.test_rc.configuration["autodeploy"] = "1"
# No pending deployment
for ver in xrange(1, 6):
for kind in ALL_FOUR:
where = getattr(self.test_rc, kind)
if os.path.islink(where):
os.unlink(where)
os.symlink(os.path.join("..", "..", "archive", "example.org",
"{0}{1}.pem".format(kind, ver)), where)
with open(where, "w") as f:
f.write(kind)
self.assertFalse(self.test_rc.should_autodeploy())
@mock.patch("letsencrypt.storage.RenewableCert.ocsp_revoked")
def test_should_autorenew(self, mock_ocsp):
"""Test should_autorenew on the basis of reasons other than
expiry time window."""
# pylint: disable=too-many-statements
# Autorenewal turned off
self.test_rc.configuration["autorenew"] = "0"
self.assertFalse(self.test_rc.should_autorenew())
self.test_rc.configuration["autorenew"] = "1"
for kind in ALL_FOUR:
where = getattr(self.test_rc, kind)
os.symlink(os.path.join("..", "..", "archive", "example.org",
"{0}12.pem".format(kind)), where)
with open(where, "w") as f:
f.write(kind)
# Mandatory renewal on the basis of OCSP revocation
mock_ocsp.return_value = True
self.assertTrue(self.test_rc.should_autorenew())
mock_ocsp.return_value = False
def test_save_successor(self):
for ver in xrange(1, 6):
for kind in ALL_FOUR:
where = getattr(self.test_rc, kind)
if os.path.islink(where):
os.unlink(where)
os.symlink(os.path.join("..", "..", "archive", "example.org",
"{0}{1}.pem".format(kind, ver)), where)
with open(where, "w") as f:
f.write(kind)
self.test_rc.update_all_links_to(3)
self.assertEqual(6, self.test_rc.save_successor(3, "new cert", None,
"new chain"))
with open(self.test_rc.version("cert", 6)) as f:
self.assertEqual(f.read(), "new cert")
with open(self.test_rc.version("chain", 6)) as f:
self.assertEqual(f.read(), "new chain")
with open(self.test_rc.version("fullchain", 6)) as f:
self.assertEqual(f.read(), "new cert" + "new chain")
# version 6 of the key should be a link back to version 3
self.assertFalse(os.path.islink(self.test_rc.version("privkey", 3)))
self.assertTrue(os.path.islink(self.test_rc.version("privkey", 6)))
# Let's try two more updates
self.assertEqual(7, self.test_rc.save_successor(6, "again", None,
"newer chain"))
self.assertEqual(8, self.test_rc.save_successor(7, "hello", None,
"other chain"))
# All of the subsequent versions should link directly to the original
# privkey.
for i in (6, 7, 8):
self.assertTrue(os.path.islink(self.test_rc.version("privkey", i)))
self.assertEqual("privkey3.pem", os.path.basename(os.readlink(
self.test_rc.version("privkey", i))))
for kind in ALL_FOUR:
self.assertEqual(self.test_rc.available_versions(kind), range(1, 9))
self.assertEqual(self.test_rc.current_version(kind), 3)
# Test updating from latest version rather than old version
self.test_rc.update_all_links_to(8)
self.assertEqual(9, self.test_rc.save_successor(8, "last", None,
"attempt"))
for kind in ALL_FOUR:
self.assertEqual(self.test_rc.available_versions(kind),
range(1, 10))
self.assertEqual(self.test_rc.current_version(kind), 8)
with open(self.test_rc.version("fullchain", 9)) as f:
self.assertEqual(f.read(), "last" + "attempt")
# Test updating when providing a new privkey. The key should
# be saved in a new file rather than creating a new symlink.
self.assertEqual(10, self.test_rc.save_successor(9, "with", "a",
"key"))
self.assertTrue(os.path.exists(self.test_rc.version("privkey", 10)))
self.assertFalse(os.path.islink(self.test_rc.version("privkey", 10)))
def test_new_lineage(self):
"""Test for new_lineage() class method."""
from letsencrypt import storage
result = storage.RenewableCert.new_lineage(
"the-lineage.com", "cert", "privkey", "chain", None,
self.defaults, self.cli_config)
# This consistency check tests most relevant properties about the
# newly created cert lineage.
self.assertTrue(result.consistent())
self.assertTrue(os.path.exists(os.path.join(
self.cli_config.renewal_configs_dir, "the-lineage.com.conf")))
with open(result.fullchain) as f:
self.assertEqual(f.read(), "cert" + "chain")
# Let's do it again and make sure it makes a different lineage
result = storage.RenewableCert.new_lineage(
"the-lineage.com", "cert2", "privkey2", "chain2", None,
self.defaults, self.cli_config)
self.assertTrue(os.path.exists(os.path.join(
self.cli_config.renewal_configs_dir, "the-lineage.com-0001.conf")))
# Now trigger the detection of already existing files
os.mkdir(os.path.join(
self.cli_config.live_dir, "the-lineage.com-0002"))
self.assertRaises(errors.CertStorageError,
storage.RenewableCert.new_lineage,
"the-lineage.com", "cert3", "privkey3", "chain3",
None, self.defaults, self.cli_config)
os.mkdir(os.path.join(self.cli_config.archive_dir, "other-example.com"))
self.assertRaises(errors.CertStorageError,
storage.RenewableCert.new_lineage,
"other-example.com", "cert4", "privkey4", "chain4",
None, self.defaults, self.cli_config)
# Make sure it can accept renewal parameters
params = {"stuff": "properties of stuff", "great": "awesome"}
result = storage.RenewableCert.new_lineage(
"the-lineage.com", "cert2", "privkey2", "chain2",
params, self.defaults, self.cli_config)
# TODO: Conceivably we could test that the renewal parameters actually
# got saved
def test_new_lineage_nonexistent_dirs(self):
"""Test that directories can be created if they don't exist."""
from letsencrypt import storage
shutil.rmtree(self.cli_config.renewal_configs_dir)
shutil.rmtree(self.cli_config.archive_dir)
shutil.rmtree(self.cli_config.live_dir)
storage.RenewableCert.new_lineage(
"the-lineage.com", "cert2", "privkey2", "chain2",
None, self.defaults, self.cli_config)
self.assertTrue(os.path.exists(
os.path.join(
self.cli_config.renewal_configs_dir, "the-lineage.com.conf")))
self.assertTrue(os.path.exists(os.path.join(
self.cli_config.live_dir, "the-lineage.com", "privkey.pem")))
self.assertTrue(os.path.exists(os.path.join(
self.cli_config.archive_dir, "the-lineage.com", "privkey1.pem")))
@mock.patch("letsencrypt.storage.le_util.unique_lineage_name")
def test_invalid_config_filename(self, mock_uln):
from letsencrypt import storage
mock_uln.return_value = "this_does_not_end_with_dot_conf", "yikes"
self.assertRaises(errors.CertStorageError,
storage.RenewableCert.new_lineage,
"example.com", "cert", "privkey", "chain",
None, self.defaults, self.cli_config)
def test_bad_kind(self):
self.assertRaises(
errors.CertStorageError, self.test_rc.current_target, "elephant")
self.assertRaises(
errors.CertStorageError, self.test_rc.current_version, "elephant")
self.assertRaises(
errors.CertStorageError, self.test_rc.version, "elephant", 17)
self.assertRaises(
errors.CertStorageError,
self.test_rc.available_versions, "elephant")
self.assertRaises(
errors.CertStorageError,
self.test_rc.newest_available_version, "elephant")
self.assertRaises(
errors.CertStorageError,
self.test_rc.update_link_to, "elephant", 17)
def test_ocsp_revoked(self):
# XXX: This is currently hardcoded to False due to a lack of an
# OCSP server to test against.
self.assertFalse(self.test_rc.ocsp_revoked())
def test_parse_time_interval(self):
from letsencrypt import storage
# XXX: I'm not sure if intervals related to years and months
# take account of the current date (if so, some of these
# may fail in the future, like in leap years or even in
# months of different lengths!)
intended = {"": 0, "17 days": 17, "23": 23, "1 month": 31,
"7 weeks": 49, "1 year 1 day": 366, "1 year-1 day": 364,
"4 years": 1461}
for time in intended:
self.assertEqual(storage.parse_time_interval(time),
datetime.timedelta(intended[time]))
@mock.patch("letsencrypt.renewer.plugins_disco")
@mock.patch("letsencrypt.account.AccountFileStorage")
@mock.patch("letsencrypt.client.Client")
def test_renew(self, mock_c, mock_acc_storage, mock_pd):
from letsencrypt import renewer
test_cert = test_util.load_vector("cert-san.pem")
for kind in ALL_FOUR:
os.symlink(os.path.join("..", "..", "archive", "example.org",
kind + "1.pem"),
getattr(self.test_rc, kind))
fill_with_sample_data(self.test_rc)
with open(self.test_rc.cert, "w") as f:
f.write(test_cert)
# Fails because renewalparams are missing
self.assertFalse(renewer.renew(self.test_rc, 1))
self.test_rc.configfile["renewalparams"] = {"some": "stuff"}
# Fails because there's no authenticator specified
self.assertFalse(renewer.renew(self.test_rc, 1))
self.test_rc.configfile["renewalparams"]["rsa_key_size"] = "2048"
self.test_rc.configfile["renewalparams"]["server"] = "acme.example.com"
self.test_rc.configfile["renewalparams"]["authenticator"] = "fake"
self.test_rc.configfile["renewalparams"]["dvsni_port"] = "4430"
self.test_rc.configfile["renewalparams"]["account"] = "abcde"
mock_auth = mock.MagicMock()
mock_pd.PluginsRegistry.find_all.return_value = {"apache": mock_auth}
# Fails because "fake" != "apache"
self.assertFalse(renewer.renew(self.test_rc, 1))
self.test_rc.configfile["renewalparams"]["authenticator"] = "apache"
mock_client = mock.MagicMock()
# pylint: disable=star-args
mock_client.obtain_certificate.return_value = (
mock.MagicMock(body=CERT), CERT, mock.Mock(pem="key"),
mock.sentinel.csr)
mock_c.return_value = mock_client
self.assertEqual(2, renewer.renew(self.test_rc, 1))
# TODO: We could also make several assertions about calls that should
# have been made to the mock functions here.
mock_acc_storage().load.assert_called_once_with(account_id="abcde")
mock_client.obtain_certificate.return_value = (
mock.sentinel.certr, None, mock.sentinel.key, mock.sentinel.csr)
# This should fail because the renewal itself appears to fail
self.assertFalse(renewer.renew(self.test_rc, 1))
@mock.patch("letsencrypt.renewer.notify")
@mock.patch("letsencrypt.storage.RenewableCert")
@mock.patch("letsencrypt.renewer.renew")
def test_main(self, mock_renew, mock_rc, mock_notify):
"""Test for main() function."""
from letsencrypt import renewer
mock_rc_instance = mock.MagicMock()
mock_rc_instance.should_autodeploy.return_value = True
mock_rc_instance.should_autorenew.return_value = True
mock_rc_instance.latest_common_version.return_value = 10
mock_rc.return_value = mock_rc_instance
with open(os.path.join(self.cli_config.renewal_configs_dir,
"README"), "w") as f:
f.write("This is a README file to make sure that the renewer is")
f.write("able to correctly ignore files that don't end in .conf.")
with open(os.path.join(self.cli_config.renewal_configs_dir,
"example.org.conf"), "w") as f:
# This isn't actually parsed in this test; we have a separate
# test_initialization that tests the initialization, assuming
# that configobj can correctly parse the config file.
f.write("cert = cert.pem\nprivkey = privkey.pem\n")
f.write("chain = chain.pem\nfullchain = fullchain.pem\n")
with open(os.path.join(self.cli_config.renewal_configs_dir,
"example.com.conf"), "w") as f:
f.write("cert = cert.pem\nprivkey = privkey.pem\n")
f.write("chain = chain.pem\nfullchain = fullchain.pem\n")
renewer.main(self.defaults, args=[
'--config-dir', self.cli_config.config_dir])
self.assertEqual(mock_rc.call_count, 2)
self.assertEqual(mock_rc_instance.update_all_links_to.call_count, 2)
self.assertEqual(mock_notify.notify.call_count, 4)
self.assertEqual(mock_renew.call_count, 2)
# If we have instances that don't need any work done, no work should
# be done (call counts associated with processing deployments or
# renewals should not increase).
mock_happy_instance = mock.MagicMock()
mock_happy_instance.should_autodeploy.return_value = False
mock_happy_instance.should_autorenew.return_value = False
mock_happy_instance.latest_common_version.return_value = 10
mock_rc.return_value = mock_happy_instance
renewer.main(self.defaults, args=[
'--config-dir', self.cli_config.config_dir])
self.assertEqual(mock_rc.call_count, 4)
self.assertEqual(mock_happy_instance.update_all_links_to.call_count, 0)
self.assertEqual(mock_notify.notify.call_count, 4)
self.assertEqual(mock_renew.call_count, 2)
def test_bad_config_file(self):
from letsencrypt import renewer
with open(os.path.join(self.cli_config.renewal_configs_dir,
"bad.conf"), "w") as f:
f.write("incomplete = configfile\n")
renewer.main(self.defaults, args=[
'--config-dir', self.cli_config.config_dir])
# The errors.CertStorageError is caught inside and nothing happens.
if __name__ == "__main__":
unittest.main() # pragma: no cover
|
tmpgit/intellij-community
|
refs/heads/master
|
python/testData/inspections/PyAugmentAssignmentInspection/differentOperations.py
|
74
|
# PY-2488
<weak_warning descr="Assignment can be replaced with augmented assignment">a = a ** 1</weak_warning>
<weak_warning descr="Assignment can be replaced with augmented assignment">x = x % 3</weak_warning>
<weak_warning descr="Assignment can be replaced with augmented assignment">x = x | 3</weak_warning>
<weak_warning descr="Assignment can be replaced with augmented assignment">x = x & 3</weak_warning>
<weak_warning descr="Assignment can be replaced with augmented assignment">x = x ^ 3</weak_warning>
|
sYnfo/samba-1
|
refs/heads/master
|
third_party/waf/wafadmin/Utils.py
|
12
|
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2005 (ita)
"""
Utilities, the stable ones are the following:
* h_file: compute a unique value for a file (hash), it uses
the module fnv if it is installed (see waf/utils/fnv & http://code.google.com/p/waf/wiki/FAQ)
else, md5 (see the python docs)
For large projects (projects with more than 15000 files) or slow hard disks and filesystems (HFS)
it is possible to use a hashing based on the path and the size (may give broken cache results)
The method h_file MUST raise an OSError if the file is a folder
import stat
def h_file(filename):
st = os.lstat(filename)
if stat.S_ISDIR(st[stat.ST_MODE]): raise IOError('not a file')
m = Utils.md5()
m.update(str(st.st_mtime))
m.update(str(st.st_size))
m.update(filename)
return m.digest()
To replace the function in your project, use something like this:
import Utils
Utils.h_file = h_file
* h_list
* h_fun
* get_term_cols
* ordered_dict
"""
import os, sys, imp, string, errno, traceback, inspect, re, shutil, datetime, gc
# In python 3.0 we can get rid of all this
try: from UserDict import UserDict
except ImportError: from collections import UserDict
if sys.hexversion >= 0x2060000 or os.name == 'java':
import subprocess as pproc
else:
import pproc
import Logs
from Constants import *
try:
from collections import deque
except ImportError:
class deque(list):
def popleft(self):
return self.pop(0)
is_win32 = sys.platform == 'win32'
try:
# defaultdict in python 2.5
from collections import defaultdict as DefaultDict
except ImportError:
class DefaultDict(dict):
def __init__(self, default_factory):
super(DefaultDict, self).__init__()
self.default_factory = default_factory
def __getitem__(self, key):
try:
return super(DefaultDict, self).__getitem__(key)
except KeyError:
value = self.default_factory()
self[key] = value
return value
class WafError(Exception):
def __init__(self, *args):
self.args = args
try:
self.stack = traceback.extract_stack()
except:
pass
Exception.__init__(self, *args)
def __str__(self):
return str(len(self.args) == 1 and self.args[0] or self.args)
class WscriptError(WafError):
def __init__(self, message, wscript_file=None):
if wscript_file:
self.wscript_file = wscript_file
self.wscript_line = None
else:
try:
(self.wscript_file, self.wscript_line) = self.locate_error()
except:
(self.wscript_file, self.wscript_line) = (None, None)
msg_file_line = ''
if self.wscript_file:
msg_file_line = "%s:" % self.wscript_file
if self.wscript_line:
msg_file_line += "%s:" % self.wscript_line
err_message = "%s error: %s" % (msg_file_line, message)
WafError.__init__(self, err_message)
def locate_error(self):
stack = traceback.extract_stack()
stack.reverse()
for frame in stack:
file_name = os.path.basename(frame[0])
is_wscript = (file_name == WSCRIPT_FILE or file_name == WSCRIPT_BUILD_FILE)
if is_wscript:
return (frame[0], frame[1])
return (None, None)
indicator = is_win32 and '\x1b[A\x1b[K%s%s%s\r' or '\x1b[K%s%s%s\r'
try:
from fnv import new as md5
import Constants
Constants.SIG_NIL = 'signofnv'
def h_file(filename):
m = md5()
try:
m.hfile(filename)
x = m.digest()
if x is None: raise OSError("not a file")
return x
except SystemError:
raise OSError("not a file" + filename)
except ImportError:
try:
try:
from hashlib import md5
except ImportError:
from md5 import md5
def h_file(filename):
f = open(filename, 'rb')
m = md5()
while (filename):
filename = f.read(100000)
m.update(filename)
f.close()
return m.digest()
except ImportError:
# portability fixes may be added elsewhere (although, md5 should be everywhere by now)
md5 = None
class ordered_dict(UserDict):
def __init__(self, dict = None):
self.allkeys = []
UserDict.__init__(self, dict)
def __delitem__(self, key):
self.allkeys.remove(key)
UserDict.__delitem__(self, key)
def __setitem__(self, key, item):
if key not in self.allkeys: self.allkeys.append(key)
UserDict.__setitem__(self, key, item)
def exec_command(s, **kw):
if 'log' in kw:
kw['stdout'] = kw['stderr'] = kw['log']
del(kw['log'])
kw['shell'] = isinstance(s, str)
try:
proc = pproc.Popen(s, **kw)
return proc.wait()
except OSError:
return -1
if is_win32:
def exec_command(s, **kw):
if 'log' in kw:
kw['stdout'] = kw['stderr'] = kw['log']
del(kw['log'])
kw['shell'] = isinstance(s, str)
if len(s) > 2000:
startupinfo = pproc.STARTUPINFO()
startupinfo.dwFlags |= pproc.STARTF_USESHOWWINDOW
kw['startupinfo'] = startupinfo
try:
if 'stdout' not in kw:
kw['stdout'] = pproc.PIPE
kw['stderr'] = pproc.PIPE
kw['universal_newlines'] = True
proc = pproc.Popen(s,**kw)
(stdout, stderr) = proc.communicate()
Logs.info(stdout)
if stderr:
Logs.error(stderr)
return proc.returncode
else:
proc = pproc.Popen(s,**kw)
return proc.wait()
except OSError:
return -1
listdir = os.listdir
if is_win32:
def listdir_win32(s):
if re.match('^[A-Za-z]:$', s):
# os.path.isdir fails if s contains only the drive name... (x:)
s += os.sep
if not os.path.isdir(s):
e = OSError()
e.errno = errno.ENOENT
raise e
return os.listdir(s)
listdir = listdir_win32
def waf_version(mini = 0x010000, maxi = 0x100000):
"Halts if the waf version is wrong"
ver = HEXVERSION
try: min_val = mini + 0
except TypeError: min_val = int(mini.replace('.', '0'), 16)
if min_val > ver:
Logs.error("waf version should be at least %s (%s found)" % (mini, ver))
sys.exit(1)
try: max_val = maxi + 0
except TypeError: max_val = int(maxi.replace('.', '0'), 16)
if max_val < ver:
Logs.error("waf version should be at most %s (%s found)" % (maxi, ver))
sys.exit(1)
def python_24_guard():
if sys.hexversion < 0x20400f0 or sys.hexversion >= 0x3000000:
raise ImportError("Waf requires Python >= 2.3 but the raw source requires Python 2.4, 2.5 or 2.6")
def ex_stack():
exc_type, exc_value, tb = sys.exc_info()
if Logs.verbose > 1:
exc_lines = traceback.format_exception(exc_type, exc_value, tb)
return ''.join(exc_lines)
return str(exc_value)
def to_list(sth):
if isinstance(sth, str):
return sth.split()
else:
return sth
g_loaded_modules = {}
"index modules by absolute path"
g_module=None
"the main module is special"
def load_module(file_path, name=WSCRIPT_FILE):
"this function requires an absolute path"
try:
return g_loaded_modules[file_path]
except KeyError:
pass
module = imp.new_module(name)
try:
code = readf(file_path, m='rU')
except (IOError, OSError):
raise WscriptError('Could not read the file %r' % file_path)
module.waf_hash_val = code
dt = os.path.dirname(file_path)
sys.path.insert(0, dt)
try:
exec(compile(code, file_path, 'exec'), module.__dict__)
except Exception:
exc_type, exc_value, tb = sys.exc_info()
raise WscriptError("".join(traceback.format_exception(exc_type, exc_value, tb)), file_path)
sys.path.remove(dt)
g_loaded_modules[file_path] = module
return module
def set_main_module(file_path):
"Load custom options, if defined"
global g_module
g_module = load_module(file_path, 'wscript_main')
g_module.root_path = file_path
try:
g_module.APPNAME
except:
g_module.APPNAME = 'noname'
try:
g_module.VERSION
except:
g_module.VERSION = '1.0'
# note: to register the module globally, use the following:
# sys.modules['wscript_main'] = g_module
def to_hashtable(s):
"used for importing env files"
tbl = {}
lst = s.split('\n')
for line in lst:
if not line: continue
mems = line.split('=')
tbl[mems[0]] = mems[1]
return tbl
def get_term_cols():
"console width"
return 80
try:
import struct, fcntl, termios
except ImportError:
pass
else:
if Logs.got_tty:
def myfun():
dummy_lines, cols = struct.unpack("HHHH", \
fcntl.ioctl(sys.stderr.fileno(),termios.TIOCGWINSZ , \
struct.pack("HHHH", 0, 0, 0, 0)))[:2]
return cols
# we actually try the function once to see if it is suitable
try:
myfun()
except:
pass
else:
get_term_cols = myfun
rot_idx = 0
rot_chr = ['\\', '|', '/', '-']
"the rotation character in the progress bar"
def split_path(path):
return path.split('/')
def split_path_cygwin(path):
if path.startswith('//'):
ret = path.split('/')[2:]
ret[0] = '/' + ret[0]
return ret
return path.split('/')
re_sp = re.compile('[/\\\\]')
def split_path_win32(path):
if path.startswith('\\\\'):
ret = re.split(re_sp, path)[2:]
ret[0] = '\\' + ret[0]
return ret
return re.split(re_sp, path)
if sys.platform == 'cygwin':
split_path = split_path_cygwin
elif is_win32:
split_path = split_path_win32
def copy_attrs(orig, dest, names, only_if_set=False):
for a in to_list(names):
u = getattr(orig, a, ())
if u or not only_if_set:
setattr(dest, a, u)
def def_attrs(cls, **kw):
'''
set attributes for class.
@param cls [any class]: the class to update the given attributes in.
@param kw [dictionary]: dictionary of attributes names and values.
if the given class hasn't one (or more) of these attributes, add the attribute with its value to the class.
'''
for k, v in kw.iteritems():
if not hasattr(cls, k):
setattr(cls, k, v)
def quote_define_name(path):
fu = re.compile("[^a-zA-Z0-9]").sub("_", path)
fu = fu.upper()
return fu
def quote_whitespace(path):
return (path.strip().find(' ') > 0 and '"%s"' % path or path).replace('""', '"')
def trimquotes(s):
if not s: return ''
s = s.rstrip()
if s[0] == "'" and s[-1] == "'": return s[1:-1]
return s
def h_list(lst):
m = md5()
m.update(str(lst))
return m.digest()
def h_fun(fun):
try:
return fun.code
except AttributeError:
try:
h = inspect.getsource(fun)
except IOError:
h = "nocode"
try:
fun.code = h
except AttributeError:
pass
return h
def pprint(col, str, label='', sep='\n'):
"print messages in color"
sys.stderr.write("%s%s%s %s%s" % (Logs.colors(col), str, Logs.colors.NORMAL, label, sep))
def check_dir(path):
"""If a folder doesn't exists, create it."""
if not os.path.isdir(path):
try:
os.makedirs(path)
except OSError, e:
if not os.path.isdir(path):
raise Errors.WafError('Cannot create the folder %r' % path, ex=e)
def cmd_output(cmd, **kw):
silent = False
if 'silent' in kw:
silent = kw['silent']
del(kw['silent'])
if 'e' in kw:
tmp = kw['e']
del(kw['e'])
kw['env'] = tmp
kw['shell'] = isinstance(cmd, str)
kw['stdout'] = pproc.PIPE
if silent:
kw['stderr'] = pproc.PIPE
try:
p = pproc.Popen(cmd, **kw)
output = p.communicate()[0]
except OSError, e:
raise ValueError(str(e))
if p.returncode:
if not silent:
msg = "command execution failed: %s -> %r" % (cmd, str(output))
raise ValueError(msg)
output = ''
return output
reg_subst = re.compile(r"(\\\\)|(\$\$)|\$\{([^}]+)\}")
def subst_vars(expr, params):
"substitute ${PREFIX}/bin in /usr/local/bin"
def repl_var(m):
if m.group(1):
return '\\'
if m.group(2):
return '$'
try:
# environments may contain lists
return params.get_flat(m.group(3))
except AttributeError:
return params[m.group(3)]
return reg_subst.sub(repl_var, expr)
def unversioned_sys_platform_to_binary_format(unversioned_sys_platform):
"infers the binary format from the unversioned_sys_platform name."
if unversioned_sys_platform in ('linux', 'freebsd', 'netbsd', 'openbsd', 'sunos', 'gnu'):
return 'elf'
elif unversioned_sys_platform == 'darwin':
return 'mac-o'
elif unversioned_sys_platform in ('win32', 'cygwin', 'uwin', 'msys'):
return 'pe'
# TODO we assume all other operating systems are elf, which is not true.
# we may set this to 'unknown' and have ccroot and other tools handle the case "gracefully" (whatever that means).
return 'elf'
def unversioned_sys_platform():
"""returns an unversioned name from sys.platform.
sys.plaform is not very well defined and depends directly on the python source tree.
The version appended to the names is unreliable as it's taken from the build environment at the time python was built,
i.e., it's possible to get freebsd7 on a freebsd8 system.
So we remove the version from the name, except for special cases where the os has a stupid name like os2 or win32.
Some possible values of sys.platform are, amongst others:
aix3 aix4 atheos beos5 darwin freebsd2 freebsd3 freebsd4 freebsd5 freebsd6 freebsd7
generic gnu0 irix5 irix6 linux2 mac netbsd1 next3 os2emx riscos sunos5 unixware7
Investigating the python source tree may reveal more values.
"""
s = sys.platform
if s == 'java':
# The real OS is hidden under the JVM.
from java.lang import System
s = System.getProperty('os.name')
# see http://lopica.sourceforge.net/os.html for a list of possible values
if s == 'Mac OS X':
return 'darwin'
elif s.startswith('Windows '):
return 'win32'
elif s == 'OS/2':
return 'os2'
elif s == 'HP-UX':
return 'hpux'
elif s in ('SunOS', 'Solaris'):
return 'sunos'
else: s = s.lower()
if s == 'win32' or s.endswith('os2') and s != 'sunos2': return s
return re.split('\d+$', s)[0]
#@deprecated('use unversioned_sys_platform instead')
def detect_platform():
"""this function has been in the Utils module for some time.
It's hard to guess what people have used it for.
It seems its goal is to return an unversionned sys.platform, but it's not handling all platforms.
For example, the version is not removed on freebsd and netbsd, amongst others.
"""
s = sys.platform
# known POSIX
for x in 'cygwin linux irix sunos hpux aix darwin gnu'.split():
# sys.platform may be linux2
if s.find(x) >= 0:
return x
# unknown POSIX
if os.name in 'posix java os2'.split():
return os.name
return s
def load_tool(tool, tooldir=None):
'''
load_tool: import a Python module, optionally using several directories.
@param tool [string]: name of tool to import.
@param tooldir [list]: directories to look for the tool.
@return: the loaded module.
Warning: this function is not thread-safe: plays with sys.path,
so must run in sequence.
'''
if tooldir:
assert isinstance(tooldir, list)
sys.path = tooldir + sys.path
else:
tooldir = []
try:
return __import__(tool)
finally:
for dt in tooldir:
sys.path.remove(dt)
def readf(fname, m='r'):
"get the contents of a file, it is not used anywhere for the moment"
f = open(fname, m)
try:
txt = f.read()
finally:
f.close()
return txt
def nada(*k, **kw):
"""A function that does nothing"""
pass
def diff_path(top, subdir):
"""difference between two absolute paths"""
top = os.path.normpath(top).replace('\\', '/').split('/')
subdir = os.path.normpath(subdir).replace('\\', '/').split('/')
if len(top) == len(subdir): return ''
diff = subdir[len(top) - len(subdir):]
return os.path.join(*diff)
class Context(object):
"""A base class for commands to be executed from Waf scripts"""
def set_curdir(self, dir):
self.curdir_ = dir
def get_curdir(self):
try:
return self.curdir_
except AttributeError:
self.curdir_ = os.getcwd()
return self.get_curdir()
curdir = property(get_curdir, set_curdir)
def recurse(self, dirs, name=''):
"""The function for calling scripts from folders, it tries to call wscript + function_name
and if that file does not exist, it will call the method 'function_name' from a file named wscript
the dirs can be a list of folders or a string containing space-separated folder paths
"""
if not name:
name = inspect.stack()[1][3]
if isinstance(dirs, str):
dirs = to_list(dirs)
for x in dirs:
if os.path.isabs(x):
nexdir = x
else:
nexdir = os.path.join(self.curdir, x)
base = os.path.join(nexdir, WSCRIPT_FILE)
file_path = base + '_' + name
try:
txt = readf(file_path, m='rU')
except (OSError, IOError):
try:
module = load_module(base)
except OSError:
raise WscriptError('No such script %s' % base)
try:
f = module.__dict__[name]
except KeyError:
raise WscriptError('No function %s defined in %s' % (name, base))
if getattr(self.__class__, 'pre_recurse', None):
self.pre_recurse(f, base, nexdir)
old = self.curdir
self.curdir = nexdir
try:
f(self)
finally:
self.curdir = old
if getattr(self.__class__, 'post_recurse', None):
self.post_recurse(module, base, nexdir)
else:
dc = {'ctx': self}
if getattr(self.__class__, 'pre_recurse', None):
dc = self.pre_recurse(txt, file_path, nexdir)
old = self.curdir
self.curdir = nexdir
try:
try:
exec(compile(txt, file_path, 'exec'), dc)
except Exception:
exc_type, exc_value, tb = sys.exc_info()
raise WscriptError("".join(traceback.format_exception(exc_type, exc_value, tb)), base)
finally:
self.curdir = old
if getattr(self.__class__, 'post_recurse', None):
self.post_recurse(txt, file_path, nexdir)
if is_win32:
old = shutil.copy2
def copy2(src, dst):
old(src, dst)
shutil.copystat(src, src)
setattr(shutil, 'copy2', copy2)
def zip_folder(dir, zip_file_name, prefix):
"""
prefix represents the app to add in the archive
"""
import zipfile
zip = zipfile.ZipFile(zip_file_name, 'w', compression=zipfile.ZIP_DEFLATED)
base = os.path.abspath(dir)
if prefix:
if prefix[-1] != os.sep:
prefix += os.sep
n = len(base)
for root, dirs, files in os.walk(base):
for f in files:
archive_name = prefix + root[n:] + os.sep + f
zip.write(root + os.sep + f, archive_name, zipfile.ZIP_DEFLATED)
zip.close()
def get_elapsed_time(start):
"Format a time delta (datetime.timedelta) using the format DdHhMmS.MSs"
delta = datetime.datetime.now() - start
# cast to int necessary for python 3.0
days = int(delta.days)
hours = int(delta.seconds / 3600)
minutes = int((delta.seconds - hours * 3600) / 60)
seconds = delta.seconds - hours * 3600 - minutes * 60 \
+ float(delta.microseconds) / 1000 / 1000
result = ''
if days:
result += '%dd' % days
if days or hours:
result += '%dh' % hours
if days or hours or minutes:
result += '%dm' % minutes
return '%s%.3fs' % (result, seconds)
if os.name == 'java':
# For Jython (they should really fix the inconsistency)
try:
gc.disable()
gc.enable()
except NotImplementedError:
gc.disable = gc.enable
def run_once(fun):
"""
decorator, make a function cache its results, use like this:
@run_once
def foo(k):
return 345*2343
"""
cache = {}
def wrap(k):
try:
return cache[k]
except KeyError:
ret = fun(k)
cache[k] = ret
return ret
wrap.__cache__ = cache
return wrap
|
chromium/chromium
|
refs/heads/master
|
build/android/stacktrace/java_deobfuscate_test.py
|
7
|
#!/usr/bin/env vpython
#
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Tests for java_deobfuscate."""
import argparse
import os
import subprocess
import sys
import tempfile
import unittest
# Set by command-line argument.
_JAVA_DEOBFUSCATE_PATH = None
LINE_PREFIXES = [
'',
# logcat -v threadtime
'09-08 14:38:35.535 18029 18084 E qcom_sensors_hal: ',
# logcat
'W/GCM (15158): ',
'W/GCM ( 158): ',
]
TEST_MAP = """\
this.was.Deobfuscated -> FOO:
int[] mFontFamily -> a
1:3:void someMethod(int,android.os.Bundle):65:67 -> bar
never.Deobfuscated -> NOTFOO:
int[] mFontFamily -> a
1:3:void someMethod(int,android.os.Bundle):65:67 -> bar
"""
TEST_DATA = [
'',
'FOO',
'FOO.bar',
'Here is a FOO',
'Here is a class FOO',
'Here is a class FOO baz',
'Here is a "FOO" baz',
'Here is a type "FOO" baz',
'Here is a "FOO.bar" baz',
'SomeError: SomeFrameworkClass in isTestClass for FOO',
'Here is a FOO.bar',
'Here is a FOO.bar baz',
'END FOO#bar',
'new-instance 3810 (LSome/Framework/Class;) in LFOO;',
'FOO: Error message',
'Caused by: FOO: Error message',
'\tat FOO.bar(PG:1)',
'\t at\t FOO.bar\t (\t PG:\t 1\t )',
('Unable to start activity ComponentInfo{garbage.in/here.test}:'
' java.lang.NullPointerException: Attempt to invoke interface method'
' \'void FOO.bar(int,android.os.Bundle)\' on a null object reference'),
('Caused by: java.lang.NullPointerException: Attempt to read from field'
' \'int[] FOO.a\' on a null object reference'),
'java.lang.VerifyError: FOO',
('java.lang.NoSuchFieldError: No instance field a of type '
'Ljava/lang/Class; in class LFOO;'),
'NOTFOO: Object of type FOO was not destroyed...',
]
EXPECTED_OUTPUT = [
'',
'this.was.Deobfuscated',
'this.was.Deobfuscated.someMethod',
'Here is a FOO',
'Here is a class this.was.Deobfuscated',
'Here is a class FOO baz',
'Here is a "FOO" baz',
'Here is a type "this.was.Deobfuscated" baz',
'Here is a "this.was.Deobfuscated.someMethod" baz',
'SomeError: SomeFrameworkClass in isTestClass for this.was.Deobfuscated',
'Here is a this.was.Deobfuscated.someMethod',
'Here is a FOO.bar baz',
'END this.was.Deobfuscated#someMethod',
'new-instance 3810 (LSome/Framework/Class;) in Lthis/was/Deobfuscated;',
'this.was.Deobfuscated: Error message',
'Caused by: this.was.Deobfuscated: Error message',
'\tat this.was.Deobfuscated.someMethod(Deobfuscated.java:65)',
('\t at\t this.was.Deobfuscated.someMethod\t '
'(\t Deobfuscated.java:\t 65\t )'),
('Unable to start activity ComponentInfo{garbage.in/here.test}:'
' java.lang.NullPointerException: Attempt to invoke interface method'
' \'void this.was.Deobfuscated.someMethod(int,android.os.Bundle)\' on a'
' null object reference'),
('Caused by: java.lang.NullPointerException: Attempt to read from field'
' \'int[] this.was.Deobfuscated.mFontFamily\' on a null object reference'),
'java.lang.VerifyError: this.was.Deobfuscated',
('java.lang.NoSuchFieldError: No instance field mFontFamily of type '
'Ljava/lang/Class; in class Lthis/was/Deobfuscated;'),
'NOTFOO: Object of type this.was.Deobfuscated was not destroyed...',
]
TEST_DATA = [s + '\n' for s in TEST_DATA]
EXPECTED_OUTPUT = [s + '\n' for s in EXPECTED_OUTPUT]
class JavaDeobfuscateTest(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(JavaDeobfuscateTest, self).__init__(*args, **kwargs)
self._map_file = None
def setUp(self):
self._map_file = tempfile.NamedTemporaryFile()
self._map_file.write(TEST_MAP)
self._map_file.flush()
def tearDown(self):
if self._map_file:
self._map_file.close()
def _testImpl(self, input_lines=None, expected_output_lines=None,
prefix=''):
self.assertTrue(bool(input_lines) == bool(expected_output_lines))
if not input_lines:
input_lines = [prefix + x for x in TEST_DATA]
if not expected_output_lines:
expected_output_lines = [prefix + x for x in EXPECTED_OUTPUT]
cmd = [_JAVA_DEOBFUSCATE_PATH, self._map_file.name]
proc = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
proc_output, _ = proc.communicate(''.join(input_lines))
actual_output_lines = proc_output.splitlines(True)
for actual, expected in zip(actual_output_lines, expected_output_lines):
self.assertTrue(
actual == expected or actual.replace('bar', 'someMethod') == expected,
msg=''.join([
'Deobfuscation failed.\n',
' actual: %s' % actual,
' expected: %s' % expected]))
def testNoPrefix(self):
self._testImpl(prefix='')
def testThreadtimePrefix(self):
self._testImpl(prefix='09-08 14:38:35.535 18029 18084 E qcom_sensors_hal: ')
def testStandardPrefix(self):
self._testImpl(prefix='W/GCM (15158): ')
def testStandardPrefixWithPadding(self):
self._testImpl(prefix='W/GCM ( 158): ')
@unittest.skip('causes java_deobfuscate to hang, see crbug.com/876539')
def testIndefiniteHang(self):
# Test for crbug.com/876539.
self._testImpl(
input_lines=[
'VFY: unable to resolve virtual method 2: LFOO;'
+ '.onDescendantInvalidated '
+ '(Landroid/view/View;Landroid/view/View;)V',
],
expected_output_lines=[
'VFY: unable to resolve virtual method 2: Lthis.was.Deobfuscated;'
+ '.onDescendantInvalidated '
+ '(Landroid/view/View;Landroid/view/View;)V',
])
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--java-deobfuscate-path', type=os.path.realpath,
required=True)
known_args, unittest_args = parser.parse_known_args()
_JAVA_DEOBFUSCATE_PATH = known_args.java_deobfuscate_path
unittest_args = [sys.argv[0]] + unittest_args
unittest.main(argv=unittest_args)
|
albertrdixon/CouchPotatoServer
|
refs/heads/master
|
libs/rsa/pem.py
|
216
|
# -*- coding: utf-8 -*-
#
# Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''Functions that load and write PEM-encoded files.'''
import base64
from rsa._compat import b, is_bytes
def _markers(pem_marker):
'''
Returns the start and end PEM markers
'''
if is_bytes(pem_marker):
pem_marker = pem_marker.decode('utf-8')
return (b('-----BEGIN %s-----' % pem_marker),
b('-----END %s-----' % pem_marker))
def load_pem(contents, pem_marker):
'''Loads a PEM file.
@param contents: the contents of the file to interpret
@param pem_marker: the marker of the PEM content, such as 'RSA PRIVATE KEY'
when your file has '-----BEGIN RSA PRIVATE KEY-----' and
'-----END RSA PRIVATE KEY-----' markers.
@return the base64-decoded content between the start and end markers.
@raise ValueError: when the content is invalid, for example when the start
marker cannot be found.
'''
(pem_start, pem_end) = _markers(pem_marker)
pem_lines = []
in_pem_part = False
for line in contents.splitlines():
line = line.strip()
# Skip empty lines
if not line:
continue
# Handle start marker
if line == pem_start:
if in_pem_part:
raise ValueError('Seen start marker "%s" twice' % pem_start)
in_pem_part = True
continue
# Skip stuff before first marker
if not in_pem_part:
continue
# Handle end marker
if in_pem_part and line == pem_end:
in_pem_part = False
break
# Load fields
if b(':') in line:
continue
pem_lines.append(line)
# Do some sanity checks
if not pem_lines:
raise ValueError('No PEM start marker "%s" found' % pem_start)
if in_pem_part:
raise ValueError('No PEM end marker "%s" found' % pem_end)
# Base64-decode the contents
pem = b('').join(pem_lines)
return base64.decodestring(pem)
def save_pem(contents, pem_marker):
'''Saves a PEM file.
@param contents: the contents to encode in PEM format
@param pem_marker: the marker of the PEM content, such as 'RSA PRIVATE KEY'
when your file has '-----BEGIN RSA PRIVATE KEY-----' and
'-----END RSA PRIVATE KEY-----' markers.
@return the base64-encoded content between the start and end markers.
'''
(pem_start, pem_end) = _markers(pem_marker)
b64 = base64.encodestring(contents).replace(b('\n'), b(''))
pem_lines = [pem_start]
for block_start in range(0, len(b64), 64):
block = b64[block_start:block_start + 64]
pem_lines.append(block)
pem_lines.append(pem_end)
pem_lines.append(b(''))
return b('\n').join(pem_lines)
|
alphagov/notifications-delivery
|
refs/heads/master
|
notifications_delivery/job/job_scheduler.py
|
1
|
from apscheduler.schedulers.background import BackgroundScheduler
from notifications_delivery.job.jobs import process_jobs
from notifications_delivery.processor.sqs_processor import process_notification_job
class JobScheduler(object):
def __init__(self, config):
self.scheduler = BackgroundScheduler()
self.scheduler.add_job(
self.job_process,
'interval',
seconds=config['JOB_POLL_INTERVAL_SECONDS'],
max_instances=1)
self.scheduler.add_job(
self.notification_job_process,
'interval',
seconds=config['DELIVERY_POLL_INTERVAL_SECONDS'],
max_instances=1)
self.config = config
def start(self):
self.scheduler.start()
def shutdown(self):
self.scheduler.shutdown(wait=True)
def job_process(self):
process_jobs(self.config)
def notification_job_process(self):
process_notification_job(self.config)
|
NL66278/OCB
|
refs/heads/8.0
|
openerp/addons/test_limits/__openerp__.py
|
435
|
# -*- coding: utf-8 -*-
{
'name': 'test-limits',
'version': '0.1',
'category': 'Tests',
'description': """A module with dummy methods.""",
'author': 'OpenERP SA',
'maintainer': 'OpenERP SA',
'website': 'http://www.openerp.com',
'depends': ['base'],
'data': ['ir.model.access.csv'],
'installable': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
andrejtokarcik/python-czechtile
|
refs/heads/master
|
czechtile/macros.py
|
1
|
# -*- coding: utf-8 -*-
import re
from sneakylang import parse, Macro, Document
from sneakylang.parser import ParserRollback
from . import nodes
class CzechtileMacro(Macro):
def _macroCallWithoutRequiredQuotes(self, *args):
content = ''.join([''.join([arg, ' ']) for arg in args])[:-1]
return self.expand(content)
def parse_argument_string(self, argument_string):
self.arguments = [argument_string]
class CzechtileInlineMacro(CzechtileMacro):
def expand_to_nodes(self, content):
node = self.node()
self.builder.append(node, move_actual = True)
parse(content, self.register_map, self.register, builder=self.builder, state=self.state)
self.builder.move_up()
class MacroWrappingParagraph(CzechtileMacro):
def wrap_text_nodes(self, node):
# we must go with numbers as we must replace textnode with it's tree on same position
for child in node.children:
if isinstance(child, nodes.TextNode):
self.builder.set_actual_node(child)
text = re.sub("^(\s)*", "", re.sub("(\s)*$", "", child.content))
for para_content in text.split('\n\n'):
if para_content:
macro = Odstavec.argument_call(para_content, \
self.register, self.builder, self.state)
macro.expand()
class Book(MacroWrappingParagraph):
name = 'kniha'
help = '((kniha text knihy))'
def expand_to_nodes(self, content):
node = nodes.Book()
self.builder.append(node, move_actual = True)
parse(content, self.register_map, self.register, builder=self.builder, state=self.state)
self.wrap_text_nodes(node)
self.builder.move_up()
class Article(MacroWrappingParagraph):
name = 'clanek'
help = '((clanek text clanku))'
def expand_to_nodes(self, content):
node = nodes.Article()
self.builder.append(node, move_actual = True)
parse(content, self.register_map, self.register, builder=self.builder, state=self.state)
self.wrap_text_nodes(node)
self.builder.move_up()
class Sekce(Document):
name = 'sekce'
help = '((sekce text sekce))'
class Nadpis(CzechtileMacro):
name = 'nadpis'
help = '((nadpis cislo_urovne text nadpisu))'
def parse_argument_string(self, argument_string):
args = argument_string.split()
try:
level = int(args[0])
except ValueError, err:
raise ParserRollback(err)
self.arguments = [level, ''.join([''.join([arg, ' ']) for arg in args[1:]])[:-1]]
def expand_to_nodes(self, level, content):
node = nodes.Nadpis()
node.level = level
self.builder.append(node, move_actual = True)
parse(content, self.register_map, self.register, builder=self.builder, state=self.state)
assert node == self.builder.actual_node
self.builder.move_up()
class Odstavec(CzechtileMacro):
name = 'odstavec'
help = '((odstavec text odstavce))'
def expand_to_nodes(self, content):
node = nodes.Odstavec()
self.builder.append(node, move_actual=False)
if isinstance(node.parent, nodes.TextNode):
self.builder.replace(node)
self.builder.set_actual_node(node)
parse(content, self.register_map, self.register, builder=self.builder, state=self.state)
assert node == self.builder.actual_node
self.builder.move_up()
class NeformatovanyText(CzechtileMacro):
name = 'neformatovany-text'
help = '((neformatovany-text nenaformatovany obsah textu))'
def expand_to_nodes(self, content):
node = nodes.NeformatovanyText()
self.builder.append(node, move_actual=True)
tn = nodes.TextNode()
tn.content = content
self.builder.append(tn, move_actual=False)
self.builder.move_up()
class ZdrojovyKod(CzechtileMacro):
name = 'zdrojovy-kod'
help = '((zdrojovy-kod nazev-jazyka nenaformatovany obsah textu))'
def parse_argument_string(self, argument_string):
args = argument_string.split(' ')
language_name = args[0]
self.arguments = [language_name, ''.join([''.join([arg, ' ']) for arg in args[1:]])[:-1]]
def expand_to_nodes(self, language_name, content):
node = nodes.ZdrojovyKod()
node.syntax_name = language_name
self.builder.append(node, move_actual=True)
tn = nodes.TextNode()
tn.content = content
self.builder.append(tn, move_actual=False)
self.builder.move_up()
class Zvyraznene(CzechtileInlineMacro):
name = 'zvyraznene'
help = '((zvyraznene zesilneny text))'
node = nodes.Zvyraznene
class FootNote(CzechtileInlineMacro):
name = 'poznamka'
help = '((poznamka text pod carou))'
node = nodes.FootNote
class Silne(CzechtileInlineMacro):
name = 'silne'
help = '((silne zesilneny text))'
node = nodes.Silne
class Hyperlink(CzechtileMacro):
name = 'odkaz'
help = '((odkaz http://adresa/linku text linku))'
def parse_argument_string(self, argument_string):
args = argument_string.split()
link = args[0]
self.arguments = [link, ''.join([''.join([arg, ' ']) for arg in args[1:]])[:-1]]
def expand_to_nodes(self, link, content):
node = nodes.Hyperlink()
node.link = link
self.builder.append(node, move_actual = True)
if link == content:
tn = nodes.TextNode()
tn.content = content
self.builder.append(tn, move_actual=False)
else:
parse(content, self.register_map, self.register, builder=self.builder, state=self.state)
self.builder.move_up()
class TriTecky(CzechtileMacro):
name = 'tri_tecky'
help = '((tri_tecky))'
def expand_to_nodes(self, *args):
self.builder.append(nodes.TriTecky(), move_actual=False)
class Trademark(CzechtileMacro):
name = 'trademark'
help = '((trademark))'
def expand_to_nodes(self, *args):
self.builder.append(nodes.Trademark(), move_actual=False)
class Copyright(CzechtileMacro):
name = 'copyright'
help = '((copyright))'
def expand_to_nodes(self, *args):
self.builder.append(nodes.Copyright(), move_actual=False)
class RightsReserved(CzechtileMacro):
name = 'rights-reserved'
help = '((rights-reserved))'
def expand_to_nodes(self, *args):
self.builder.append(nodes.RightsReserved(), move_actual=False)
class Pomlcka(CzechtileMacro):
name = 'pomlcka'
help = '((pomlcka))'
def expand_to_nodes(self, string):
node = nodes.Pomlcka()
signals = [0, 1]
spaces = []
if string.startswith(u' '):
spaces.append(signals[0])
if string.endswith(u' '):
spaces.append(signals[1])
if spaces == []:
node.spojovnik = True
else:
node.spojovnik = False
if signals[0] in spaces:
self.builder.append(nodes.PevnaMedzera(), move_actual=False)
self.builder.append(node, move_actual=False)
if signals[1] in spaces:
self.builder.append(nodes.PevnaMedzera(), move_actual=False)
class Uvozovky(CzechtileInlineMacro):
name = 'uvozovky'
help = '((uvozovky text v uvozovkach))'
node = nodes.Uvozovky
class List(CzechtileMacro):
name = 'seznam'
help = '((seznam token obsah))'
def parse_argument_string(self, argument_string):
self.arguments = argument_string.split(' ', 1)
def expand_to_nodes(self, token, content):
node = nodes.List()
node.token = token
self.builder.append(node, move_actual=True)
parse(content, self.register_map, self.register, builder=self.builder, state=self.state)
assert self.builder.actual_node == node
self.builder.move_up()
class ListItem(CzechtileMacro):
name = 'polozka'
help = '((polozka text))'
def expand_to_nodes(self, content):
node = nodes.ListItem()
self.builder.append(node, move_actual=True)
parse(content, self.register_map, self.register, builder=self.builder, state=self.state)
self.builder.move_up()
class Preskrtnute(CzechtileInlineMacro):
name = 'preskrtnute'
help = '((preskrtnute preskrtnuty text))'
node = nodes.Preskrtnute
class Obrazek(CzechtileMacro):
name = 'obrazek'
help = '((obrazek lokace))'
def expand_to_nodes(self, source):
if len(source.strip()) == 0:
raise ParserRollback(u"Empty image source")
node = nodes.Obrazek()
node.source = source.strip()
self.builder.append(node, move_actual=True)
self.builder.move_up()
class HorniIndex(CzechtileInlineMacro):
name = 'horni-index'
help = '((horni-index text posazeny do horniho indexu))'
node = nodes.HorniIndex
class DolniIndex(CzechtileInlineMacro):
name = 'dolni-index'
help = '((dolni-index text posazeny do dolniho indexu))'
node = nodes.DolniIndex
class NovyRadek(CzechtileMacro):
name = 'novy_radek'
help = '((novy_radek))'
def expand_to_nodes(self, *args):
self.builder.append(nodes.NovyRadek(), move_actual=False)
class Tabulka(CzechtileMacro):
name = 'tabulka'
help = '((tabulka radky))'
def expand_to_nodes(self, content):
node = nodes.Tabulka()
self.builder.append(node, move_actual=True)
parse(content, self.register_map, self.register, builder=self.builder, state=self.state)
self.builder.move_up()
class TabulkaRadek(CzechtileMacro):
name = 'radek'
help = '((radek stlpce))'
def expand_to_nodes(self, content):
node = nodes.TabulkaRadek()
self.builder.append(node, move_actual=True)
parse(content, self.register_map, self.register, builder=self.builder, state=self.state)
self.builder.move_up()
class TabulkaStlpec(CzechtileInlineMacro):
name = 'stlpec'
help = '((stlpec text))'
node = nodes.TabulkaStlpec
|
markfinger/recipe-crawler-for-beaney
|
refs/heads/master
|
requests/packages/urllib3/util/request.py
|
780
|
from __future__ import absolute_import
from base64 import b64encode
from ..packages.six import b
ACCEPT_ENCODING = 'gzip,deflate'
def make_headers(keep_alive=None, accept_encoding=None, user_agent=None,
basic_auth=None, proxy_basic_auth=None, disable_cache=None):
"""
Shortcuts for generating request headers.
:param keep_alive:
If ``True``, adds 'connection: keep-alive' header.
:param accept_encoding:
Can be a boolean, list, or string.
``True`` translates to 'gzip,deflate'.
List will get joined by comma.
String will be used as provided.
:param user_agent:
String representing the user-agent you want, such as
"python-urllib3/0.6"
:param basic_auth:
Colon-separated username:password string for 'authorization: basic ...'
auth header.
:param proxy_basic_auth:
Colon-separated username:password string for 'proxy-authorization: basic ...'
auth header.
:param disable_cache:
If ``True``, adds 'cache-control: no-cache' header.
Example::
>>> make_headers(keep_alive=True, user_agent="Batman/1.0")
{'connection': 'keep-alive', 'user-agent': 'Batman/1.0'}
>>> make_headers(accept_encoding=True)
{'accept-encoding': 'gzip,deflate'}
"""
headers = {}
if accept_encoding:
if isinstance(accept_encoding, str):
pass
elif isinstance(accept_encoding, list):
accept_encoding = ','.join(accept_encoding)
else:
accept_encoding = ACCEPT_ENCODING
headers['accept-encoding'] = accept_encoding
if user_agent:
headers['user-agent'] = user_agent
if keep_alive:
headers['connection'] = 'keep-alive'
if basic_auth:
headers['authorization'] = 'Basic ' + \
b64encode(b(basic_auth)).decode('utf-8')
if proxy_basic_auth:
headers['proxy-authorization'] = 'Basic ' + \
b64encode(b(proxy_basic_auth)).decode('utf-8')
if disable_cache:
headers['cache-control'] = 'no-cache'
return headers
|
asedunov/intellij-community
|
refs/heads/master
|
python/testData/inspections/PyStringFormatInspection/PercentStringKeywordListArgument.py
|
29
|
"%(foo)s" % <warning descr="Format requires a mapping">[1, 2, 3]</warning>
|
Acehaidrey/incubator-airflow
|
refs/heads/master
|
airflow/kubernetes/kubernetes_helper_functions.py
|
7
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
def _strip_unsafe_kubernetes_special_chars(string: str) -> str:
"""
Kubernetes only supports lowercase alphanumeric characters, "-" and "." in
the pod name.
However, there are special rules about how "-" and "." can be used so let's
only keep
alphanumeric chars see here for detail:
https://kubernetes.io/docs/concepts/overview/working-with-objects/names/
:param string: The requested Pod name
:return: Pod name stripped of any unsafe characters
"""
return ''.join(ch.lower() for ch in list(string) if ch.isalnum())
def create_pod_id(dag_id: str, task_id: str) -> str:
"""
Generates the kubernetes safe pod_id. Note that this is
NOT the full ID that will be launched to k8s. We will add a uuid
to ensure uniqueness.
:param dag_id: DAG ID
:param task_id: Task ID
:return: The non-unique pod_id for this task/DAG pairing
"""
safe_dag_id = _strip_unsafe_kubernetes_special_chars(dag_id)
safe_task_id = _strip_unsafe_kubernetes_special_chars(task_id)
return safe_dag_id + safe_task_id
|
louietsai/python-for-android
|
refs/heads/master
|
python-modules/twisted/twisted/internet/test/test_udp.py
|
49
|
# Copyright (c) 2010 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for implementations of L{IReactorUDP}.
"""
__metaclass__ = type
from zope.interface.verify import verifyObject
from twisted.internet.test.reactormixins import ReactorBuilder
from twisted.internet.interfaces import IListeningPort
from twisted.internet.protocol import DatagramProtocol
class UDPServerTestsBuilder(ReactorBuilder):
"""
Builder defining tests relating to L{IReactorUDP.listenUDP}.
"""
def test_interface(self):
"""
L{IReactorUDP.listenUDP} returns an object providing L{IListeningPort}.
"""
reactor = self.buildReactor()
port = reactor.listenUDP(0, DatagramProtocol())
self.assertTrue(verifyObject(IListeningPort, port))
globals().update(UDPServerTestsBuilder.makeTestCaseClasses())
|
sunlianqiang/kbengine
|
refs/heads/master
|
kbe/src/lib/python/Lib/test/test_pathlib.py
|
60
|
import collections
import io
import os
import errno
import pathlib
import pickle
import shutil
import socket
import stat
import sys
import tempfile
import unittest
from contextlib import contextmanager
from test import support
TESTFN = support.TESTFN
try:
import grp, pwd
except ImportError:
grp = pwd = None
class _BaseFlavourTest(object):
def _check_parse_parts(self, arg, expected):
f = self.flavour.parse_parts
sep = self.flavour.sep
altsep = self.flavour.altsep
actual = f([x.replace('/', sep) for x in arg])
self.assertEqual(actual, expected)
if altsep:
actual = f([x.replace('/', altsep) for x in arg])
self.assertEqual(actual, expected)
def test_parse_parts_common(self):
check = self._check_parse_parts
sep = self.flavour.sep
# Unanchored parts
check([], ('', '', []))
check(['a'], ('', '', ['a']))
check(['a/'], ('', '', ['a']))
check(['a', 'b'], ('', '', ['a', 'b']))
# Expansion
check(['a/b'], ('', '', ['a', 'b']))
check(['a/b/'], ('', '', ['a', 'b']))
check(['a', 'b/c', 'd'], ('', '', ['a', 'b', 'c', 'd']))
# Collapsing and stripping excess slashes
check(['a', 'b//c', 'd'], ('', '', ['a', 'b', 'c', 'd']))
check(['a', 'b/c/', 'd'], ('', '', ['a', 'b', 'c', 'd']))
# Eliminating standalone dots
check(['.'], ('', '', []))
check(['.', '.', 'b'], ('', '', ['b']))
check(['a', '.', 'b'], ('', '', ['a', 'b']))
check(['a', '.', '.'], ('', '', ['a']))
# The first part is anchored
check(['/a/b'], ('', sep, [sep, 'a', 'b']))
check(['/a', 'b'], ('', sep, [sep, 'a', 'b']))
check(['/a/', 'b'], ('', sep, [sep, 'a', 'b']))
# Ignoring parts before an anchored part
check(['a', '/b', 'c'], ('', sep, [sep, 'b', 'c']))
check(['a', '/b', '/c'], ('', sep, [sep, 'c']))
class PosixFlavourTest(_BaseFlavourTest, unittest.TestCase):
flavour = pathlib._posix_flavour
def test_parse_parts(self):
check = self._check_parse_parts
# Collapsing of excess leading slashes, except for the double-slash
# special case.
check(['//a', 'b'], ('', '//', ['//', 'a', 'b']))
check(['///a', 'b'], ('', '/', ['/', 'a', 'b']))
check(['////a', 'b'], ('', '/', ['/', 'a', 'b']))
# Paths which look like NT paths aren't treated specially
check(['c:a'], ('', '', ['c:a']))
check(['c:\\a'], ('', '', ['c:\\a']))
check(['\\a'], ('', '', ['\\a']))
def test_splitroot(self):
f = self.flavour.splitroot
self.assertEqual(f(''), ('', '', ''))
self.assertEqual(f('a'), ('', '', 'a'))
self.assertEqual(f('a/b'), ('', '', 'a/b'))
self.assertEqual(f('a/b/'), ('', '', 'a/b/'))
self.assertEqual(f('/a'), ('', '/', 'a'))
self.assertEqual(f('/a/b'), ('', '/', 'a/b'))
self.assertEqual(f('/a/b/'), ('', '/', 'a/b/'))
# The root is collapsed when there are redundant slashes
# except when there are exactly two leading slashes, which
# is a special case in POSIX.
self.assertEqual(f('//a'), ('', '//', 'a'))
self.assertEqual(f('///a'), ('', '/', 'a'))
self.assertEqual(f('///a/b'), ('', '/', 'a/b'))
# Paths which look like NT paths aren't treated specially
self.assertEqual(f('c:/a/b'), ('', '', 'c:/a/b'))
self.assertEqual(f('\\/a/b'), ('', '', '\\/a/b'))
self.assertEqual(f('\\a\\b'), ('', '', '\\a\\b'))
class NTFlavourTest(_BaseFlavourTest, unittest.TestCase):
flavour = pathlib._windows_flavour
def test_parse_parts(self):
check = self._check_parse_parts
# First part is anchored
check(['c:'], ('c:', '', ['c:']))
check(['c:\\'], ('c:', '\\', ['c:\\']))
check(['\\'], ('', '\\', ['\\']))
check(['c:a'], ('c:', '', ['c:', 'a']))
check(['c:\\a'], ('c:', '\\', ['c:\\', 'a']))
check(['\\a'], ('', '\\', ['\\', 'a']))
# UNC paths
check(['\\\\a\\b'], ('\\\\a\\b', '\\', ['\\\\a\\b\\']))
check(['\\\\a\\b\\'], ('\\\\a\\b', '\\', ['\\\\a\\b\\']))
check(['\\\\a\\b\\c'], ('\\\\a\\b', '\\', ['\\\\a\\b\\', 'c']))
# Second part is anchored, so that the first part is ignored
check(['a', 'Z:b', 'c'], ('Z:', '', ['Z:', 'b', 'c']))
check(['a', 'Z:\\b', 'c'], ('Z:', '\\', ['Z:\\', 'b', 'c']))
check(['a', '\\b', 'c'], ('', '\\', ['\\', 'b', 'c']))
# UNC paths
check(['a', '\\\\b\\c', 'd'], ('\\\\b\\c', '\\', ['\\\\b\\c\\', 'd']))
# Collapsing and stripping excess slashes
check(['a', 'Z:\\\\b\\\\c\\', 'd\\'], ('Z:', '\\', ['Z:\\', 'b', 'c', 'd']))
# UNC paths
check(['a', '\\\\b\\c\\\\', 'd'], ('\\\\b\\c', '\\', ['\\\\b\\c\\', 'd']))
# Extended paths
check(['\\\\?\\c:\\'], ('\\\\?\\c:', '\\', ['\\\\?\\c:\\']))
check(['\\\\?\\c:\\a'], ('\\\\?\\c:', '\\', ['\\\\?\\c:\\', 'a']))
# Extended UNC paths (format is "\\?\UNC\server\share")
check(['\\\\?\\UNC\\b\\c'], ('\\\\?\\UNC\\b\\c', '\\', ['\\\\?\\UNC\\b\\c\\']))
check(['\\\\?\\UNC\\b\\c\\d'], ('\\\\?\\UNC\\b\\c', '\\', ['\\\\?\\UNC\\b\\c\\', 'd']))
def test_splitroot(self):
f = self.flavour.splitroot
self.assertEqual(f(''), ('', '', ''))
self.assertEqual(f('a'), ('', '', 'a'))
self.assertEqual(f('a\\b'), ('', '', 'a\\b'))
self.assertEqual(f('\\a'), ('', '\\', 'a'))
self.assertEqual(f('\\a\\b'), ('', '\\', 'a\\b'))
self.assertEqual(f('c:a\\b'), ('c:', '', 'a\\b'))
self.assertEqual(f('c:\\a\\b'), ('c:', '\\', 'a\\b'))
# Redundant slashes in the root are collapsed
self.assertEqual(f('\\\\a'), ('', '\\', 'a'))
self.assertEqual(f('\\\\\\a/b'), ('', '\\', 'a/b'))
self.assertEqual(f('c:\\\\a'), ('c:', '\\', 'a'))
self.assertEqual(f('c:\\\\\\a/b'), ('c:', '\\', 'a/b'))
# Valid UNC paths
self.assertEqual(f('\\\\a\\b'), ('\\\\a\\b', '\\', ''))
self.assertEqual(f('\\\\a\\b\\'), ('\\\\a\\b', '\\', ''))
self.assertEqual(f('\\\\a\\b\\c\\d'), ('\\\\a\\b', '\\', 'c\\d'))
# These are non-UNC paths (according to ntpath.py and test_ntpath)
# However, command.com says such paths are invalid, so it's
# difficult to know what the right semantics are
self.assertEqual(f('\\\\\\a\\b'), ('', '\\', 'a\\b'))
self.assertEqual(f('\\\\a'), ('', '\\', 'a'))
#
# Tests for the pure classes
#
class _BasePurePathTest(object):
# keys are canonical paths, values are list of tuples of arguments
# supposed to produce equal paths
equivalences = {
'a/b': [
('a', 'b'), ('a/', 'b'), ('a', 'b/'), ('a/', 'b/'),
('a/b/',), ('a//b',), ('a//b//',),
# empty components get removed
('', 'a', 'b'), ('a', '', 'b'), ('a', 'b', ''),
],
'/b/c/d': [
('a', '/b/c', 'd'), ('a', '///b//c', 'd/'),
('/a', '/b/c', 'd'),
# empty components get removed
('/', 'b', '', 'c/d'), ('/', '', 'b/c/d'), ('', '/b/c/d'),
],
}
def setUp(self):
p = self.cls('a')
self.flavour = p._flavour
self.sep = self.flavour.sep
self.altsep = self.flavour.altsep
def test_constructor_common(self):
P = self.cls
p = P('a')
self.assertIsInstance(p, P)
P('a', 'b', 'c')
P('/a', 'b', 'c')
P('a/b/c')
P('/a/b/c')
self.assertEqual(P(P('a')), P('a'))
self.assertEqual(P(P('a'), 'b'), P('a/b'))
self.assertEqual(P(P('a'), P('b')), P('a/b'))
def _check_str_subclass(self, *args):
# Issue #21127: it should be possible to construct a PurePath object
# from an str subclass instance, and it then gets converted to
# a pure str object.
class StrSubclass(str):
pass
P = self.cls
p = P(*(StrSubclass(x) for x in args))
self.assertEqual(p, P(*args))
for part in p.parts:
self.assertIs(type(part), str)
def test_str_subclass_common(self):
self._check_str_subclass('')
self._check_str_subclass('.')
self._check_str_subclass('a')
self._check_str_subclass('a/b.txt')
self._check_str_subclass('/a/b.txt')
def test_join_common(self):
P = self.cls
p = P('a/b')
pp = p.joinpath('c')
self.assertEqual(pp, P('a/b/c'))
self.assertIs(type(pp), type(p))
pp = p.joinpath('c', 'd')
self.assertEqual(pp, P('a/b/c/d'))
pp = p.joinpath(P('c'))
self.assertEqual(pp, P('a/b/c'))
pp = p.joinpath('/c')
self.assertEqual(pp, P('/c'))
def test_div_common(self):
# Basically the same as joinpath()
P = self.cls
p = P('a/b')
pp = p / 'c'
self.assertEqual(pp, P('a/b/c'))
self.assertIs(type(pp), type(p))
pp = p / 'c/d'
self.assertEqual(pp, P('a/b/c/d'))
pp = p / 'c' / 'd'
self.assertEqual(pp, P('a/b/c/d'))
pp = 'c' / p / 'd'
self.assertEqual(pp, P('c/a/b/d'))
pp = p / P('c')
self.assertEqual(pp, P('a/b/c'))
pp = p/ '/c'
self.assertEqual(pp, P('/c'))
def _check_str(self, expected, args):
p = self.cls(*args)
self.assertEqual(str(p), expected.replace('/', self.sep))
def test_str_common(self):
# Canonicalized paths roundtrip
for pathstr in ('a', 'a/b', 'a/b/c', '/', '/a/b', '/a/b/c'):
self._check_str(pathstr, (pathstr,))
# Special case for the empty path
self._check_str('.', ('',))
# Other tests for str() are in test_equivalences()
def test_as_posix_common(self):
P = self.cls
for pathstr in ('a', 'a/b', 'a/b/c', '/', '/a/b', '/a/b/c'):
self.assertEqual(P(pathstr).as_posix(), pathstr)
# Other tests for as_posix() are in test_equivalences()
def test_as_bytes_common(self):
sep = os.fsencode(self.sep)
P = self.cls
self.assertEqual(bytes(P('a/b')), b'a' + sep + b'b')
def test_as_uri_common(self):
P = self.cls
with self.assertRaises(ValueError):
P('a').as_uri()
with self.assertRaises(ValueError):
P().as_uri()
def test_repr_common(self):
for pathstr in ('a', 'a/b', 'a/b/c', '/', '/a/b', '/a/b/c'):
p = self.cls(pathstr)
clsname = p.__class__.__name__
r = repr(p)
# The repr() is in the form ClassName("forward-slashes path")
self.assertTrue(r.startswith(clsname + '('), r)
self.assertTrue(r.endswith(')'), r)
inner = r[len(clsname) + 1 : -1]
self.assertEqual(eval(inner), p.as_posix())
# The repr() roundtrips
q = eval(r, pathlib.__dict__)
self.assertIs(q.__class__, p.__class__)
self.assertEqual(q, p)
self.assertEqual(repr(q), r)
def test_eq_common(self):
P = self.cls
self.assertEqual(P('a/b'), P('a/b'))
self.assertEqual(P('a/b'), P('a', 'b'))
self.assertNotEqual(P('a/b'), P('a'))
self.assertNotEqual(P('a/b'), P('/a/b'))
self.assertNotEqual(P('a/b'), P())
self.assertNotEqual(P('/a/b'), P('/'))
self.assertNotEqual(P(), P('/'))
self.assertNotEqual(P(), "")
self.assertNotEqual(P(), {})
self.assertNotEqual(P(), int)
def test_match_common(self):
P = self.cls
self.assertRaises(ValueError, P('a').match, '')
self.assertRaises(ValueError, P('a').match, '.')
# Simple relative pattern
self.assertTrue(P('b.py').match('b.py'))
self.assertTrue(P('a/b.py').match('b.py'))
self.assertTrue(P('/a/b.py').match('b.py'))
self.assertFalse(P('a.py').match('b.py'))
self.assertFalse(P('b/py').match('b.py'))
self.assertFalse(P('/a.py').match('b.py'))
self.assertFalse(P('b.py/c').match('b.py'))
# Wilcard relative pattern
self.assertTrue(P('b.py').match('*.py'))
self.assertTrue(P('a/b.py').match('*.py'))
self.assertTrue(P('/a/b.py').match('*.py'))
self.assertFalse(P('b.pyc').match('*.py'))
self.assertFalse(P('b./py').match('*.py'))
self.assertFalse(P('b.py/c').match('*.py'))
# Multi-part relative pattern
self.assertTrue(P('ab/c.py').match('a*/*.py'))
self.assertTrue(P('/d/ab/c.py').match('a*/*.py'))
self.assertFalse(P('a.py').match('a*/*.py'))
self.assertFalse(P('/dab/c.py').match('a*/*.py'))
self.assertFalse(P('ab/c.py/d').match('a*/*.py'))
# Absolute pattern
self.assertTrue(P('/b.py').match('/*.py'))
self.assertFalse(P('b.py').match('/*.py'))
self.assertFalse(P('a/b.py').match('/*.py'))
self.assertFalse(P('/a/b.py').match('/*.py'))
# Multi-part absolute pattern
self.assertTrue(P('/a/b.py').match('/a/*.py'))
self.assertFalse(P('/ab.py').match('/a/*.py'))
self.assertFalse(P('/a/b/c.py').match('/a/*.py'))
def test_ordering_common(self):
# Ordering is tuple-alike
def assertLess(a, b):
self.assertLess(a, b)
self.assertGreater(b, a)
P = self.cls
a = P('a')
b = P('a/b')
c = P('abc')
d = P('b')
assertLess(a, b)
assertLess(a, c)
assertLess(a, d)
assertLess(b, c)
assertLess(c, d)
P = self.cls
a = P('/a')
b = P('/a/b')
c = P('/abc')
d = P('/b')
assertLess(a, b)
assertLess(a, c)
assertLess(a, d)
assertLess(b, c)
assertLess(c, d)
with self.assertRaises(TypeError):
P() < {}
def test_parts_common(self):
# `parts` returns a tuple
sep = self.sep
P = self.cls
p = P('a/b')
parts = p.parts
self.assertEqual(parts, ('a', 'b'))
# The object gets reused
self.assertIs(parts, p.parts)
# When the path is absolute, the anchor is a separate part
p = P('/a/b')
parts = p.parts
self.assertEqual(parts, (sep, 'a', 'b'))
def test_equivalences(self):
for k, tuples in self.equivalences.items():
canon = k.replace('/', self.sep)
posix = k.replace(self.sep, '/')
if canon != posix:
tuples = tuples + [
tuple(part.replace('/', self.sep) for part in t)
for t in tuples
]
tuples.append((posix, ))
pcanon = self.cls(canon)
for t in tuples:
p = self.cls(*t)
self.assertEqual(p, pcanon, "failed with args {}".format(t))
self.assertEqual(hash(p), hash(pcanon))
self.assertEqual(str(p), canon)
self.assertEqual(p.as_posix(), posix)
def test_parent_common(self):
# Relative
P = self.cls
p = P('a/b/c')
self.assertEqual(p.parent, P('a/b'))
self.assertEqual(p.parent.parent, P('a'))
self.assertEqual(p.parent.parent.parent, P())
self.assertEqual(p.parent.parent.parent.parent, P())
# Anchored
p = P('/a/b/c')
self.assertEqual(p.parent, P('/a/b'))
self.assertEqual(p.parent.parent, P('/a'))
self.assertEqual(p.parent.parent.parent, P('/'))
self.assertEqual(p.parent.parent.parent.parent, P('/'))
def test_parents_common(self):
# Relative
P = self.cls
p = P('a/b/c')
par = p.parents
self.assertEqual(len(par), 3)
self.assertEqual(par[0], P('a/b'))
self.assertEqual(par[1], P('a'))
self.assertEqual(par[2], P('.'))
self.assertEqual(list(par), [P('a/b'), P('a'), P('.')])
with self.assertRaises(IndexError):
par[-1]
with self.assertRaises(IndexError):
par[3]
with self.assertRaises(TypeError):
par[0] = p
# Anchored
p = P('/a/b/c')
par = p.parents
self.assertEqual(len(par), 3)
self.assertEqual(par[0], P('/a/b'))
self.assertEqual(par[1], P('/a'))
self.assertEqual(par[2], P('/'))
self.assertEqual(list(par), [P('/a/b'), P('/a'), P('/')])
with self.assertRaises(IndexError):
par[3]
def test_drive_common(self):
P = self.cls
self.assertEqual(P('a/b').drive, '')
self.assertEqual(P('/a/b').drive, '')
self.assertEqual(P('').drive, '')
def test_root_common(self):
P = self.cls
sep = self.sep
self.assertEqual(P('').root, '')
self.assertEqual(P('a/b').root, '')
self.assertEqual(P('/').root, sep)
self.assertEqual(P('/a/b').root, sep)
def test_anchor_common(self):
P = self.cls
sep = self.sep
self.assertEqual(P('').anchor, '')
self.assertEqual(P('a/b').anchor, '')
self.assertEqual(P('/').anchor, sep)
self.assertEqual(P('/a/b').anchor, sep)
def test_name_common(self):
P = self.cls
self.assertEqual(P('').name, '')
self.assertEqual(P('.').name, '')
self.assertEqual(P('/').name, '')
self.assertEqual(P('a/b').name, 'b')
self.assertEqual(P('/a/b').name, 'b')
self.assertEqual(P('/a/b/.').name, 'b')
self.assertEqual(P('a/b.py').name, 'b.py')
self.assertEqual(P('/a/b.py').name, 'b.py')
def test_suffix_common(self):
P = self.cls
self.assertEqual(P('').suffix, '')
self.assertEqual(P('.').suffix, '')
self.assertEqual(P('..').suffix, '')
self.assertEqual(P('/').suffix, '')
self.assertEqual(P('a/b').suffix, '')
self.assertEqual(P('/a/b').suffix, '')
self.assertEqual(P('/a/b/.').suffix, '')
self.assertEqual(P('a/b.py').suffix, '.py')
self.assertEqual(P('/a/b.py').suffix, '.py')
self.assertEqual(P('a/.hgrc').suffix, '')
self.assertEqual(P('/a/.hgrc').suffix, '')
self.assertEqual(P('a/.hg.rc').suffix, '.rc')
self.assertEqual(P('/a/.hg.rc').suffix, '.rc')
self.assertEqual(P('a/b.tar.gz').suffix, '.gz')
self.assertEqual(P('/a/b.tar.gz').suffix, '.gz')
self.assertEqual(P('a/Some name. Ending with a dot.').suffix, '')
self.assertEqual(P('/a/Some name. Ending with a dot.').suffix, '')
def test_suffixes_common(self):
P = self.cls
self.assertEqual(P('').suffixes, [])
self.assertEqual(P('.').suffixes, [])
self.assertEqual(P('/').suffixes, [])
self.assertEqual(P('a/b').suffixes, [])
self.assertEqual(P('/a/b').suffixes, [])
self.assertEqual(P('/a/b/.').suffixes, [])
self.assertEqual(P('a/b.py').suffixes, ['.py'])
self.assertEqual(P('/a/b.py').suffixes, ['.py'])
self.assertEqual(P('a/.hgrc').suffixes, [])
self.assertEqual(P('/a/.hgrc').suffixes, [])
self.assertEqual(P('a/.hg.rc').suffixes, ['.rc'])
self.assertEqual(P('/a/.hg.rc').suffixes, ['.rc'])
self.assertEqual(P('a/b.tar.gz').suffixes, ['.tar', '.gz'])
self.assertEqual(P('/a/b.tar.gz').suffixes, ['.tar', '.gz'])
self.assertEqual(P('a/Some name. Ending with a dot.').suffixes, [])
self.assertEqual(P('/a/Some name. Ending with a dot.').suffixes, [])
def test_stem_common(self):
P = self.cls
self.assertEqual(P('').stem, '')
self.assertEqual(P('.').stem, '')
self.assertEqual(P('..').stem, '..')
self.assertEqual(P('/').stem, '')
self.assertEqual(P('a/b').stem, 'b')
self.assertEqual(P('a/b.py').stem, 'b')
self.assertEqual(P('a/.hgrc').stem, '.hgrc')
self.assertEqual(P('a/.hg.rc').stem, '.hg')
self.assertEqual(P('a/b.tar.gz').stem, 'b.tar')
self.assertEqual(P('a/Some name. Ending with a dot.').stem,
'Some name. Ending with a dot.')
def test_with_name_common(self):
P = self.cls
self.assertEqual(P('a/b').with_name('d.xml'), P('a/d.xml'))
self.assertEqual(P('/a/b').with_name('d.xml'), P('/a/d.xml'))
self.assertEqual(P('a/b.py').with_name('d.xml'), P('a/d.xml'))
self.assertEqual(P('/a/b.py').with_name('d.xml'), P('/a/d.xml'))
self.assertEqual(P('a/Dot ending.').with_name('d.xml'), P('a/d.xml'))
self.assertEqual(P('/a/Dot ending.').with_name('d.xml'), P('/a/d.xml'))
self.assertRaises(ValueError, P('').with_name, 'd.xml')
self.assertRaises(ValueError, P('.').with_name, 'd.xml')
self.assertRaises(ValueError, P('/').with_name, 'd.xml')
self.assertRaises(ValueError, P('a/b').with_name, '')
self.assertRaises(ValueError, P('a/b').with_name, '/c')
self.assertRaises(ValueError, P('a/b').with_name, 'c/')
self.assertRaises(ValueError, P('a/b').with_name, 'c/d')
def test_with_suffix_common(self):
P = self.cls
self.assertEqual(P('a/b').with_suffix('.gz'), P('a/b.gz'))
self.assertEqual(P('/a/b').with_suffix('.gz'), P('/a/b.gz'))
self.assertEqual(P('a/b.py').with_suffix('.gz'), P('a/b.gz'))
self.assertEqual(P('/a/b.py').with_suffix('.gz'), P('/a/b.gz'))
# Stripping suffix
self.assertEqual(P('a/b.py').with_suffix(''), P('a/b'))
self.assertEqual(P('/a/b').with_suffix(''), P('/a/b'))
# Path doesn't have a "filename" component
self.assertRaises(ValueError, P('').with_suffix, '.gz')
self.assertRaises(ValueError, P('.').with_suffix, '.gz')
self.assertRaises(ValueError, P('/').with_suffix, '.gz')
# Invalid suffix
self.assertRaises(ValueError, P('a/b').with_suffix, 'gz')
self.assertRaises(ValueError, P('a/b').with_suffix, '/')
self.assertRaises(ValueError, P('a/b').with_suffix, '.')
self.assertRaises(ValueError, P('a/b').with_suffix, '/.gz')
self.assertRaises(ValueError, P('a/b').with_suffix, 'c/d')
self.assertRaises(ValueError, P('a/b').with_suffix, '.c/.d')
self.assertRaises(ValueError, P('a/b').with_suffix, './.d')
self.assertRaises(ValueError, P('a/b').with_suffix, '.d/.')
def test_relative_to_common(self):
P = self.cls
p = P('a/b')
self.assertRaises(TypeError, p.relative_to)
self.assertRaises(TypeError, p.relative_to, b'a')
self.assertEqual(p.relative_to(P()), P('a/b'))
self.assertEqual(p.relative_to(''), P('a/b'))
self.assertEqual(p.relative_to(P('a')), P('b'))
self.assertEqual(p.relative_to('a'), P('b'))
self.assertEqual(p.relative_to('a/'), P('b'))
self.assertEqual(p.relative_to(P('a/b')), P())
self.assertEqual(p.relative_to('a/b'), P())
# With several args
self.assertEqual(p.relative_to('a', 'b'), P())
# Unrelated paths
self.assertRaises(ValueError, p.relative_to, P('c'))
self.assertRaises(ValueError, p.relative_to, P('a/b/c'))
self.assertRaises(ValueError, p.relative_to, P('a/c'))
self.assertRaises(ValueError, p.relative_to, P('/a'))
p = P('/a/b')
self.assertEqual(p.relative_to(P('/')), P('a/b'))
self.assertEqual(p.relative_to('/'), P('a/b'))
self.assertEqual(p.relative_to(P('/a')), P('b'))
self.assertEqual(p.relative_to('/a'), P('b'))
self.assertEqual(p.relative_to('/a/'), P('b'))
self.assertEqual(p.relative_to(P('/a/b')), P())
self.assertEqual(p.relative_to('/a/b'), P())
# Unrelated paths
self.assertRaises(ValueError, p.relative_to, P('/c'))
self.assertRaises(ValueError, p.relative_to, P('/a/b/c'))
self.assertRaises(ValueError, p.relative_to, P('/a/c'))
self.assertRaises(ValueError, p.relative_to, P())
self.assertRaises(ValueError, p.relative_to, '')
self.assertRaises(ValueError, p.relative_to, P('a'))
def test_pickling_common(self):
P = self.cls
p = P('/a/b')
for proto in range(0, pickle.HIGHEST_PROTOCOL + 1):
dumped = pickle.dumps(p, proto)
pp = pickle.loads(dumped)
self.assertIs(pp.__class__, p.__class__)
self.assertEqual(pp, p)
self.assertEqual(hash(pp), hash(p))
self.assertEqual(str(pp), str(p))
class PurePosixPathTest(_BasePurePathTest, unittest.TestCase):
cls = pathlib.PurePosixPath
def test_root(self):
P = self.cls
self.assertEqual(P('/a/b').root, '/')
self.assertEqual(P('///a/b').root, '/')
# POSIX special case for two leading slashes
self.assertEqual(P('//a/b').root, '//')
def test_eq(self):
P = self.cls
self.assertNotEqual(P('a/b'), P('A/b'))
self.assertEqual(P('/a'), P('///a'))
self.assertNotEqual(P('/a'), P('//a'))
def test_as_uri(self):
P = self.cls
self.assertEqual(P('/').as_uri(), 'file:///')
self.assertEqual(P('/a/b.c').as_uri(), 'file:///a/b.c')
self.assertEqual(P('/a/b%#c').as_uri(), 'file:///a/b%25%23c')
def test_as_uri_non_ascii(self):
from urllib.parse import quote_from_bytes
P = self.cls
try:
os.fsencode('\xe9')
except UnicodeEncodeError:
self.skipTest("\\xe9 cannot be encoded to the filesystem encoding")
self.assertEqual(P('/a/b\xe9').as_uri(),
'file:///a/b' + quote_from_bytes(os.fsencode('\xe9')))
def test_match(self):
P = self.cls
self.assertFalse(P('A.py').match('a.PY'))
def test_is_absolute(self):
P = self.cls
self.assertFalse(P().is_absolute())
self.assertFalse(P('a').is_absolute())
self.assertFalse(P('a/b/').is_absolute())
self.assertTrue(P('/').is_absolute())
self.assertTrue(P('/a').is_absolute())
self.assertTrue(P('/a/b/').is_absolute())
self.assertTrue(P('//a').is_absolute())
self.assertTrue(P('//a/b').is_absolute())
def test_is_reserved(self):
P = self.cls
self.assertIs(False, P('').is_reserved())
self.assertIs(False, P('/').is_reserved())
self.assertIs(False, P('/foo/bar').is_reserved())
self.assertIs(False, P('/dev/con/PRN/NUL').is_reserved())
def test_join(self):
P = self.cls
p = P('//a')
pp = p.joinpath('b')
self.assertEqual(pp, P('//a/b'))
pp = P('/a').joinpath('//c')
self.assertEqual(pp, P('//c'))
pp = P('//a').joinpath('/c')
self.assertEqual(pp, P('/c'))
def test_div(self):
# Basically the same as joinpath()
P = self.cls
p = P('//a')
pp = p / 'b'
self.assertEqual(pp, P('//a/b'))
pp = P('/a') / '//c'
self.assertEqual(pp, P('//c'))
pp = P('//a') / '/c'
self.assertEqual(pp, P('/c'))
class PureWindowsPathTest(_BasePurePathTest, unittest.TestCase):
cls = pathlib.PureWindowsPath
equivalences = _BasePurePathTest.equivalences.copy()
equivalences.update({
'c:a': [ ('c:', 'a'), ('c:', 'a/'), ('/', 'c:', 'a') ],
'c:/a': [
('c:/', 'a'), ('c:', '/', 'a'), ('c:', '/a'),
('/z', 'c:/', 'a'), ('//x/y', 'c:/', 'a'),
],
'//a/b/': [ ('//a/b',) ],
'//a/b/c': [
('//a/b', 'c'), ('//a/b/', 'c'),
],
})
def test_str(self):
p = self.cls('a/b/c')
self.assertEqual(str(p), 'a\\b\\c')
p = self.cls('c:/a/b/c')
self.assertEqual(str(p), 'c:\\a\\b\\c')
p = self.cls('//a/b')
self.assertEqual(str(p), '\\\\a\\b\\')
p = self.cls('//a/b/c')
self.assertEqual(str(p), '\\\\a\\b\\c')
p = self.cls('//a/b/c/d')
self.assertEqual(str(p), '\\\\a\\b\\c\\d')
def test_str_subclass(self):
self._check_str_subclass('c:')
self._check_str_subclass('c:a')
self._check_str_subclass('c:a\\b.txt')
self._check_str_subclass('c:\\')
self._check_str_subclass('c:\\a')
self._check_str_subclass('c:\\a\\b.txt')
self._check_str_subclass('\\\\some\\share')
self._check_str_subclass('\\\\some\\share\\a')
self._check_str_subclass('\\\\some\\share\\a\\b.txt')
def test_eq(self):
P = self.cls
self.assertEqual(P('c:a/b'), P('c:a/b'))
self.assertEqual(P('c:a/b'), P('c:', 'a', 'b'))
self.assertNotEqual(P('c:a/b'), P('d:a/b'))
self.assertNotEqual(P('c:a/b'), P('c:/a/b'))
self.assertNotEqual(P('/a/b'), P('c:/a/b'))
# Case-insensitivity
self.assertEqual(P('a/B'), P('A/b'))
self.assertEqual(P('C:a/B'), P('c:A/b'))
self.assertEqual(P('//Some/SHARE/a/B'), P('//somE/share/A/b'))
def test_as_uri(self):
from urllib.parse import quote_from_bytes
P = self.cls
with self.assertRaises(ValueError):
P('/a/b').as_uri()
with self.assertRaises(ValueError):
P('c:a/b').as_uri()
self.assertEqual(P('c:/').as_uri(), 'file:///c:/')
self.assertEqual(P('c:/a/b.c').as_uri(), 'file:///c:/a/b.c')
self.assertEqual(P('c:/a/b%#c').as_uri(), 'file:///c:/a/b%25%23c')
self.assertEqual(P('c:/a/b\xe9').as_uri(), 'file:///c:/a/b%C3%A9')
self.assertEqual(P('//some/share/').as_uri(), 'file://some/share/')
self.assertEqual(P('//some/share/a/b.c').as_uri(),
'file://some/share/a/b.c')
self.assertEqual(P('//some/share/a/b%#c\xe9').as_uri(),
'file://some/share/a/b%25%23c%C3%A9')
def test_match_common(self):
P = self.cls
# Absolute patterns
self.assertTrue(P('c:/b.py').match('/*.py'))
self.assertTrue(P('c:/b.py').match('c:*.py'))
self.assertTrue(P('c:/b.py').match('c:/*.py'))
self.assertFalse(P('d:/b.py').match('c:/*.py')) # wrong drive
self.assertFalse(P('b.py').match('/*.py'))
self.assertFalse(P('b.py').match('c:*.py'))
self.assertFalse(P('b.py').match('c:/*.py'))
self.assertFalse(P('c:b.py').match('/*.py'))
self.assertFalse(P('c:b.py').match('c:/*.py'))
self.assertFalse(P('/b.py').match('c:*.py'))
self.assertFalse(P('/b.py').match('c:/*.py'))
# UNC patterns
self.assertTrue(P('//some/share/a.py').match('/*.py'))
self.assertTrue(P('//some/share/a.py').match('//some/share/*.py'))
self.assertFalse(P('//other/share/a.py').match('//some/share/*.py'))
self.assertFalse(P('//some/share/a/b.py').match('//some/share/*.py'))
# Case-insensitivity
self.assertTrue(P('B.py').match('b.PY'))
self.assertTrue(P('c:/a/B.Py').match('C:/A/*.pY'))
self.assertTrue(P('//Some/Share/B.Py').match('//somE/sharE/*.pY'))
def test_ordering_common(self):
# Case-insensitivity
def assertOrderedEqual(a, b):
self.assertLessEqual(a, b)
self.assertGreaterEqual(b, a)
P = self.cls
p = P('c:A/b')
q = P('C:a/B')
assertOrderedEqual(p, q)
self.assertFalse(p < q)
self.assertFalse(p > q)
p = P('//some/Share/A/b')
q = P('//Some/SHARE/a/B')
assertOrderedEqual(p, q)
self.assertFalse(p < q)
self.assertFalse(p > q)
def test_parts(self):
P = self.cls
p = P('c:a/b')
parts = p.parts
self.assertEqual(parts, ('c:', 'a', 'b'))
p = P('c:/a/b')
parts = p.parts
self.assertEqual(parts, ('c:\\', 'a', 'b'))
p = P('//a/b/c/d')
parts = p.parts
self.assertEqual(parts, ('\\\\a\\b\\', 'c', 'd'))
def test_parent(self):
# Anchored
P = self.cls
p = P('z:a/b/c')
self.assertEqual(p.parent, P('z:a/b'))
self.assertEqual(p.parent.parent, P('z:a'))
self.assertEqual(p.parent.parent.parent, P('z:'))
self.assertEqual(p.parent.parent.parent.parent, P('z:'))
p = P('z:/a/b/c')
self.assertEqual(p.parent, P('z:/a/b'))
self.assertEqual(p.parent.parent, P('z:/a'))
self.assertEqual(p.parent.parent.parent, P('z:/'))
self.assertEqual(p.parent.parent.parent.parent, P('z:/'))
p = P('//a/b/c/d')
self.assertEqual(p.parent, P('//a/b/c'))
self.assertEqual(p.parent.parent, P('//a/b'))
self.assertEqual(p.parent.parent.parent, P('//a/b'))
def test_parents(self):
# Anchored
P = self.cls
p = P('z:a/b/')
par = p.parents
self.assertEqual(len(par), 2)
self.assertEqual(par[0], P('z:a'))
self.assertEqual(par[1], P('z:'))
self.assertEqual(list(par), [P('z:a'), P('z:')])
with self.assertRaises(IndexError):
par[2]
p = P('z:/a/b/')
par = p.parents
self.assertEqual(len(par), 2)
self.assertEqual(par[0], P('z:/a'))
self.assertEqual(par[1], P('z:/'))
self.assertEqual(list(par), [P('z:/a'), P('z:/')])
with self.assertRaises(IndexError):
par[2]
p = P('//a/b/c/d')
par = p.parents
self.assertEqual(len(par), 2)
self.assertEqual(par[0], P('//a/b/c'))
self.assertEqual(par[1], P('//a/b'))
self.assertEqual(list(par), [P('//a/b/c'), P('//a/b')])
with self.assertRaises(IndexError):
par[2]
def test_drive(self):
P = self.cls
self.assertEqual(P('c:').drive, 'c:')
self.assertEqual(P('c:a/b').drive, 'c:')
self.assertEqual(P('c:/').drive, 'c:')
self.assertEqual(P('c:/a/b/').drive, 'c:')
self.assertEqual(P('//a/b').drive, '\\\\a\\b')
self.assertEqual(P('//a/b/').drive, '\\\\a\\b')
self.assertEqual(P('//a/b/c/d').drive, '\\\\a\\b')
def test_root(self):
P = self.cls
self.assertEqual(P('c:').root, '')
self.assertEqual(P('c:a/b').root, '')
self.assertEqual(P('c:/').root, '\\')
self.assertEqual(P('c:/a/b/').root, '\\')
self.assertEqual(P('//a/b').root, '\\')
self.assertEqual(P('//a/b/').root, '\\')
self.assertEqual(P('//a/b/c/d').root, '\\')
def test_anchor(self):
P = self.cls
self.assertEqual(P('c:').anchor, 'c:')
self.assertEqual(P('c:a/b').anchor, 'c:')
self.assertEqual(P('c:/').anchor, 'c:\\')
self.assertEqual(P('c:/a/b/').anchor, 'c:\\')
self.assertEqual(P('//a/b').anchor, '\\\\a\\b\\')
self.assertEqual(P('//a/b/').anchor, '\\\\a\\b\\')
self.assertEqual(P('//a/b/c/d').anchor, '\\\\a\\b\\')
def test_name(self):
P = self.cls
self.assertEqual(P('c:').name, '')
self.assertEqual(P('c:/').name, '')
self.assertEqual(P('c:a/b').name, 'b')
self.assertEqual(P('c:/a/b').name, 'b')
self.assertEqual(P('c:a/b.py').name, 'b.py')
self.assertEqual(P('c:/a/b.py').name, 'b.py')
self.assertEqual(P('//My.py/Share.php').name, '')
self.assertEqual(P('//My.py/Share.php/a/b').name, 'b')
def test_suffix(self):
P = self.cls
self.assertEqual(P('c:').suffix, '')
self.assertEqual(P('c:/').suffix, '')
self.assertEqual(P('c:a/b').suffix, '')
self.assertEqual(P('c:/a/b').suffix, '')
self.assertEqual(P('c:a/b.py').suffix, '.py')
self.assertEqual(P('c:/a/b.py').suffix, '.py')
self.assertEqual(P('c:a/.hgrc').suffix, '')
self.assertEqual(P('c:/a/.hgrc').suffix, '')
self.assertEqual(P('c:a/.hg.rc').suffix, '.rc')
self.assertEqual(P('c:/a/.hg.rc').suffix, '.rc')
self.assertEqual(P('c:a/b.tar.gz').suffix, '.gz')
self.assertEqual(P('c:/a/b.tar.gz').suffix, '.gz')
self.assertEqual(P('c:a/Some name. Ending with a dot.').suffix, '')
self.assertEqual(P('c:/a/Some name. Ending with a dot.').suffix, '')
self.assertEqual(P('//My.py/Share.php').suffix, '')
self.assertEqual(P('//My.py/Share.php/a/b').suffix, '')
def test_suffixes(self):
P = self.cls
self.assertEqual(P('c:').suffixes, [])
self.assertEqual(P('c:/').suffixes, [])
self.assertEqual(P('c:a/b').suffixes, [])
self.assertEqual(P('c:/a/b').suffixes, [])
self.assertEqual(P('c:a/b.py').suffixes, ['.py'])
self.assertEqual(P('c:/a/b.py').suffixes, ['.py'])
self.assertEqual(P('c:a/.hgrc').suffixes, [])
self.assertEqual(P('c:/a/.hgrc').suffixes, [])
self.assertEqual(P('c:a/.hg.rc').suffixes, ['.rc'])
self.assertEqual(P('c:/a/.hg.rc').suffixes, ['.rc'])
self.assertEqual(P('c:a/b.tar.gz').suffixes, ['.tar', '.gz'])
self.assertEqual(P('c:/a/b.tar.gz').suffixes, ['.tar', '.gz'])
self.assertEqual(P('//My.py/Share.php').suffixes, [])
self.assertEqual(P('//My.py/Share.php/a/b').suffixes, [])
self.assertEqual(P('c:a/Some name. Ending with a dot.').suffixes, [])
self.assertEqual(P('c:/a/Some name. Ending with a dot.').suffixes, [])
def test_stem(self):
P = self.cls
self.assertEqual(P('c:').stem, '')
self.assertEqual(P('c:.').stem, '')
self.assertEqual(P('c:..').stem, '..')
self.assertEqual(P('c:/').stem, '')
self.assertEqual(P('c:a/b').stem, 'b')
self.assertEqual(P('c:a/b.py').stem, 'b')
self.assertEqual(P('c:a/.hgrc').stem, '.hgrc')
self.assertEqual(P('c:a/.hg.rc').stem, '.hg')
self.assertEqual(P('c:a/b.tar.gz').stem, 'b.tar')
self.assertEqual(P('c:a/Some name. Ending with a dot.').stem,
'Some name. Ending with a dot.')
def test_with_name(self):
P = self.cls
self.assertEqual(P('c:a/b').with_name('d.xml'), P('c:a/d.xml'))
self.assertEqual(P('c:/a/b').with_name('d.xml'), P('c:/a/d.xml'))
self.assertEqual(P('c:a/Dot ending.').with_name('d.xml'), P('c:a/d.xml'))
self.assertEqual(P('c:/a/Dot ending.').with_name('d.xml'), P('c:/a/d.xml'))
self.assertRaises(ValueError, P('c:').with_name, 'd.xml')
self.assertRaises(ValueError, P('c:/').with_name, 'd.xml')
self.assertRaises(ValueError, P('//My/Share').with_name, 'd.xml')
self.assertRaises(ValueError, P('c:a/b').with_name, 'd:')
self.assertRaises(ValueError, P('c:a/b').with_name, 'd:e')
self.assertRaises(ValueError, P('c:a/b').with_name, 'd:/e')
self.assertRaises(ValueError, P('c:a/b').with_name, '//My/Share')
def test_with_suffix(self):
P = self.cls
self.assertEqual(P('c:a/b').with_suffix('.gz'), P('c:a/b.gz'))
self.assertEqual(P('c:/a/b').with_suffix('.gz'), P('c:/a/b.gz'))
self.assertEqual(P('c:a/b.py').with_suffix('.gz'), P('c:a/b.gz'))
self.assertEqual(P('c:/a/b.py').with_suffix('.gz'), P('c:/a/b.gz'))
# Path doesn't have a "filename" component
self.assertRaises(ValueError, P('').with_suffix, '.gz')
self.assertRaises(ValueError, P('.').with_suffix, '.gz')
self.assertRaises(ValueError, P('/').with_suffix, '.gz')
self.assertRaises(ValueError, P('//My/Share').with_suffix, '.gz')
# Invalid suffix
self.assertRaises(ValueError, P('c:a/b').with_suffix, 'gz')
self.assertRaises(ValueError, P('c:a/b').with_suffix, '/')
self.assertRaises(ValueError, P('c:a/b').with_suffix, '\\')
self.assertRaises(ValueError, P('c:a/b').with_suffix, 'c:')
self.assertRaises(ValueError, P('c:a/b').with_suffix, '/.gz')
self.assertRaises(ValueError, P('c:a/b').with_suffix, '\\.gz')
self.assertRaises(ValueError, P('c:a/b').with_suffix, 'c:.gz')
self.assertRaises(ValueError, P('c:a/b').with_suffix, 'c/d')
self.assertRaises(ValueError, P('c:a/b').with_suffix, 'c\\d')
self.assertRaises(ValueError, P('c:a/b').with_suffix, '.c/d')
self.assertRaises(ValueError, P('c:a/b').with_suffix, '.c\\d')
def test_relative_to(self):
P = self.cls
p = P('C:Foo/Bar')
self.assertEqual(p.relative_to(P('c:')), P('Foo/Bar'))
self.assertEqual(p.relative_to('c:'), P('Foo/Bar'))
self.assertEqual(p.relative_to(P('c:foO')), P('Bar'))
self.assertEqual(p.relative_to('c:foO'), P('Bar'))
self.assertEqual(p.relative_to('c:foO/'), P('Bar'))
self.assertEqual(p.relative_to(P('c:foO/baR')), P())
self.assertEqual(p.relative_to('c:foO/baR'), P())
# Unrelated paths
self.assertRaises(ValueError, p.relative_to, P())
self.assertRaises(ValueError, p.relative_to, '')
self.assertRaises(ValueError, p.relative_to, P('d:'))
self.assertRaises(ValueError, p.relative_to, P('/'))
self.assertRaises(ValueError, p.relative_to, P('Foo'))
self.assertRaises(ValueError, p.relative_to, P('/Foo'))
self.assertRaises(ValueError, p.relative_to, P('C:/Foo'))
self.assertRaises(ValueError, p.relative_to, P('C:Foo/Bar/Baz'))
self.assertRaises(ValueError, p.relative_to, P('C:Foo/Baz'))
p = P('C:/Foo/Bar')
self.assertEqual(p.relative_to(P('c:')), P('/Foo/Bar'))
self.assertEqual(p.relative_to('c:'), P('/Foo/Bar'))
self.assertEqual(str(p.relative_to(P('c:'))), '\\Foo\\Bar')
self.assertEqual(str(p.relative_to('c:')), '\\Foo\\Bar')
self.assertEqual(p.relative_to(P('c:/')), P('Foo/Bar'))
self.assertEqual(p.relative_to('c:/'), P('Foo/Bar'))
self.assertEqual(p.relative_to(P('c:/foO')), P('Bar'))
self.assertEqual(p.relative_to('c:/foO'), P('Bar'))
self.assertEqual(p.relative_to('c:/foO/'), P('Bar'))
self.assertEqual(p.relative_to(P('c:/foO/baR')), P())
self.assertEqual(p.relative_to('c:/foO/baR'), P())
# Unrelated paths
self.assertRaises(ValueError, p.relative_to, P('C:/Baz'))
self.assertRaises(ValueError, p.relative_to, P('C:/Foo/Bar/Baz'))
self.assertRaises(ValueError, p.relative_to, P('C:/Foo/Baz'))
self.assertRaises(ValueError, p.relative_to, P('C:Foo'))
self.assertRaises(ValueError, p.relative_to, P('d:'))
self.assertRaises(ValueError, p.relative_to, P('d:/'))
self.assertRaises(ValueError, p.relative_to, P('/'))
self.assertRaises(ValueError, p.relative_to, P('/Foo'))
self.assertRaises(ValueError, p.relative_to, P('//C/Foo'))
# UNC paths
p = P('//Server/Share/Foo/Bar')
self.assertEqual(p.relative_to(P('//sErver/sHare')), P('Foo/Bar'))
self.assertEqual(p.relative_to('//sErver/sHare'), P('Foo/Bar'))
self.assertEqual(p.relative_to('//sErver/sHare/'), P('Foo/Bar'))
self.assertEqual(p.relative_to(P('//sErver/sHare/Foo')), P('Bar'))
self.assertEqual(p.relative_to('//sErver/sHare/Foo'), P('Bar'))
self.assertEqual(p.relative_to('//sErver/sHare/Foo/'), P('Bar'))
self.assertEqual(p.relative_to(P('//sErver/sHare/Foo/Bar')), P())
self.assertEqual(p.relative_to('//sErver/sHare/Foo/Bar'), P())
# Unrelated paths
self.assertRaises(ValueError, p.relative_to, P('/Server/Share/Foo'))
self.assertRaises(ValueError, p.relative_to, P('c:/Server/Share/Foo'))
self.assertRaises(ValueError, p.relative_to, P('//z/Share/Foo'))
self.assertRaises(ValueError, p.relative_to, P('//Server/z/Foo'))
def test_is_absolute(self):
P = self.cls
# Under NT, only paths with both a drive and a root are absolute
self.assertFalse(P().is_absolute())
self.assertFalse(P('a').is_absolute())
self.assertFalse(P('a/b/').is_absolute())
self.assertFalse(P('/').is_absolute())
self.assertFalse(P('/a').is_absolute())
self.assertFalse(P('/a/b/').is_absolute())
self.assertFalse(P('c:').is_absolute())
self.assertFalse(P('c:a').is_absolute())
self.assertFalse(P('c:a/b/').is_absolute())
self.assertTrue(P('c:/').is_absolute())
self.assertTrue(P('c:/a').is_absolute())
self.assertTrue(P('c:/a/b/').is_absolute())
# UNC paths are absolute by definition
self.assertTrue(P('//a/b').is_absolute())
self.assertTrue(P('//a/b/').is_absolute())
self.assertTrue(P('//a/b/c').is_absolute())
self.assertTrue(P('//a/b/c/d').is_absolute())
def test_join(self):
P = self.cls
p = P('C:/a/b')
pp = p.joinpath('x/y')
self.assertEqual(pp, P('C:/a/b/x/y'))
pp = p.joinpath('/x/y')
self.assertEqual(pp, P('C:/x/y'))
# Joining with a different drive => the first path is ignored, even
# if the second path is relative.
pp = p.joinpath('D:x/y')
self.assertEqual(pp, P('D:x/y'))
pp = p.joinpath('D:/x/y')
self.assertEqual(pp, P('D:/x/y'))
pp = p.joinpath('//host/share/x/y')
self.assertEqual(pp, P('//host/share/x/y'))
# Joining with the same drive => the first path is appended to if
# the second path is relative.
pp = p.joinpath('c:x/y')
self.assertEqual(pp, P('C:/a/b/x/y'))
pp = p.joinpath('c:/x/y')
self.assertEqual(pp, P('C:/x/y'))
def test_div(self):
# Basically the same as joinpath()
P = self.cls
p = P('C:/a/b')
self.assertEqual(p / 'x/y', P('C:/a/b/x/y'))
self.assertEqual(p / 'x' / 'y', P('C:/a/b/x/y'))
self.assertEqual(p / '/x/y', P('C:/x/y'))
self.assertEqual(p / '/x' / 'y', P('C:/x/y'))
# Joining with a different drive => the first path is ignored, even
# if the second path is relative.
self.assertEqual(p / 'D:x/y', P('D:x/y'))
self.assertEqual(p / 'D:' / 'x/y', P('D:x/y'))
self.assertEqual(p / 'D:/x/y', P('D:/x/y'))
self.assertEqual(p / 'D:' / '/x/y', P('D:/x/y'))
self.assertEqual(p / '//host/share/x/y', P('//host/share/x/y'))
# Joining with the same drive => the first path is appended to if
# the second path is relative.
self.assertEqual(p / 'c:x/y', P('C:/a/b/x/y'))
self.assertEqual(p / 'c:/x/y', P('C:/x/y'))
def test_is_reserved(self):
P = self.cls
self.assertIs(False, P('').is_reserved())
self.assertIs(False, P('/').is_reserved())
self.assertIs(False, P('/foo/bar').is_reserved())
self.assertIs(True, P('con').is_reserved())
self.assertIs(True, P('NUL').is_reserved())
self.assertIs(True, P('NUL.txt').is_reserved())
self.assertIs(True, P('com1').is_reserved())
self.assertIs(True, P('com9.bar').is_reserved())
self.assertIs(False, P('bar.com9').is_reserved())
self.assertIs(True, P('lpt1').is_reserved())
self.assertIs(True, P('lpt9.bar').is_reserved())
self.assertIs(False, P('bar.lpt9').is_reserved())
# Only the last component matters
self.assertIs(False, P('c:/NUL/con/baz').is_reserved())
# UNC paths are never reserved
self.assertIs(False, P('//my/share/nul/con/aux').is_reserved())
class PurePathTest(_BasePurePathTest, unittest.TestCase):
cls = pathlib.PurePath
def test_concrete_class(self):
p = self.cls('a')
self.assertIs(type(p),
pathlib.PureWindowsPath if os.name == 'nt' else pathlib.PurePosixPath)
def test_different_flavours_unequal(self):
p = pathlib.PurePosixPath('a')
q = pathlib.PureWindowsPath('a')
self.assertNotEqual(p, q)
def test_different_flavours_unordered(self):
p = pathlib.PurePosixPath('a')
q = pathlib.PureWindowsPath('a')
with self.assertRaises(TypeError):
p < q
with self.assertRaises(TypeError):
p <= q
with self.assertRaises(TypeError):
p > q
with self.assertRaises(TypeError):
p >= q
#
# Tests for the concrete classes
#
# Make sure any symbolic links in the base test path are resolved
BASE = os.path.realpath(TESTFN)
join = lambda *x: os.path.join(BASE, *x)
rel_join = lambda *x: os.path.join(TESTFN, *x)
def symlink_skip_reason():
if not pathlib.supports_symlinks:
return "no system support for symlinks"
try:
os.symlink(__file__, BASE)
except OSError as e:
return str(e)
else:
support.unlink(BASE)
return None
symlink_skip_reason = symlink_skip_reason()
only_nt = unittest.skipIf(os.name != 'nt',
'test requires a Windows-compatible system')
only_posix = unittest.skipIf(os.name == 'nt',
'test requires a POSIX-compatible system')
with_symlinks = unittest.skipIf(symlink_skip_reason, symlink_skip_reason)
@only_posix
class PosixPathAsPureTest(PurePosixPathTest):
cls = pathlib.PosixPath
@only_nt
class WindowsPathAsPureTest(PureWindowsPathTest):
cls = pathlib.WindowsPath
class _BasePathTest(object):
"""Tests for the FS-accessing functionalities of the Path classes."""
# (BASE)
# |
# |-- dirA/
# |-- linkC -> "../dirB"
# |-- dirB/
# | |-- fileB
# |-- linkD -> "../dirB"
# |-- dirC/
# | |-- fileC
# | |-- fileD
# |-- fileA
# |-- linkA -> "fileA"
# |-- linkB -> "dirB"
#
def setUp(self):
os.mkdir(BASE)
self.addCleanup(support.rmtree, BASE)
os.mkdir(join('dirA'))
os.mkdir(join('dirB'))
os.mkdir(join('dirC'))
os.mkdir(join('dirC', 'dirD'))
with open(join('fileA'), 'wb') as f:
f.write(b"this is file A\n")
with open(join('dirB', 'fileB'), 'wb') as f:
f.write(b"this is file B\n")
with open(join('dirC', 'fileC'), 'wb') as f:
f.write(b"this is file C\n")
with open(join('dirC', 'dirD', 'fileD'), 'wb') as f:
f.write(b"this is file D\n")
if not symlink_skip_reason:
# Relative symlinks
os.symlink('fileA', join('linkA'))
os.symlink('non-existing', join('brokenLink'))
self.dirlink('dirB', join('linkB'))
self.dirlink(os.path.join('..', 'dirB'), join('dirA', 'linkC'))
# This one goes upwards but doesn't create a loop
self.dirlink(os.path.join('..', 'dirB'), join('dirB', 'linkD'))
if os.name == 'nt':
# Workaround for http://bugs.python.org/issue13772
def dirlink(self, src, dest):
os.symlink(src, dest, target_is_directory=True)
else:
def dirlink(self, src, dest):
os.symlink(src, dest)
def assertSame(self, path_a, path_b):
self.assertTrue(os.path.samefile(str(path_a), str(path_b)),
"%r and %r don't point to the same file" %
(path_a, path_b))
def assertFileNotFound(self, func, *args, **kwargs):
with self.assertRaises(FileNotFoundError) as cm:
func(*args, **kwargs)
self.assertEqual(cm.exception.errno, errno.ENOENT)
def _test_cwd(self, p):
q = self.cls(os.getcwd())
self.assertEqual(p, q)
self.assertEqual(str(p), str(q))
self.assertIs(type(p), type(q))
self.assertTrue(p.is_absolute())
def test_cwd(self):
p = self.cls.cwd()
self._test_cwd(p)
def test_empty_path(self):
# The empty path points to '.'
p = self.cls('')
self.assertEqual(p.stat(), os.stat('.'))
def test_exists(self):
P = self.cls
p = P(BASE)
self.assertIs(True, p.exists())
self.assertIs(True, (p / 'dirA').exists())
self.assertIs(True, (p / 'fileA').exists())
if not symlink_skip_reason:
self.assertIs(True, (p / 'linkA').exists())
self.assertIs(True, (p / 'linkB').exists())
self.assertIs(False, (p / 'foo').exists())
self.assertIs(False, P('/xyzzy').exists())
def test_open_common(self):
p = self.cls(BASE)
with (p / 'fileA').open('r') as f:
self.assertIsInstance(f, io.TextIOBase)
self.assertEqual(f.read(), "this is file A\n")
with (p / 'fileA').open('rb') as f:
self.assertIsInstance(f, io.BufferedIOBase)
self.assertEqual(f.read().strip(), b"this is file A")
with (p / 'fileA').open('rb', buffering=0) as f:
self.assertIsInstance(f, io.RawIOBase)
self.assertEqual(f.read().strip(), b"this is file A")
def test_iterdir(self):
P = self.cls
p = P(BASE)
it = p.iterdir()
paths = set(it)
expected = ['dirA', 'dirB', 'dirC', 'fileA']
if not symlink_skip_reason:
expected += ['linkA', 'linkB', 'brokenLink']
self.assertEqual(paths, { P(BASE, q) for q in expected })
@with_symlinks
def test_iterdir_symlink(self):
# __iter__ on a symlink to a directory
P = self.cls
p = P(BASE, 'linkB')
paths = set(p.iterdir())
expected = { P(BASE, 'linkB', q) for q in ['fileB', 'linkD'] }
self.assertEqual(paths, expected)
def test_iterdir_nodir(self):
# __iter__ on something that is not a directory
p = self.cls(BASE, 'fileA')
with self.assertRaises(OSError) as cm:
next(p.iterdir())
# ENOENT or EINVAL under Windows, ENOTDIR otherwise
# (see issue #12802)
self.assertIn(cm.exception.errno, (errno.ENOTDIR,
errno.ENOENT, errno.EINVAL))
def test_glob_common(self):
def _check(glob, expected):
self.assertEqual(set(glob), { P(BASE, q) for q in expected })
P = self.cls
p = P(BASE)
it = p.glob("fileA")
self.assertIsInstance(it, collections.Iterator)
_check(it, ["fileA"])
_check(p.glob("fileB"), [])
_check(p.glob("dir*/file*"), ["dirB/fileB", "dirC/fileC"])
if symlink_skip_reason:
_check(p.glob("*A"), ['dirA', 'fileA'])
else:
_check(p.glob("*A"), ['dirA', 'fileA', 'linkA'])
if symlink_skip_reason:
_check(p.glob("*B/*"), ['dirB/fileB'])
else:
_check(p.glob("*B/*"), ['dirB/fileB', 'dirB/linkD',
'linkB/fileB', 'linkB/linkD'])
if symlink_skip_reason:
_check(p.glob("*/fileB"), ['dirB/fileB'])
else:
_check(p.glob("*/fileB"), ['dirB/fileB', 'linkB/fileB'])
def test_rglob_common(self):
def _check(glob, expected):
self.assertEqual(set(glob), { P(BASE, q) for q in expected })
P = self.cls
p = P(BASE)
it = p.rglob("fileA")
self.assertIsInstance(it, collections.Iterator)
# XXX cannot test because of symlink loops in the test setup
#_check(it, ["fileA"])
#_check(p.rglob("fileB"), ["dirB/fileB"])
#_check(p.rglob("*/fileA"), [""])
#_check(p.rglob("*/fileB"), ["dirB/fileB"])
#_check(p.rglob("file*"), ["fileA", "dirB/fileB"])
# No symlink loops here
p = P(BASE, "dirC")
_check(p.rglob("file*"), ["dirC/fileC", "dirC/dirD/fileD"])
_check(p.rglob("*/*"), ["dirC/dirD/fileD"])
def test_glob_dotdot(self):
# ".." is not special in globs
P = self.cls
p = P(BASE)
self.assertEqual(set(p.glob("..")), { P(BASE, "..") })
self.assertEqual(set(p.glob("dirA/../file*")), { P(BASE, "dirA/../fileA") })
self.assertEqual(set(p.glob("../xyzzy")), set())
def _check_resolve_relative(self, p, expected):
q = p.resolve()
self.assertEqual(q, expected)
def _check_resolve_absolute(self, p, expected):
q = p.resolve()
self.assertEqual(q, expected)
@with_symlinks
def test_resolve_common(self):
P = self.cls
p = P(BASE, 'foo')
with self.assertRaises(OSError) as cm:
p.resolve()
self.assertEqual(cm.exception.errno, errno.ENOENT)
# These are all relative symlinks
p = P(BASE, 'dirB', 'fileB')
self._check_resolve_relative(p, p)
p = P(BASE, 'linkA')
self._check_resolve_relative(p, P(BASE, 'fileA'))
p = P(BASE, 'dirA', 'linkC', 'fileB')
self._check_resolve_relative(p, P(BASE, 'dirB', 'fileB'))
p = P(BASE, 'dirB', 'linkD', 'fileB')
self._check_resolve_relative(p, P(BASE, 'dirB', 'fileB'))
# Now create absolute symlinks
d = tempfile.mkdtemp(suffix='-dirD')
self.addCleanup(support.rmtree, d)
os.symlink(os.path.join(d), join('dirA', 'linkX'))
os.symlink(join('dirB'), os.path.join(d, 'linkY'))
p = P(BASE, 'dirA', 'linkX', 'linkY', 'fileB')
self._check_resolve_absolute(p, P(BASE, 'dirB', 'fileB'))
@with_symlinks
def test_resolve_dot(self):
# See https://bitbucket.org/pitrou/pathlib/issue/9/pathresolve-fails-on-complex-symlinks
p = self.cls(BASE)
self.dirlink('.', join('0'))
self.dirlink(os.path.join('0', '0'), join('1'))
self.dirlink(os.path.join('1', '1'), join('2'))
q = p / '2'
self.assertEqual(q.resolve(), p)
def test_with(self):
p = self.cls(BASE)
it = p.iterdir()
it2 = p.iterdir()
next(it2)
with p:
pass
# I/O operation on closed path
self.assertRaises(ValueError, next, it)
self.assertRaises(ValueError, next, it2)
self.assertRaises(ValueError, p.open)
self.assertRaises(ValueError, p.resolve)
self.assertRaises(ValueError, p.absolute)
self.assertRaises(ValueError, p.__enter__)
def test_chmod(self):
p = self.cls(BASE) / 'fileA'
mode = p.stat().st_mode
# Clear writable bit
new_mode = mode & ~0o222
p.chmod(new_mode)
self.assertEqual(p.stat().st_mode, new_mode)
# Set writable bit
new_mode = mode | 0o222
p.chmod(new_mode)
self.assertEqual(p.stat().st_mode, new_mode)
# XXX also need a test for lchmod
def test_stat(self):
p = self.cls(BASE) / 'fileA'
st = p.stat()
self.assertEqual(p.stat(), st)
# Change file mode by flipping write bit
p.chmod(st.st_mode ^ 0o222)
self.addCleanup(p.chmod, st.st_mode)
self.assertNotEqual(p.stat(), st)
@with_symlinks
def test_lstat(self):
p = self.cls(BASE)/ 'linkA'
st = p.stat()
self.assertNotEqual(st, p.lstat())
def test_lstat_nosymlink(self):
p = self.cls(BASE) / 'fileA'
st = p.stat()
self.assertEqual(st, p.lstat())
@unittest.skipUnless(pwd, "the pwd module is needed for this test")
def test_owner(self):
p = self.cls(BASE) / 'fileA'
uid = p.stat().st_uid
try:
name = pwd.getpwuid(uid).pw_name
except KeyError:
self.skipTest(
"user %d doesn't have an entry in the system database" % uid)
self.assertEqual(name, p.owner())
@unittest.skipUnless(grp, "the grp module is needed for this test")
def test_group(self):
p = self.cls(BASE) / 'fileA'
gid = p.stat().st_gid
try:
name = grp.getgrgid(gid).gr_name
except KeyError:
self.skipTest(
"group %d doesn't have an entry in the system database" % gid)
self.assertEqual(name, p.group())
def test_unlink(self):
p = self.cls(BASE) / 'fileA'
p.unlink()
self.assertFileNotFound(p.stat)
self.assertFileNotFound(p.unlink)
def test_rmdir(self):
p = self.cls(BASE) / 'dirA'
for q in p.iterdir():
q.unlink()
p.rmdir()
self.assertFileNotFound(p.stat)
self.assertFileNotFound(p.unlink)
def test_rename(self):
P = self.cls(BASE)
p = P / 'fileA'
size = p.stat().st_size
# Renaming to another path
q = P / 'dirA' / 'fileAA'
p.rename(q)
self.assertEqual(q.stat().st_size, size)
self.assertFileNotFound(p.stat)
# Renaming to a str of a relative path
r = rel_join('fileAAA')
q.rename(r)
self.assertEqual(os.stat(r).st_size, size)
self.assertFileNotFound(q.stat)
def test_replace(self):
P = self.cls(BASE)
p = P / 'fileA'
size = p.stat().st_size
# Replacing a non-existing path
q = P / 'dirA' / 'fileAA'
p.replace(q)
self.assertEqual(q.stat().st_size, size)
self.assertFileNotFound(p.stat)
# Replacing another (existing) path
r = rel_join('dirB', 'fileB')
q.replace(r)
self.assertEqual(os.stat(r).st_size, size)
self.assertFileNotFound(q.stat)
def test_touch_common(self):
P = self.cls(BASE)
p = P / 'newfileA'
self.assertFalse(p.exists())
p.touch()
self.assertTrue(p.exists())
st = p.stat()
old_mtime = st.st_mtime
old_mtime_ns = st.st_mtime_ns
# Rewind the mtime sufficiently far in the past to work around
# filesystem-specific timestamp granularity.
os.utime(str(p), (old_mtime - 10, old_mtime - 10))
# The file mtime should be refreshed by calling touch() again
p.touch()
st = p.stat()
self.assertGreaterEqual(st.st_mtime_ns, old_mtime_ns)
self.assertGreaterEqual(st.st_mtime, old_mtime)
# Now with exist_ok=False
p = P / 'newfileB'
self.assertFalse(p.exists())
p.touch(mode=0o700, exist_ok=False)
self.assertTrue(p.exists())
self.assertRaises(OSError, p.touch, exist_ok=False)
def test_touch_nochange(self):
P = self.cls(BASE)
p = P / 'fileA'
p.touch()
with p.open('rb') as f:
self.assertEqual(f.read().strip(), b"this is file A")
def test_mkdir(self):
P = self.cls(BASE)
p = P / 'newdirA'
self.assertFalse(p.exists())
p.mkdir()
self.assertTrue(p.exists())
self.assertTrue(p.is_dir())
with self.assertRaises(OSError) as cm:
p.mkdir()
self.assertEqual(cm.exception.errno, errno.EEXIST)
def test_mkdir_parents(self):
# Creating a chain of directories
p = self.cls(BASE, 'newdirB', 'newdirC')
self.assertFalse(p.exists())
with self.assertRaises(OSError) as cm:
p.mkdir()
self.assertEqual(cm.exception.errno, errno.ENOENT)
p.mkdir(parents=True)
self.assertTrue(p.exists())
self.assertTrue(p.is_dir())
with self.assertRaises(OSError) as cm:
p.mkdir(parents=True)
self.assertEqual(cm.exception.errno, errno.EEXIST)
# test `mode` arg
mode = stat.S_IMODE(p.stat().st_mode) # default mode
p = self.cls(BASE, 'newdirD', 'newdirE')
p.mkdir(0o555, parents=True)
self.assertTrue(p.exists())
self.assertTrue(p.is_dir())
if os.name != 'nt':
# the directory's permissions follow the mode argument
self.assertEqual(stat.S_IMODE(p.stat().st_mode), 0o7555 & mode)
# the parent's permissions follow the default process settings
self.assertEqual(stat.S_IMODE(p.parent.stat().st_mode), mode)
@with_symlinks
def test_symlink_to(self):
P = self.cls(BASE)
target = P / 'fileA'
# Symlinking a path target
link = P / 'dirA' / 'linkAA'
link.symlink_to(target)
self.assertEqual(link.stat(), target.stat())
self.assertNotEqual(link.lstat(), target.stat())
# Symlinking a str target
link = P / 'dirA' / 'linkAAA'
link.symlink_to(str(target))
self.assertEqual(link.stat(), target.stat())
self.assertNotEqual(link.lstat(), target.stat())
self.assertFalse(link.is_dir())
# Symlinking to a directory
target = P / 'dirB'
link = P / 'dirA' / 'linkAAAA'
link.symlink_to(target, target_is_directory=True)
self.assertEqual(link.stat(), target.stat())
self.assertNotEqual(link.lstat(), target.stat())
self.assertTrue(link.is_dir())
self.assertTrue(list(link.iterdir()))
def test_is_dir(self):
P = self.cls(BASE)
self.assertTrue((P / 'dirA').is_dir())
self.assertFalse((P / 'fileA').is_dir())
self.assertFalse((P / 'non-existing').is_dir())
if not symlink_skip_reason:
self.assertFalse((P / 'linkA').is_dir())
self.assertTrue((P / 'linkB').is_dir())
self.assertFalse((P/ 'brokenLink').is_dir())
def test_is_file(self):
P = self.cls(BASE)
self.assertTrue((P / 'fileA').is_file())
self.assertFalse((P / 'dirA').is_file())
self.assertFalse((P / 'non-existing').is_file())
if not symlink_skip_reason:
self.assertTrue((P / 'linkA').is_file())
self.assertFalse((P / 'linkB').is_file())
self.assertFalse((P/ 'brokenLink').is_file())
def test_is_symlink(self):
P = self.cls(BASE)
self.assertFalse((P / 'fileA').is_symlink())
self.assertFalse((P / 'dirA').is_symlink())
self.assertFalse((P / 'non-existing').is_symlink())
if not symlink_skip_reason:
self.assertTrue((P / 'linkA').is_symlink())
self.assertTrue((P / 'linkB').is_symlink())
self.assertTrue((P/ 'brokenLink').is_symlink())
def test_is_fifo_false(self):
P = self.cls(BASE)
self.assertFalse((P / 'fileA').is_fifo())
self.assertFalse((P / 'dirA').is_fifo())
self.assertFalse((P / 'non-existing').is_fifo())
@unittest.skipUnless(hasattr(os, "mkfifo"), "os.mkfifo() required")
def test_is_fifo_true(self):
P = self.cls(BASE, 'myfifo')
os.mkfifo(str(P))
self.assertTrue(P.is_fifo())
self.assertFalse(P.is_socket())
self.assertFalse(P.is_file())
def test_is_socket_false(self):
P = self.cls(BASE)
self.assertFalse((P / 'fileA').is_socket())
self.assertFalse((P / 'dirA').is_socket())
self.assertFalse((P / 'non-existing').is_socket())
@unittest.skipUnless(hasattr(socket, "AF_UNIX"), "Unix sockets required")
def test_is_socket_true(self):
P = self.cls(BASE, 'mysock')
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
self.addCleanup(sock.close)
try:
sock.bind(str(P))
except OSError as e:
if "AF_UNIX path too long" in str(e):
self.skipTest("cannot bind Unix socket: " + str(e))
self.assertTrue(P.is_socket())
self.assertFalse(P.is_fifo())
self.assertFalse(P.is_file())
def test_is_block_device_false(self):
P = self.cls(BASE)
self.assertFalse((P / 'fileA').is_block_device())
self.assertFalse((P / 'dirA').is_block_device())
self.assertFalse((P / 'non-existing').is_block_device())
def test_is_char_device_false(self):
P = self.cls(BASE)
self.assertFalse((P / 'fileA').is_char_device())
self.assertFalse((P / 'dirA').is_char_device())
self.assertFalse((P / 'non-existing').is_char_device())
def test_is_char_device_true(self):
# Under Unix, /dev/null should generally be a char device
P = self.cls('/dev/null')
if not P.exists():
self.skipTest("/dev/null required")
self.assertTrue(P.is_char_device())
self.assertFalse(P.is_block_device())
self.assertFalse(P.is_file())
def test_pickling_common(self):
p = self.cls(BASE, 'fileA')
for proto in range(0, pickle.HIGHEST_PROTOCOL + 1):
dumped = pickle.dumps(p, proto)
pp = pickle.loads(dumped)
self.assertEqual(pp.stat(), p.stat())
def test_parts_interning(self):
P = self.cls
p = P('/usr/bin/foo')
q = P('/usr/local/bin')
# 'usr'
self.assertIs(p.parts[1], q.parts[1])
# 'bin'
self.assertIs(p.parts[2], q.parts[3])
def _check_complex_symlinks(self, link0_target):
# Test solving a non-looping chain of symlinks (issue #19887)
P = self.cls(BASE)
self.dirlink(os.path.join('link0', 'link0'), join('link1'))
self.dirlink(os.path.join('link1', 'link1'), join('link2'))
self.dirlink(os.path.join('link2', 'link2'), join('link3'))
self.dirlink(link0_target, join('link0'))
# Resolve absolute paths
p = (P / 'link0').resolve()
self.assertEqual(p, P)
self.assertEqual(str(p), BASE)
p = (P / 'link1').resolve()
self.assertEqual(p, P)
self.assertEqual(str(p), BASE)
p = (P / 'link2').resolve()
self.assertEqual(p, P)
self.assertEqual(str(p), BASE)
p = (P / 'link3').resolve()
self.assertEqual(p, P)
self.assertEqual(str(p), BASE)
# Resolve relative paths
old_path = os.getcwd()
os.chdir(BASE)
try:
p = self.cls('link0').resolve()
self.assertEqual(p, P)
self.assertEqual(str(p), BASE)
p = self.cls('link1').resolve()
self.assertEqual(p, P)
self.assertEqual(str(p), BASE)
p = self.cls('link2').resolve()
self.assertEqual(p, P)
self.assertEqual(str(p), BASE)
p = self.cls('link3').resolve()
self.assertEqual(p, P)
self.assertEqual(str(p), BASE)
finally:
os.chdir(old_path)
@with_symlinks
def test_complex_symlinks_absolute(self):
self._check_complex_symlinks(BASE)
@with_symlinks
def test_complex_symlinks_relative(self):
self._check_complex_symlinks('.')
@with_symlinks
def test_complex_symlinks_relative_dot_dot(self):
self._check_complex_symlinks(os.path.join('dirA', '..'))
class PathTest(_BasePathTest, unittest.TestCase):
cls = pathlib.Path
def test_concrete_class(self):
p = self.cls('a')
self.assertIs(type(p),
pathlib.WindowsPath if os.name == 'nt' else pathlib.PosixPath)
def test_unsupported_flavour(self):
if os.name == 'nt':
self.assertRaises(NotImplementedError, pathlib.PosixPath)
else:
self.assertRaises(NotImplementedError, pathlib.WindowsPath)
@only_posix
class PosixPathTest(_BasePathTest, unittest.TestCase):
cls = pathlib.PosixPath
def _check_symlink_loop(self, *args):
path = self.cls(*args)
with self.assertRaises(RuntimeError):
print(path.resolve())
def test_open_mode(self):
old_mask = os.umask(0)
self.addCleanup(os.umask, old_mask)
p = self.cls(BASE)
with (p / 'new_file').open('wb'):
pass
st = os.stat(join('new_file'))
self.assertEqual(stat.S_IMODE(st.st_mode), 0o666)
os.umask(0o022)
with (p / 'other_new_file').open('wb'):
pass
st = os.stat(join('other_new_file'))
self.assertEqual(stat.S_IMODE(st.st_mode), 0o644)
def test_touch_mode(self):
old_mask = os.umask(0)
self.addCleanup(os.umask, old_mask)
p = self.cls(BASE)
(p / 'new_file').touch()
st = os.stat(join('new_file'))
self.assertEqual(stat.S_IMODE(st.st_mode), 0o666)
os.umask(0o022)
(p / 'other_new_file').touch()
st = os.stat(join('other_new_file'))
self.assertEqual(stat.S_IMODE(st.st_mode), 0o644)
(p / 'masked_new_file').touch(mode=0o750)
st = os.stat(join('masked_new_file'))
self.assertEqual(stat.S_IMODE(st.st_mode), 0o750)
@with_symlinks
def test_resolve_loop(self):
# Loop detection for broken symlinks under POSIX
P = self.cls
# Loops with relative symlinks
os.symlink('linkX/inside', join('linkX'))
self._check_symlink_loop(BASE, 'linkX')
os.symlink('linkY', join('linkY'))
self._check_symlink_loop(BASE, 'linkY')
os.symlink('linkZ/../linkZ', join('linkZ'))
self._check_symlink_loop(BASE, 'linkZ')
# Loops with absolute symlinks
os.symlink(join('linkU/inside'), join('linkU'))
self._check_symlink_loop(BASE, 'linkU')
os.symlink(join('linkV'), join('linkV'))
self._check_symlink_loop(BASE, 'linkV')
os.symlink(join('linkW/../linkW'), join('linkW'))
self._check_symlink_loop(BASE, 'linkW')
def test_glob(self):
P = self.cls
p = P(BASE)
given = set(p.glob("FILEa"))
expect = set() if not support.fs_is_case_insensitive(BASE) else given
self.assertEqual(given, expect)
self.assertEqual(set(p.glob("FILEa*")), set())
def test_rglob(self):
P = self.cls
p = P(BASE, "dirC")
given = set(p.rglob("FILEd"))
expect = set() if not support.fs_is_case_insensitive(BASE) else given
self.assertEqual(given, expect)
self.assertEqual(set(p.rglob("FILEd*")), set())
@only_nt
class WindowsPathTest(_BasePathTest, unittest.TestCase):
cls = pathlib.WindowsPath
def test_glob(self):
P = self.cls
p = P(BASE)
self.assertEqual(set(p.glob("FILEa")), { P(BASE, "fileA") })
def test_rglob(self):
P = self.cls
p = P(BASE, "dirC")
self.assertEqual(set(p.rglob("FILEd")), { P(BASE, "dirC/dirD/fileD") })
if __name__ == "__main__":
unittest.main()
|
trib3/xhtml2pdf
|
refs/heads/master
|
demo/djangoproject/ezpdf.py
|
166
|
#! /usr/bin/python
# -*- encoding: utf-8 -*-
from django.template.loader import get_template
from django.template import Context
from django.http import HttpResponse
import cStringIO as StringIO
from sx.pisa3 import pisaDocument
import cgi
def render_to_pdf(template_src, context_dict):
'''
Renderiza el template con el contexto.
Envía al cliente la Respuesta HTTP del contenido PDF para
el template renderizado.
'''
template = get_template(template_src)
context = Context(context_dict)
html = template.render(context)
result = StringIO.StringIO()
pdf = pisaDocument(StringIO.StringIO(html.encode("ISO-8859-1")), result)
if not pdf.err:
return HttpResponse(result.getvalue(), mimetype='application/pdf')
return HttpResponse('We had some errors<pre>%s</pre>' % cgi.escape(html))
|
gazpachoking/Flexget
|
refs/heads/develop
|
flexget/components/imdb/api.py
|
4
|
from __future__ import unicode_literals, division, absolute_import
from flask import jsonify
from flexget.api import api, APIResource
from flexget.api.app import etag
from flexget.components.imdb.utils import ImdbSearch
imdb_api = api.namespace('imdb', description='IMDB lookup endpoint')
class ObjectsContainer(object):
movie_object = {
'type': 'object',
'properties': {
'imdb_id': {'type': 'string'},
'match': {'type': 'number'},
'name': {'type': 'string'},
'url': {'type': 'string'},
'year': {'type': 'number'},
'thumbnail': {'type': 'string'},
},
'required': ['imdb_id', 'match', 'name', 'url', 'year'],
'additionalProperties': False,
}
return_object = {'type': 'array', 'items': movie_object}
return_schema = api.schema_model('imdb_search_schema', ObjectsContainer.return_object)
@imdb_api.route('/search/<string:title>/')
@api.doc(params={'title': 'Movie name or IMDB ID'})
class IMDBMovieSearch(APIResource):
# noinspection PyUnusedLocal
@etag
@api.response(200, model=return_schema)
def get(self, title, session=None):
""" Get a list of IMDB search result by name or ID"""
raw_movies = ImdbSearch().smart_match(title, single_match=False)
if not raw_movies:
return jsonify([])
# Convert single movie to list to preserve consistent reply
if not isinstance(raw_movies, list):
raw_movies = [raw_movies]
return jsonify(raw_movies)
|
ar7z1/ansible
|
refs/heads/devel
|
lib/ansible/modules/cloud/azure/azure_rm_mysqlfirewallrule.py
|
25
|
#!/usr/bin/python
#
# Copyright (c) 2018 Zim Kalinowski, <zikalino@microsoft.com>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: azure_rm_mysqlfirewallrule
version_added: "2.8"
short_description: Manage MySQL firewall rule instance.
description:
- Create, update and delete instance of MySQL firewall rule.
options:
resource_group:
description:
- The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal.
required: True
server_name:
description:
- The name of the server.
required: True
name:
description:
- The name of the MySQL firewall rule.
required: True
start_ip_address:
description:
- The start IP address of the MySQL firewall rule. Must be IPv4 format.
end_ip_address:
description:
- The end IP address of the MySQL firewall rule. Must be IPv4 format.
state:
description:
- Assert the state of the MySQL firewall rule. Use 'present' to create or update a rule and 'absent' to ensure it is not present.
default: present
choices:
- absent
- present
extends_documentation_fragment:
- azure
author:
- "Zim Kalinowski (@zikalino)"
'''
EXAMPLES = '''
- name: Create (or update) MySQL firewall rule
azure_rm_mysqlfirewallrule:
resource_group: TestGroup
server_name: testserver
name: rule1
start_ip_address: 10.0.0.17
end_ip_address: 10.0.0.20
'''
RETURN = '''
id:
description:
- Resource ID
returned: always
type: str
sample: /subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroups/TestGroup/providers/Microsoft.DBforMySQL/servers/testserver/firewallRules/rule1
'''
import time
from ansible.module_utils.azure_rm_common import AzureRMModuleBase
try:
from msrestazure.azure_exceptions import CloudError
from msrest.polling import LROPoller
from azure.mgmt.rdbms.mysql import MySQLManagementClient
from msrest.serialization import Model
except ImportError:
# This is handled in azure_rm_common
pass
class Actions:
NoAction, Create, Update, Delete = range(4)
class AzureRMFirewallRules(AzureRMModuleBase):
"""Configuration class for an Azure RM MySQL firewall rule resource"""
def __init__(self):
self.module_arg_spec = dict(
resource_group=dict(
type='str',
required=True
),
server_name=dict(
type='str',
required=True
),
name=dict(
type='str',
required=True
),
start_ip_address=dict(
type='str'
),
end_ip_address=dict(
type='str'
),
state=dict(
type='str',
default='present',
choices=['present', 'absent']
)
)
self.resource_group = None
self.server_name = None
self.name = None
self.start_ip_address = None
self.end_ip_address = None
self.results = dict(changed=False)
self.state = None
self.to_do = Actions.NoAction
super(AzureRMFirewallRules, self).__init__(derived_arg_spec=self.module_arg_spec,
supports_check_mode=True,
supports_tags=False)
def exec_module(self, **kwargs):
"""Main module execution method"""
for key in list(self.module_arg_spec.keys()):
if hasattr(self, key):
setattr(self, key, kwargs[key])
old_response = None
response = None
resource_group = self.get_resource_group(self.resource_group)
old_response = self.get_firewallrule()
if not old_response:
self.log("MySQL firewall rule instance doesn't exist")
if self.state == 'absent':
self.log("Old instance didn't exist")
else:
self.to_do = Actions.Create
else:
self.log("MySQL firewall rule instance already exists")
if self.state == 'absent':
self.to_do = Actions.Delete
elif self.state == 'present':
self.log("Need to check if MySQL firewall rule instance has to be deleted or may be updated")
if (self.start_ip_address is not None) and (self.start_ip_address != old_response['start_ip_address']):
self.to_do = Actions.Update
if (self.end_ip_address is not None) and (self.end_ip_address != old_response['end_ip_address']):
self.to_do = Actions.Update
if (self.to_do == Actions.Create) or (self.to_do == Actions.Update):
self.log("Need to Create / Update the MySQL firewall rule instance")
if self.check_mode:
self.results['changed'] = True
return self.results
response = self.create_update_firewallrule()
if not old_response:
self.results['changed'] = True
else:
self.results['changed'] = old_response.__ne__(response)
self.log("Creation / Update done")
elif self.to_do == Actions.Delete:
self.log("MySQL firewall rule instance deleted")
self.results['changed'] = True
if self.check_mode:
return self.results
self.delete_firewallrule()
# make sure instance is actually deleted, for some Azure resources, instance is hanging around
# for some time after deletion -- this should be really fixed in Azure
while self.get_firewallrule():
time.sleep(20)
else:
self.log("MySQL firewall rule instance unchanged")
self.results['changed'] = False
response = old_response
if response:
self.results["id"] = response["id"]
return self.results
def create_update_firewallrule(self):
'''
Creates or updates MySQL firewall rule with the specified configuration.
:return: deserialized MySQL firewall rule instance state dictionary
'''
self.log("Creating / Updating the MySQL firewall rule instance {0}".format(self.name))
try:
response = self.mysql_client.firewall_rules.create_or_update(resource_group_name=self.resource_group,
server_name=self.server_name,
firewall_rule_name=self.name,
start_ip_address=self.start_ip_address,
end_ip_address=self.end_ip_address)
if isinstance(response, LROPoller):
response = self.get_poller_result(response)
except CloudError as exc:
self.log('Error attempting to create the MySQL firewall rule instance.')
self.fail("Error creating the MySQL firewall rule instance: {0}".format(str(exc)))
return response.as_dict()
def delete_firewallrule(self):
'''
Deletes specified MySQL firewall rule instance in the specified subscription and resource group.
:return: True
'''
self.log("Deleting the MySQL firewall rule instance {0}".format(self.name))
try:
response = self.mysql_client.firewall_rules.delete(resource_group_name=self.resource_group,
server_name=self.server_name,
firewall_rule_name=self.name)
except CloudError as e:
self.log('Error attempting to delete the MySQL firewall rule instance.')
self.fail("Error deleting the MySQL firewall rule instance: {0}".format(str(e)))
return True
def get_firewallrule(self):
'''
Gets the properties of the specified MySQL firewall rule.
:return: deserialized MySQL firewall rule instance state dictionary
'''
self.log("Checking if the MySQL firewall rule instance {0} is present".format(self.name))
found = False
try:
response = self.mysql_client.firewall_rules.get(resource_group_name=self.resource_group,
server_name=self.server_name,
firewall_rule_name=self.name)
found = True
self.log("Response : {0}".format(response))
self.log("MySQL firewall rule instance : {0} found".format(response.name))
except CloudError as e:
self.log('Did not find the MySQL firewall rule instance.')
if found is True:
return response.as_dict()
return False
def main():
"""Main execution"""
AzureRMFirewallRules()
if __name__ == '__main__':
main()
|
jsheedy/worldmap-dynamic-zoom
|
refs/heads/master
|
server/queries.py
|
1
|
import config
class Query():
def country_list(self, bbox): pass
def country(self, id): pass
class World(Query):
def country_list(self, bbox=None):
query = """SELECT MAX(fips) from world_borders """
if bbox:
envelope = ','.join(map(str, bbox))
query += """ WHERE world_borders.geom && ST_MakeEnvelope(""" + envelope + ", 4326)"
query += " GROUP BY fips"
return query
def country(self):
query = """
SELECT ST_AsGeoJSON(ST_SimplifyPreserveTopology(ST_Union(geom),%s)), MAX(name)
FROM world_borders
WHERE fips=%s
GROUP BY fips"""
return query
class GADM2(Query):
def country_list(self, bbox):pass
def country(self):pass
class InvalidConfigurationError(Exception): pass
if config.DB['NAME'] == 'world':
db = World()
elif config.DB['NAME'] == 'gadm2':
db = GADM2()
else:
raise InvalidConfigurationError("config.db must define a NAME key containing one of (world, gadm2) in order to define queries")
|
thesquelched/libcloud
|
refs/heads/trunk
|
libcloud/compute/drivers/elastichosts.py
|
58
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
ElasticHosts Driver
"""
from libcloud.compute.types import Provider
from libcloud.compute.drivers.elasticstack import ElasticStackBaseNodeDriver
# API end-points
API_ENDPOINTS = {
'lon-p': {
'name': 'London Peer 1',
'country': 'United Kingdom',
'host': 'api-lon-p.elastichosts.com'
},
'lon-b': {
'name': 'London BlueSquare',
'country': 'United Kingdom',
'host': 'api-lon-b.elastichosts.com'
},
'sat-p': {
'name': 'San Antonio Peer 1',
'country': 'United States',
'host': 'api-sat-p.elastichosts.com'
},
'lax-p': {
'name': 'Los Angeles Peer 1',
'country': 'United States',
'host': 'api-lax-p.elastichosts.com'
},
'sjc-c': {
'name': 'San Jose (Silicon Valley)',
'country': 'United States',
'host': 'api-sjc-c.elastichosts.com'
},
'tor-p': {
'name': 'Toronto Peer 1',
'country': 'Canada',
'host': 'api-tor-p.elastichosts.com'
},
'syd-y': {
'name': 'Sydney',
'country': 'Australia',
'host': 'api-syd-v.elastichosts.com'
},
'cn-1': {
'name': 'Hong Kong',
'country': 'China',
'host': 'api-hkg-e.elastichosts.com'
}
}
# Default API end-point for the base connection class.
DEFAULT_REGION = 'sat-p'
# Retrieved from http://www.elastichosts.com/cloud-hosting/api
STANDARD_DRIVES = {
'38df0986-4d85-4b76-b502-3878ffc80161': {
'uuid': '38df0986-4d85-4b76-b502-3878ffc80161',
'description': 'CentOS Linux 5.5',
'size_gunzipped': '3GB',
'supports_deployment': True,
},
'980cf63c-f21e-4382-997b-6541d5809629': {
'uuid': '980cf63c-f21e-4382-997b-6541d5809629',
'description': 'Debian Linux 5.0',
'size_gunzipped': '1GB',
'supports_deployment': True,
},
'aee5589a-88c3-43ef-bb0a-9cab6e64192d': {
'uuid': 'aee5589a-88c3-43ef-bb0a-9cab6e64192d',
'description': 'Ubuntu Linux 10.04',
'size_gunzipped': '1GB',
'supports_deployment': True,
},
'62f512cd-82c7-498e-88d8-a09ac2ef20e7': {
'uuid': '62f512cd-82c7-498e-88d8-a09ac2ef20e7',
'description': 'Ubuntu Linux 12.04',
'size_gunzipped': '1GB',
'supports_deployment': True,
},
'b9d0eb72-d273-43f1-98e3-0d4b87d372c0': {
'uuid': 'b9d0eb72-d273-43f1-98e3-0d4b87d372c0',
'description': 'Windows Web Server 2008',
'size_gunzipped': '13GB',
'supports_deployment': False,
},
'30824e97-05a4-410c-946e-2ba5a92b07cb': {
'uuid': '30824e97-05a4-410c-946e-2ba5a92b07cb',
'description': 'Windows Web Server 2008 R2',
'size_gunzipped': '13GB',
'supports_deployment': False,
},
'9ecf810e-6ad1-40ef-b360-d606f0444671': {
'uuid': '9ecf810e-6ad1-40ef-b360-d606f0444671',
'description': 'Windows Web Server 2008 R2 + SQL Server',
'size_gunzipped': '13GB',
'supports_deployment': False,
},
'10a88d1c-6575-46e3-8d2c-7744065ea530': {
'uuid': '10a88d1c-6575-46e3-8d2c-7744065ea530',
'description': 'Windows Server 2008 Standard R2',
'size_gunzipped': '13GB',
'supports_deployment': False,
},
'2567f25c-8fb8-45c7-95fc-bfe3c3d84c47': {
'uuid': '2567f25c-8fb8-45c7-95fc-bfe3c3d84c47',
'description': 'Windows Server 2008 Standard R2 + SQL Server',
'size_gunzipped': '13GB',
'supports_deployment': False,
},
}
class ElasticHostsException(Exception):
def __str__(self):
return self.args[0]
def __repr__(self):
return "<ElasticHostsException '%s'>" % (self.args[0])
class ElasticHostsNodeDriver(ElasticStackBaseNodeDriver):
"""
Node Driver class for ElasticHosts
"""
type = Provider.ELASTICHOSTS
api_name = 'elastichosts'
name = 'ElasticHosts'
website = 'http://www.elastichosts.com/'
features = {"create_node": ["generates_password"]}
_standard_drives = STANDARD_DRIVES
def __init__(self, key, secret=None, secure=True, host=None, port=None,
region=DEFAULT_REGION, **kwargs):
if hasattr(self, '_region'):
region = self._region
if region not in API_ENDPOINTS:
raise ValueError('Invalid region: %s' % (region))
self._host_argument_set = host is not None
super(ElasticHostsNodeDriver, self).__init__(key=key, secret=secret,
secure=secure, host=host,
port=port,
region=region, **kwargs)
def _ex_connection_class_kwargs(self):
"""
Return the host value based on the user supplied region.
"""
kwargs = {}
if not self._host_argument_set:
kwargs['host'] = API_ENDPOINTS[self.region]['host']
return kwargs
class ElasticHostsUK1NodeDriver(ElasticHostsNodeDriver):
"""
ElasticHosts node driver for the London Peer 1 end-point
"""
name = 'ElasticHosts (lon-p)'
_region = 'lon-p'
class ElasticHostsUK2NodeDriver(ElasticHostsNodeDriver):
"""
ElasticHosts node driver for the London Bluesquare end-point
"""
name = 'ElasticHosts (lon-b)'
_region = 'lon-b'
class ElasticHostsUS1NodeDriver(ElasticHostsNodeDriver):
"""
ElasticHosts node driver for the San Antonio Peer 1 end-point
"""
name = 'ElasticHosts (sat-p)'
_region = 'sat-p'
class ElasticHostsUS2NodeDriver(ElasticHostsNodeDriver):
"""
ElasticHosts node driver for the Los Angeles Peer 1 end-point
"""
name = 'ElasticHosts (lax-p)'
_region = 'lax-p'
class ElasticHostsUS3NodeDriver(ElasticHostsNodeDriver):
"""
ElasticHosts node driver for the San Jose (Silicon Valley) end-point
"""
name = 'ElasticHosts (sjc-c)'
_region = 'sjc-c'
class ElasticHostsCA1NodeDriver(ElasticHostsNodeDriver):
"""
ElasticHosts node driver for the Toronto Peer 1 end-point
"""
name = 'ElasticHosts (tor-p)'
_region = 'tor-p'
class ElasticHostsAU1NodeDriver(ElasticHostsNodeDriver):
"""
ElasticHosts node driver for the Sydney end-point
"""
name = 'ElasticHosts (syd-y)'
_region = 'syd-y'
class ElasticHostsCN1NodeDriver(ElasticHostsNodeDriver):
"""
ElasticHosts node driver for the Hong Kong end-point
"""
name = 'ElasticHosts (cn-1)'
_region = 'cn-1'
|
x303597316/hue
|
refs/heads/master
|
desktop/core/ext-py/Django-1.6.10/tests/db_typecasts/__init__.py
|
12133432
| |
OpringaoDoTurno/airflow
|
refs/heads/master
|
airflow/api/auth/backend/__init__.py
|
1049
|
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
|
tedor/yandex-dns-manager
|
refs/heads/master
|
vendors/__init__.py
|
2
|
__author__ = 'Vitalyi'
|
django-nonrel/django
|
refs/heads/nonrel-1.6
|
tests/admin_scripts/complex_app/models/__init__.py
|
739
|
from .bar import Bar
from .foo import Foo
__all__ = ['Foo', 'Bar']
|
lamby/django-cache-toolbox
|
refs/heads/master
|
tests/__init__.py
|
12133432
| |
gabrielfalcao/lettuce
|
refs/heads/master
|
tests/integration/lib/Django-1.3/django/contrib/localflavor/ca/__init__.py
|
12133432
| |
qgis/QGIS-Django
|
refs/heads/master
|
qgis-app/lib/templatetags/__init__.py
|
12133432
| |
lautriv/Just-Metadata
|
refs/heads/master
|
modules/intelgathering/__init__.py
|
12133432
| |
openstack/senlin
|
refs/heads/master
|
contrib/vdu/vdu/__init__.py
|
12133432
| |
sam-tsai/django
|
refs/heads/master
|
django/conf/locale/nl/__init__.py
|
12133432
| |
jdiez17/pagmo
|
refs/heads/master
|
PyGMO/problem/_tsp.py
|
1
|
from PyGMO.problem._problem import tsp, tsp_cs, tsp_vrplc, _tsp_encoding
from PyGMO import __extensions__
if __extensions__["gtop"] is True:
from PyGMO.problem._problem import tsp_ds
tsp_ds.encoding_type = _tsp_encoding
# Renaming and placing the enums
tsp.encoding_type = _tsp_encoding
tsp_vrplc.encoding_type = _tsp_encoding
tsp_cs.encoding_type = _tsp_encoding
def _tsp_ctor(self, weights=[[0, 1, 2], [1, 0, 5], [2, 5, 0]], type="cities"):
"""
Constructs Travelling Salesman Problem (TSP or ATSP)
The problem encoding can be of three different types as
selected by the type kwarg
1-"cities"
This encoding represents the ids of the cities visited
directly in the chromosome. It will
thus create a constrained problem as only permutation of the
cities ids are valid (e.g. [0,2,1,5,0] is not
a valid chromosome)
2-"randomkeys"
This encoding, first introduced in the paper
Bean, J. C. (1994). Genetic algorithms and random keys for
sequencing and optimization. ORSA journal on computing, 6(2), 154-160.
It creates a box constrained problem without any constraint.
It essentially represents the tour as a sequence of doubles bounded
in [0,1]. The tour is reconstructed by the argsort of the sequence.
(e.g. [0.34,0.12,0.76,0.03] -> [3,1,0,2])
3-"full"
In the full encoding the TSP is represented as a integer linear
programming problem. The details can be found in
http://en.wikipedia.org/wiki/Travelling_salesman_problem
Constructs a Travelling Salesman problem
(Constrained Integer Single-Objective)
USAGE: problem.tsp(weights = [0,1,2],[1,0,5],[2,5,0], type="randomkeys")
* weights: Square matrix with zero diagonal entries containing the cities distances.
* type: encoding type. One of "cities","randomkeys","full"
"""
# We construct the arg list for the original constructor exposed by
# boost_python
from PyGMO.problem._problem import _tsp_encoding
def encoding_type(x):
return {
"cities": _tsp_encoding.CITIES,
"randomkeys": _tsp_encoding.RANDOMKEYS,
"full": _tsp_encoding.FULL
}[x]
arg_list = []
arg_list.append(weights)
arg_list.append(encoding_type(type))
self._orig_init(*arg_list)
tsp._orig_init = tsp.__init__
tsp.__init__ = _tsp_ctor
def _tsp_cs_ctor(self, weights=[[0, 1, 2], [1, 0, 5], [2, 5, 0]], values=[1, 1, 1], max_path_length=2, type="cities"):
"""
Constructs Travelling Salesman Problem City-Selection (TSP-CS)
The problem encoding can be of three different types as
selected by the type kwarg
1-"cities"
This encoding represents the ids of the cities visited
directly in the chromosome. It will
thus create a constrained problem as only permutation of the
cities ids are valid (e.g. [0,2,1,5,0] is not
a valid chromosome)
2-"randomkeys"
This encoding, first introduced in the paper
Bean, J. C. (1994). Genetic algorithms and random keys for
sequencing and optimization. ORSA journal on computing, 6(2), 154-160.
It creates a box constrained problem without any constraint.
It essentially represents the tour as a sequence of doubles bounded
in [0,1]. The tour is reconstructed by the argsort of the sequence.
(e.g. [0.34,0.12,0.76,0.03] -> [3,1,0,2])
3-"full"
In the full encoding the TSP is represented as a integer linear
programming problem. The details can be found in
http://en.wikipedia.org/wiki/Travelling_salesman_problem
Constructs a Travelling Salesman problem
(Constrained Integer Single-Objective)
USAGE: problem.tsp_cs(weights=[[0, 1, 2], [1, 0, 5], [2, 5, 0]], values=[1, 1, 1], max_path_length=2, type="cities")
* weights: Square matrix with zero diagonal entries containing the cities distances.
* values: The city values.
* max_path_length: maximum length the salesman can walk
* type: encoding type. One of "cities","randomkeys","full"
"""
# We construct the arg list for the original constructor exposed by
# boost_python
from PyGMO.problem._problem import _tsp_encoding
def encoding_type(x):
return {
"cities": _tsp_encoding.CITIES,
"randomkeys": _tsp_encoding.RANDOMKEYS,
"full": _tsp_encoding.FULL
}[x]
arg_list = []
arg_list.append(weights)
arg_list.append(values)
arg_list.append(max_path_length)
arg_list.append(encoding_type(type))
self._orig_init(*arg_list)
tsp_cs._orig_init = tsp_cs.__init__
tsp_cs.__init__ = _tsp_cs_ctor
def _tsp_vrplc_ctor(self, weights=[[0, 1, 2], [1, 0, 5], [2, 5, 0]], type="cities", capacity=1.1):
"""
Constructs Vehicle routing problem with limited capacity.
This is a variant to the TSP that asks to find n-tours of length
smaller than the maximum vehicle capacity that visit all cities.
The objective is to minimize n
The problem encoding can be of three different types as
selected by the type kwarg
1-"cities"
This encoding represents the ids of the cities visited
directly in the chromosome. It will
thus create a constrained problem as only permutation of the
cities ids are valid (e.g. [0,2,1,5,0] is not
a valid chromosome)
2-"randomkeys"
This encoding, first introduced in the paper
Bean, J. C. (1994). Genetic algorithms and random keys for
sequencing and optimization. ORSA journal on computing, 6(2), 154-160.
It creates a box constrained problem without any constraint.
It essentially represents the tour as a sequence of doubles bounded
in [0,1]. The tour is reconstructed by the argsort of the sequence.
(e.g. [0.34,0.12,0.76,0.03] -> [3,1,0,2])
3-"full"
In the full encoding the TSP is represented as a integer linear
programming problem. The details can be found in
http://en.wikipedia.org/wiki/Travelling_salesman_problem
Constructs a Travelling Salesman problem
(Constrained Integer Single-Objective)
USAGE: problem.tsp(matrix = [0,1,2],[1,0,5],[2,5,0], type="randomkeys", capacity=1.1)
* weights: Square matrix with zero diagonal entries containing the cities distances.
* type: encoding type. One of "cities","randomkeys","full"
* capacity: maximum vehicle capacity
"""
from PyGMO.problem._problem import _tsp_encoding
def encoding_type(x):
return {
"cities": _tsp_encoding.CITIES,
"randomkeys": _tsp_encoding.RANDOMKEYS,
"full": _tsp_encoding.FULL
}[x]
# We construct the arg list for the original constructor exposed by
# boost_python
arg_list = []
arg_list.append(weights)
arg_list.append(encoding_type(type))
arg_list.append(capacity)
self._orig_init(*arg_list)
tsp_vrplc._orig_init = tsp_vrplc.__init__
tsp_vrplc.__init__ = _tsp_vrplc_ctor
def _plot_tsp(self, x, node_size=10, edge_color='r',
edge_width=1, bias=None, node_color=None, pos=None):
"""
Plots a tour represented in the chromosome x
(using the same encoding of the self object)
USAGE: problem._plot_tsp(x, node_size=10, edge_color='r',
edge_width=1, bias=None, node_color=None, pos=None):
* x: Crhomosome encoding the city tour.
The encoding type used must be the same as that of self
* node_size: size of the nodes in the graph visualization
* edge_color: size of the edges in the graph visualization
* edge_width: width of the edges in the graph visualization
* bias: when the graoh node positions are not used,
the plot tries to use
a spring model to place the nodes. The spring
constants depend on this
bias parameter
* node_color: color of the nodes in the graph visualization
* pos: a dictionary containing the node positions
(same format as networkx)
"""
if not (self.verify_x(x) and self.feasibility_x(x)):
raise Exception("crhomosome is unfeasible")
from matplotlib import pyplot as plt
import networkx as nx
import numpy as np
from PyGMO.problem import tsp
fig = plt.gcf()
axis = plt.gca()
# We extract few informations on the problem
weights = self.weights
n_cities = len(weights[0])
if self.encoding == _tsp_encoding.RANDOMKEYS:
edgelist = self.randomkeys2cities(x)
elif self.encoding == _tsp_encoding.CITIES:
edgelist = x
elif self.encoding == _tsp_encoding.FULL:
edgelist = self.full2cities(x)
# We construct the list of edges (u,v) containing
# the indices of the cities visited and we here distinguish between tsp types
if type(self) == tsp:
edgelist = [(edgelist[i], edgelist[i + 1]) for i in range(n_cities - 1)] + [(edgelist[-1], edgelist[0])]
elif type(self) == tsp_cs:
_, _, id1, id2 = self.find_city_subsequence(x)
if id1 <= id2:
edgelist = edgelist[id1:(id2 + 1) % n_cities]
else:
edgelist = edgelist[id1:] + edgelist[:id2 + 1]
edgelist = [(edgelist[i], edgelist[i + 1]) for i in range(len(edgelist) - 1)]
elif type(self) == tsp_vrplc:
stl = 0
chromosome = edgelist
edgelist = [(chromosome[0], chromosome[1])]
for i in range(1, n_cities - 1):
stl += weights[int(chromosome[i])][int(chromosome[i + 1])]
if stl > self.capacity:
stl = 0
else:
edgelist += [(chromosome[i], chromosome[i + 1])]
if bias is None:
bias = max([max(d) for d in weights])
# We create a networkx graph
G = nx.Graph()
# We fill in the vertices
for i in range(n_cities):
G.add_node(i)
# We fill in all the edges
for i in range(n_cities):
for j in range(n_cities):
if i <= j:
continue
G.add_edge(i, j, weight=bias / weights[i][j])
# If cities coordinates are not passed as an input we try to calculate
# the coordinates for an euclidian TSP (assuming symmetric weights)
if pos is None:
# assign the first two nodes: node 0 and node 1, node 0 is
# chosen to be in the origin
pos = {0: np.array([0, 0]), 1: np.array([weights[0][1], 0])}
# algorithm checks during computation of the coordinates if the
# problem is euclidian
prob_is_eucl = True
# we will have to store the first node that is not located in the
# line constructed by the initial two nodes 0 and 1
nil_idx = -1
i = 2
while (i < n_cities and prob_is_eucl is True):
# we compute cos(alpha) where alpha is the angle enclosed
# by the edge (0,1) and (0,i)
cos_alpha = 0.5 * ((weights[0][i]) ** 2 + (weights[0][1]) ** 2 -
(weights[1][i]) ** 2) / (weights[0][i] * weights[0][1])
if (cos_alpha < -1 or 1 < cos_alpha):
prob_is_eucl = False
else:
# computes one of the two possible positions for node i
pos[i] = np.array([weights[0][i] * cos_alpha,
weights[0][i] * (1 - cos_alpha ** 2) ** (0.5)])
omega = 1
if abs(cos_alpha) != 1:
# as soon as one node is not aligned with edge (0,1)
# we have to orientate the plot the first node not aligned,
# named nil_idx, is chosen to have a positive second
# coordinate - every following node is then oriented
# accordingly
if nil_idx == -1:
nil_idx = i
elif abs(((pos[i][0] - pos[nil_idx][0]) ** 2 +
(pos[i][1] - pos[nil_idx][1]) ** 2) ** (0.5) -
weights[i][nil_idx]) > 1e-08 * weights[i][nil_idx]:
omega = -1
pos[i][1] = omega * pos[i][1] # orient node
# We have to check the distance to all the previous
# nodes to decide if the problem is euclidian
for j in range(2, i):
if abs(((pos[i][0] - pos[j][0]) ** 2 +
(pos[i][1] - pos[j][1]) ** 2) ** (0.5) -
weights[i][j]) > 1e-08 * weights[i][j]:
prob_is_eucl = False
i += 1
# In case of a non euclidian TSP we create a spring model
if prob_is_eucl is False:
pos = nx.layout.spring_layout(G)
if node_color is None:
node_color = [0.4] * n_cities
nx.draw_networkx_nodes(G, pos=pos, node_size=node_size,
cmap=plt.cm.Blues, node_color=node_color, ax=axis)
nx.draw_networkx_edges(G, pos, edgelist=edgelist,
width=edge_width, alpha=1, edge_color=edge_color, ax=axis)
fig.canvas.draw()
plt.show()
return pos
tsp.plot = _plot_tsp
tsp_cs.plot = _plot_tsp
tsp_vrplc.plot = _plot_tsp
if __extensions__["gtop"] is True:
def _tsp_ds_ctor(self, planets, values, max_DV, epochs, type="cities"):
"""
Constructs Travelling Salesman Problem City-Selection (TSP-CS)
The problem encoding can be of three different types as
selected by the type kwarg
1-"cities"
This encoding represents the ids of the cities visited
directly in the chromosome. It will
thus create a constrained problem as only permutation of the
cities ids are valid (e.g. [0,2,1,5,0] is not
a valid chromosome)
2-"randomkeys"
This encoding, first introduced in the paper
Bean, J. C. (1994). Genetic algorithms and random keys for
sequencing and optimization. ORSA journal on computing, 6(2), 154-160.
It creates a box constrained problem without any constraint.
It essentially represents the tour as a sequence of doubles bounded
in [0,1]. The tour is reconstructed by the argsort of the sequence.
(e.g. [0.34,0.12,0.76,0.03] -> [3,1,0,2])
3-"full"
In the full encoding the TSP is represented as a integer linear
programming problem. The details can be found in
http://en.wikipedia.org/wiki/Travelling_salesman_problem
Constructs a Travelling Salesman problem
(Constrained Integer Single-Objective)
USAGE: problem.tsp_cs(planets, values, max_DV, epochs, type="cities"):
* planets: list of planets
* values: list of planets values
* max_DV: maximum DV on-board
* epochs: list of allowed epochs for the visit (in MJD2000)
* type: encoding type. One of "cities","randomkeys","full"
"""
# We construct the arg list for the original constructor exposed by
# boost_python
from PyGMO.problem._problem import _tsp_encoding
def encoding_type(x):
return {
"cities": _tsp_encoding.CITIES,
"randomkeys": _tsp_encoding.RANDOMKEYS,
"full": _tsp_encoding.FULL
}[x]
arg_list = []
arg_list.append(planets)
arg_list.append(values)
arg_list.append(max_DV)
arg_list.append(epochs)
arg_list.append(encoding_type(type))
self._orig_init(*arg_list)
tsp_ds._orig_init = tsp_ds.__init__
tsp_ds.__init__ = _tsp_ds_ctor
|
nickdirienzo/flask-dance
|
refs/heads/master
|
flask_dance/contrib/meetup.py
|
3
|
from __future__ import unicode_literals
from flask_dance.consumer import OAuth2ConsumerBlueprint
from functools import partial
from flask.globals import LocalProxy, _lookup_app_object
try:
from flask import _app_ctx_stack as stack
except ImportError:
from flask import _request_ctx_stack as stack
__maintainer__ = "David Baumgold <david@davidbaumgold.com>"
def make_meetup_blueprint(
key=None, secret=None, scope=None,
redirect_url=None, redirect_to=None,
login_url=None, authorized_url=None,
session_class=None, backend=None):
"""
Make a blueprint for authenticating with Meetup using OAuth 2. This requires
an OAuth consumer from Meetup. You should either pass the key and secret to
this constructor, or make sure that your Flask application config defines
them, using the variables MEETUP_OAUTH_KEY and MEETUP_OAUTH_SECRET.
Args:
key (str): The OAuth consumer key for your application on Meetup
secret (str): The OAuth consumer secret for your application on Meetup
scope (str, optional): comma-separated list of scopes for the OAuth token
redirect_url (str): the URL to redirect to after the authentication
dance is complete
redirect_to (str): if ``redirect_url`` is not defined, the name of the
view to redirect to after the authentication dance is complete.
The actual URL will be determined by :func:`flask.url_for`
login_url (str, optional): the URL path for the ``login`` view.
Defaults to ``/meetup``
authorized_url (str, optional): the URL path for the ``authorized`` view.
Defaults to ``/meetup/authorized``.
session_class (class, optional): The class to use for creating a
Requests session. Defaults to
:class:`~flask_dance.consumer.requests.OAuth2Session`.
backend: A storage backend class, or an instance of a storage
backend class, to use for this blueprint. Defaults to
:class:`~flask_dance.consumer.backend.session.SessionBackend`.
:rtype: :class:`~flask_dance.consumer.OAuth2ConsumerBlueprint`
:returns: A :ref:`blueprint <flask:blueprints>` to attach to your Flask app.
"""
scope = scope or ["basic"]
meetup_bp = OAuth2ConsumerBlueprint("meetup", __name__,
client_id=key,
client_secret=secret,
scope=scope,
base_url="https://api.meetup.com/2/",
authorization_url="https://secure.meetup.com/oauth2/authorize",
token_url="https://secure.meetup.com/oauth2/access",
redirect_url=redirect_url,
redirect_to=redirect_to,
login_url=login_url,
authorized_url=authorized_url,
session_class=session_class,
backend=backend,
)
meetup_bp.from_config["client_id"] = "MEETUP_OAUTH_KEY"
meetup_bp.from_config["client_secret"] = "MEETUP_OAUTH_SECRET"
@meetup_bp.before_app_request
def set_applocal_session():
ctx = stack.top
ctx.meetup_oauth = meetup_bp.session
return meetup_bp
meetup = LocalProxy(partial(_lookup_app_object, "meetup_oauth"))
|
nesdis/djongo
|
refs/heads/master
|
tests/django_tests/tests/v21/tests/schema/fields.py
|
68
|
from functools import partial
from django.db import models
from django.db.models.fields.related import (
RECURSIVE_RELATIONSHIP_CONSTANT, ManyToManyDescriptor, ManyToManyField,
ManyToManyRel, RelatedField, create_many_to_many_intermediary_model,
)
class CustomManyToManyField(RelatedField):
"""
Ticket #24104 - Need to have a custom ManyToManyField,
which is not an inheritor of ManyToManyField.
"""
many_to_many = True
def __init__(self, to, db_constraint=True, swappable=True, related_name=None, related_query_name=None,
limit_choices_to=None, symmetrical=None, through=None, through_fields=None, db_table=None, **kwargs):
try:
to._meta
except AttributeError:
to = str(to)
kwargs['rel'] = ManyToManyRel(
self, to,
related_name=related_name,
related_query_name=related_query_name,
limit_choices_to=limit_choices_to,
symmetrical=symmetrical if symmetrical is not None else (to == RECURSIVE_RELATIONSHIP_CONSTANT),
through=through,
through_fields=through_fields,
db_constraint=db_constraint,
)
self.swappable = swappable
self.db_table = db_table
if kwargs['rel'].through is not None:
assert self.db_table is None, "Cannot specify a db_table if an intermediary model is used."
super().__init__(**kwargs)
def contribute_to_class(self, cls, name, **kwargs):
if self.remote_field.symmetrical and (
self.remote_field.model == "self" or self.remote_field.model == cls._meta.object_name):
self.remote_field.related_name = "%s_rel_+" % name
super().contribute_to_class(cls, name, **kwargs)
if not self.remote_field.through and not cls._meta.abstract and not cls._meta.swapped:
self.remote_field.through = create_many_to_many_intermediary_model(self, cls)
setattr(cls, self.name, ManyToManyDescriptor(self.remote_field))
self.m2m_db_table = partial(self._get_m2m_db_table, cls._meta)
def get_internal_type(self):
return 'ManyToManyField'
# Copy those methods from ManyToManyField because they don't call super() internally
contribute_to_related_class = ManyToManyField.__dict__['contribute_to_related_class']
_get_m2m_attr = ManyToManyField.__dict__['_get_m2m_attr']
_get_m2m_reverse_attr = ManyToManyField.__dict__['_get_m2m_reverse_attr']
_get_m2m_db_table = ManyToManyField.__dict__['_get_m2m_db_table']
class InheritedManyToManyField(ManyToManyField):
pass
class MediumBlobField(models.BinaryField):
"""
A MySQL BinaryField that uses a different blob size.
"""
def db_type(self, connection):
return 'MEDIUMBLOB'
|
SteveHNH/ansible
|
refs/heads/devel
|
lib/ansible/modules/network/ovs/openvswitch_port.py
|
29
|
#!/usr/bin/python
#coding: utf-8 -*-
# (c) 2013, David Stygstra <david.stygstra@gmail.com>
# Portions copyright @ 2015 VMware, Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = '''
---
module: openvswitch_port
version_added: 1.4
author: "David Stygstra (@stygstra)"
short_description: Manage Open vSwitch ports
requirements: [ ovs-vsctl ]
description:
- Manage Open vSwitch ports
options:
bridge:
required: true
description:
- Name of bridge to manage
port:
required: true
description:
- Name of port to manage on the bridge
tag:
version_added: 2.2
required: false
description:
- VLAN tag for this port. Must be a value between
0 and 4095.
state:
required: false
default: "present"
choices: [ present, absent ]
description:
- Whether the port should exist
timeout:
required: false
default: 5
description:
- How long to wait for ovs-vswitchd to respond
external_ids:
version_added: 2.0
required: false
default: {}
description:
- Dictionary of external_ids applied to a port.
set:
version_added: 2.0
required: false
default: None
description:
- Set a single property on a port.
'''
EXAMPLES = '''
# Creates port eth2 on bridge br-ex
- openvswitch_port:
bridge: br-ex
port: eth2
state: present
# Creates port eth6
- openvswitch_port:
bridge: bridge-loop
port: eth6
state: present
set: Interface eth6
# Creates port vlan10 with tag 10 on bridge br-ex
- openvswitch_port:
bridge: br-ex
port: vlan10
tag: 10
state: present
set: Interface vlan10
# Assign interface id server1-vifeth6 and mac address 00:00:5E:00:53:23
# to port vifeth6 and setup port to be managed by a controller.
- openvswitch_port:
bridge: br-int
port: vifeth6
state: present
args:
external_ids:
iface-id: '{{ inventory_hostname }}-vifeth6'
attached-mac: '00:00:5E:00:53:23'
vm-id: '{{ inventory_hostname }}'
iface-status: active
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six import iteritems
def _external_ids_to_dict(text):
text = text.strip()
if text == '{}':
return None
else:
d = {}
for kv in text[1:-1].split(','):
kv = kv.strip()
k, v = kv.split('=')
d[k] = v
return d
def _tag_to_str(text):
text = text.strip()
if text == '[]':
return None
else:
return text
def map_obj_to_commands(want, have, module):
commands = list()
if module.params['state'] == 'absent':
if have:
templatized_command = ("%(ovs-vsctl)s -t %(timeout)s del-port"
" %(bridge)s %(port)s")
command = templatized_command % module.params
commands.append(command)
else:
if have:
if want['tag'] != have['tag']:
templatized_command = ("%(ovs-vsctl)s -t %(timeout)s"
" set port %(port)s tag=%(tag)s")
command = templatized_command % module.params
commands.append(command)
if want['external_ids'] != have['external_ids']:
for k, v in iteritems(want['external_ids']):
if (not have['external_ids']
or k not in have['external_ids']
or want['external_ids'][k] != have['external_ids'][k]):
if v is None:
templatized_command = ("%(ovs-vsctl)s -t %(timeout)s"
" remove port %(port)s"
" external_ids " + k)
command = templatized_command % module.params
commands.append(command)
else:
templatized_command = ("%(ovs-vsctl)s -t %(timeout)s"
" set port %(port)s"
" external_ids:")
command = templatized_command % module.params
command += k + "=" + v
commands.append(command)
else:
templatized_command = ("%(ovs-vsctl)s -t %(timeout)s add-port"
" %(bridge)s %(port)s")
command = templatized_command % module.params
if want['tag']:
templatized_command = " tag=%(tag)s"
command += templatized_command % module.params
if want['set']:
templatized_command = " -- set %(set)s"
command += templatized_command % module.params
commands.append(command)
if want['external_ids']:
for k, v in iteritems(want['external_ids']):
templatized_command = ("%(ovs-vsctl)s -t %(timeout)s"
" set port %(port)s external_ids:")
command = templatized_command % module.params
command += k + "=" + v
commands.append(command)
return commands
def map_config_to_obj(module):
templatized_command = "%(ovs-vsctl)s -t %(timeout)s list-ports %(bridge)s"
command = templatized_command % module.params
rc, out, err = module.run_command(command, check_rc=True)
if rc != 0:
module.fail_json(msg=err)
obj = {}
if module.params['port'] in out.splitlines():
obj['bridge'] = module.params['bridge']
obj['port'] = module.params['port']
templatized_command = ("%(ovs-vsctl)s -t %(timeout)s get"
" Port %(port)s tag")
command = templatized_command % module.params
rc, out, err = module.run_command(command, check_rc=True)
obj['tag'] = _tag_to_str(out)
templatized_command = ("%(ovs-vsctl)s -t %(timeout)s get"
" Port %(port)s external_ids")
command = templatized_command % module.params
rc, out, err = module.run_command(command, check_rc=True)
obj['external_ids'] = _external_ids_to_dict(out)
return obj
def map_params_to_obj(module):
obj = {
'bridge': module.params['bridge'],
'port': module.params['port'],
'tag': module.params['tag'],
'external_ids': module.params['external_ids'],
'set': module.params['set']
}
return obj
def main():
""" Entry point. """
argument_spec={
'bridge': {'required': True},
'port': {'required': True},
'state': {'default': 'present', 'choices': ['present', 'absent']},
'timeout': {'default': 5, 'type': 'int'},
'external_ids': {'default': None, 'type': 'dict'},
'tag': {'default': None},
'set': {'required': False, 'default': None}
}
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True)
result = {'changed': False}
# We add ovs-vsctl to module_params to later build up templatized commands
module.params["ovs-vsctl"] = module.get_bin_path("ovs-vsctl", True)
want = map_params_to_obj(module)
have = map_config_to_obj(module)
commands = map_obj_to_commands(want, have, module)
result['commands'] = commands
if commands:
if not module.check_mode:
for c in commands:
module.run_command(c, check_rc=True)
result['changed'] = True
module.exit_json(**result)
if __name__ == '__main__':
main()
|
vganapath/rally
|
refs/heads/master
|
rally/plugins/openstack/context/nova/flavors.py
|
6
|
# Copyright 2014: Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from novaclient import exceptions as nova_exceptions
from rally.common.i18n import _
from rally.common import logging
from rally.common import utils as rutils
from rally import consts
from rally import osclients
from rally.task import context
LOG = logging.getLogger(__name__)
@context.configure(name="flavors", order=340)
class FlavorsGenerator(context.Context):
"""Context creates a list of flavors."""
CONFIG_SCHEMA = {
"type": "array",
"$schema": consts.JSON_SCHEMA,
"items": {
"type": "object",
"properties": {
"name": {
"type": "string",
},
"ram": {
"type": "integer",
"minimum": 1
},
"vcpus": {
"type": "integer",
"minimum": 1
},
"disk": {
"type": "integer",
"minimum": 0
},
"swap": {
"type": "integer",
"minimum": 0
},
"ephemeral": {
"type": "integer",
"minimum": 0
},
"extra_specs": {
"type": "object",
"additionalProperties": {
"type": "string"
}
}
},
"additionalProperties": False,
"required": ["name", "ram"]
}
}
@logging.log_task_wrapper(LOG.info, _("Enter context: `flavors`"))
def setup(self):
"""Create list of flavors."""
self.context["flavors"] = {}
clients = osclients.Clients(self.context["admin"]["credential"])
for flavor_config in self.config:
extra_specs = flavor_config.get("extra_specs")
flavor_config = FlavorConfig(**flavor_config)
try:
flavor = clients.nova().flavors.create(**flavor_config)
except nova_exceptions.Conflict as e:
LOG.warning("Using already existing flavor %s" %
flavor_config["name"])
if logging.is_debug():
LOG.exception(e)
continue
if extra_specs:
flavor.set_keys(extra_specs)
self.context["flavors"][flavor_config["name"]] = flavor.to_dict()
LOG.debug("Created flavor with id '%s'" % flavor.id)
@logging.log_task_wrapper(LOG.info, _("Exit context: `flavors`"))
def cleanup(self):
"""Delete created flavors."""
clients = osclients.Clients(self.context["admin"]["credential"])
for flavor in self.context["flavors"].values():
with logging.ExceptionLogger(
LOG, _("Can't delete flavor %s") % flavor["id"]):
rutils.retry(3, clients.nova().flavors.delete, flavor["id"])
LOG.debug("Flavor is deleted %s" % flavor["id"])
class FlavorConfig(dict):
def __init__(self, name, ram, vcpus=1, disk=0, swap=0, ephemeral=0,
extra_specs=None):
"""Flavor configuration for context and flavor & image validation code.
Context code uses this code to provide default values for flavor
creation. Validation code uses this class as a Flavor instance to
check image validity against a flavor that is to be created by
the context.
:param name: name of the newly created flavor
:param ram: RAM amount for the flavor (MBs)
:param vcpus: VCPUs amount for the flavor
:param disk: disk amount for the flavor (GBs)
:param swap: swap amount for the flavor (MBs)
:param ephemeral: ephemeral disk amount for the flavor (GBs)
:param extra_specs: is ignored
"""
super(FlavorConfig, self).__init__(
name=name, ram=ram, vcpus=vcpus, disk=disk,
swap=swap, ephemeral=ephemeral)
self.__dict__.update(self)
|
cretin45/htc-kernel-ruby
|
refs/heads/ics
|
tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/SchedGui.py
|
12980
|
# SchedGui.py - Python extension for perf script, basic GUI code for
# traces drawing and overview.
#
# Copyright (C) 2010 by Frederic Weisbecker <fweisbec@gmail.com>
#
# This software is distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
try:
import wx
except ImportError:
raise ImportError, "You need to install the wxpython lib for this script"
class RootFrame(wx.Frame):
Y_OFFSET = 100
RECT_HEIGHT = 100
RECT_SPACE = 50
EVENT_MARKING_WIDTH = 5
def __init__(self, sched_tracer, title, parent = None, id = -1):
wx.Frame.__init__(self, parent, id, title)
(self.screen_width, self.screen_height) = wx.GetDisplaySize()
self.screen_width -= 10
self.screen_height -= 10
self.zoom = 0.5
self.scroll_scale = 20
self.sched_tracer = sched_tracer
self.sched_tracer.set_root_win(self)
(self.ts_start, self.ts_end) = sched_tracer.interval()
self.update_width_virtual()
self.nr_rects = sched_tracer.nr_rectangles() + 1
self.height_virtual = RootFrame.Y_OFFSET + (self.nr_rects * (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE))
# whole window panel
self.panel = wx.Panel(self, size=(self.screen_width, self.screen_height))
# scrollable container
self.scroll = wx.ScrolledWindow(self.panel)
self.scroll.SetScrollbars(self.scroll_scale, self.scroll_scale, self.width_virtual / self.scroll_scale, self.height_virtual / self.scroll_scale)
self.scroll.EnableScrolling(True, True)
self.scroll.SetFocus()
# scrollable drawing area
self.scroll_panel = wx.Panel(self.scroll, size=(self.screen_width - 15, self.screen_height / 2))
self.scroll_panel.Bind(wx.EVT_PAINT, self.on_paint)
self.scroll_panel.Bind(wx.EVT_KEY_DOWN, self.on_key_press)
self.scroll_panel.Bind(wx.EVT_LEFT_DOWN, self.on_mouse_down)
self.scroll.Bind(wx.EVT_PAINT, self.on_paint)
self.scroll.Bind(wx.EVT_KEY_DOWN, self.on_key_press)
self.scroll.Bind(wx.EVT_LEFT_DOWN, self.on_mouse_down)
self.scroll.Fit()
self.Fit()
self.scroll_panel.SetDimensions(-1, -1, self.width_virtual, self.height_virtual, wx.SIZE_USE_EXISTING)
self.txt = None
self.Show(True)
def us_to_px(self, val):
return val / (10 ** 3) * self.zoom
def px_to_us(self, val):
return (val / self.zoom) * (10 ** 3)
def scroll_start(self):
(x, y) = self.scroll.GetViewStart()
return (x * self.scroll_scale, y * self.scroll_scale)
def scroll_start_us(self):
(x, y) = self.scroll_start()
return self.px_to_us(x)
def paint_rectangle_zone(self, nr, color, top_color, start, end):
offset_px = self.us_to_px(start - self.ts_start)
width_px = self.us_to_px(end - self.ts_start)
offset_py = RootFrame.Y_OFFSET + (nr * (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE))
width_py = RootFrame.RECT_HEIGHT
dc = self.dc
if top_color is not None:
(r, g, b) = top_color
top_color = wx.Colour(r, g, b)
brush = wx.Brush(top_color, wx.SOLID)
dc.SetBrush(brush)
dc.DrawRectangle(offset_px, offset_py, width_px, RootFrame.EVENT_MARKING_WIDTH)
width_py -= RootFrame.EVENT_MARKING_WIDTH
offset_py += RootFrame.EVENT_MARKING_WIDTH
(r ,g, b) = color
color = wx.Colour(r, g, b)
brush = wx.Brush(color, wx.SOLID)
dc.SetBrush(brush)
dc.DrawRectangle(offset_px, offset_py, width_px, width_py)
def update_rectangles(self, dc, start, end):
start += self.ts_start
end += self.ts_start
self.sched_tracer.fill_zone(start, end)
def on_paint(self, event):
dc = wx.PaintDC(self.scroll_panel)
self.dc = dc
width = min(self.width_virtual, self.screen_width)
(x, y) = self.scroll_start()
start = self.px_to_us(x)
end = self.px_to_us(x + width)
self.update_rectangles(dc, start, end)
def rect_from_ypixel(self, y):
y -= RootFrame.Y_OFFSET
rect = y / (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE)
height = y % (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE)
if rect < 0 or rect > self.nr_rects - 1 or height > RootFrame.RECT_HEIGHT:
return -1
return rect
def update_summary(self, txt):
if self.txt:
self.txt.Destroy()
self.txt = wx.StaticText(self.panel, -1, txt, (0, (self.screen_height / 2) + 50))
def on_mouse_down(self, event):
(x, y) = event.GetPositionTuple()
rect = self.rect_from_ypixel(y)
if rect == -1:
return
t = self.px_to_us(x) + self.ts_start
self.sched_tracer.mouse_down(rect, t)
def update_width_virtual(self):
self.width_virtual = self.us_to_px(self.ts_end - self.ts_start)
def __zoom(self, x):
self.update_width_virtual()
(xpos, ypos) = self.scroll.GetViewStart()
xpos = self.us_to_px(x) / self.scroll_scale
self.scroll.SetScrollbars(self.scroll_scale, self.scroll_scale, self.width_virtual / self.scroll_scale, self.height_virtual / self.scroll_scale, xpos, ypos)
self.Refresh()
def zoom_in(self):
x = self.scroll_start_us()
self.zoom *= 2
self.__zoom(x)
def zoom_out(self):
x = self.scroll_start_us()
self.zoom /= 2
self.__zoom(x)
def on_key_press(self, event):
key = event.GetRawKeyCode()
if key == ord("+"):
self.zoom_in()
return
if key == ord("-"):
self.zoom_out()
return
key = event.GetKeyCode()
(x, y) = self.scroll.GetViewStart()
if key == wx.WXK_RIGHT:
self.scroll.Scroll(x + 1, y)
elif key == wx.WXK_LEFT:
self.scroll.Scroll(x - 1, y)
elif key == wx.WXK_DOWN:
self.scroll.Scroll(x, y + 1)
elif key == wx.WXK_UP:
self.scroll.Scroll(x, y - 1)
|
chromium/chromium
|
refs/heads/master
|
third_party/tensorflow-text/src/tensorflow_text/python/ops/wordshape_ops_test.py
|
7
|
# coding=utf-8
# Copyright 2021 TF.Text Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -*- coding: utf-8 -*-
"""Tests for wordshape ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import test_util
from tensorflow.python.platform import test
from tensorflow_text.python.ops import wordshape_ops
@test_util.run_all_in_graph_and_eager_modes
class Utf8CharsOpTest(test.TestCase):
def testDashShape(self):
test_string = [
u"a-b", u"a\u2010b".encode("utf-8"), u"a\u2013b".encode("utf-8"),
u"a\u2e3ab".encode("utf-8"), u"abc".encode("utf-8")
]
shapes = wordshape_ops.wordshape(
test_string, wordshape_ops.WordShape.HAS_PUNCTUATION_DASH)
self.assertAllEqual(shapes, [True, True, True, True, False])
def testNoDigits(self):
test_string = [u"abc", u"a\u06f3m".encode("utf-8")]
shapes = wordshape_ops.wordshape(test_string,
wordshape_ops.WordShape.HAS_NO_DIGITS)
self.assertAllEqual(shapes, [True, False])
def testSomeDigits(self):
test_string = [
u"abc", u"a\u06f3m".encode("utf-8"), u"90\u06f3".encode("utf-8"),
u"a9b8c7", u"9ab87c", u"\u06f3m\u06f3"
]
shapes = wordshape_ops.wordshape(test_string,
wordshape_ops.WordShape.HAS_SOME_DIGITS)
self.assertAllEqual(shapes, [False, True, False, True, True, True])
def testSomeDigitAndCurrency(self):
test_string = [
u"abc", u"a\u06f3m".encode("utf-8"), u"90\u06f3".encode("utf-8"),
u"a9b8c7", u"$9ab87c$", u"\u06f3m\u06f3"
]
pattern_list = [
wordshape_ops.WordShape.HAS_SOME_DIGITS,
wordshape_ops.WordShape.HAS_CURRENCY_SYMBOL
]
shapes = wordshape_ops.wordshape(test_string, pattern=pattern_list)
self.assertAllEqual(shapes, [[False, False], [True, False], [False, False],
[True, False], [True, True], [True, False]])
def testOnlyDigits(self):
test_string = [u"abc", u"a9b".encode("utf-8"), u"90\u06f3".encode("utf-8")]
shapes = wordshape_ops.wordshape(test_string,
wordshape_ops.WordShape.HAS_ONLY_DIGITS)
self.assertAllEqual(shapes, [False, False, True])
def testNumericValue(self):
test_string = [u"98.6", u"-0.3", u"2.783E4", u"e4", u"1e10"]
shapes = wordshape_ops.wordshape(test_string,
wordshape_ops.WordShape.IS_NUMERIC_VALUE)
self.assertAllEqual(shapes, [True, True, True, False, True])
def SKIP_testWhitespace(self):
test_string = [
u" ", u"\v", u"\r\n", u"\u3000".encode("utf-8"), u" a", u"abc", u"a\nb",
u"\u3000 \n".encode("utf-8")
]
shapes = wordshape_ops.wordshape(test_string,
wordshape_ops.WordShape.IS_WHITESPACE)
self.assertAllEqual(shapes,
[True, True, True, True, False, False, False, True])
def testNoPunct(self):
test_string = [u"abc", u"a;m".encode("utf-8")]
shapes = wordshape_ops.wordshape(
test_string, wordshape_ops.WordShape.HAS_NO_PUNCT_OR_SYMBOL)
self.assertAllEqual(shapes, [True, False])
def testSomePunct(self):
test_string = [
u"abc", u"a;m".encode("utf-8"), u".,!".encode("utf-8"), u"a@b.c,",
u".ab8;c", u"\u0f08m\u0f08"
]
shapes = wordshape_ops.wordshape(
test_string, wordshape_ops.WordShape.HAS_SOME_PUNCT_OR_SYMBOL)
self.assertAllEqual(shapes, [False, True, False, True, True, True])
def testAllPunct(self):
test_string = [u"abc", u"a;b".encode("utf-8"), u";,\u0f08".encode("utf-8")]
shapes = wordshape_ops.wordshape(test_string,
wordshape_ops.WordShape.IS_PUNCT_OR_SYMBOL)
self.assertAllEqual(shapes, [False, False, True])
def testLeadingPunct(self):
test_string = [u"abc", u";b", u"b;", u";,\u0f08".encode("utf-8")]
shapes = wordshape_ops.wordshape(
test_string, wordshape_ops.WordShape.BEGINS_WITH_PUNCT_OR_SYMBOL)
self.assertAllEqual(shapes, [False, True, False, True])
def testTrailingPunct(self):
test_string = [u"abc", u";b", u"b;", u";,\u0f08".encode("utf-8")]
shapes = wordshape_ops.wordshape(
test_string, wordshape_ops.WordShape.ENDS_WITH_PUNCT_OR_SYMBOL)
self.assertAllEqual(shapes, [False, False, True, True])
def SKIP_testSentenceTerminal(self):
test_string = [u"abc", u".b", u"b.", u"b,", u"b!!!", u"abc?!"]
shapes = wordshape_ops.wordshape(
test_string, wordshape_ops.WordShape.ENDS_WITH_SENTENCE_TERMINAL)
self.assertAllEqual(shapes, [False, False, True, False, True, True])
def SKIP_testMultipleSentenceTerminal(self):
test_string = [u"abc", u".b", u"b.", u"b,", u"b!!!", u"abc?!"]
shapes = wordshape_ops.wordshape(
test_string,
wordshape_ops.WordShape.ENDS_WITH_MULTIPLE_SENTENCE_TERMINAL)
self.assertAllEqual(shapes, [False, False, False, False, True, True])
def SKIP_testTerminalPunct(self):
test_string = [u"abc", u".b", u"b.", u"b,", u"b!!!", u"abc?!"]
shapes = wordshape_ops.wordshape(
test_string, wordshape_ops.WordShape.ENDS_WITH_TERMINAL_PUNCT)
self.assertAllEqual(shapes, [False, False, True, True, True, True])
def SKIP_testMultipleTerminalPunct(self):
test_string = [u"abc", u".b", u"b.", u"b,,", u"b!!!", u"abc?!"]
shapes = wordshape_ops.wordshape(
test_string, wordshape_ops.WordShape.ENDS_WITH_MULTIPLE_TERMINAL_PUNCT)
self.assertAllEqual(shapes, [False, False, False, True, True, True])
def testEllipsis(self):
test_string = [u"abc", u"abc...", u"...abc", u"abc\u2026".encode("utf-8")]
shapes = wordshape_ops.wordshape(test_string,
wordshape_ops.WordShape.ENDS_WITH_ELLIPSIS)
self.assertAllEqual(shapes, [False, True, False, True])
def testEndsWithEmoticon(self):
test_string = [u"abc", u":-)", u"O:)", u"8)x", u":\u3063C", u"abc:-)"]
shapes = wordshape_ops.wordshape(test_string,
wordshape_ops.WordShape.ENDS_WITH_EMOTICON)
self.assertAllEqual(shapes, [False, True, True, False, True, True])
def testIsEmoticon(self):
test_string = [u"abc", u":-)", u"O:)", u"8)x", u":\u3063C", u"abc:-)"]
shapes = wordshape_ops.wordshape(test_string,
wordshape_ops.WordShape.IS_EMOTICON)
self.assertAllEqual(shapes, [False, True, False, False, True, False])
def testEmoji(self):
test_string = [
u"\U0001f604m".encode("utf-8"), u"m\u2605m".encode("utf-8"), u"O:)",
u"m\U0001f604".encode("utf-8"), u"\u2105k".encode("utf-8")
]
shapes = wordshape_ops.wordshape(test_string,
wordshape_ops.WordShape.HAS_EMOJI)
self.assertAllEqual(shapes, [True, True, False, True, False])
# This is by no means exhaustive, but it's a broad and diverse sample
# to more throroughly test the emoji regex.
def testExtendedEmojis(self):
test_string = [
"‼",
"⁉",
"ℹ",
"↘",
"↩",
"⌚",
"⌛",
"⏳",
"⌨",
"⏏",
"⏩",
"⏺",
"⏰",
"⏱",
"⏲",
"🕰",
"Ⓜ",
"▪",
"⬛",
"⬜",
"✂",
"✅",
"✈",
"✉",
"✊",
"✊🏿",
"✋",
"✌",
"🤘🏾",
"🤞🏿",
"✍",
"✏",
"✒",
"✔",
"✝",
"✡",
"✨",
"✳",
"✴",
"❄",
"❇",
"❌",
"❎",
"❓",
"❔",
"❗",
"❕",
"❣",
"❤",
"➕",
"➖",
"➗",
"⤴",
"⤵",
"⬅",
"⭐",
"⭕",
"〰",
"〽",
"㊗",
"🀄",
"🃏",
"🅰",
"🅱",
"🅾",
"🅿",
"🆎",
"🆑",
"🆒",
"🆔",
"🆗",
"🆘",
"🆙",
"🆚",
"🈁",
"🈂",
"🈚",
"🈯",
"🈴",
"🈳",
"🈺",
"🉐",
"🉑",
"🌍",
"🏔",
"🍾",
"🐯",
"🐆",
"🦇",
"🦅",
"🐝",
"🦖",
"🐉",
"🦠",
"🔎",
"⚗",
"🕯",
"💡",
"📽",
"📡",
"🧮",
"🔋",
"📲",
"☎",
"🥁",
"🎧",
"🎼",
"🔊",
"💍",
"👗",
"🕶",
"🎭",
"🔮",
"🧬",
"🔬",
"🤹",
"🚵",
"🧗",
"🧗🏼♀️",
"🧗🏿♂️",
"🥋",
"🎳",
"🏈",
"🏅",
"🎑",
"🎉",
"🎄",
"🌊",
"⚡",
"🌖",
"🚀",
"🚠",
"🛩",
"🛴",
"🏎",
"🚅",
"🌆",
"🕌",
"🕍",
"⛪",
"🗽",
"🏘",
"🍵",
"🍫",
"🦑",
"🍱",
"🥦",
"🥑",
"🌴",
"🌼",
"🦂",
"🐬",
"🥀",
"🧖🏾",
"🧕🏿",
"🧔🏼",
"🧒🏾",
"🧛",
"🧝🏻",
"🧞",
"🧟",
"🧙🏾",
"🧚🏻",
"💃🏽",
"👯",
"🧘",
"🦱",
"👪",
"👩👩👧👦",
"👨🏿🤝👨🏻",
"🕵️♀️",
"🧑🚀",
"👩✈️",
"🧑🏿⚕️",
"🧑🏾⚖️",
"🧠",
"👁️🗨️",
"🙉",
"🤗",
"👏",
"💏",
"🧯",
"🛒",
"🧺",
"🧷",
"💊",
"🧲",
"⛓",
"⚖",
"🛡",
"🏹",
"🎣",
"⚔",
"🔨",
"📌",
"📊",
"📈",
"💹",
"💸",
"💵",
"📜",
"📚",
"📆",
"💼",
"📝",
"📬",
"🔏",
"🔓",
"🔑",
"🗃",
"🚿",
"🛏",
"🗿",
"🏧",
"🚮",
"🚰",
"♿",
"🚻",
"🚾",
"🛄",
"⚠",
"🚸",
"⛔",
"🚭",
"☣",
"🔃",
"🔚",
"🔚",
"⚛",
"♈",
"🔆",
"🎦",
"⚕",
"♻",
"⚜",
"💠",
"🏁",
"🚩",
"🎌",
"🏴☠️",
"🇺🇸",
"🇨🇭",
"🇺🇦",
"🇿🇼",
"🇦🇴",
"🇦🇨",
"🇦🇶",
"🇺🇳",
"🇪🇺",
"🇧🇿",
"🇵🇲",
"🇮🇴",
"🇻🇮",
"🇨🇽",
"🏴",
"🇧🇱",
u"\U0001fa70".encode("utf-8"), # ballet shoes.
u"\U0001fa7a".encode("utf-8"), # stethoscope.
u"\U0001fa80".encode("utf-8"), # yo-yo.
u"\U0001fa82".encode("utf-8"), # parachute.
u"\U0001fa86".encode("utf-8"), # nesting dolls.
u"\U0001fa90".encode("utf-8"), # ringed planet.
u"\U0001fa97".encode("utf-8"), # accordion.
u"\U0001fa99".encode("utf-8"), # coin.
u"\U0001fa9c".encode("utf-8"), # ladder.
u"\U0001fa9f".encode("utf-8"), # window.
u"\U0001faa1".encode("utf-8"), # sewing needle.
u"\U0001faa8".encode("utf-8"), # rock.
u"\U0001fab0".encode("utf-8"), # fly.
u"\U0001fab4".encode("utf-8"), # potted plant.
u"\U0001fab6".encode("utf-8"), # feather.
u"\U0001fac0".encode("utf-8"), # anatomical heart.
u"\U0001fac2".encode("utf-8"), # people hugging.
u"\U0001fad0".encode("utf-8"), # blueberries.
u"\U0001fad2".encode("utf-8"), # olive.
u"\U0001fad6".encode("utf-8"), # teapot.
]
shapes = wordshape_ops.wordshape(test_string,
wordshape_ops.WordShape.HAS_EMOJI)
self.assertAllEqual(shapes, [True] * len(test_string))
def testAcronym(self):
test_string = [u"abc", u"A.B.", u"A.B.C.)", u"ABC"]
shapes = wordshape_ops.wordshape(
test_string, wordshape_ops.WordShape.IS_ACRONYM_WITH_PERIODS)
self.assertAllEqual(shapes, [False, True, False, False])
def testAllUppercase(self):
test_string = [u"abc", u"ABc", u"ABC"]
shapes = wordshape_ops.wordshape(test_string,
wordshape_ops.WordShape.IS_UPPERCASE)
self.assertAllEqual(shapes, [False, False, True])
def testAllLowercase(self):
test_string = [u"abc", u"ABc", u"ABC"]
shapes = wordshape_ops.wordshape(test_string,
wordshape_ops.WordShape.IS_LOWERCASE)
self.assertAllEqual(shapes, [True, False, False])
def testMixedCase(self):
test_string = [u"abc", u"ABc", u"ABC", u"abC"]
shapes = wordshape_ops.wordshape(test_string,
wordshape_ops.WordShape.HAS_MIXED_CASE)
self.assertAllEqual(shapes, [False, True, False, True])
def testMixedCaseLetters(self):
test_string = [u"abc", u"ABc", u"ABC", u"abC", u"abC."]
shapes = wordshape_ops.wordshape(
test_string, wordshape_ops.WordShape.IS_MIXED_CASE_LETTERS)
self.assertAllEqual(shapes, [False, True, False, True, False])
def testTitleCase(self):
test_string = [
u"abc", u"ABc", u"ABC", u"Abc", u"aBcd", u"\u01c8bc".encode("utf-8")
]
shapes = wordshape_ops.wordshape(test_string,
wordshape_ops.WordShape.HAS_TITLE_CASE)
self.assertAllEqual(shapes, [False, False, False, True, False, True])
def SKIP_testNoQuotes(self):
test_string = [
u"abc", u"\"ABc", u"ABC'", u"Abc\u201c".encode("utf-8"), u"aBcd"
]
shapes = wordshape_ops.wordshape(test_string,
wordshape_ops.WordShape.HAS_NO_QUOTES)
self.assertAllEqual(shapes, [True, False, False, False, True])
def testOpenQuote(self):
test_string = [
u"''", u"ABc\"", u"\uff07".encode("utf-8"), u"\u2018".encode("utf-8"),
u"aBcd", u"``"
]
shapes = wordshape_ops.wordshape(
test_string, wordshape_ops.WordShape.BEGINS_WITH_OPEN_QUOTE)
self.assertAllEqual(shapes, [False, False, True, True, False, True])
def testCloseQuote(self):
test_string = [
u"''", u"ABc\"", u"\u300f".encode("utf-8"), u"\u2018".encode("utf-8"),
u"aBcd", u"``"
]
shapes = wordshape_ops.wordshape(
test_string, wordshape_ops.WordShape.ENDS_WITH_CLOSE_QUOTE)
self.assertAllEqual(shapes, [True, True, True, False, False, False])
def SKIP_testQuote(self):
test_string = [
u"''", u"ABc\"", u"\uff07".encode("utf-8"), u"\u2018".encode("utf-8"),
u"aBcd", u"``", u"\u300d".encode("utf-8")
]
shapes = wordshape_ops.wordshape(test_string,
wordshape_ops.WordShape.HAS_QUOTE)
self.assertAllEqual(shapes, [True, True, True, True, False, True, True])
def testMathSymbol(self):
test_string = [u"''", u"\u003c", u"\uff07".encode("utf-8")]
shapes = wordshape_ops.wordshape(test_string,
wordshape_ops.WordShape.HAS_MATH_SYMBOL)
self.assertAllEqual(shapes, [False, True, False])
def testCurrencySymbol(self):
test_string = [u"''", u"ABc$", u"$\uff07".encode("utf-8")]
shapes = wordshape_ops.wordshape(
test_string, wordshape_ops.WordShape.HAS_CURRENCY_SYMBOL)
self.assertAllEqual(shapes, [False, True, True])
def testCurrencySymbolAtBeginning(self):
test_string = [u"''", u"ABc$", u"$ABc", u"A$Bc"]
shapes = wordshape_ops.wordshape(
test_string, wordshape_ops.WordShape.HAS_CURRENCY_SYMBOL)
self.assertAllEqual(shapes, [False, True, True, True])
def testNonLetters(self):
test_string = [
u"''", u"ABc", u"\uff07".encode("utf-8"), u"\u2018".encode("utf-8"),
u"aBcd", u"`#ab", u"\u300d".encode("utf-8")
]
shapes = wordshape_ops.wordshape(test_string,
wordshape_ops.WordShape.HAS_NON_LETTER)
self.assertAllEqual(shapes, [True, False, True, True, False, True, True])
def testMultipleShapes(self):
test_string = [u"abc", u"ABc", u"ABC"]
shapes = wordshape_ops.wordshape(test_string, [
wordshape_ops.WordShape.IS_UPPERCASE,
wordshape_ops.WordShape.IS_LOWERCASE
])
self.assertAllEqual(shapes, [[False, True], [False, False], [True, False]])
def testNonShapePassedToShapeArg(self):
test_string = [u"abc", u"ABc", u"ABC"]
with self.assertRaises(TypeError):
wordshape_ops.wordshape(test_string, "This is not a Shape")
if __name__ == "__main__":
test.main()
|
deepakselvaraj/federated-horizon
|
refs/heads/master
|
openstack_dashboard/dashboards/project/routers/tables.py
|
12
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012, Nachi Ueno, NTT MCL, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from django.core.urlresolvers import reverse # noqa
from django.template.defaultfilters import title # noqa
from django.utils.translation import ugettext_lazy as _ # noqa
from horizon import exceptions
from horizon import messages
from horizon import tables
from neutronclient.common import exceptions as q_ext
from openstack_dashboard import api
LOG = logging.getLogger(__name__)
class DeleteRouter(tables.DeleteAction):
data_type_singular = _("Router")
data_type_plural = _("Routers")
redirect_url = "horizon:project:routers:index"
def delete(self, request, obj_id):
obj = self.table.get_object_by_id(obj_id)
name = self.table.get_object_display(obj)
try:
api.neutron.router_delete(request, obj_id)
except q_ext.NeutronClientException as e:
msg = _('Unable to delete router "%s"') % e.message
LOG.info(msg)
messages.error(request, msg)
redirect = reverse(self.redirect_url)
raise exceptions.Http302(redirect, message=msg)
except Exception as e:
msg = _('Unable to delete router "%s"') % name
LOG.info(msg)
exceptions.handle(request, msg)
def allowed(self, request, router=None):
return True
class CreateRouter(tables.LinkAction):
name = "create"
verbose_name = _("Create Router")
url = "horizon:project:routers:create"
classes = ("ajax-modal", "btn-create")
class SetGateway(tables.LinkAction):
name = "setgateway"
verbose_name = _("Set Gateway")
url = "horizon:project:routers:setgateway"
classes = ("ajax-modal", "btn-camera")
def allowed(self, request, datum=None):
if datum.external_gateway_info:
return False
return True
class ClearGateway(tables.BatchAction):
name = "cleargateway"
action_present = _("Clear")
action_past = _("Cleared")
data_type_singular = _("Gateway")
data_type_plural = _("Gateways")
classes = ('btn-danger', 'btn-cleargateway')
redirect_url = "horizon:project:routers:index"
def action(self, request, obj_id):
obj = self.table.get_object_by_id(obj_id)
name = self.table.get_object_display(obj)
try:
api.neutron.router_remove_gateway(request, obj_id)
except Exception as e:
msg = (_('Unable to clear gateway for router '
'"%(name)s": "%(msg)s"')
% {"name": name, "msg": e.message})
LOG.info(msg)
redirect = reverse(self.redirect_url)
exceptions.handle(request, msg, redirect=redirect)
def get_success_url(self, request):
return reverse(self.redirect_url)
def allowed(self, request, datum=None):
if datum.external_gateway_info:
return True
return False
class UpdateRow(tables.Row):
ajax = True
def get_data(self, request, router_id):
router = api.neutron.router_get(request, router_id)
return router
def get_external_network(router):
if router.external_gateway_info:
return router.external_gateway_info['network']
else:
return "-"
class RoutersTable(tables.DataTable):
name = tables.Column("name",
verbose_name=_("Name"),
link="horizon:project:routers:detail")
status = tables.Column("status",
filters=(title,),
verbose_name=_("Status"),
status=True)
ext_net = tables.Column(get_external_network,
verbose_name=_("External Network"))
def get_object_display(self, obj):
return obj.name
class Meta:
name = "Routers"
verbose_name = _("Routers")
status_columns = ["status"]
row_class = UpdateRow
table_actions = (CreateRouter, DeleteRouter)
row_actions = (SetGateway, ClearGateway, DeleteRouter)
|
Nick-OpusVL/odoo
|
refs/heads/8.0
|
openerp/addons/base/ir/ir_actions.py
|
174
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2014 OpenERP S.A. <http://www.openerp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from functools import partial
import logging
import operator
import os
import time
import datetime
import dateutil
import openerp
from openerp import SUPERUSER_ID
from openerp import tools
from openerp import workflow
import openerp.api
from openerp.osv import fields, osv
from openerp.osv.orm import browse_record
import openerp.report.interface
from openerp.report.report_sxw import report_sxw, report_rml
from openerp.tools.safe_eval import safe_eval as eval
from openerp.tools.translate import _
import openerp.workflow
_logger = logging.getLogger(__name__)
class actions(osv.osv):
_name = 'ir.actions.actions'
_table = 'ir_actions'
_order = 'name'
_columns = {
'name': fields.char('Name', required=True),
'type': fields.char('Action Type', required=True),
'usage': fields.char('Action Usage'),
'help': fields.text('Action description',
help='Optional help text for the users with a description of the target view, such as its usage and purpose.',
translate=True),
}
_defaults = {
'usage': lambda *a: False,
}
def unlink(self, cr, uid, ids, context=None):
"""unlink ir.action.todo which are related to actions which will be deleted.
NOTE: ondelete cascade will not work on ir.actions.actions so we will need to do it manually."""
todo_obj = self.pool.get('ir.actions.todo')
if not ids:
return True
if isinstance(ids, (int, long)):
ids = [ids]
todo_ids = todo_obj.search(cr, uid, [('action_id', 'in', ids)], context=context)
todo_obj.unlink(cr, uid, todo_ids, context=context)
return super(actions, self).unlink(cr, uid, ids, context=context)
class ir_actions_report_xml(osv.osv):
def _report_content(self, cursor, user, ids, name, arg, context=None):
res = {}
for report in self.browse(cursor, user, ids, context=context):
data = report[name + '_data']
if not data and report[name[:-8]]:
fp = None
try:
fp = tools.file_open(report[name[:-8]], mode='rb')
data = fp.read()
except:
data = False
finally:
if fp:
fp.close()
res[report.id] = data
return res
def _report_content_inv(self, cursor, user, id, name, value, arg, context=None):
self.write(cursor, user, id, {name+'_data': value}, context=context)
def _report_sxw(self, cursor, user, ids, name, arg, context=None):
res = {}
for report in self.browse(cursor, user, ids, context=context):
if report.report_rml:
res[report.id] = report.report_rml.replace('.rml', '.sxw')
else:
res[report.id] = False
return res
def _lookup_report(self, cr, name):
"""
Look up a report definition.
"""
opj = os.path.join
# First lookup in the deprecated place, because if the report definition
# has not been updated, it is more likely the correct definition is there.
# Only reports with custom parser sepcified in Python are still there.
if 'report.' + name in openerp.report.interface.report_int._reports:
new_report = openerp.report.interface.report_int._reports['report.' + name]
else:
cr.execute("SELECT * FROM ir_act_report_xml WHERE report_name=%s", (name,))
r = cr.dictfetchone()
if r:
if r['report_type'] in ['qweb-pdf', 'qweb-html']:
return r['report_name']
elif r['report_rml'] or r['report_rml_content_data']:
if r['parser']:
kwargs = { 'parser': operator.attrgetter(r['parser'])(openerp.addons) }
else:
kwargs = {}
new_report = report_sxw('report.'+r['report_name'], r['model'],
opj('addons',r['report_rml'] or '/'), header=r['header'], register=False, **kwargs)
elif r['report_xsl'] and r['report_xml']:
new_report = report_rml('report.'+r['report_name'], r['model'],
opj('addons',r['report_xml']),
r['report_xsl'] and opj('addons',r['report_xsl']), register=False)
else:
raise Exception, "Unhandled report type: %s" % r
else:
raise Exception, "Required report does not exist: %s" % name
return new_report
def render_report(self, cr, uid, res_ids, name, data, context=None):
"""
Look up a report definition and render the report for the provided IDs.
"""
new_report = self._lookup_report(cr, name)
if isinstance(new_report, (str, unicode)): # Qweb report
# The only case where a QWeb report is rendered with this method occurs when running
# yml tests originally written for RML reports.
if openerp.tools.config['test_enable'] and not tools.config['test_report_directory']:
# Only generate the pdf when a destination folder has been provided.
return self.pool['report'].get_html(cr, uid, res_ids, new_report, data=data, context=context), 'html'
else:
return self.pool['report'].get_pdf(cr, uid, res_ids, new_report, data=data, context=context), 'pdf'
else:
return new_report.create(cr, uid, res_ids, data, context)
_name = 'ir.actions.report.xml'
_inherit = 'ir.actions.actions'
_table = 'ir_act_report_xml'
_sequence = 'ir_actions_id_seq'
_order = 'name'
_columns = {
'type': fields.char('Action Type', required=True),
'name': fields.char('Name', required=True, translate=True),
'model': fields.char('Model', required=True),
'report_type': fields.selection([('qweb-pdf', 'PDF'),
('qweb-html', 'HTML'),
('controller', 'Controller'),
('pdf', 'RML pdf (deprecated)'),
('sxw', 'RML sxw (deprecated)'),
('webkit', 'Webkit (deprecated)'),
], 'Report Type', required=True, help="HTML will open the report directly in your browser, PDF will use wkhtmltopdf to render the HTML into a PDF file and let you download it, Controller allows you to define the url of a custom controller outputting any kind of report."),
'report_name': fields.char('Template Name', required=True, help="For QWeb reports, name of the template used in the rendering. The method 'render_html' of the model 'report.template_name' will be called (if any) to give the html. For RML reports, this is the LocalService name."),
'groups_id': fields.many2many('res.groups', 'res_groups_report_rel', 'uid', 'gid', 'Groups'),
# options
'multi': fields.boolean('On Multiple Doc.', help="If set to true, the action will not be displayed on the right toolbar of a form view."),
'attachment_use': fields.boolean('Reload from Attachment', help='If you check this, then the second time the user prints with same attachment name, it returns the previous report.'),
'attachment': fields.char('Save as Attachment Prefix', help='This is the filename of the attachment used to store the printing result. Keep empty to not save the printed reports. You can use a python expression with the object and time variables.'),
# Deprecated rml stuff
'usage': fields.char('Action Usage'),
'header': fields.boolean('Add RML Header', help="Add or not the corporate RML header"),
'parser': fields.char('Parser Class'),
'auto': fields.boolean('Custom Python Parser'),
'report_xsl': fields.char('XSL Path'),
'report_xml': fields.char('XML Path'),
'report_rml': fields.char('Main Report File Path/controller', help="The path to the main report file/controller (depending on Report Type) or NULL if the content is in another data field"),
'report_file': fields.related('report_rml', type="char", required=False, readonly=False, string='Report File', help="The path to the main report file (depending on Report Type) or NULL if the content is in another field", store=True),
'report_sxw': fields.function(_report_sxw, type='char', string='SXW Path'),
'report_sxw_content_data': fields.binary('SXW Content'),
'report_rml_content_data': fields.binary('RML Content'),
'report_sxw_content': fields.function(_report_content, fnct_inv=_report_content_inv, type='binary', string='SXW Content',),
'report_rml_content': fields.function(_report_content, fnct_inv=_report_content_inv, type='binary', string='RML Content'),
}
_defaults = {
'type': 'ir.actions.report.xml',
'multi': False,
'auto': True,
'header': True,
'report_sxw_content': False,
'report_type': 'pdf',
'attachment': False,
}
class ir_actions_act_window(osv.osv):
_name = 'ir.actions.act_window'
_table = 'ir_act_window'
_inherit = 'ir.actions.actions'
_sequence = 'ir_actions_id_seq'
_order = 'name'
def _check_model(self, cr, uid, ids, context=None):
for action in self.browse(cr, uid, ids, context):
if action.res_model not in self.pool:
return False
if action.src_model and action.src_model not in self.pool:
return False
return True
def _invalid_model_msg(self, cr, uid, ids, context=None):
return _('Invalid model name in the action definition.')
_constraints = [
(_check_model, _invalid_model_msg, ['res_model','src_model'])
]
def _views_get_fnc(self, cr, uid, ids, name, arg, context=None):
"""Returns an ordered list of the specific view modes that should be
enabled when displaying the result of this action, along with the
ID of the specific view to use for each mode, if any were required.
This function hides the logic of determining the precedence between
the view_modes string, the view_ids o2m, and the view_id m2o that can
be set on the action.
:rtype: dict in the form { action_id: list of pairs (tuples) }
:return: { action_id: [(view_id, view_mode), ...], ... }, where view_mode
is one of the possible values for ir.ui.view.type and view_id
is the ID of a specific view to use for this mode, or False for
the default one.
"""
res = {}
for act in self.browse(cr, uid, ids):
res[act.id] = [(view.view_id.id, view.view_mode) for view in act.view_ids]
view_ids_modes = [view.view_mode for view in act.view_ids]
modes = act.view_mode.split(',')
missing_modes = [mode for mode in modes if mode not in view_ids_modes]
if missing_modes:
if act.view_id and act.view_id.type in missing_modes:
# reorder missing modes to put view_id first if present
missing_modes.remove(act.view_id.type)
res[act.id].append((act.view_id.id, act.view_id.type))
res[act.id].extend([(False, mode) for mode in missing_modes])
return res
def _search_view(self, cr, uid, ids, name, arg, context=None):
res = {}
for act in self.browse(cr, uid, ids, context=context):
field_get = self.pool[act.res_model].fields_view_get(cr, uid,
act.search_view_id and act.search_view_id.id or False,
'search', context=context)
res[act.id] = str(field_get)
return res
_columns = {
'name': fields.char('Action Name', required=True, translate=True),
'type': fields.char('Action Type', required=True),
'view_id': fields.many2one('ir.ui.view', 'View Ref.', ondelete='set null'),
'domain': fields.char('Domain Value',
help="Optional domain filtering of the destination data, as a Python expression"),
'context': fields.char('Context Value', required=True,
help="Context dictionary as Python expression, empty by default (Default: {})"),
'res_id': fields.integer('Record ID', help="Database ID of record to open in form view, when ``view_mode`` is set to 'form' only"),
'res_model': fields.char('Destination Model', required=True,
help="Model name of the object to open in the view window"),
'src_model': fields.char('Source Model',
help="Optional model name of the objects on which this action should be visible"),
'target': fields.selection([('current','Current Window'),('new','New Window'),('inline','Inline Edit'),('inlineview','Inline View')], 'Target Window'),
'view_mode': fields.char('View Mode', required=True,
help="Comma-separated list of allowed view modes, such as 'form', 'tree', 'calendar', etc. (Default: tree,form)"),
'view_type': fields.selection((('tree','Tree'),('form','Form')), string='View Type', required=True,
help="View type: Tree type to use for the tree view, set to 'tree' for a hierarchical tree view, or 'form' for a regular list view"),
'usage': fields.char('Action Usage',
help="Used to filter menu and home actions from the user form."),
'view_ids': fields.one2many('ir.actions.act_window.view', 'act_window_id', 'Views'),
'views': fields.function(_views_get_fnc, type='binary', string='Views',
help="This function field computes the ordered list of views that should be enabled " \
"when displaying the result of an action, federating view mode, views and " \
"reference view. The result is returned as an ordered list of pairs (view_id,view_mode)."),
'limit': fields.integer('Limit', help='Default limit for the list view'),
'auto_refresh': fields.integer('Auto-Refresh',
help='Add an auto-refresh on the view'),
'groups_id': fields.many2many('res.groups', 'ir_act_window_group_rel',
'act_id', 'gid', 'Groups'),
'search_view_id': fields.many2one('ir.ui.view', 'Search View Ref.'),
'filter': fields.boolean('Filter'),
'auto_search':fields.boolean('Auto Search'),
'search_view' : fields.function(_search_view, type='text', string='Search View'),
'multi': fields.boolean('Restrict to lists', help="If checked and the action is bound to a model, it will only appear in the More menu on list views"),
}
_defaults = {
'type': 'ir.actions.act_window',
'view_type': 'form',
'view_mode': 'tree,form',
'context': '{}',
'limit': 80,
'target': 'current',
'auto_refresh': 0,
'auto_search':True,
'multi': False,
}
def read(self, cr, uid, ids, fields=None, context=None, load='_classic_read'):
""" call the method get_empty_list_help of the model and set the window action help message
"""
ids_int = isinstance(ids, (int, long))
if ids_int:
ids = [ids]
results = super(ir_actions_act_window, self).read(cr, uid, ids, fields=fields, context=context, load=load)
if not fields or 'help' in fields:
for res in results:
model = res.get('res_model')
if model and self.pool.get(model):
ctx = dict(context or {})
res['help'] = self.pool[model].get_empty_list_help(cr, uid, res.get('help', ""), context=ctx)
if ids_int:
return results[0]
return results
def for_xml_id(self, cr, uid, module, xml_id, context=None):
""" Returns the act_window object created for the provided xml_id
:param module: the module the act_window originates in
:param xml_id: the namespace-less id of the action (the @id
attribute from the XML file)
:return: A read() view of the ir.actions.act_window
"""
dataobj = self.pool.get('ir.model.data')
data_id = dataobj._get_id (cr, SUPERUSER_ID, module, xml_id)
res_id = dataobj.browse(cr, uid, data_id, context).res_id
return self.read(cr, uid, [res_id], [], context)[0]
VIEW_TYPES = [
('tree', 'Tree'),
('form', 'Form'),
('graph', 'Graph'),
('calendar', 'Calendar'),
('gantt', 'Gantt'),
('kanban', 'Kanban')]
class ir_actions_act_window_view(osv.osv):
_name = 'ir.actions.act_window.view'
_table = 'ir_act_window_view'
_rec_name = 'view_id'
_order = 'sequence'
_columns = {
'sequence': fields.integer('Sequence'),
'view_id': fields.many2one('ir.ui.view', 'View'),
'view_mode': fields.selection(VIEW_TYPES, string='View Type', required=True),
'act_window_id': fields.many2one('ir.actions.act_window', 'Action', ondelete='cascade'),
'multi': fields.boolean('On Multiple Doc.',
help="If set to true, the action will not be displayed on the right toolbar of a form view."),
}
_defaults = {
'multi': False,
}
def _auto_init(self, cr, context=None):
super(ir_actions_act_window_view, self)._auto_init(cr, context)
cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = \'act_window_view_unique_mode_per_action\'')
if not cr.fetchone():
cr.execute('CREATE UNIQUE INDEX act_window_view_unique_mode_per_action ON ir_act_window_view (act_window_id, view_mode)')
class ir_actions_act_window_close(osv.osv):
_name = 'ir.actions.act_window_close'
_inherit = 'ir.actions.actions'
_table = 'ir_actions'
_defaults = {
'type': 'ir.actions.act_window_close',
}
class ir_actions_act_url(osv.osv):
_name = 'ir.actions.act_url'
_table = 'ir_act_url'
_inherit = 'ir.actions.actions'
_sequence = 'ir_actions_id_seq'
_order = 'name'
_columns = {
'name': fields.char('Action Name', required=True, translate=True),
'type': fields.char('Action Type', required=True),
'url': fields.text('Action URL',required=True),
'target': fields.selection((
('new', 'New Window'),
('self', 'This Window')),
'Action Target', required=True
)
}
_defaults = {
'type': 'ir.actions.act_url',
'target': 'new'
}
class ir_actions_server(osv.osv):
""" Server actions model. Server action work on a base model and offer various
type of actions that can be executed automatically, for example using base
action rules, of manually, by adding the action in the 'More' contextual
menu.
Since OpenERP 8.0 a button 'Create Menu Action' button is available on the
action form view. It creates an entry in the More menu of the base model.
This allows to create server actions and run them in mass mode easily through
the interface.
The available actions are :
- 'Execute Python Code': a block of python code that will be executed
- 'Trigger a Workflow Signal': send a signal to a workflow
- 'Run a Client Action': choose a client action to launch
- 'Create or Copy a new Record': create a new record with new values, or
copy an existing record in your database
- 'Write on a Record': update the values of a record
- 'Execute several actions': define an action that triggers several other
server actions
"""
_name = 'ir.actions.server'
_table = 'ir_act_server'
_inherit = 'ir.actions.actions'
_sequence = 'ir_actions_id_seq'
_order = 'sequence,name'
def _select_objects(self, cr, uid, context=None):
model_pool = self.pool.get('ir.model')
ids = model_pool.search(cr, uid, [], limit=None)
res = model_pool.read(cr, uid, ids, ['model', 'name'])
return [(r['model'], r['name']) for r in res] + [('', '')]
def _get_states(self, cr, uid, context=None):
""" Override me in order to add new states in the server action. Please
note that the added key length should not be higher than already-existing
ones. """
return [('code', 'Execute Python Code'),
('trigger', 'Trigger a Workflow Signal'),
('client_action', 'Run a Client Action'),
('object_create', 'Create or Copy a new Record'),
('object_write', 'Write on a Record'),
('multi', 'Execute several actions')]
def _get_states_wrapper(self, cr, uid, context=None):
return self._get_states(cr, uid, context)
_columns = {
'name': fields.char('Action Name', required=True, translate=True),
'condition': fields.char('Condition',
help="Condition verified before executing the server action. If it "
"is not verified, the action will not be executed. The condition is "
"a Python expression, like 'object.list_price > 5000'. A void "
"condition is considered as always True. Help about python expression "
"is given in the help tab."),
'state': fields.selection(_get_states_wrapper, 'Action To Do', required=True,
help="Type of server action. The following values are available:\n"
"- 'Execute Python Code': a block of python code that will be executed\n"
"- 'Trigger a Workflow Signal': send a signal to a workflow\n"
"- 'Run a Client Action': choose a client action to launch\n"
"- 'Create or Copy a new Record': create a new record with new values, or copy an existing record in your database\n"
"- 'Write on a Record': update the values of a record\n"
"- 'Execute several actions': define an action that triggers several other server actions\n"
"- 'Send Email': automatically send an email (available in email_template)"),
'usage': fields.char('Action Usage'),
'type': fields.char('Action Type', required=True),
# Generic
'sequence': fields.integer('Sequence',
help="When dealing with multiple actions, the execution order is "
"based on the sequence. Low number means high priority."),
'model_id': fields.many2one('ir.model', 'Base Model', required=True, ondelete='cascade',
help="Base model on which the server action runs."),
'model_name': fields.related('model_id', 'model', type='char',
string='Model Name', readonly=True),
'menu_ir_values_id': fields.many2one('ir.values', 'More Menu entry', readonly=True,
help='More menu entry.', copy=False),
# Client Action
'action_id': fields.many2one('ir.actions.actions', 'Client Action',
help="Select the client action that has to be executed."),
# Python code
'code': fields.text('Python Code',
help="Write Python code that the action will execute. Some variables are "
"available for use; help about pyhon expression is given in the help tab."),
# Workflow signal
'use_relational_model': fields.selection([('base', 'Use the base model of the action'),
('relational', 'Use a relation field on the base model')],
string='Target Model', required=True),
'wkf_transition_id': fields.many2one('workflow.transition', string='Signal to Trigger',
help="Select the workflow signal to trigger."),
'wkf_model_id': fields.many2one('ir.model', 'Target Model',
help="The model that will receive the workflow signal. Note that it should have a workflow associated with it."),
'wkf_model_name': fields.related('wkf_model_id', 'model', type='char', string='Target Model Name', store=True, readonly=True),
'wkf_field_id': fields.many2one('ir.model.fields', string='Relation Field',
oldname='trigger_obj_id',
help="The field on the current object that links to the target object record (must be a many2one, or an integer field with the record ID)"),
# Multi
'child_ids': fields.many2many('ir.actions.server', 'rel_server_actions',
'server_id', 'action_id',
string='Child Actions',
help='Child server actions that will be executed. Note that the last return returned action value will be used as global return value.'),
# Create/Copy/Write
'use_create': fields.selection([('new', 'Create a new record in the Base Model'),
('new_other', 'Create a new record in another model'),
('copy_current', 'Copy the current record'),
('copy_other', 'Choose and copy a record in the database')],
string="Creation Policy", required=True,
help=""),
'crud_model_id': fields.many2one('ir.model', 'Target Model',
oldname='srcmodel_id',
help="Model for record creation / update. Set this field only to specify a different model than the base model."),
'crud_model_name': fields.related('crud_model_id', 'model', type='char',
string='Create/Write Target Model Name',
store=True, readonly=True),
'ref_object': fields.reference('Reference record', selection=_select_objects, size=128,
oldname='copy_object'),
'link_new_record': fields.boolean('Attach the new record',
help="Check this if you want to link the newly-created record "
"to the current record on which the server action runs."),
'link_field_id': fields.many2one('ir.model.fields', 'Link using field',
oldname='record_id',
help="Provide the field where the record id is stored after the operations."),
'use_write': fields.selection([('current', 'Update the current record'),
('expression', 'Update a record linked to the current record using python'),
('other', 'Choose and Update a record in the database')],
string='Update Policy', required=True,
help=""),
'write_expression': fields.char('Expression',
oldname='write_id',
help="Provide an expression that, applied on the current record, gives the field to update."),
'fields_lines': fields.one2many('ir.server.object.lines', 'server_id',
string='Value Mapping',
copy=True),
# Fake fields used to implement the placeholder assistant
'model_object_field': fields.many2one('ir.model.fields', string="Field",
help="Select target field from the related document model.\n"
"If it is a relationship field you will be able to select "
"a target field at the destination of the relationship."),
'sub_object': fields.many2one('ir.model', 'Sub-model', readonly=True,
help="When a relationship field is selected as first field, "
"this field shows the document model the relationship goes to."),
'sub_model_object_field': fields.many2one('ir.model.fields', 'Sub-field',
help="When a relationship field is selected as first field, "
"this field lets you select the target field within the "
"destination document model (sub-model)."),
'copyvalue': fields.char('Placeholder Expression', help="Final placeholder expression, to be copy-pasted in the desired template field."),
# Fake fields used to implement the ID finding assistant
'id_object': fields.reference('Record', selection=_select_objects, size=128),
'id_value': fields.char('Record ID'),
}
_defaults = {
'state': 'code',
'condition': 'True',
'type': 'ir.actions.server',
'sequence': 5,
'code': """# Available locals:
# - time, datetime, dateutil: Python libraries
# - env: Odoo Environement
# - model: Model of the record on which the action is triggered
# - object: Record on which the action is triggered if there is one, otherwise None
# - workflow: Workflow engine
# - Warning: Warning Exception to use with raise
# To return an action, assign: action = {...}""",
'use_relational_model': 'base',
'use_create': 'new',
'use_write': 'current',
}
def _check_expression(self, cr, uid, expression, model_id, context):
""" Check python expression (condition, write_expression). Each step of
the path must be a valid many2one field, or an integer field for the last
step.
:param str expression: a python expression, beginning by 'obj' or 'object'
:param int model_id: the base model of the server action
:returns tuple: (is_valid, target_model_name, error_msg)
"""
if not model_id:
return (False, None, 'Your expression cannot be validated because the Base Model is not set.')
# fetch current model
current_model_name = self.pool.get('ir.model').browse(cr, uid, model_id, context).model
# transform expression into a path that should look like 'object.many2onefield.many2onefield'
path = expression.split('.')
initial = path.pop(0)
if initial not in ['obj', 'object']:
return (False, None, 'Your expression should begin with obj or object.\nAn expression builder is available in the help tab.')
# analyze path
while path:
step = path.pop(0)
field = self.pool[current_model_name]._fields.get(step)
if not field:
return (False, None, 'Part of the expression (%s) is not recognized as a column in the model %s.' % (step, current_model_name))
ftype = field.type
if ftype not in ['many2one', 'int']:
return (False, None, 'Part of the expression (%s) is not a valid column type (is %s, should be a many2one or an int)' % (step, ftype))
if ftype == 'int' and path:
return (False, None, 'Part of the expression (%s) is an integer field that is only allowed at the end of an expression' % (step))
if ftype == 'many2one':
current_model_name = field.comodel_name
return (True, current_model_name, None)
def _check_write_expression(self, cr, uid, ids, context=None):
for record in self.browse(cr, uid, ids, context=context):
if record.write_expression and record.model_id:
correct, model_name, message = self._check_expression(cr, uid, record.write_expression, record.model_id.id, context=context)
if not correct:
_logger.warning('Invalid expression: %s' % message)
return False
return True
_constraints = [
(_check_write_expression,
'Incorrect Write Record Expression',
['write_expression']),
(partial(osv.Model._check_m2m_recursion, field_name='child_ids'),
'Recursion found in child server actions',
['child_ids']),
]
def on_change_model_id(self, cr, uid, ids, model_id, wkf_model_id, crud_model_id, context=None):
""" When changing the action base model, reset workflow and crud config
to ease value coherence. """
values = {
'use_create': 'new',
'use_write': 'current',
'use_relational_model': 'base',
'wkf_model_id': model_id,
'wkf_field_id': False,
'crud_model_id': model_id,
}
if model_id:
values['model_name'] = self.pool.get('ir.model').browse(cr, uid, model_id, context).model
return {'value': values}
def on_change_wkf_wonfig(self, cr, uid, ids, use_relational_model, wkf_field_id, wkf_model_id, model_id, context=None):
""" Update workflow type configuration
- update the workflow model (for base (model_id) /relational (field.relation))
- update wkf_transition_id to False if workflow model changes, to force
the user to choose a new one
"""
values = {}
if use_relational_model == 'relational' and wkf_field_id:
field = self.pool['ir.model.fields'].browse(cr, uid, wkf_field_id, context=context)
new_wkf_model_id = self.pool.get('ir.model').search(cr, uid, [('model', '=', field.relation)], context=context)[0]
values['wkf_model_id'] = new_wkf_model_id
else:
values['wkf_model_id'] = model_id
return {'value': values}
def on_change_wkf_model_id(self, cr, uid, ids, wkf_model_id, context=None):
""" When changing the workflow model, update its stored name also """
wkf_model_name = False
if wkf_model_id:
wkf_model_name = self.pool.get('ir.model').browse(cr, uid, wkf_model_id, context).model
values = {'wkf_transition_id': False, 'wkf_model_name': wkf_model_name}
return {'value': values}
def on_change_crud_config(self, cr, uid, ids, state, use_create, use_write, ref_object, crud_model_id, model_id, context=None):
""" Wrapper on CRUD-type (create or write) on_change """
if state == 'object_create':
return self.on_change_create_config(cr, uid, ids, use_create, ref_object, crud_model_id, model_id, context=context)
elif state == 'object_write':
return self.on_change_write_config(cr, uid, ids, use_write, ref_object, crud_model_id, model_id, context=context)
else:
return {}
def on_change_create_config(self, cr, uid, ids, use_create, ref_object, crud_model_id, model_id, context=None):
""" When changing the object_create type configuration:
- `new` and `copy_current`: crud_model_id is the same as base model
- `new_other`: user choose crud_model_id
- `copy_other`: disassemble the reference object to have its model
- if the target model has changed, then reset the link field that is
probably not correct anymore
"""
values = {}
if use_create == 'new':
values['crud_model_id'] = model_id
elif use_create == 'new_other':
pass
elif use_create == 'copy_current':
values['crud_model_id'] = model_id
elif use_create == 'copy_other' and ref_object:
ref_model, ref_id = ref_object.split(',')
ref_model_id = self.pool['ir.model'].search(cr, uid, [('model', '=', ref_model)], context=context)[0]
values['crud_model_id'] = ref_model_id
if values.get('crud_model_id') != crud_model_id:
values['link_field_id'] = False
return {'value': values}
def on_change_write_config(self, cr, uid, ids, use_write, ref_object, crud_model_id, model_id, context=None):
""" When changing the object_write type configuration:
- `current`: crud_model_id is the same as base model
- `other`: disassemble the reference object to have its model
- `expression`: has its own on_change, nothing special here
"""
values = {}
if use_write == 'current':
values['crud_model_id'] = model_id
elif use_write == 'other' and ref_object:
ref_model, ref_id = ref_object.split(',')
ref_model_id = self.pool['ir.model'].search(cr, uid, [('model', '=', ref_model)], context=context)[0]
values['crud_model_id'] = ref_model_id
elif use_write == 'expression':
pass
if values.get('crud_model_id') != crud_model_id:
values['link_field_id'] = False
return {'value': values}
def on_change_write_expression(self, cr, uid, ids, write_expression, model_id, context=None):
""" Check the write_expression and update crud_model_id accordingly """
values = {}
if write_expression:
valid, model_name, message = self._check_expression(cr, uid, write_expression, model_id, context=context)
else:
valid, model_name, message = True, None, False
if model_id:
model_name = self.pool['ir.model'].browse(cr, uid, model_id, context).model
if not valid:
return {
'warning': {
'title': 'Incorrect expression',
'message': message or 'Invalid expression',
}
}
if model_name:
ref_model_id = self.pool['ir.model'].search(cr, uid, [('model', '=', model_name)], context=context)[0]
values['crud_model_id'] = ref_model_id
return {'value': values}
return {'value': {}}
def on_change_crud_model_id(self, cr, uid, ids, crud_model_id, context=None):
""" When changing the CRUD model, update its stored name also """
crud_model_name = False
if crud_model_id:
crud_model_name = self.pool.get('ir.model').browse(cr, uid, crud_model_id, context).model
values = {'link_field_id': False, 'crud_model_name': crud_model_name}
return {'value': values}
def _build_expression(self, field_name, sub_field_name):
""" Returns a placeholder expression for use in a template field,
based on the values provided in the placeholder assistant.
:param field_name: main field name
:param sub_field_name: sub field name (M2O)
:return: final placeholder expression
"""
expression = ''
if field_name:
expression = "object." + field_name
if sub_field_name:
expression += "." + sub_field_name
return expression
def onchange_sub_model_object_value_field(self, cr, uid, ids, model_object_field, sub_model_object_field=False, context=None):
result = {
'sub_object': False,
'copyvalue': False,
'sub_model_object_field': False,
}
if model_object_field:
fields_obj = self.pool.get('ir.model.fields')
field_value = fields_obj.browse(cr, uid, model_object_field, context)
if field_value.ttype in ['many2one', 'one2many', 'many2many']:
res_ids = self.pool.get('ir.model').search(cr, uid, [('model', '=', field_value.relation)], context=context)
sub_field_value = False
if sub_model_object_field:
sub_field_value = fields_obj.browse(cr, uid, sub_model_object_field, context)
if res_ids:
result.update({
'sub_object': res_ids[0],
'copyvalue': self._build_expression(field_value.name, sub_field_value and sub_field_value.name or False),
'sub_model_object_field': sub_model_object_field or False,
})
else:
result.update({
'copyvalue': self._build_expression(field_value.name, False),
})
return {'value': result}
def onchange_id_object(self, cr, uid, ids, id_object, context=None):
if id_object:
ref_model, ref_id = id_object.split(',')
return {'value': {'id_value': ref_id}}
return {'value': {'id_value': False}}
def create_action(self, cr, uid, ids, context=None):
""" Create a contextual action for each of the server actions. """
for action in self.browse(cr, uid, ids, context=context):
ir_values_id = self.pool.get('ir.values').create(cr, SUPERUSER_ID, {
'name': _('Run %s') % action.name,
'model': action.model_id.model,
'key2': 'client_action_multi',
'value': "ir.actions.server,%s" % action.id,
}, context)
action.write({
'menu_ir_values_id': ir_values_id,
})
return True
def unlink_action(self, cr, uid, ids, context=None):
""" Remove the contextual actions created for the server actions. """
for action in self.browse(cr, uid, ids, context=context):
if action.menu_ir_values_id:
try:
self.pool.get('ir.values').unlink(cr, SUPERUSER_ID, action.menu_ir_values_id.id, context)
except Exception:
raise osv.except_osv(_('Warning'), _('Deletion of the action record failed.'))
return True
def run_action_client_action(self, cr, uid, action, eval_context=None, context=None):
if not action.action_id:
raise osv.except_osv(_('Error'), _("Please specify an action to launch!"))
return self.pool[action.action_id.type].read(cr, uid, [action.action_id.id], context=context)[0]
def run_action_code_multi(self, cr, uid, action, eval_context=None, context=None):
eval(action.code.strip(), eval_context, mode="exec", nocopy=True) # nocopy allows to return 'action'
if 'action' in eval_context:
return eval_context['action']
def run_action_trigger(self, cr, uid, action, eval_context=None, context=None):
""" Trigger a workflow signal, depending on the use_relational_model:
- `base`: base_model_pool.signal_workflow(cr, uid, context.get('active_id'), <TRIGGER_NAME>)
- `relational`: find the related model and object, using the relational
field, then target_model_pool.signal_workflow(cr, uid, target_id, <TRIGGER_NAME>)
"""
# weird signature and calling -> no self.env, use action param's
record = action.env[action.model_id.model].browse(context['active_id'])
if action.use_relational_model == 'relational':
record = getattr(record, action.wkf_field_id.name)
if not isinstance(record, openerp.models.BaseModel):
record = action.env[action.wkf_model_id.model].browse(record)
record.signal_workflow(action.wkf_transition_id.signal)
def run_action_multi(self, cr, uid, action, eval_context=None, context=None):
res = False
for act in action.child_ids:
result = self.run(cr, uid, [act.id], context=context)
if result:
res = result
return res
def run_action_object_write(self, cr, uid, action, eval_context=None, context=None):
""" Write server action.
- 1. evaluate the value mapping
- 2. depending on the write configuration:
- `current`: id = active_id
- `other`: id = from reference object
- `expression`: id = from expression evaluation
"""
res = {}
for exp in action.fields_lines:
res[exp.col1.name] = exp.eval_value(eval_context=eval_context)[exp.id]
if action.use_write == 'current':
model = action.model_id.model
ref_id = context.get('active_id')
elif action.use_write == 'other':
model = action.crud_model_id.model
ref_id = action.ref_object.id
elif action.use_write == 'expression':
model = action.crud_model_id.model
ref = eval(action.write_expression, eval_context)
if isinstance(ref, browse_record):
ref_id = getattr(ref, 'id')
else:
ref_id = int(ref)
obj_pool = self.pool[model]
obj_pool.write(cr, uid, [ref_id], res, context=context)
def run_action_object_create(self, cr, uid, action, eval_context=None, context=None):
""" Create and Copy server action.
- 1. evaluate the value mapping
- 2. depending on the write configuration:
- `new`: new record in the base model
- `copy_current`: copy the current record (id = active_id) + gives custom values
- `new_other`: new record in target model
- `copy_other`: copy the current record (id from reference object)
+ gives custom values
"""
res = {}
for exp in action.fields_lines:
res[exp.col1.name] = exp.eval_value(eval_context=eval_context)[exp.id]
if action.use_create in ['new', 'copy_current']:
model = action.model_id.model
elif action.use_create in ['new_other', 'copy_other']:
model = action.crud_model_id.model
obj_pool = self.pool[model]
if action.use_create == 'copy_current':
ref_id = context.get('active_id')
res_id = obj_pool.copy(cr, uid, ref_id, res, context=context)
elif action.use_create == 'copy_other':
ref_id = action.ref_object.id
res_id = obj_pool.copy(cr, uid, ref_id, res, context=context)
else:
res_id = obj_pool.create(cr, uid, res, context=context)
if action.link_new_record and action.link_field_id:
self.pool[action.model_id.model].write(cr, uid, [context.get('active_id')], {action.link_field_id.name: res_id})
def _get_eval_context(self, cr, uid, action, context=None):
""" Prepare the context used when evaluating python code, like the
condition or code server actions.
:param action: the current server action
:type action: browse record
:returns: dict -- evaluation context given to (safe_)eval """
obj_pool = self.pool[action.model_id.model]
env = openerp.api.Environment(cr, uid, context)
model = env[action.model_id.model]
obj = None
if context.get('active_model') == action.model_id.model and context.get('active_id'):
obj = model.browse(context['active_id'])
return {
# python libs
'time': time,
'datetime': datetime,
'dateutil': dateutil,
# orm
'env': env,
'model': model,
'workflow': workflow,
# Exceptions
'Warning': openerp.exceptions.Warning,
# record
# TODO: When porting to master move badly named obj and object to
# deprecated and define record (active_id) and records (active_ids)
'object': obj,
'obj': obj,
# Deprecated use env or model instead
'self': obj_pool,
'pool': self.pool,
'cr': cr,
'uid': uid,
'context': context,
'user': env.user,
}
def run(self, cr, uid, ids, context=None):
""" Runs the server action. For each server action, the condition is
checked. Note that a void (``False``) condition is considered as always
valid. If it is verified, the run_action_<STATE> method is called. This
allows easy overriding of the server actions.
:param dict context: context should contain following keys
- active_id: id of the current object (single mode)
- active_model: current model that should equal the action's model
The following keys are optional:
- active_ids: ids of the current records (mass mode). If active_ids
and active_id are present, active_ids is given precedence.
:return: an action_id to be executed, or False is finished correctly without
return action
"""
if context is None:
context = {}
res = False
for action in self.browse(cr, uid, ids, context):
eval_context = self._get_eval_context(cr, uid, action, context=context)
condition = action.condition
if condition is False:
# Void (aka False) conditions are considered as True
condition = True
if hasattr(self, 'run_action_%s_multi' % action.state):
run_context = eval_context['context']
expr = eval(str(condition), eval_context)
if not expr:
continue
# call the multi method
func = getattr(self, 'run_action_%s_multi' % action.state)
res = func(cr, uid, action, eval_context=eval_context, context=run_context)
elif hasattr(self, 'run_action_%s' % action.state):
func = getattr(self, 'run_action_%s' % action.state)
active_id = context.get('active_id')
active_ids = context.get('active_ids', [active_id] if active_id else [])
for active_id in active_ids:
# run context dedicated to a particular active_id
run_context = dict(context, active_ids=[active_id], active_id=active_id)
eval_context["context"] = run_context
expr = eval(str(condition), eval_context)
if not expr:
continue
# call the single method related to the action: run_action_<STATE>
res = func(cr, uid, action, eval_context=eval_context, context=run_context)
return res
class ir_server_object_lines(osv.osv):
_name = 'ir.server.object.lines'
_description = 'Server Action value mapping'
_sequence = 'ir_actions_id_seq'
_columns = {
'server_id': fields.many2one('ir.actions.server', 'Related Server Action', ondelete='cascade'),
'col1': fields.many2one('ir.model.fields', 'Field', required=True),
'value': fields.text('Value', required=True, help="Expression containing a value specification. \n"
"When Formula type is selected, this field may be a Python expression "
" that can use the same values as for the condition field on the server action.\n"
"If Value type is selected, the value will be used directly without evaluation."),
'type': fields.selection([
('value', 'Value'),
('equation', 'Python expression')
], 'Evaluation Type', required=True, change_default=True),
}
_defaults = {
'type': 'value',
}
def eval_value(self, cr, uid, ids, eval_context=None, context=None):
res = dict.fromkeys(ids, False)
for line in self.browse(cr, uid, ids, context=context):
expr = line.value
if line.type == 'equation':
expr = eval(line.value, eval_context)
elif line.col1.ttype in ['many2one', 'integer']:
try:
expr = int(line.value)
except Exception:
pass
res[line.id] = expr
return res
TODO_STATES = [('open', 'To Do'),
('done', 'Done')]
TODO_TYPES = [('manual', 'Launch Manually'),('once', 'Launch Manually Once'),
('automatic', 'Launch Automatically')]
class ir_actions_todo(osv.osv):
"""
Configuration Wizards
"""
_name = 'ir.actions.todo'
_description = "Configuration Wizards"
_columns={
'action_id': fields.many2one(
'ir.actions.actions', 'Action', select=True, required=True),
'sequence': fields.integer('Sequence'),
'state': fields.selection(TODO_STATES, string='Status', required=True),
'name': fields.char('Name'),
'type': fields.selection(TODO_TYPES, 'Type', required=True,
help="""Manual: Launched manually.
Automatic: Runs whenever the system is reconfigured.
Launch Manually Once: after having been launched manually, it sets automatically to Done."""),
'groups_id': fields.many2many('res.groups', 'res_groups_action_rel', 'uid', 'gid', 'Groups'),
'note': fields.text('Text', translate=True),
}
_defaults={
'state': 'open',
'sequence': 10,
'type': 'manual',
}
_order="sequence,id"
def name_get(self, cr, uid, ids, context=None):
return [(rec.id, rec.action_id.name) for rec in self.browse(cr, uid, ids, context=context)]
def name_search(self, cr, user, name, args=None, operator='ilike', context=None, limit=100):
if args is None:
args = []
if name:
ids = self.search(cr, user, [('action_id', operator, name)] + args, limit=limit)
return self.name_get(cr, user, ids, context=context)
return super(ir_actions_todo, self).name_search(cr, user, name, args=args, operator=operator, context=context, limit=limit)
def action_launch(self, cr, uid, ids, context=None):
""" Launch Action of Wizard"""
wizard_id = ids and ids[0] or False
wizard = self.browse(cr, uid, wizard_id, context=context)
if wizard.type in ('automatic', 'once'):
wizard.write({'state': 'done'})
# Load action
act_type = wizard.action_id.type
res = self.pool[act_type].read(cr, uid, [wizard.action_id.id], [], context=context)[0]
if act_type != 'ir.actions.act_window':
return res
res.setdefault('context','{}')
res['nodestroy'] = True
# Open a specific record when res_id is provided in the context
user = self.pool.get('res.users').browse(cr, uid, uid, context=context)
ctx = eval(res['context'], {'user': user})
if ctx.get('res_id'):
res.update({'res_id': ctx.pop('res_id')})
# disable log for automatic wizards
if wizard.type == 'automatic':
ctx.update({'disable_log': True})
res.update({'context': ctx})
return res
def action_open(self, cr, uid, ids, context=None):
""" Sets configuration wizard in TODO state"""
return self.write(cr, uid, ids, {'state': 'open'}, context=context)
def progress(self, cr, uid, context=None):
""" Returns a dict with 3 keys {todo, done, total}.
These keys all map to integers and provide the number of todos
marked as open, the total number of todos and the number of
todos not open (which is basically a shortcut to total-todo)
:rtype: dict
"""
user_groups = set(map(
lambda x: x.id,
self.pool['res.users'].browse(cr, uid, [uid], context=context)[0].groups_id))
def groups_match(todo):
""" Checks if the todo's groups match those of the current user
"""
return not todo.groups_id \
or bool(user_groups.intersection((
group.id for group in todo.groups_id)))
done = filter(
groups_match,
self.browse(cr, uid,
self.search(cr, uid, [('state', '!=', 'open')], context=context),
context=context))
total = filter(
groups_match,
self.browse(cr, uid,
self.search(cr, uid, [], context=context),
context=context))
return {
'done': len(done),
'total': len(total),
'todo': len(total) - len(done)
}
class ir_actions_act_client(osv.osv):
_name = 'ir.actions.client'
_inherit = 'ir.actions.actions'
_table = 'ir_act_client'
_sequence = 'ir_actions_id_seq'
_order = 'name'
def _get_params(self, cr, uid, ids, field_name, arg, context):
result = {}
# Need to remove bin_size from context, to obtains the binary and not the length.
context = dict(context, bin_size_params_store=False)
for record in self.browse(cr, uid, ids, context=context):
result[record.id] = record.params_store and eval(record.params_store, {'uid': uid}) or False
return result
def _set_params(self, cr, uid, id, field_name, field_value, arg, context):
if isinstance(field_value, dict):
self.write(cr, uid, id, {'params_store': repr(field_value)}, context=context)
else:
self.write(cr, uid, id, {'params_store': field_value}, context=context)
_columns = {
'name': fields.char('Action Name', required=True, translate=True),
'tag': fields.char('Client action tag', required=True,
help="An arbitrary string, interpreted by the client"
" according to its own needs and wishes. There "
"is no central tag repository across clients."),
'res_model': fields.char('Destination Model',
help="Optional model, mostly used for needactions."),
'context': fields.char('Context Value', required=True,
help="Context dictionary as Python expression, empty by default (Default: {})"),
'params': fields.function(_get_params, fnct_inv=_set_params,
type='binary',
string="Supplementary arguments",
help="Arguments sent to the client along with"
"the view tag"),
'params_store': fields.binary("Params storage", readonly=True)
}
_defaults = {
'type': 'ir.actions.client',
'context': '{}',
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
abcsds/tw
|
refs/heads/master
|
dashboard/views.py
|
1
|
from django.shortcuts import render, redirect
from .models import *
from twitter_stream.models import FilterTerm
def dashboard(request):
tweets = []
overall = 0
try:
for tweet in SenTweet.objects.all():
if tweet.lang == 'en':
tweets.append(tweet)
text = tweet.text
# Sentiment analisis
if not tweet.sentiment:
try:
# for stopWord in StopWord.objects.all():
# text = text.replace(stopWord.word," ")
words = text.split(" ")
tweetScore = 0
for word in words:
for obj in WordScore.objects.all():
if word == obj.word:
tweetScore += obj.score
tweet.sentiment = tweetScore
tweet.save()
except:
raise NameError('Twit score will be 0')
tweet.sentiment = 0
else:
tweet.delete()
except SenTweet.DoesNotExist:
raise Http404("No tweets found: run stream.")
for tweet in SenTweet.objects.all():
overall += tweet.sentiment
context = {'tweets': tweets, 'overall': overall, 'terms': FilterTerm.objects.all()}
return render(request, 'dashboard/dashboard.html', context)#{'tweets': SenTweet.objects.all(), 'overall':0})
# View form to upload file
def wordlist(request):
return render(request, 'dashboard/wordlist.html', {})
def uploadWordlist(request):
if request.FILES == {}:
return render(request, 'dashboard/wordlist.html', { 'error_message' : "File not uploaded; try again."})
for line in request.FILES['wordlist']:
term, s = line.split("\t") # The file is tab-delimited. "\t" means "tab character"
model = WordScore(word=term,score=int(s),frequency=0)
try:
# import pdb; pdb.set_trace()
model.validate_unique()
model.save()
except:
continue
return redirect('dashboard:dashboard')
# View form to upload file
def stopwords(request):
return render(request, 'dashboard/stopwords.html', {})
# Upload a stopwords.csv
def uploadStopwords(request):
if request.FILES == {}:
return render(request, 'dashboard/stopwords.html', { 'error_message' : "File not uploaded; try again."})
for line in request.FILES['stopwords']:
model = StopWord(word=line)
try:
# import pdb; pdb.set_trace()
model.validate_unique()
model.save()
except:
continue
return redirect('dashboard:dashboard')
|
vim-IDE/python-mode
|
refs/heads/develop
|
pymode/libs/pep8.py
|
7
|
#!/usr/bin/env python
# pep8.py - Check Python source code formatting, according to PEP 8
# Copyright (C) 2006-2009 Johann C. Rocholl <johann@rocholl.net>
# Copyright (C) 2009-2014 Florent Xicluna <florent.xicluna@gmail.com>
# Copyright (C) 2014-2015 Ian Lee <ianlee1521@gmail.com>
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
r"""
Check Python source code formatting, according to PEP 8.
For usage and a list of options, try this:
$ python pep8.py -h
This program and its regression test suite live here:
http://github.com/jcrocholl/pep8
Groups of errors and warnings:
E errors
W warnings
100 indentation
200 whitespace
300 blank lines
400 imports
500 line length
600 deprecation
700 statements
900 syntax error
"""
from __future__ import with_statement
import os
import sys
import re
import time
import inspect
import keyword
import tokenize
from optparse import OptionParser
from fnmatch import fnmatch
try:
from configparser import RawConfigParser
from io import TextIOWrapper
except ImportError:
from ConfigParser import RawConfigParser
__version__ = '1.6.3a0'
DEFAULT_EXCLUDE = '.svn,CVS,.bzr,.hg,.git,__pycache__,.tox'
DEFAULT_IGNORE = 'E121,E123,E126,E226,E24,E704'
try:
if sys.platform == 'win32':
USER_CONFIG = os.path.expanduser(r'~\.pep8')
else:
USER_CONFIG = os.path.join(
os.getenv('XDG_CONFIG_HOME') or os.path.expanduser('~/.config'),
'pep8'
)
except ImportError:
USER_CONFIG = None
PROJECT_CONFIG = ('setup.cfg', 'tox.ini', '.pep8')
TESTSUITE_PATH = os.path.join(os.path.dirname(__file__), 'testsuite')
MAX_LINE_LENGTH = 79
REPORT_FORMAT = {
'default': '%(path)s:%(row)d:%(col)d: %(code)s %(text)s',
'pylint': '%(path)s:%(row)d: [%(code)s] %(text)s',
}
PyCF_ONLY_AST = 1024
SINGLETONS = frozenset(['False', 'None', 'True'])
KEYWORDS = frozenset(keyword.kwlist + ['print']) - SINGLETONS
UNARY_OPERATORS = frozenset(['>>', '**', '*', '+', '-'])
ARITHMETIC_OP = frozenset(['**', '*', '/', '//', '+', '-'])
WS_OPTIONAL_OPERATORS = ARITHMETIC_OP.union(['^', '&', '|', '<<', '>>', '%'])
WS_NEEDED_OPERATORS = frozenset([
'**=', '*=', '/=', '//=', '+=', '-=', '!=', '<>', '<', '>',
'%=', '^=', '&=', '|=', '==', '<=', '>=', '<<=', '>>=', '='])
WHITESPACE = frozenset(' \t')
NEWLINE = frozenset([tokenize.NL, tokenize.NEWLINE])
SKIP_TOKENS = NEWLINE.union([tokenize.INDENT, tokenize.DEDENT])
# ERRORTOKEN is triggered by backticks in Python 3
SKIP_COMMENTS = SKIP_TOKENS.union([tokenize.COMMENT, tokenize.ERRORTOKEN])
BENCHMARK_KEYS = ['directories', 'files', 'logical lines', 'physical lines']
INDENT_REGEX = re.compile(r'([ \t]*)')
RAISE_COMMA_REGEX = re.compile(r'raise\s+\w+\s*,')
RERAISE_COMMA_REGEX = re.compile(r'raise\s+\w+\s*,.*,\s*\w+\s*$')
ERRORCODE_REGEX = re.compile(r'\b[A-Z]\d{3}\b')
DOCSTRING_REGEX = re.compile(r'u?r?["\']')
EXTRANEOUS_WHITESPACE_REGEX = re.compile(r'[[({] | []}),;:]')
WHITESPACE_AFTER_COMMA_REGEX = re.compile(r'[,;:]\s*(?: |\t)')
COMPARE_SINGLETON_REGEX = re.compile(r'\b(None|False|True)?\s*([=!]=)'
r'\s*(?(1)|(None|False|True))\b')
COMPARE_NEGATIVE_REGEX = re.compile(r'\b(not)\s+[^][)(}{ ]+\s+(in|is)\s')
COMPARE_TYPE_REGEX = re.compile(r'(?:[=!]=|is(?:\s+not)?)\s*type(?:s.\w+Type'
r'|\s*\(\s*([^)]*[^ )])\s*\))')
KEYWORD_REGEX = re.compile(r'(\s*)\b(?:%s)\b(\s*)' % r'|'.join(KEYWORDS))
OPERATOR_REGEX = re.compile(r'(?:[^,\s])(\s*)(?:[-+*/|!<=>%&^]+)(\s*)')
LAMBDA_REGEX = re.compile(r'\blambda\b')
HUNK_REGEX = re.compile(r'^@@ -\d+(?:,\d+)? \+(\d+)(?:,(\d+))? @@.*$')
# Work around Python < 2.6 behaviour, which does not generate NL after
# a comment which is on a line by itself.
COMMENT_WITH_NL = tokenize.generate_tokens(['#\n'].pop).send(None)[1] == '#\n'
##############################################################################
# Plugins (check functions) for physical lines
##############################################################################
def tabs_or_spaces(physical_line, indent_char):
r"""Never mix tabs and spaces.
The most popular way of indenting Python is with spaces only. The
second-most popular way is with tabs only. Code indented with a mixture
of tabs and spaces should be converted to using spaces exclusively. When
invoking the Python command line interpreter with the -t option, it issues
warnings about code that illegally mixes tabs and spaces. When using -tt
these warnings become errors. These options are highly recommended!
Okay: if a == 0:\n a = 1\n b = 1
E101: if a == 0:\n a = 1\n\tb = 1
"""
indent = INDENT_REGEX.match(physical_line).group(1)
for offset, char in enumerate(indent):
if char != indent_char:
return offset, "E101 indentation contains mixed spaces and tabs"
def tabs_obsolete(physical_line):
r"""For new projects, spaces-only are strongly recommended over tabs.
Okay: if True:\n return
W191: if True:\n\treturn
"""
indent = INDENT_REGEX.match(physical_line).group(1)
if '\t' in indent:
return indent.index('\t'), "W191 indentation contains tabs"
def trailing_whitespace(physical_line):
r"""Trailing whitespace is superfluous.
The warning returned varies on whether the line itself is blank, for easier
filtering for those who want to indent their blank lines.
Okay: spam(1)\n#
W291: spam(1) \n#
W293: class Foo(object):\n \n bang = 12
"""
physical_line = physical_line.rstrip('\n') # chr(10), newline
physical_line = physical_line.rstrip('\r') # chr(13), carriage return
physical_line = physical_line.rstrip('\x0c') # chr(12), form feed, ^L
stripped = physical_line.rstrip(' \t\v')
if physical_line != stripped:
if stripped:
return len(stripped), "W291 trailing whitespace"
else:
return 0, "W293 blank line contains whitespace"
def trailing_blank_lines(physical_line, lines, line_number, total_lines):
r"""Trailing blank lines are superfluous.
Okay: spam(1)
W391: spam(1)\n
However the last line should end with a new line (warning W292).
"""
if line_number == total_lines:
stripped_last_line = physical_line.rstrip()
if not stripped_last_line:
return 0, "W391 blank line at end of file"
if stripped_last_line == physical_line:
return len(physical_line), "W292 no newline at end of file"
def maximum_line_length(physical_line, max_line_length, multiline):
r"""Limit all lines to a maximum of 79 characters.
There are still many devices around that are limited to 80 character
lines; plus, limiting windows to 80 characters makes it possible to have
several windows side-by-side. The default wrapping on such devices looks
ugly. Therefore, please limit all lines to a maximum of 79 characters.
For flowing long blocks of text (docstrings or comments), limiting the
length to 72 characters is recommended.
Reports error E501.
"""
line = physical_line.rstrip()
length = len(line)
if length > max_line_length and not noqa(line):
# Special case for long URLs in multi-line docstrings or comments,
# but still report the error when the 72 first chars are whitespaces.
chunks = line.split()
if ((len(chunks) == 1 and multiline) or
(len(chunks) == 2 and chunks[0] == '#')) and \
len(line) - len(chunks[-1]) < max_line_length - 7:
return
if hasattr(line, 'decode'): # Python 2
# The line could contain multi-byte characters
try:
length = len(line.decode('utf-8'))
except UnicodeError:
pass
if length > max_line_length:
return (max_line_length, "E501 line too long "
"(%d > %d characters)" % (length, max_line_length))
##############################################################################
# Plugins (check functions) for logical lines
##############################################################################
def blank_lines(logical_line, blank_lines, indent_level, line_number,
blank_before, previous_logical, previous_indent_level):
r"""Separate top-level function and class definitions with two blank lines.
Method definitions inside a class are separated by a single blank line.
Extra blank lines may be used (sparingly) to separate groups of related
functions. Blank lines may be omitted between a bunch of related
one-liners (e.g. a set of dummy implementations).
Use blank lines in functions, sparingly, to indicate logical sections.
Okay: def a():\n pass\n\n\ndef b():\n pass
Okay: def a():\n pass\n\n\n# Foo\n# Bar\n\ndef b():\n pass
E301: class Foo:\n b = 0\n def bar():\n pass
E302: def a():\n pass\n\ndef b(n):\n pass
E303: def a():\n pass\n\n\n\ndef b(n):\n pass
E303: def a():\n\n\n\n pass
E304: @decorator\n\ndef a():\n pass
"""
if line_number < 3 and not previous_logical:
return # Don't expect blank lines before the first line
if previous_logical.startswith('@'):
if blank_lines:
yield 0, "E304 blank lines found after function decorator"
elif blank_lines > 2 or (indent_level and blank_lines == 2):
yield 0, "E303 too many blank lines (%d)" % blank_lines
elif logical_line.startswith(('def ', 'class ', '@')):
if indent_level:
if not (blank_before or previous_indent_level < indent_level or
DOCSTRING_REGEX.match(previous_logical)):
yield 0, "E301 expected 1 blank line, found 0"
elif blank_before != 2:
yield 0, "E302 expected 2 blank lines, found %d" % blank_before
def extraneous_whitespace(logical_line):
r"""Avoid extraneous whitespace.
Avoid extraneous whitespace in these situations:
- Immediately inside parentheses, brackets or braces.
- Immediately before a comma, semicolon, or colon.
Okay: spam(ham[1], {eggs: 2})
E201: spam( ham[1], {eggs: 2})
E201: spam(ham[ 1], {eggs: 2})
E201: spam(ham[1], { eggs: 2})
E202: spam(ham[1], {eggs: 2} )
E202: spam(ham[1 ], {eggs: 2})
E202: spam(ham[1], {eggs: 2 })
E203: if x == 4: print x, y; x, y = y , x
E203: if x == 4: print x, y ; x, y = y, x
E203: if x == 4 : print x, y; x, y = y, x
"""
line = logical_line
for match in EXTRANEOUS_WHITESPACE_REGEX.finditer(line):
text = match.group()
char = text.strip()
found = match.start()
if text == char + ' ':
# assert char in '([{'
yield found + 1, "E201 whitespace after '%s'" % char
elif line[found - 1] != ',':
code = ('E202' if char in '}])' else 'E203') # if char in ',;:'
yield found, "%s whitespace before '%s'" % (code, char)
def whitespace_around_keywords(logical_line):
r"""Avoid extraneous whitespace around keywords.
Okay: True and False
E271: True and False
E272: True and False
E273: True and\tFalse
E274: True\tand False
"""
for match in KEYWORD_REGEX.finditer(logical_line):
before, after = match.groups()
if '\t' in before:
yield match.start(1), "E274 tab before keyword"
elif len(before) > 1:
yield match.start(1), "E272 multiple spaces before keyword"
if '\t' in after:
yield match.start(2), "E273 tab after keyword"
elif len(after) > 1:
yield match.start(2), "E271 multiple spaces after keyword"
def missing_whitespace(logical_line):
r"""Each comma, semicolon or colon should be followed by whitespace.
Okay: [a, b]
Okay: (3,)
Okay: a[1:4]
Okay: a[:4]
Okay: a[1:]
Okay: a[1:4:2]
E231: ['a','b']
E231: foo(bar,baz)
E231: [{'a':'b'}]
"""
line = logical_line
for index in range(len(line) - 1):
char = line[index]
if char in ',;:' and line[index + 1] not in WHITESPACE:
before = line[:index]
if char == ':' and before.count('[') > before.count(']') and \
before.rfind('{') < before.rfind('['):
continue # Slice syntax, no space required
if char == ',' and line[index + 1] == ')':
continue # Allow tuple with only one element: (3,)
yield index, "E231 missing whitespace after '%s'" % char
def indentation(logical_line, previous_logical, indent_char,
indent_level, previous_indent_level):
r"""Use 4 spaces per indentation level.
For really old code that you don't want to mess up, you can continue to
use 8-space tabs.
Okay: a = 1
Okay: if a == 0:\n a = 1
E111: a = 1
E114: # a = 1
Okay: for item in items:\n pass
E112: for item in items:\npass
E115: for item in items:\n# Hi\n pass
Okay: a = 1\nb = 2
E113: a = 1\n b = 2
E116: a = 1\n # b = 2
"""
c = 0 if logical_line else 3
tmpl = "E11%d %s" if logical_line else "E11%d %s (comment)"
if indent_level % 4:
yield 0, tmpl % (1 + c, "indentation is not a multiple of four")
indent_expect = previous_logical.endswith(':')
if indent_expect and indent_level <= previous_indent_level:
yield 0, tmpl % (2 + c, "expected an indented block")
elif not indent_expect and indent_level > previous_indent_level:
yield 0, tmpl % (3 + c, "unexpected indentation")
def continued_indentation(logical_line, tokens, indent_level, hang_closing,
indent_char, noqa, verbose):
r"""Continuation lines indentation.
Continuation lines should align wrapped elements either vertically
using Python's implicit line joining inside parentheses, brackets
and braces, or using a hanging indent.
When using a hanging indent these considerations should be applied:
- there should be no arguments on the first line, and
- further indentation should be used to clearly distinguish itself as a
continuation line.
Okay: a = (\n)
E123: a = (\n )
Okay: a = (\n 42)
E121: a = (\n 42)
E122: a = (\n42)
E123: a = (\n 42\n )
E124: a = (24,\n 42\n)
E125: if (\n b):\n pass
E126: a = (\n 42)
E127: a = (24,\n 42)
E128: a = (24,\n 42)
E129: if (a or\n b):\n pass
E131: a = (\n 42\n 24)
"""
first_row = tokens[0][2][0]
nrows = 1 + tokens[-1][2][0] - first_row
if noqa or nrows == 1:
return
# indent_next tells us whether the next block is indented; assuming
# that it is indented by 4 spaces, then we should not allow 4-space
# indents on the final continuation line; in turn, some other
# indents are allowed to have an extra 4 spaces.
indent_next = logical_line.endswith(':')
row = depth = 0
valid_hangs = (4,) if indent_char != '\t' else (4, 8)
# remember how many brackets were opened on each line
parens = [0] * nrows
# relative indents of physical lines
rel_indent = [0] * nrows
# for each depth, collect a list of opening rows
open_rows = [[0]]
# for each depth, memorize the hanging indentation
hangs = [None]
# visual indents
indent_chances = {}
last_indent = tokens[0][2]
visual_indent = None
last_token_multiline = False
# for each depth, memorize the visual indent column
indent = [last_indent[1]]
if verbose >= 3:
print(">>> " + tokens[0][4].rstrip())
for token_type, text, start, end, line in tokens:
newline = row < start[0] - first_row
if newline:
row = start[0] - first_row
newline = not last_token_multiline and token_type not in NEWLINE
if newline:
# this is the beginning of a continuation line.
last_indent = start
if verbose >= 3:
print("... " + line.rstrip())
# record the initial indent.
rel_indent[row] = expand_indent(line) - indent_level
# identify closing bracket
close_bracket = (token_type == tokenize.OP and text in ']})')
# is the indent relative to an opening bracket line?
for open_row in reversed(open_rows[depth]):
hang = rel_indent[row] - rel_indent[open_row]
hanging_indent = hang in valid_hangs
if hanging_indent:
break
if hangs[depth]:
hanging_indent = (hang == hangs[depth])
# is there any chance of visual indent?
visual_indent = (not close_bracket and hang > 0 and
indent_chances.get(start[1]))
if close_bracket and indent[depth]:
# closing bracket for visual indent
if start[1] != indent[depth]:
yield (start, "E124 closing bracket does not match "
"visual indentation")
elif close_bracket and not hang:
# closing bracket matches indentation of opening bracket's line
if hang_closing:
yield start, "E133 closing bracket is missing indentation"
elif indent[depth] and start[1] < indent[depth]:
if visual_indent is not True:
# visual indent is broken
yield (start, "E128 continuation line "
"under-indented for visual indent")
elif hanging_indent or (indent_next and rel_indent[row] == 8):
# hanging indent is verified
if close_bracket and not hang_closing:
yield (start, "E123 closing bracket does not match "
"indentation of opening bracket's line")
hangs[depth] = hang
elif visual_indent is True:
# visual indent is verified
indent[depth] = start[1]
elif visual_indent in (text, str):
# ignore token lined up with matching one from a previous line
pass
else:
# indent is broken
if hang <= 0:
error = "E122", "missing indentation or outdented"
elif indent[depth]:
error = "E127", "over-indented for visual indent"
elif not close_bracket and hangs[depth]:
error = "E131", "unaligned for hanging indent"
else:
hangs[depth] = hang
if hang > 4:
error = "E126", "over-indented for hanging indent"
else:
error = "E121", "under-indented for hanging indent"
yield start, "%s continuation line %s" % error
# look for visual indenting
if (parens[row] and
token_type not in (tokenize.NL, tokenize.COMMENT) and
not indent[depth]):
indent[depth] = start[1]
indent_chances[start[1]] = True
if verbose >= 4:
print("bracket depth %s indent to %s" % (depth, start[1]))
# deal with implicit string concatenation
elif (token_type in (tokenize.STRING, tokenize.COMMENT) or
text in ('u', 'ur', 'b', 'br')):
indent_chances[start[1]] = str
# special case for the "if" statement because len("if (") == 4
elif not indent_chances and not row and not depth and text == 'if':
indent_chances[end[1] + 1] = True
elif text == ':' and line[end[1]:].isspace():
open_rows[depth].append(row)
# keep track of bracket depth
if token_type == tokenize.OP:
if text in '([{':
depth += 1
indent.append(0)
hangs.append(None)
if len(open_rows) == depth:
open_rows.append([])
open_rows[depth].append(row)
parens[row] += 1
if verbose >= 4:
print("bracket depth %s seen, col %s, visual min = %s" %
(depth, start[1], indent[depth]))
elif text in ')]}' and depth > 0:
# parent indents should not be more than this one
prev_indent = indent.pop() or last_indent[1]
hangs.pop()
for d in range(depth):
if indent[d] > prev_indent:
indent[d] = 0
for ind in list(indent_chances):
if ind >= prev_indent:
del indent_chances[ind]
del open_rows[depth + 1:]
depth -= 1
if depth:
indent_chances[indent[depth]] = True
for idx in range(row, -1, -1):
if parens[idx]:
parens[idx] -= 1
break
assert len(indent) == depth + 1
if start[1] not in indent_chances:
# allow to line up tokens
indent_chances[start[1]] = text
last_token_multiline = (start[0] != end[0])
if last_token_multiline:
rel_indent[end[0] - first_row] = rel_indent[row]
if indent_next and expand_indent(line) == indent_level + 4:
pos = (start[0], indent[0] + 4)
if visual_indent:
code = "E129 visually indented line"
else:
code = "E125 continuation line"
yield pos, "%s with same indent as next logical line" % code
def whitespace_before_parameters(logical_line, tokens):
r"""Avoid extraneous whitespace.
Avoid extraneous whitespace in the following situations:
- before the open parenthesis that starts the argument list of a
function call.
- before the open parenthesis that starts an indexing or slicing.
Okay: spam(1)
E211: spam (1)
Okay: dict['key'] = list[index]
E211: dict ['key'] = list[index]
E211: dict['key'] = list [index]
"""
prev_type, prev_text, __, prev_end, __ = tokens[0]
for index in range(1, len(tokens)):
token_type, text, start, end, __ = tokens[index]
if (token_type == tokenize.OP and
text in '([' and
start != prev_end and
(prev_type == tokenize.NAME or prev_text in '}])') and
# Syntax "class A (B):" is allowed, but avoid it
(index < 2 or tokens[index - 2][1] != 'class') and
# Allow "return (a.foo for a in range(5))"
not keyword.iskeyword(prev_text)):
yield prev_end, "E211 whitespace before '%s'" % text
prev_type = token_type
prev_text = text
prev_end = end
def whitespace_around_operator(logical_line):
r"""Avoid extraneous whitespace around an operator.
Okay: a = 12 + 3
E221: a = 4 + 5
E222: a = 4 + 5
E223: a = 4\t+ 5
E224: a = 4 +\t5
"""
for match in OPERATOR_REGEX.finditer(logical_line):
before, after = match.groups()
if '\t' in before:
yield match.start(1), "E223 tab before operator"
elif len(before) > 1:
yield match.start(1), "E221 multiple spaces before operator"
if '\t' in after:
yield match.start(2), "E224 tab after operator"
elif len(after) > 1:
yield match.start(2), "E222 multiple spaces after operator"
def missing_whitespace_around_operator(logical_line, tokens):
r"""Surround operators with a single space on either side.
- Always surround these binary operators with a single space on
either side: assignment (=), augmented assignment (+=, -= etc.),
comparisons (==, <, >, !=, <=, >=, in, not in, is, is not),
Booleans (and, or, not).
- If operators with different priorities are used, consider adding
whitespace around the operators with the lowest priorities.
Okay: i = i + 1
Okay: submitted += 1
Okay: x = x * 2 - 1
Okay: hypot2 = x * x + y * y
Okay: c = (a + b) * (a - b)
Okay: foo(bar, key='word', *args, **kwargs)
Okay: alpha[:-i]
E225: i=i+1
E225: submitted +=1
E225: x = x /2 - 1
E225: z = x **y
E226: c = (a+b) * (a-b)
E226: hypot2 = x*x + y*y
E227: c = a|b
E228: msg = fmt%(errno, errmsg)
"""
parens = 0
need_space = False
prev_type = tokenize.OP
prev_text = prev_end = None
for token_type, text, start, end, line in tokens:
if token_type in SKIP_COMMENTS:
continue
if text in ('(', 'lambda'):
parens += 1
elif text == ')':
parens -= 1
if need_space:
if start != prev_end:
# Found a (probably) needed space
if need_space is not True and not need_space[1]:
yield (need_space[0],
"E225 missing whitespace around operator")
need_space = False
elif text == '>' and prev_text in ('<', '-'):
# Tolerate the "<>" operator, even if running Python 3
# Deal with Python 3's annotated return value "->"
pass
else:
if need_space is True or need_space[1]:
# A needed trailing space was not found
yield prev_end, "E225 missing whitespace around operator"
elif prev_text != '**':
code, optype = 'E226', 'arithmetic'
if prev_text == '%':
code, optype = 'E228', 'modulo'
elif prev_text not in ARITHMETIC_OP:
code, optype = 'E227', 'bitwise or shift'
yield (need_space[0], "%s missing whitespace "
"around %s operator" % (code, optype))
need_space = False
elif token_type == tokenize.OP and prev_end is not None:
if text == '=' and parens:
# Allow keyword args or defaults: foo(bar=None).
pass
elif text in WS_NEEDED_OPERATORS:
need_space = True
elif text in UNARY_OPERATORS:
# Check if the operator is being used as a binary operator
# Allow unary operators: -123, -x, +1.
# Allow argument unpacking: foo(*args, **kwargs).
if (prev_text in '}])' if prev_type == tokenize.OP
else prev_text not in KEYWORDS):
need_space = None
elif text in WS_OPTIONAL_OPERATORS:
need_space = None
if need_space is None:
# Surrounding space is optional, but ensure that
# trailing space matches opening space
need_space = (prev_end, start != prev_end)
elif need_space and start == prev_end:
# A needed opening space was not found
yield prev_end, "E225 missing whitespace around operator"
need_space = False
prev_type = token_type
prev_text = text
prev_end = end
def whitespace_around_comma(logical_line):
r"""Avoid extraneous whitespace after a comma or a colon.
Note: these checks are disabled by default
Okay: a = (1, 2)
E241: a = (1, 2)
E242: a = (1,\t2)
"""
line = logical_line
for m in WHITESPACE_AFTER_COMMA_REGEX.finditer(line):
found = m.start() + 1
if '\t' in m.group():
yield found, "E242 tab after '%s'" % m.group()[0]
else:
yield found, "E241 multiple spaces after '%s'" % m.group()[0]
def whitespace_around_named_parameter_equals(logical_line, tokens):
r"""Don't use spaces around the '=' sign in function arguments.
Don't use spaces around the '=' sign when used to indicate a
keyword argument or a default parameter value.
Okay: def complex(real, imag=0.0):
Okay: return magic(r=real, i=imag)
Okay: boolean(a == b)
Okay: boolean(a != b)
Okay: boolean(a <= b)
Okay: boolean(a >= b)
Okay: def foo(arg: int = 42):
E251: def complex(real, imag = 0.0):
E251: return magic(r = real, i = imag)
"""
parens = 0
no_space = False
prev_end = None
annotated_func_arg = False
in_def = logical_line.startswith('def')
message = "E251 unexpected spaces around keyword / parameter equals"
for token_type, text, start, end, line in tokens:
if token_type == tokenize.NL:
continue
if no_space:
no_space = False
if start != prev_end:
yield (prev_end, message)
if token_type == tokenize.OP:
if text == '(':
parens += 1
elif text == ')':
parens -= 1
elif in_def and text == ':' and parens == 1:
annotated_func_arg = True
elif parens and text == ',' and parens == 1:
annotated_func_arg = False
elif parens and text == '=' and not annotated_func_arg:
no_space = True
if start != prev_end:
yield (prev_end, message)
if not parens:
annotated_func_arg = False
prev_end = end
def whitespace_before_comment(logical_line, tokens):
r"""Separate inline comments by at least two spaces.
An inline comment is a comment on the same line as a statement. Inline
comments should be separated by at least two spaces from the statement.
They should start with a # and a single space.
Each line of a block comment starts with a # and a single space
(unless it is indented text inside the comment).
Okay: x = x + 1 # Increment x
Okay: x = x + 1 # Increment x
Okay: # Block comment
E261: x = x + 1 # Increment x
E262: x = x + 1 #Increment x
E262: x = x + 1 # Increment x
E265: #Block comment
E266: ### Block comment
"""
prev_end = (0, 0)
for token_type, text, start, end, line in tokens:
if token_type == tokenize.COMMENT:
inline_comment = line[:start[1]].strip()
if inline_comment:
if prev_end[0] == start[0] and start[1] < prev_end[1] + 2:
yield (prev_end,
"E261 at least two spaces before inline comment")
symbol, sp, comment = text.partition(' ')
bad_prefix = symbol not in '#:' and (symbol.lstrip('#')[:1] or '#')
if inline_comment:
if bad_prefix or comment[:1] in WHITESPACE:
yield start, "E262 inline comment should start with '# '"
elif bad_prefix and (bad_prefix != '!' or start[0] > 1):
if bad_prefix != '#':
yield start, "E265 block comment should start with '# '"
elif comment:
yield start, "E266 too many leading '#' for block comment"
elif token_type != tokenize.NL:
prev_end = end
def imports_on_separate_lines(logical_line):
r"""Imports should usually be on separate lines.
Okay: import os\nimport sys
E401: import sys, os
Okay: from subprocess import Popen, PIPE
Okay: from myclas import MyClass
Okay: from foo.bar.yourclass import YourClass
Okay: import myclass
Okay: import foo.bar.yourclass
"""
line = logical_line
if line.startswith('import '):
found = line.find(',')
if -1 < found and ';' not in line[:found]:
yield found, "E401 multiple imports on one line"
def module_imports_on_top_of_file(
logical_line, indent_level, checker_state, noqa):
r"""Imports are always put at the top of the file, just after any module
comments and docstrings, and before module globals and constants.
Okay: import os
Okay: # this is a comment\nimport os
Okay: '''this is a module docstring'''\nimport os
Okay: r'''this is a module docstring'''\nimport os
Okay: try:\n import x\nexcept:\n pass\nelse:\n pass\nimport y
Okay: try:\n import x\nexcept:\n pass\nfinally:\n pass\nimport y
E402: a=1\nimport os
E402: 'One string'\n"Two string"\nimport os
E402: a=1\nfrom sys import x
Okay: if x:\n import os
"""
def is_string_literal(line):
if line[0] in 'uUbB':
line = line[1:]
if line and line[0] in 'rR':
line = line[1:]
return line and (line[0] == '"' or line[0] == "'")
allowed_try_keywords = ('try', 'except', 'else', 'finally')
if indent_level: # Allow imports in conditional statements or functions
return
if not logical_line: # Allow empty lines or comments
return
if noqa:
return
line = logical_line
if line.startswith('import ') or line.startswith('from '):
if checker_state.get('seen_non_imports', False):
yield 0, "E402 module level import not at top of file"
elif any(line.startswith(kw) for kw in allowed_try_keywords):
# Allow try, except, else, finally keywords intermixed with imports in
# order to support conditional importing
return
elif is_string_literal(line):
# The first literal is a docstring, allow it. Otherwise, report error.
if checker_state.get('seen_docstring', False):
checker_state['seen_non_imports'] = True
else:
checker_state['seen_docstring'] = True
else:
checker_state['seen_non_imports'] = True
def compound_statements(logical_line):
r"""Compound statements (on the same line) are generally discouraged.
While sometimes it's okay to put an if/for/while with a small body
on the same line, never do this for multi-clause statements.
Also avoid folding such long lines!
Always use a def statement instead of an assignment statement that
binds a lambda expression directly to a name.
Okay: if foo == 'blah':\n do_blah_thing()
Okay: do_one()
Okay: do_two()
Okay: do_three()
E701: if foo == 'blah': do_blah_thing()
E701: for x in lst: total += x
E701: while t < 10: t = delay()
E701: if foo == 'blah': do_blah_thing()
E701: else: do_non_blah_thing()
E701: try: something()
E701: finally: cleanup()
E701: if foo == 'blah': one(); two(); three()
E702: do_one(); do_two(); do_three()
E703: do_four(); # useless semicolon
E704: def f(x): return 2*x
E731: f = lambda x: 2*x
"""
line = logical_line
last_char = len(line) - 1
found = line.find(':')
while -1 < found < last_char:
before = line[:found]
if ((before.count('{') <= before.count('}') and # {'a': 1} (dict)
before.count('[') <= before.count(']') and # [1:2] (slice)
before.count('(') <= before.count(')'))): # (annotation)
lambda_kw = LAMBDA_REGEX.search(before)
if lambda_kw:
before = line[:lambda_kw.start()].rstrip()
if before[-1:] == '=' and isidentifier(before[:-1].strip()):
yield 0, ("E731 do not assign a lambda expression, use a "
"def")
break
if before.startswith('def '):
yield 0, "E704 multiple statements on one line (def)"
else:
yield found, "E701 multiple statements on one line (colon)"
found = line.find(':', found + 1)
found = line.find(';')
while -1 < found:
if found < last_char:
yield found, "E702 multiple statements on one line (semicolon)"
else:
yield found, "E703 statement ends with a semicolon"
found = line.find(';', found + 1)
def explicit_line_join(logical_line, tokens):
r"""Avoid explicit line join between brackets.
The preferred way of wrapping long lines is by using Python's implied line
continuation inside parentheses, brackets and braces. Long lines can be
broken over multiple lines by wrapping expressions in parentheses. These
should be used in preference to using a backslash for line continuation.
E502: aaa = [123, \\n 123]
E502: aaa = ("bbb " \\n "ccc")
Okay: aaa = [123,\n 123]
Okay: aaa = ("bbb "\n "ccc")
Okay: aaa = "bbb " \\n "ccc"
Okay: aaa = 123 # \\
"""
prev_start = prev_end = parens = 0
comment = False
backslash = None
for token_type, text, start, end, line in tokens:
if token_type == tokenize.COMMENT:
comment = True
if start[0] != prev_start and parens and backslash and not comment:
yield backslash, "E502 the backslash is redundant between brackets"
if end[0] != prev_end:
if line.rstrip('\r\n').endswith('\\'):
backslash = (end[0], len(line.splitlines()[-1]) - 1)
else:
backslash = None
prev_start = prev_end = end[0]
else:
prev_start = start[0]
if token_type == tokenize.OP:
if text in '([{':
parens += 1
elif text in ')]}':
parens -= 1
def break_around_binary_operator(logical_line, tokens):
r"""
Avoid breaks before binary operators.
The preferred place to break around a binary operator is after the
operator, not before it.
W503: (width == 0\n + height == 0)
W503: (width == 0\n and height == 0)
Okay: (width == 0 +\n height == 0)
Okay: foo(\n -x)
Okay: foo(x\n [])
Okay: x = '''\n''' + ''
Okay: foo(x,\n -y)
Okay: foo(x, # comment\n -y)
"""
def is_binary_operator(token_type, text):
# The % character is strictly speaking a binary operator, but the
# common usage seems to be to put it next to the format parameters,
# after a line break.
return ((token_type == tokenize.OP or text in ['and', 'or']) and
text not in "()[]{},:.;@=%")
line_break = False
unary_context = True
for token_type, text, start, end, line in tokens:
if token_type == tokenize.COMMENT:
continue
if ('\n' in text or '\r' in text) and token_type != tokenize.STRING:
line_break = True
else:
if (is_binary_operator(token_type, text) and line_break and
not unary_context):
yield start, "W503 line break before binary operator"
unary_context = text in '([{,;'
line_break = False
def comparison_to_singleton(logical_line, noqa):
r"""Comparison to singletons should use "is" or "is not".
Comparisons to singletons like None should always be done
with "is" or "is not", never the equality operators.
Okay: if arg is not None:
E711: if arg != None:
E711: if None == arg:
E712: if arg == True:
E712: if False == arg:
Also, beware of writing if x when you really mean if x is not None --
e.g. when testing whether a variable or argument that defaults to None was
set to some other value. The other value might have a type (such as a
container) that could be false in a boolean context!
"""
match = not noqa and COMPARE_SINGLETON_REGEX.search(logical_line)
if match:
singleton = match.group(1) or match.group(3)
same = (match.group(2) == '==')
msg = "'if cond is %s:'" % (('' if same else 'not ') + singleton)
if singleton in ('None',):
code = 'E711'
else:
code = 'E712'
nonzero = ((singleton == 'True' and same) or
(singleton == 'False' and not same))
msg += " or 'if %scond:'" % ('' if nonzero else 'not ')
yield match.start(2), ("%s comparison to %s should be %s" %
(code, singleton, msg))
def comparison_negative(logical_line):
r"""Negative comparison should be done using "not in" and "is not".
Okay: if x not in y:\n pass
Okay: assert (X in Y or X is Z)
Okay: if not (X in Y):\n pass
Okay: zz = x is not y
E713: Z = not X in Y
E713: if not X.B in Y:\n pass
E714: if not X is Y:\n pass
E714: Z = not X.B is Y
"""
match = COMPARE_NEGATIVE_REGEX.search(logical_line)
if match:
pos = match.start(1)
if match.group(2) == 'in':
yield pos, "E713 test for membership should be 'not in'"
else:
yield pos, "E714 test for object identity should be 'is not'"
def comparison_type(logical_line, noqa):
r"""Object type comparisons should always use isinstance().
Do not compare types directly.
Okay: if isinstance(obj, int):
E721: if type(obj) is type(1):
When checking if an object is a string, keep in mind that it might be a
unicode string too! In Python 2.3, str and unicode have a common base
class, basestring, so you can do:
Okay: if isinstance(obj, basestring):
Okay: if type(a1) is type(b1):
"""
match = COMPARE_TYPE_REGEX.search(logical_line)
if match and not noqa:
inst = match.group(1)
if inst and isidentifier(inst) and inst not in SINGLETONS:
return # Allow comparison for types which are not obvious
yield match.start(), "E721 do not compare types, use 'isinstance()'"
def python_3000_has_key(logical_line, noqa):
r"""The {}.has_key() method is removed in Python 3: use the 'in' operator.
Okay: if "alph" in d:\n print d["alph"]
W601: assert d.has_key('alph')
"""
pos = logical_line.find('.has_key(')
if pos > -1 and not noqa:
yield pos, "W601 .has_key() is deprecated, use 'in'"
def python_3000_raise_comma(logical_line):
r"""When raising an exception, use "raise ValueError('message')".
The older form is removed in Python 3.
Okay: raise DummyError("Message")
W602: raise DummyError, "Message"
"""
match = RAISE_COMMA_REGEX.match(logical_line)
if match and not RERAISE_COMMA_REGEX.match(logical_line):
yield match.end() - 1, "W602 deprecated form of raising exception"
def python_3000_not_equal(logical_line):
r"""New code should always use != instead of <>.
The older syntax is removed in Python 3.
Okay: if a != 'no':
W603: if a <> 'no':
"""
pos = logical_line.find('<>')
if pos > -1:
yield pos, "W603 '<>' is deprecated, use '!='"
def python_3000_backticks(logical_line):
r"""Backticks are removed in Python 3: use repr() instead.
Okay: val = repr(1 + 2)
W604: val = `1 + 2`
"""
pos = logical_line.find('`')
if pos > -1:
yield pos, "W604 backticks are deprecated, use 'repr()'"
##############################################################################
# Helper functions
##############################################################################
if '' == ''.encode():
# Python 2: implicit encoding.
def readlines(filename):
"""Read the source code."""
with open(filename, 'rU') as f:
return f.readlines()
isidentifier = re.compile(r'[a-zA-Z_]\w*$').match
stdin_get_value = sys.stdin.read
else:
# Python 3
def readlines(filename):
"""Read the source code."""
try:
with open(filename, 'rb') as f:
(coding, lines) = tokenize.detect_encoding(f.readline)
f = TextIOWrapper(f, coding, line_buffering=True)
return [l.decode(coding) for l in lines] + f.readlines()
except (LookupError, SyntaxError, UnicodeError):
# Fall back if file encoding is improperly declared
with open(filename, encoding='latin-1') as f:
return f.readlines()
isidentifier = str.isidentifier
def stdin_get_value():
return TextIOWrapper(sys.stdin.buffer, errors='ignore').read()
noqa = re.compile(r'# no(?:qa|pep8)\b', re.I).search
def expand_indent(line):
r"""Return the amount of indentation.
Tabs are expanded to the next multiple of 8.
>>> expand_indent(' ')
4
>>> expand_indent('\t')
8
>>> expand_indent(' \t')
8
>>> expand_indent(' \t')
16
"""
if '\t' not in line:
return len(line) - len(line.lstrip())
result = 0
for char in line:
if char == '\t':
result = result // 8 * 8 + 8
elif char == ' ':
result += 1
else:
break
return result
def mute_string(text):
"""Replace contents with 'xxx' to prevent syntax matching.
>>> mute_string('"abc"')
'"xxx"'
>>> mute_string("'''abc'''")
"'''xxx'''"
>>> mute_string("r'abc'")
"r'xxx'"
"""
# String modifiers (e.g. u or r)
start = text.index(text[-1]) + 1
end = len(text) - 1
# Triple quotes
if text[-3:] in ('"""', "'''"):
start += 2
end -= 2
return text[:start] + 'x' * (end - start) + text[end:]
def parse_udiff(diff, patterns=None, parent='.'):
"""Return a dictionary of matching lines."""
# For each file of the diff, the entry key is the filename,
# and the value is a set of row numbers to consider.
rv = {}
path = nrows = None
for line in diff.splitlines():
if nrows:
if line[:1] != '-':
nrows -= 1
continue
if line[:3] == '@@ ':
hunk_match = HUNK_REGEX.match(line)
(row, nrows) = [int(g or '1') for g in hunk_match.groups()]
rv[path].update(range(row, row + nrows))
elif line[:3] == '+++':
path = line[4:].split('\t', 1)[0]
if path[:2] == 'b/':
path = path[2:]
rv[path] = set()
return dict([(os.path.join(parent, path), rows)
for (path, rows) in rv.items()
if rows and filename_match(path, patterns)])
def normalize_paths(value, parent=os.curdir):
"""Parse a comma-separated list of paths.
Return a list of absolute paths.
"""
if not value:
return []
if isinstance(value, list):
return value
paths = []
for path in value.split(','):
path = path.strip()
if '/' in path:
path = os.path.abspath(os.path.join(parent, path))
paths.append(path.rstrip('/'))
return paths
def filename_match(filename, patterns, default=True):
"""Check if patterns contains a pattern that matches filename.
If patterns is unspecified, this always returns True.
"""
if not patterns:
return default
return any(fnmatch(filename, pattern) for pattern in patterns)
def _is_eol_token(token):
return token[0] in NEWLINE or token[4][token[3][1]:].lstrip() == '\\\n'
if COMMENT_WITH_NL:
def _is_eol_token(token, _eol_token=_is_eol_token):
return _eol_token(token) or (token[0] == tokenize.COMMENT and
token[1] == token[4])
##############################################################################
# Framework to run all checks
##############################################################################
_checks = {'physical_line': {}, 'logical_line': {}, 'tree': {}}
def _get_parameters(function):
if sys.version_info >= (3, 3):
return list(inspect.signature(function).parameters)
else:
return inspect.getargspec(function)[0]
def register_check(check, codes=None):
"""Register a new check object."""
def _add_check(check, kind, codes, args):
if check in _checks[kind]:
_checks[kind][check][0].extend(codes or [])
else:
_checks[kind][check] = (codes or [''], args)
if inspect.isfunction(check):
args = _get_parameters(check)
if args and args[0] in ('physical_line', 'logical_line'):
if codes is None:
codes = ERRORCODE_REGEX.findall(check.__doc__ or '')
_add_check(check, args[0], codes, args)
elif inspect.isclass(check):
if _get_parameters(check.__init__)[:2] == ['self', 'tree']:
_add_check(check, 'tree', codes, None)
def init_checks_registry():
"""Register all globally visible functions.
The first argument name is either 'physical_line' or 'logical_line'.
"""
mod = inspect.getmodule(register_check)
for (name, function) in inspect.getmembers(mod, inspect.isfunction):
register_check(function)
init_checks_registry()
class Checker(object):
"""Load a Python source file, tokenize it, check coding style."""
def __init__(self, filename=None, lines=None,
options=None, report=None, **kwargs):
if options is None:
options = StyleGuide(kwargs).options
else:
assert not kwargs
self._io_error = None
self._physical_checks = options.physical_checks
self._logical_checks = options.logical_checks
self._ast_checks = options.ast_checks
self.max_line_length = options.max_line_length
self.multiline = False # in a multiline string?
self.hang_closing = options.hang_closing
self.verbose = options.verbose
self.filename = filename
# Dictionary where a checker can store its custom state.
self._checker_states = {}
if filename is None:
self.filename = 'stdin'
self.lines = lines or []
elif filename == '-':
self.filename = 'stdin'
self.lines = stdin_get_value().splitlines(True)
elif lines is None:
try:
self.lines = readlines(filename)
except IOError:
(exc_type, exc) = sys.exc_info()[:2]
self._io_error = '%s: %s' % (exc_type.__name__, exc)
self.lines = []
else:
self.lines = lines
if self.lines:
ord0 = ord(self.lines[0][0])
if ord0 in (0xef, 0xfeff): # Strip the UTF-8 BOM
if ord0 == 0xfeff:
self.lines[0] = self.lines[0][1:]
elif self.lines[0][:3] == '\xef\xbb\xbf':
self.lines[0] = self.lines[0][3:]
self.report = report or options.report
self.report_error = self.report.error
def report_invalid_syntax(self):
"""Check if the syntax is valid."""
(exc_type, exc) = sys.exc_info()[:2]
if len(exc.args) > 1:
offset = exc.args[1]
if len(offset) > 2:
offset = offset[1:3]
else:
offset = (1, 0)
self.report_error(offset[0], offset[1] or 0,
'E901 %s: %s' % (exc_type.__name__, exc.args[0]),
self.report_invalid_syntax)
def readline(self):
"""Get the next line from the input buffer."""
if self.line_number >= self.total_lines:
return ''
line = self.lines[self.line_number]
self.line_number += 1
if self.indent_char is None and line[:1] in WHITESPACE:
self.indent_char = line[0]
return line
def run_check(self, check, argument_names):
"""Run a check plugin."""
arguments = []
for name in argument_names:
arguments.append(getattr(self, name))
return check(*arguments)
def init_checker_state(self, name, argument_names):
""" Prepares a custom state for the specific checker plugin."""
if 'checker_state' in argument_names:
self.checker_state = self._checker_states.setdefault(name, {})
def check_physical(self, line):
"""Run all physical checks on a raw input line."""
self.physical_line = line
for name, check, argument_names in self._physical_checks:
self.init_checker_state(name, argument_names)
result = self.run_check(check, argument_names)
if result is not None:
(offset, text) = result
self.report_error(self.line_number, offset, text, check)
if text[:4] == 'E101':
self.indent_char = line[0]
def build_tokens_line(self):
"""Build a logical line from tokens."""
logical = []
comments = []
length = 0
prev_row = prev_col = mapping = None
for token_type, text, start, end, line in self.tokens:
if token_type in SKIP_TOKENS:
continue
if not mapping:
mapping = [(0, start)]
if token_type == tokenize.COMMENT:
comments.append(text)
continue
if token_type == tokenize.STRING:
text = mute_string(text)
if prev_row:
(start_row, start_col) = start
if prev_row != start_row: # different row
prev_text = self.lines[prev_row - 1][prev_col - 1]
if prev_text == ',' or (prev_text not in '{[(' and
text not in '}])'):
text = ' ' + text
elif prev_col != start_col: # different column
text = line[prev_col:start_col] + text
logical.append(text)
length += len(text)
mapping.append((length, end))
(prev_row, prev_col) = end
self.logical_line = ''.join(logical)
self.noqa = comments and noqa(''.join(comments))
return mapping
def check_logical(self):
"""Build a line from tokens and run all logical checks on it."""
self.report.increment_logical_line()
mapping = self.build_tokens_line()
if not mapping:
return
(start_row, start_col) = mapping[0][1]
start_line = self.lines[start_row - 1]
self.indent_level = expand_indent(start_line[:start_col])
if self.blank_before < self.blank_lines:
self.blank_before = self.blank_lines
if self.verbose >= 2:
print(self.logical_line[:80].rstrip())
for name, check, argument_names in self._logical_checks:
if self.verbose >= 4:
print(' ' + name)
self.init_checker_state(name, argument_names)
for offset, text in self.run_check(check, argument_names) or ():
if not isinstance(offset, tuple):
for token_offset, pos in mapping:
if offset <= token_offset:
break
offset = (pos[0], pos[1] + offset - token_offset)
self.report_error(offset[0], offset[1], text, check)
if self.logical_line:
self.previous_indent_level = self.indent_level
self.previous_logical = self.logical_line
self.blank_lines = 0
self.tokens = []
def check_ast(self):
"""Build the file's AST and run all AST checks."""
try:
tree = compile(''.join(self.lines), '', 'exec', PyCF_ONLY_AST)
except (ValueError, SyntaxError, TypeError):
return self.report_invalid_syntax()
for name, cls, __ in self._ast_checks:
checker = cls(tree, self.filename)
for lineno, offset, text, check in checker.run():
if not self.lines or not noqa(self.lines[lineno - 1]):
self.report_error(lineno, offset, text, check)
def generate_tokens(self):
"""Tokenize the file, run physical line checks and yield tokens."""
if self._io_error:
self.report_error(1, 0, 'E902 %s' % self._io_error, readlines)
tokengen = tokenize.generate_tokens(self.readline)
try:
for token in tokengen:
if token[2][0] > self.total_lines:
return
self.maybe_check_physical(token)
yield token
except (SyntaxError, tokenize.TokenError):
self.report_invalid_syntax()
def maybe_check_physical(self, token):
"""If appropriate (based on token), check current physical line(s)."""
# Called after every token, but act only on end of line.
if _is_eol_token(token):
# Obviously, a newline token ends a single physical line.
self.check_physical(token[4])
elif token[0] == tokenize.STRING and '\n' in token[1]:
# Less obviously, a string that contains newlines is a
# multiline string, either triple-quoted or with internal
# newlines backslash-escaped. Check every physical line in the
# string *except* for the last one: its newline is outside of
# the multiline string, so we consider it a regular physical
# line, and will check it like any other physical line.
#
# Subtleties:
# - we don't *completely* ignore the last line; if it contains
# the magical "# noqa" comment, we disable all physical
# checks for the entire multiline string
# - have to wind self.line_number back because initially it
# points to the last line of the string, and we want
# check_physical() to give accurate feedback
if noqa(token[4]):
return
self.multiline = True
self.line_number = token[2][0]
for line in token[1].split('\n')[:-1]:
self.check_physical(line + '\n')
self.line_number += 1
self.multiline = False
def check_all(self, expected=None, line_offset=0):
"""Run all checks on the input file."""
self.report.init_file(self.filename, self.lines, expected, line_offset)
self.total_lines = len(self.lines)
if self._ast_checks:
self.check_ast()
self.line_number = 0
self.indent_char = None
self.indent_level = self.previous_indent_level = 0
self.previous_logical = ''
self.tokens = []
self.blank_lines = self.blank_before = 0
parens = 0
for token in self.generate_tokens():
self.tokens.append(token)
token_type, text = token[0:2]
if self.verbose >= 3:
if token[2][0] == token[3][0]:
pos = '[%s:%s]' % (token[2][1] or '', token[3][1])
else:
pos = 'l.%s' % token[3][0]
print('l.%s\t%s\t%s\t%r' %
(token[2][0], pos, tokenize.tok_name[token[0]], text))
if token_type == tokenize.OP:
if text in '([{':
parens += 1
elif text in '}])':
parens -= 1
elif not parens:
if token_type in NEWLINE:
if token_type == tokenize.NEWLINE:
self.check_logical()
self.blank_before = 0
elif len(self.tokens) == 1:
# The physical line contains only this token.
self.blank_lines += 1
del self.tokens[0]
else:
self.check_logical()
elif COMMENT_WITH_NL and token_type == tokenize.COMMENT:
if len(self.tokens) == 1:
# The comment also ends a physical line
token = list(token)
token[1] = text.rstrip('\r\n')
token[3] = (token[2][0], token[2][1] + len(token[1]))
self.tokens = [tuple(token)]
self.check_logical()
if self.tokens:
self.check_physical(self.lines[-1])
self.check_logical()
return self.report.get_file_results()
class BaseReport(object):
"""Collect the results of the checks."""
print_filename = False
def __init__(self, options):
self._benchmark_keys = options.benchmark_keys
self._ignore_code = options.ignore_code
# Results
self.elapsed = 0
self.total_errors = 0
self.counters = dict.fromkeys(self._benchmark_keys, 0)
self.messages = {}
def start(self):
"""Start the timer."""
self._start_time = time.time()
def stop(self):
"""Stop the timer."""
self.elapsed = time.time() - self._start_time
def init_file(self, filename, lines, expected, line_offset):
"""Signal a new file."""
self.filename = filename
self.lines = lines
self.expected = expected or ()
self.line_offset = line_offset
self.file_errors = 0
self.counters['files'] += 1
self.counters['physical lines'] += len(lines)
def increment_logical_line(self):
"""Signal a new logical line."""
self.counters['logical lines'] += 1
def error(self, line_number, offset, text, check):
"""Report an error, according to options."""
code = text[:4]
if self._ignore_code(code):
return
if code in self.counters:
self.counters[code] += 1
else:
self.counters[code] = 1
self.messages[code] = text[5:]
# Don't care about expected errors or warnings
if code in self.expected:
return
if self.print_filename and not self.file_errors:
print(self.filename)
self.file_errors += 1
self.total_errors += 1
return code
def get_file_results(self):
"""Return the count of errors and warnings for this file."""
return self.file_errors
def get_count(self, prefix=''):
"""Return the total count of errors and warnings."""
return sum([self.counters[key]
for key in self.messages if key.startswith(prefix)])
def get_statistics(self, prefix=''):
"""Get statistics for message codes that start with the prefix.
prefix='' matches all errors and warnings
prefix='E' matches all errors
prefix='W' matches all warnings
prefix='E4' matches all errors that have to do with imports
"""
return ['%-7s %s %s' % (self.counters[key], key, self.messages[key])
for key in sorted(self.messages) if key.startswith(prefix)]
def print_statistics(self, prefix=''):
"""Print overall statistics (number of errors and warnings)."""
for line in self.get_statistics(prefix):
print(line)
def print_benchmark(self):
"""Print benchmark numbers."""
print('%-7.2f %s' % (self.elapsed, 'seconds elapsed'))
if self.elapsed:
for key in self._benchmark_keys:
print('%-7d %s per second (%d total)' %
(self.counters[key] / self.elapsed, key,
self.counters[key]))
class FileReport(BaseReport):
"""Collect the results of the checks and print only the filenames."""
print_filename = True
class StandardReport(BaseReport):
"""Collect and print the results of the checks."""
def __init__(self, options):
super(StandardReport, self).__init__(options)
self._fmt = REPORT_FORMAT.get(options.format.lower(),
options.format)
self._repeat = options.repeat
self._show_source = options.show_source
self._show_pep8 = options.show_pep8
def init_file(self, filename, lines, expected, line_offset):
"""Signal a new file."""
self._deferred_print = []
return super(StandardReport, self).init_file(
filename, lines, expected, line_offset)
def error(self, line_number, offset, text, check):
"""Report an error, according to options."""
code = super(StandardReport, self).error(line_number, offset,
text, check)
if code and (self.counters[code] == 1 or self._repeat):
self._deferred_print.append(
(line_number, offset, code, text[5:], check.__doc__))
return code
def get_file_results(self):
"""Print the result and return the overall count for this file."""
self._deferred_print.sort()
for line_number, offset, code, text, doc in self._deferred_print:
print(self._fmt % {
'path': self.filename,
'row': self.line_offset + line_number, 'col': offset + 1,
'code': code, 'text': text,
})
if self._show_source:
if line_number > len(self.lines):
line = ''
else:
line = self.lines[line_number - 1]
print(line.rstrip())
print(re.sub(r'\S', ' ', line[:offset]) + '^')
if self._show_pep8 and doc:
print(' ' + doc.strip())
# stdout is block buffered when not stdout.isatty().
# line can be broken where buffer boundary since other processes
# write to same file.
# flush() after print() to avoid buffer boundary.
# Typical buffer size is 8192. line written safely when
# len(line) < 8192.
sys.stdout.flush()
return self.file_errors
class DiffReport(StandardReport):
"""Collect and print the results for the changed lines only."""
def __init__(self, options):
super(DiffReport, self).__init__(options)
self._selected = options.selected_lines
def error(self, line_number, offset, text, check):
if line_number not in self._selected[self.filename]:
return
return super(DiffReport, self).error(line_number, offset, text, check)
class StyleGuide(object):
"""Initialize a PEP-8 instance with few options."""
def __init__(self, *args, **kwargs):
# build options from the command line
self.checker_class = kwargs.pop('checker_class', Checker)
parse_argv = kwargs.pop('parse_argv', False)
config_file = kwargs.pop('config_file', False)
parser = kwargs.pop('parser', None)
# build options from dict
options_dict = dict(*args, **kwargs)
arglist = None if parse_argv else options_dict.get('paths', None)
options, self.paths = process_options(
arglist, parse_argv, config_file, parser)
if options_dict:
options.__dict__.update(options_dict)
if 'paths' in options_dict:
self.paths = options_dict['paths']
self.runner = self.input_file
self.options = options
if not options.reporter:
options.reporter = BaseReport if options.quiet else StandardReport
options.select = tuple(options.select or ())
if not (options.select or options.ignore or
options.testsuite or options.doctest) and DEFAULT_IGNORE:
# The default choice: ignore controversial checks
options.ignore = tuple(DEFAULT_IGNORE.split(','))
else:
# Ignore all checks which are not explicitly selected
options.ignore = ('',) if options.select else tuple(options.ignore)
options.benchmark_keys = BENCHMARK_KEYS[:]
options.ignore_code = self.ignore_code
options.physical_checks = self.get_checks('physical_line')
options.logical_checks = self.get_checks('logical_line')
options.ast_checks = self.get_checks('tree')
self.init_report()
def init_report(self, reporter=None):
"""Initialize the report instance."""
self.options.report = (reporter or self.options.reporter)(self.options)
return self.options.report
def check_files(self, paths=None):
"""Run all checks on the paths."""
if paths is None:
paths = self.paths
report = self.options.report
runner = self.runner
report.start()
try:
for path in paths:
if os.path.isdir(path):
self.input_dir(path)
elif not self.excluded(path):
runner(path)
except KeyboardInterrupt:
print('... stopped')
report.stop()
return report
def input_file(self, filename, lines=None, expected=None, line_offset=0):
"""Run all checks on a Python source file."""
if self.options.verbose:
print('checking %s' % filename)
fchecker = self.checker_class(
filename, lines=lines, options=self.options)
return fchecker.check_all(expected=expected, line_offset=line_offset)
def input_dir(self, dirname):
"""Check all files in this directory and all subdirectories."""
dirname = dirname.rstrip('/')
if self.excluded(dirname):
return 0
counters = self.options.report.counters
verbose = self.options.verbose
filepatterns = self.options.filename
runner = self.runner
for root, dirs, files in os.walk(dirname):
if verbose:
print('directory ' + root)
counters['directories'] += 1
for subdir in sorted(dirs):
if self.excluded(subdir, root):
dirs.remove(subdir)
for filename in sorted(files):
# contain a pattern that matches?
if ((filename_match(filename, filepatterns) and
not self.excluded(filename, root))):
runner(os.path.join(root, filename))
def excluded(self, filename, parent=None):
"""Check if the file should be excluded.
Check if 'options.exclude' contains a pattern that matches filename.
"""
if not self.options.exclude:
return False
basename = os.path.basename(filename)
if filename_match(basename, self.options.exclude):
return True
if parent:
filename = os.path.join(parent, filename)
filename = os.path.abspath(filename)
return filename_match(filename, self.options.exclude)
def ignore_code(self, code):
"""Check if the error code should be ignored.
If 'options.select' contains a prefix of the error code,
return False. Else, if 'options.ignore' contains a prefix of
the error code, return True.
"""
if len(code) < 4 and any(s.startswith(code)
for s in self.options.select):
return False
return (code.startswith(self.options.ignore) and
not code.startswith(self.options.select))
def get_checks(self, argument_name):
"""Get all the checks for this category.
Find all globally visible functions where the first argument name
starts with argument_name and which contain selected tests.
"""
checks = []
for check, attrs in _checks[argument_name].items():
(codes, args) = attrs
if any(not (code and self.ignore_code(code)) for code in codes):
checks.append((check.__name__, check, args))
return sorted(checks)
def get_parser(prog='pep8', version=__version__):
parser = OptionParser(prog=prog, version=version,
usage="%prog [options] input ...")
parser.config_options = [
'exclude', 'filename', 'select', 'ignore', 'max-line-length',
'hang-closing', 'count', 'format', 'quiet', 'show-pep8',
'show-source', 'statistics', 'verbose']
parser.add_option('-v', '--verbose', default=0, action='count',
help="print status messages, or debug with -vv")
parser.add_option('-q', '--quiet', default=0, action='count',
help="report only file names, or nothing with -qq")
parser.add_option('-r', '--repeat', default=True, action='store_true',
help="(obsolete) show all occurrences of the same error")
parser.add_option('--first', action='store_false', dest='repeat',
help="show first occurrence of each error")
parser.add_option('--exclude', metavar='patterns', default=DEFAULT_EXCLUDE,
help="exclude files or directories which match these "
"comma separated patterns (default: %default)")
parser.add_option('--filename', metavar='patterns', default='*.py',
help="when parsing directories, only check filenames "
"matching these comma separated patterns "
"(default: %default)")
parser.add_option('--select', metavar='errors', default='',
help="select errors and warnings (e.g. E,W6)")
parser.add_option('--ignore', metavar='errors', default='',
help="skip errors and warnings (e.g. E4,W) "
"(default: %s)" % DEFAULT_IGNORE)
parser.add_option('--show-source', action='store_true',
help="show source code for each error")
parser.add_option('--show-pep8', action='store_true',
help="show text of PEP 8 for each error "
"(implies --first)")
parser.add_option('--statistics', action='store_true',
help="count errors and warnings")
parser.add_option('--count', action='store_true',
help="print total number of errors and warnings "
"to standard error and set exit code to 1 if "
"total is not null")
parser.add_option('--max-line-length', type='int', metavar='n',
default=MAX_LINE_LENGTH,
help="set maximum allowed line length "
"(default: %default)")
parser.add_option('--hang-closing', action='store_true',
help="hang closing bracket instead of matching "
"indentation of opening bracket's line")
parser.add_option('--format', metavar='format', default='default',
help="set the error format [default|pylint|<custom>]")
parser.add_option('--diff', action='store_true',
help="report only lines changed according to the "
"unified diff received on STDIN")
group = parser.add_option_group("Testing Options")
if os.path.exists(TESTSUITE_PATH):
group.add_option('--testsuite', metavar='dir',
help="run regression tests from dir")
group.add_option('--doctest', action='store_true',
help="run doctest on myself")
group.add_option('--benchmark', action='store_true',
help="measure processing speed")
return parser
def read_config(options, args, arglist, parser):
"""Read and parse configurations
If a config file is specified on the command line with the "--config"
option, then only it is used for configuration.
Otherwise, the user configuration (~/.config/pep8) and any local
configurations in the current directory or above will be merged together
(in that order) using the read method of ConfigParser.
"""
config = RawConfigParser()
cli_conf = options.config
local_dir = os.curdir
if USER_CONFIG and os.path.isfile(USER_CONFIG):
if options.verbose:
print('user configuration: %s' % USER_CONFIG)
config.read(USER_CONFIG)
parent = tail = args and os.path.abspath(os.path.commonprefix(args))
while tail:
if config.read(os.path.join(parent, fn) for fn in PROJECT_CONFIG):
local_dir = parent
if options.verbose:
print('local configuration: in %s' % parent)
break
(parent, tail) = os.path.split(parent)
if cli_conf and os.path.isfile(cli_conf):
if options.verbose:
print('cli configuration: %s' % cli_conf)
config.read(cli_conf)
pep8_section = parser.prog
if config.has_section(pep8_section):
option_list = dict([(o.dest, o.type or o.action)
for o in parser.option_list])
# First, read the default values
(new_options, __) = parser.parse_args([])
# Second, parse the configuration
for opt in config.options(pep8_section):
if opt.replace('_', '-') not in parser.config_options:
print(" unknown option '%s' ignored" % opt)
continue
if options.verbose > 1:
print(" %s = %s" % (opt, config.get(pep8_section, opt)))
normalized_opt = opt.replace('-', '_')
opt_type = option_list[normalized_opt]
if opt_type in ('int', 'count'):
value = config.getint(pep8_section, opt)
elif opt_type == 'string':
value = config.get(pep8_section, opt)
if normalized_opt == 'exclude':
value = normalize_paths(value, local_dir)
else:
assert opt_type in ('store_true', 'store_false')
value = config.getboolean(pep8_section, opt)
setattr(new_options, normalized_opt, value)
# Third, overwrite with the command-line options
(options, __) = parser.parse_args(arglist, values=new_options)
options.doctest = options.testsuite = False
return options
def process_options(arglist=None, parse_argv=False, config_file=None,
parser=None):
"""Process options passed either via arglist or via command line args.
Passing in the ``config_file`` parameter allows other tools, such as flake8
to specify their own options to be processed in pep8.
"""
if not parser:
parser = get_parser()
if not parser.has_option('--config'):
group = parser.add_option_group("Configuration", description=(
"The project options are read from the [%s] section of the "
"tox.ini file or the setup.cfg file located in any parent folder "
"of the path(s) being processed. Allowed options are: %s." %
(parser.prog, ', '.join(parser.config_options))))
group.add_option('--config', metavar='path', default=config_file,
help="user config file location")
# Don't read the command line if the module is used as a library.
if not arglist and not parse_argv:
arglist = []
# If parse_argv is True and arglist is None, arguments are
# parsed from the command line (sys.argv)
(options, args) = parser.parse_args(arglist)
options.reporter = None
if options.ensure_value('testsuite', False):
args.append(options.testsuite)
elif not options.ensure_value('doctest', False):
if parse_argv and not args:
if options.diff or any(os.path.exists(name)
for name in PROJECT_CONFIG):
args = ['.']
else:
parser.error('input not specified')
options = read_config(options, args, arglist, parser)
options.reporter = parse_argv and options.quiet == 1 and FileReport
options.filename = options.filename and options.filename.split(',')
options.exclude = normalize_paths(options.exclude)
options.select = options.select and options.select.split(',')
options.ignore = options.ignore and options.ignore.split(',')
if options.diff:
options.reporter = DiffReport
stdin = stdin_get_value()
options.selected_lines = parse_udiff(stdin, options.filename, args[0])
args = sorted(options.selected_lines)
return options, args
def _main():
"""Parse options and run checks on Python source."""
import signal
# Handle "Broken pipe" gracefully
try:
signal.signal(signal.SIGPIPE, lambda signum, frame: sys.exit(1))
except AttributeError:
pass # not supported on Windows
pep8style = StyleGuide(parse_argv=True)
options = pep8style.options
if options.doctest or options.testsuite:
from testsuite.support import run_tests
report = run_tests(pep8style)
else:
report = pep8style.check_files()
if options.statistics:
report.print_statistics()
if options.benchmark:
report.print_benchmark()
if options.testsuite and not options.quiet:
report.print_results()
if report.total_errors:
if options.count:
sys.stderr.write(str(report.total_errors) + '\n')
sys.exit(1)
if __name__ == '__main__':
_main()
|
addon-onlinetv/plugin.video.onlinetv
|
refs/heads/master
|
resources/lib/__init__.py
|
12133432
| |
GiladE/birde
|
refs/heads/master
|
venv/lib/python2.7/site-packages/django/conf/locale/te/__init__.py
|
12133432
| |
FHannes/intellij-community
|
refs/heads/master
|
python/testData/refactoring/move/module/before/src/p1/__init__.py
|
12133432
| |
dronefly/dronefly.github.io
|
refs/heads/master
|
flask/lib/python2.7/site-packages/wtforms/ext/appengine/__init__.py
|
177
|
import warnings
warnings.warn(
'wtforms.ext.appengine is deprecated, and will be removed in WTForms 3.0. '
'The package has been split out into its own package, wtforms-appengine: '
'https://github.com/wtforms/wtforms-appengine ',
DeprecationWarning
)
|
Fokusnica/codecombat
|
refs/heads/master
|
scripts/analytics/mixpanelGetEvent.py
|
97
|
# Get mixpanel event data via export API
# Useful for debugging Mixpanel data weirdness
targetLevels = ['dungeons-of-kithgard', 'the-raised-sword', 'endangered-burl']
targetLevels = ['dungeons-of-kithgard']
eventFunnel = ['Started Level', 'Saw Victory']
# eventFunnel = ['Saw Victory']
# eventFunnel = ['Started Level']
import sys
from pprint import pprint
from datetime import datetime, timedelta
from mixpanel import Mixpanel
try:
import json
except ImportError:
import simplejson as json
# NOTE: mixpanel dates are by day and inclusive
# E.g. '2014-12-08' is any date that day, up to 2014-12-09 12am
if __name__ == '__main__':
if not len(sys.argv) is 3:
print "Script format: <script> <api_key> <api_secret>"
else:
scriptStart = datetime.now()
api_key = sys.argv[1]
api_secret = sys.argv[2]
api = Mixpanel(
api_key = api_key,
api_secret = api_secret
)
startDate = '2015-01-01'
endDate = '2015-01-26'
startEvent = eventFunnel[0]
endEvent = eventFunnel[-1]
print("Requesting data for {0} to {1}".format(startDate, endDate))
data = api.request(['export'], {
# 'where': '"539c630f30a67c3b05d98d95" == properties["id"]',
# 'where': "('539c630f30a67c3b05d98d95' == properties['id'] or '539c630f30a67c3b05d98d95' == properties['distinct_id'])",
'event': eventFunnel,
'from_date': startDate,
'to_date': endDate
})
weirdUserIDs = []
eventUsers = {}
levelEventUserDayMap = {}
levelUserEventDayMap = {}
lines = data.split('\n')
print "Received %d entries" % len(lines)
for line in lines:
try:
if len(line) is 0: continue
eventData = json.loads(line)
# pprint(eventData)
# break
eventName = eventData['event']
if not eventName in eventFunnel:
print 'Unexpected event ' + eventName
break
if not 'properties' in eventData:
print('no properties, skpping')
continue
properties = eventData['properties']
if not 'distinct_id' in properties:
print('no distinct_id, skpping')
continue
user = properties['distinct_id']
if not 'time' in properties:
print('no time, skpping')
continue
time = properties['time']
pst = datetime.fromtimestamp(int(properties['time']))
utc = pst + timedelta(0, 8 * 60 * 60)
dateCreated = utc.isoformat()
day = dateCreated[0:10]
if day < startDate or day > endDate:
print "Skipping {0}".format(day)
continue
if 'levelID' in properties:
level = properties['levelID']
elif 'level' in properties:
level = properties['level'].lower().replace(' ', '-')
else:
print("Unkonwn level for", eventName)
print(properties)
break
if not level in targetLevels: continue
# if user != "539c630f30a67c3b05d98d95": continue
pprint(eventData)
# if user == "54c1fc3a08652d5305442c6b":
# pprint(eventData)
# break
# if '-' in user:
# weirdUserIDs.append(user)
# # pprint(eventData)
# # break
# continue
# print level
if not level in levelEventUserDayMap: levelEventUserDayMap[level] = {}
if not eventName in levelEventUserDayMap[level]: levelEventUserDayMap[level][eventName] = {}
if not user in levelEventUserDayMap[level][eventName] or levelEventUserDayMap[level][eventName][user] > day:
levelEventUserDayMap[level][eventName][user] = day
if not user in eventUsers: eventUsers[user] = True
if not level in levelUserEventDayMap: levelUserEventDayMap[level] = {}
if not user in levelUserEventDayMap[level]: levelUserEventDayMap[level][user] = {}
if not eventName in levelUserEventDayMap[level][user] or levelUserEventDayMap[level][user][eventName] > day:
levelUserEventDayMap[level][user][eventName] = day
except:
print "Unexpected error:", sys.exc_info()[0]
print line
break
# pprint(levelEventUserDayMap)
print("Weird user IDs: {0}".format(len(weirdUserIDs)))
for level in levelEventUserDayMap:
for event in levelEventUserDayMap[level]:
print("{0} {1} {2}".format(level, event, len(levelEventUserDayMap[level][event])))
print("Users: {0}".format(len(eventUsers)))
noStartDayUsers = []
levelFunnelData = {}
for level in levelUserEventDayMap:
for user in levelUserEventDayMap[level]:
# 6455
# for event in levelUserEventDayMap[level][user]:
# day = levelUserEventDayMap[level][user][event]
# if not level in levelFunnelData: levelFunnelData[level] = {}
# if not day in levelFunnelData[level]: levelFunnelData[level][day] = {}
# if not event in levelFunnelData[level][day]: levelFunnelData[level][day][event] = 0
# levelFunnelData[level][day][event] += 1
# 5382
funnelStartDay = None
for event in levelUserEventDayMap[level][user]:
day = levelUserEventDayMap[level][user][event]
if not level in levelFunnelData: levelFunnelData[level] = {}
if not day in levelFunnelData[level]: levelFunnelData[level][day] = {}
if not event in levelFunnelData[level][day]: levelFunnelData[level][day][event] = 0
if eventFunnel[0] == event:
levelFunnelData[level][day][event] += 1
funnelStartDay = day
break
if funnelStartDay:
for event in levelUserEventDayMap[level][user]:
if not event in levelFunnelData[level][funnelStartDay]:
levelFunnelData[level][funnelStartDay][event] = 0
if eventFunnel[0] != event:
levelFunnelData[level][funnelStartDay][event] += 1
for i in range(1, len(eventFunnel)):
event = eventFunnel[i]
if not event in levelFunnelData[level][funnelStartDay]:
levelFunnelData[level][funnelStartDay][event] = 0
else:
noStartDayUsers.append(user)
pprint(levelFunnelData)
print("No start day count: {0}".format(len(noStartDayUsers)))
noStartDayUsers.sort()
for i in range(len(noStartDayUsers)):
if i > 50: break
print(noStartDayUsers[i])
print("Script runtime: {0}".format(datetime.now() - scriptStart))
|
dsavransky/EXOSIMS
|
refs/heads/master
|
EXOSIMS/SimulatedUniverse/KnownRVPlanetsUniverse.py
|
1
|
from EXOSIMS.Prototypes.SimulatedUniverse import SimulatedUniverse
import numpy as np
import astropy.units as u
from astropy.time import Time
class KnownRVPlanetsUniverse(SimulatedUniverse):
"""
Simulated universe implementation inteded to work with the Known RV planet
planetary population and target list implementations.
Args:
specs:
user specified values
"""
def __init__(self, **specs):
SimulatedUniverse.__init__(self, **specs)
def gen_physical_properties(self, missionStart=60634, **specs):
"""Generates the planetary systems' physical properties. Populates arrays
of the orbital elements, albedos, masses and radii of all planets, and
generates indices that map from planet to parent star.
All parameters are generated by adding consistent error terms to the
catalog values for each planet.
"""
PPop = self.PlanetPopulation
PPMod = self.PlanetPhysicalModel
TL = self.TargetList
# Go through the target list and pick out the planets belonging to those hosts
starinds = np.array([])
planinds = np.array([])
for j,name in enumerate(TL.Name):
tmp = np.where(PPop.hostname == name)[0]
planinds = np.hstack((planinds,tmp))
starinds = np.hstack((starinds,[j]*len(tmp)))
planinds = planinds.astype(int)
starinds = starinds.astype(int)
# map planets to stars in standard format
self.plan2star = starinds
self.sInds = np.unique(self.plan2star)
self.nPlans = len(planinds)
# populate parameters
self.a = PPop.sma[planinds] + np.random.normal(size=self.nPlans)\
*PPop.smaerr[planinds] # semi-major axis
# ensure sampling did not make it negative
self.a[self.a <= 0] = PPop.sma[planinds][self.a <= 0]
self.e = PPop.eccen[planinds] + np.random.normal(size=self.nPlans)\
*PPop.eccenerr[planinds] # eccentricity
self.e[self.e < 0.] = 0.
self.e[self.e > 0.9] = 0.9
Itmp, Otmp, self.w = PPop.gen_angles(self.nPlans)
self.I = PPop.allplanetdata['pl_orbincl'][planinds] + np.random.normal\
(size=self.nPlans)*PPop.allplanetdata['pl_orbinclerr1'][planinds]
self.I[self.I.mask] = Itmp[self.I.mask].to('deg').value
self.I = self.I.data*u.deg # inclination
lper = PPop.allplanetdata['pl_orblper'][planinds] + \
np.random.normal(size=self.nPlans)*PPop.allplanetdata['pl_orblpererr1'][planinds]
self.O = lper.data*u.deg - self.w # longitude of ascending node
self.O[np.isnan(self.O)] = Otmp[np.isnan(self.O)]
self.p = PPMod.calc_albedo_from_sma(self.a,PPop.prange) # albedo
self.Mp = PPop.mass[planinds] # mass first!
self.Rp = PPMod.calc_radius_from_mass(self.Mp) # radius from mass
self.Rmask = ~PPop.radiusmask[planinds]
self.Rp[self.Rmask] = PPop.radius[planinds][self.Rmask]
self.Rperr1 = PPop.radiuserr1[planinds][self.Rmask]
self.Rperr2 = PPop.radiuserr2[planinds][self.Rmask]
# calculate period
missionStart = Time(float(missionStart), format='mjd', scale='tai')
T = PPop.period[planinds] + np.random.normal(size=self.nPlans)\
*PPop.perioderr[planinds]
T[T <= 0] = PPop.period[planinds][T <= 0]
# calculate initial mean anomaly
tper = Time(PPop.tper[planinds].value + (np.random.normal(size=self.nPlans)\
*PPop.tpererr[planinds]).to('day').value, format='jd', scale='tai')
self.M0 = ((missionStart - tper)/T % 1)*360*u.deg
|
XeCycle/indico
|
refs/heads/master
|
indico/MaKaC/webinterface/pages/conferences.py
|
2
|
# This file is part of Indico.
# Copyright (C) 2002 - 2015 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
import collections
from flask import session, request
import os
import re
from datetime import timedelta, datetime
from xml.sax.saxutils import quoteattr
import MaKaC.webinterface.wcomponents as wcomponents
import MaKaC.webinterface.urlHandlers as urlHandlers
import MaKaC.webinterface.linking as linking
import MaKaC.webinterface.navigation as navigation
import MaKaC.schedule as schedule
import MaKaC.conference as conference
import MaKaC.common.filters as filters
from MaKaC.common.utils import isStringHTML
import MaKaC.common.utils
import MaKaC.review as review
from MaKaC.review import AbstractTextField
from MaKaC.webinterface.pages.base import WPDecorated
from MaKaC.webinterface.common.tools import strip_ml_tags, escape_html
from MaKaC.webinterface.common.abstractStatusWrapper import AbstractStatusList
from MaKaC.webinterface.common.contribStatusWrapper import ContribStatusList
from MaKaC.common.output import outputGenerator
from MaKaC.webinterface.common.timezones import TimezoneRegistry
from MaKaC.PDFinterface.base import PDFSizes
from pytz import timezone
from MaKaC.common.timezoneUtils import nowutc, DisplayTZ
from MaKaC.conference import EventCloner
from MaKaC.badgeDesignConf import BadgeDesignConfiguration
from MaKaC.posterDesignConf import PosterDesignConfiguration
from MaKaC.webinterface.pages import main
from MaKaC.webinterface.pages import base
import MaKaC.common.info as info
from indico.util.i18n import i18nformat, _, ngettext
from indico.util.date_time import format_time, format_date, format_datetime
from indico.util.string import safe_upper
from MaKaC.common.fossilize import fossilize
from MaKaC.fossils.conference import IConferenceEventInfoFossil
from MaKaC.common.Conversion import Conversion
from indico.modules import ModuleHolder
from indico.modules.auth.util import url_for_logout
from MaKaC.conference import Session, Contribution
from indico.core.config import Config
from MaKaC.common.utils import formatDateTime
from MaKaC.webinterface.general import WebFactory
from MaKaC.common.TemplateExec import render
from indico.core import signals
from indico.core.db.sqlalchemy.principals import PrincipalType
from indico.modules.events.layout import layout_settings
from indico.modules.events.layout.util import (build_menu_entry_name, get_css_url, get_menu_entry_by_name,
menu_entries_for_event)
from indico.modules.users.util import get_user_by_email
from indico.util import json
from indico.util.signals import values_from_signal
from indico.util.string import to_unicode
from indico.web.flask.util import url_for
from indico.web.menu import render_sidemenu
LECTURE_SERIES_RE = re.compile(r'^part\d+$')
def stringToDate(str):
# Don't delete this dictionary inside comment. Its purpose is to
# add the dictionary in the language dictionary during the extraction!
# months = { _("January"): 1, _("February"): 2, _("March"): 3, _("April"): 4,
# _("May"): 5, _("June"): 6, _("July"): 7, _("August"): 8,
# _("September"): 9, _("October"): 10, _("November"): 11, _("December"): 12 }
months = {
"January": 1,
"February": 2,
"March": 3,
"April": 4,
"May": 5,
"June": 6,
"July": 7,
"August": 8,
"September": 9,
"October": 10,
"November": 11,
"December": 12
}
[day, month, year] = str.split("-")
return datetime(int(year), months[month], int(day))
class WPConferenceBase(base.WPDecorated):
def __init__(self, rh, conference, **kwargs):
WPDecorated.__init__(self, rh, **kwargs)
self._navigationTarget = self._conf = conference
tz = self._tz = DisplayTZ(rh._aw, self._conf).getDisplayTZ()
sDate = self.sDate = self._conf.getAdjustedScreenStartDate(tz)
eDate = self.eDate = self._conf.getAdjustedScreenEndDate(tz)
dates = " (%s)" % format_date(sDate, format='long')
if sDate.strftime("%d%B%Y") != eDate.strftime("%d%B%Y"):
if sDate.strftime("%B%Y") == eDate.strftime("%B%Y"):
dates = " (%s-%s)" % (sDate.strftime("%d"), format_date(eDate, format='long'))
else:
dates = " (%s - %s)" % (format_date(sDate, format='long'), format_date(eDate, format='long'))
self._setTitle("%s %s" % (strip_ml_tags(self._conf.getTitle()), dates))
def _getFooter(self):
"""
"""
wc = wcomponents.WFooter()
p = {"modificationDate": format_datetime(self._conf.getModificationDate(), format='d MMMM yyyy H:mm'),
"subArea": self._getSiteArea()
}
return wc.getHTML(p)
def getLogoutURL(self):
return url_for_logout(str(urlHandlers.UHConferenceDisplay.getURL(self._conf)))
class WPConferenceDisplayBase(WPConferenceBase):
pass
class WPConferenceDefaultDisplayBase( WPConferenceBase):
navigationEntry = None
menu_entry_plugin = None
menu_entry_name = None
def getJSFiles(self):
return (WPConferenceBase.getJSFiles(self) + self._includeJSPackage('Display') +
self._includeJSPackage('MaterialEditor'))
def _getFooter( self ):
wc = wcomponents.WFooter()
p = {"modificationDate": format_datetime(self._conf.getModificationDate(), format='d MMMM yyyy H:mm'),
"subArea": self._getSiteArea()}
cid = self._conf.getUrlTag().strip() or self._conf.getId()
p["shortURL"] = Config.getInstance().getShortEventURL() + cid
return wc.getHTML(p)
def _getHeader( self ):
"""
"""
wc = wcomponents.WConferenceHeader( self._getAW(), self._conf )
return wc.getHTML( { "loginURL": self.getLoginURL(),\
"logoutURL": self.getLogoutURL(),\
"confId": self._conf.getId(), \
"dark": True} )
@property
def sidemenu_option(self):
if not self.menu_entry_name:
return None
name = build_menu_entry_name(self.menu_entry_name, self.menu_entry_plugin)
entry = get_menu_entry_by_name(name, self._conf)
if entry:
return entry.id
def _getNavigationBarHTML(self):
item=None
if self.navigationEntry:
item = self.navigationEntry()
itemList = []
while item is not None:
if itemList == []:
itemList.insert(0, wcomponents.WTemplated.htmlText(item.getTitle()) )
else:
itemList.insert(0, """<a href=%s>%s</a>"""%( quoteattr(str(item.getURL(self._navigationTarget))), wcomponents.WTemplated.htmlText(item.getTitle()) ) )
item = item.getParent(self._navigationTarget)
itemList.insert(0, i18nformat("""<a href=%s> _("Home")</a>""")%quoteattr(str(urlHandlers.UHConferenceDisplay.getURL(self._conf))) )
return " > ".join(itemList)
def _applyConfDisplayDecoration( self, body ):
drawer = wcomponents.WConfTickerTapeDrawer(self._conf, self._tz)
frame = WConfDisplayFrame( self._getAW(), self._conf )
frameParams = {
"confModifURL": urlHandlers.UHConferenceModification.getURL(self._conf),
"logoURL": self.logo_url,
"currentURL": request.url,
"nowHappening": drawer.getNowHappeningHTML(),
"simpleTextAnnouncement": drawer.getSimpleText(),
'active_menu_entry_id': self.sidemenu_option
}
if self.event.has_logo:
frameParams["logoURL"] = self.logo_url
body = """
<div class="confBodyBox clearfix">
<div>
<div></div>
<div class="breadcrumps">%s</div>
</div>
<!--Main body-->
<div class="mainContent">
<div class="col2">
%s
</div>
</div>
</div>""" % (self._getNavigationBarHTML(), body)
return frame.getHTML(body, frameParams)
def _getHeadContent(self):
path = self._getBaseURL()
try:
timestamp = os.stat(__file__).st_mtime
except OSError:
timestamp = 0
printCSS = '<link rel="stylesheet" type="text/css" href="{}/css/Conf_Basic.css?{}">'.format(path, timestamp)
theme_url = get_css_url(self._conf.as_event)
if theme_url:
printCSS += '<link rel="stylesheet" type="text/css" href="{url}">'.format(url=theme_url)
# Include MathJax
return '\n'.join([
printCSS,
WConfMetadata(self._conf).getHTML(), # confMetadata
render('js/mathjax.config.js.tpl'), # mathJax
'\n'.join('<script src="{0}" type="text/javascript"></script>'.format(url)
for url in self._asset_env['mathjax_js'].urls())
])
def _applyDecoration( self, body ):
self.event = self._conf.as_event
self.logo_url = self.event.logo_url if self.event.has_logo else None
body = self._applyConfDisplayDecoration( body )
return WPConferenceBase._applyDecoration(self, to_unicode(body))
class WConfMetadata(wcomponents.WTemplated):
def __init__(self, conf):
self._conf = conf
def getVars(self):
v = wcomponents.WTemplated.getVars( self )
minfo = info.HelperMaKaCInfo.getMaKaCInfoInstance()
v['site_name'] = minfo.getTitle()
v['fb_config'] = minfo.getSocialAppConfig().get('facebook', {})
event = self._conf.as_event
v['image'] = event.logo_url if event.has_logo else Config.getInstance().getSystemIconURL("logo_indico")
v['description'] = strip_ml_tags(self._conf.getDescription()[:500])
return v
class WConfDisplayFrame(wcomponents.WTemplated):
def __init__(self, aw, conf):
self._aw = aw
self._conf = conf
self.event = self._conf.as_event
def getHTML(self, body, params):
self._body = body
return wcomponents.WTemplated.getHTML( self, params )
def getVars(self):
vars = wcomponents.WTemplated.getVars( self )
vars["logo"] = ""
if self.event.has_logo:
vars["logoURL"] = self.event.logo_url
vars["logo"] = "<img src=\"%s\" alt=\"%s\" border=\"0\" class=\"confLogo\" >"%(vars["logoURL"], escape_html(self._conf.getTitle(), escape_quotes = True))
vars["confTitle"] = self._conf.getTitle()
vars["displayURL"] = urlHandlers.UHConferenceDisplay.getURL(self._conf)
vars["imgConferenceRoom"] = Config.getInstance().getSystemIconURL( "conferenceRoom" )
tz = DisplayTZ(self._aw,self._conf).getDisplayTZ()
adjusted_sDate = self._conf.getAdjustedScreenStartDate(tz)
adjusted_eDate = self._conf.getAdjustedScreenEndDate(tz)
vars["timezone"] = tz
vars["confDateInterval"] = i18nformat("""_("from") %s _("to") %s""") % (
format_date(adjusted_sDate, format='long'), format_date(adjusted_eDate, format='long'))
if adjusted_sDate.strftime("%d%B%Y") == \
adjusted_eDate.strftime("%d%B%Y"):
vars["confDateInterval"] = format_date(adjusted_sDate, format='long')
elif adjusted_sDate.strftime("%B%Y") == adjusted_eDate.strftime("%B%Y"):
vars["confDateInterval"] = "%s-%s %s"%(adjusted_sDate.day, adjusted_eDate.day, format_date(adjusted_sDate, format='MMMM yyyy'))
vars["confLocation"] = ""
if self._conf.getLocationList():
vars["confLocation"] = self._conf.getLocationList()[0].getName()
vars["body"] = self._body
vars["supportEmail"] = ""
vars["supportTelephone"] = ""
vars['menu'] = menu_entries_for_event(self._conf)
vars['support_info'] = self._conf.getSupportInfo()
vars["bgColorCode"] = layout_settings.get(self._conf, 'header_background_color').replace("#", "")
vars["textColorCode"] = layout_settings.get(self._conf, 'header_text_color').replace("#", "")
vars["confId"] = self._conf.getId()
vars["conf"] = self._conf
return vars
class WConfDisplayMenu(wcomponents.WTemplated):
def __init__(self, menu):
wcomponents.WTemplated.__init__(self)
self._menu = menu
class WConfDetailsBase( wcomponents.WTemplated ):
def __init__(self, aw, conf):
self._conf = conf
self._aw = aw
def getVars( self ):
vars = wcomponents.WTemplated.getVars( self )
tz = DisplayTZ(self._aw,self._conf).getDisplayTZ()
vars["timezone"] = tz
description = self._conf.getDescription()
vars["description_html"] = isStringHTML(description)
vars["description"] = description
sdate, edate = self._conf.getAdjustedScreenStartDate(tz), self._conf.getAdjustedScreenEndDate(tz)
fsdate, fedate = format_date(sdate, format='medium'), format_date(edate, format='medium')
fstime, fetime = sdate.strftime("%H:%M"), edate.strftime("%H:%M")
vars["dateInterval"] = (fsdate, fstime, fedate, fetime)
vars["location"] = None
vars["address"] = None
vars["room"] = None
location = self._conf.getLocation()
if location:
vars["location"] = location.getName()
vars["address"] = location.getAddress()
room = self._conf.getRoom()
if room and room.getName():
roomLink = linking.RoomLinker().getHTMLLink(room, location)
vars["room"] = roomLink
vars["chairs"] = self._conf.getChairList()
vars["attachments"] = self._conf.attached_items
vars["conf"] = self._conf
info = self._conf.getContactInfo()
vars["moreInfo_html"] = isStringHTML(info)
vars["moreInfo"] = info
vars["actions"] = ''
vars["isSubmitter"] = self._conf.as_event.can_manage(session.user, 'submit')
regform = self._conf.getRegistrationForm()
if regform:
vars["registration_enabled"] = regform.isActivated()
vars["in_registration_period"] = regform.inRegistrationPeriod(nowutc())
vars["in_modification_period"] = regform.inModificationPeriod()
vars["registration_deadline"] = format_date(regform.getEndRegistrationDate())
vars["modification_deadline"] = format_date(regform.getModificationEndDate())
vars["ticket_enabled"] = regform.getETicket().isEnabled()
if session.avatar:
vars["registrant"] = session.avatar.getRegistrantById(self._conf.getId())
return vars
class WConfDetailsFull(WConfDetailsBase):
pass
#---------------------------------------------------------------------------
class WConfDetails:
def __init__(self, aw, conf):
self._conf = conf
self._aw = aw
def getHTML( self, params ):
return WConfDetailsFull( self._aw, self._conf ).getHTML( params )
class WPConferenceDisplay(WPConferenceDefaultDisplayBase):
menu_entry_name = 'overview'
def getCSSFiles(self):
return (WPConferenceDefaultDisplayBase.getCSSFiles(self)
+ self._asset_env['eventservices_sass'].urls()
+ self._asset_env['event_display_sass'].urls())
def _getBody(self, params):
wc = WConfDetails(self._getAW(), self._conf)
pars = {"modifyURL": urlHandlers.UHConferenceModification.getURL(self._conf),
"sessionModifyURLGen": urlHandlers.UHSessionModification.getURL,
"contribModifyURLGen": urlHandlers.UHContributionModification.getURL,
"subContribModifyURLGen": urlHandlers.UHSubContribModification.getURL}
return wc.getHTML(pars)
def _getFooter(self):
wc = wcomponents.WEventFooter(self._conf)
return wc.getHTML()
class WSentMail (wcomponents.WTemplated):
def __init__(self,conf):
self._conf = conf
def getVars(self):
vars = wcomponents.WTemplated.getVars( self )
vars["BackURL"]=urlHandlers.UHConferenceDisplay.getURL(self._conf)
return vars
class WPSentEmail( WPConferenceDefaultDisplayBase ):
def _getBody(self,params):
wc = WSentMail(self._conf)
return wc.getHTML()
class WEmail(wcomponents.WTemplated):
def __init__(self,conf,user,toUsers):
self._conf = conf
self._from = user
self._to = toUsers
def getVars(self):
vars = wcomponents.WTemplated.getVars( self )
if vars.get("from", None) is None :
vars["FromName"] = self._from
vars["fromUser"] = self._from
vars["toUsers"] = self._to
if vars.get("postURL",None) is None :
vars["postURL"]=urlHandlers.UHConferenceSendEmail.getURL(self._to)
if vars.get("subject", None) is None :
vars["subject"]=""
if vars.get("body", None) is None :
vars["body"]=""
return vars
class WPEMail ( WPConferenceDefaultDisplayBase ):
def _getBody(self,params):
toemail = params["emailto"]
wc = WEmail(self._conf, self._getAW().getUser(), toemail)
params["fromDisabled"] = True
params["toDisabled"] = True
params["ccDisabled"] = True
return wc.getHTML(params)
class WPXSLConferenceDisplay(WPConferenceBase):
"""
Use this class just to transform to XML
"""
menu_entry_name = 'overview'
def __init__(self, rh, conference, view, type, params):
WPConferenceBase.__init__(self, rh, conference)
self._params = params
self._view = view
self._conf = conference
self._type = type
self._firstDay = params.get("firstDay")
self._lastDay = params.get("lastDay")
self._daysPerRow = params.get("daysPerRow")
def _getFooter(self):
"""
"""
return ""
def _getHTMLHeader(self):
return ""
def _applyDecoration(self, body):
"""
"""
return to_unicode(body)
def _getHTMLFooter(self):
return ""
def _getBodyVariables(self):
pars = { \
"modifyURL": urlHandlers.UHConferenceModification.getURL( self._conf ), \
"iCalURL": urlHandlers.UHConferenceToiCal.getURL(self._conf), \
"cloneURL": urlHandlers.UHConfClone.getURL( self._conf ), \
"sessionModifyURLGen": urlHandlers.UHSessionModification.getURL, \
"contribModifyURLGen": urlHandlers.UHContributionModification.getURL, \
"subContribModifyURLGen": urlHandlers.UHSubContribModification.getURL}
pars.update({ 'firstDay' : self._firstDay, 'lastDay' : self._lastDay, 'daysPerRow' : self._daysPerRow })
return pars
def _getBody(self, params):
body_vars = self._getBodyVariables()
view = self._view
outGen = outputGenerator(self._getAW())
styleMgr = info.HelperMaKaCInfo.getMaKaCInfoInstance().getStyleManager()
if styleMgr.existsXSLFile(self._view):
if self._params.get("detailLevel", "") == "contribution" or self._params.get("detailLevel", "") == "":
includeContribution = 1
else:
includeContribution = 0
body = outGen.getFormattedOutput(self._rh, self._conf, styleMgr.getXSLPath(self._view), body_vars, 1,
includeContribution, 1, 1, self._params.get("showSession", ""),
self._params.get("showDate", ""))
return body
else:
return _("Cannot find the %s stylesheet") % view
class WPTPLConferenceDisplay(WPXSLConferenceDisplay, object):
"""
Overrides XSL related functions in WPXSLConferenceDisplay
class and re-implements them using normal Indico templates.
"""
def __init__(self, rh, conference, view, type, params):
WPXSLConferenceDisplay.__init__(self, rh, conference, view, type, params)
imagesBaseURL = Config.getInstance().getImagesBaseURL()
self._types = {
"pdf" :{"mapsTo" : "pdf", "imgURL" : os.path.join(imagesBaseURL, "pdf_small.png"), "imgAlt" : "pdf file"},
"doc" :{"mapsTo" : "doc", "imgURL" : os.path.join(imagesBaseURL, "word.png"), "imgAlt" : "word file"},
"docx" :{"mapsTo" : "doc", "imgURL" : os.path.join(imagesBaseURL, "word.png"), "imgAlt" : "word file"},
"ppt" :{"mapsTo" : "ppt", "imgURL" : os.path.join(imagesBaseURL, "powerpoint.png"), "imgAlt" : "powerpoint file"},
"pptx" :{"mapsTo" : "ppt", "imgURL" : os.path.join(imagesBaseURL, "powerpoint.png"), "imgAlt" : "powerpoint file"},
"xls" :{"mapsTo" : "xls", "imgURL" : os.path.join(imagesBaseURL, "excel.png"), "imgAlt" : "excel file"},
"xlsx" :{"mapsTo" : "xls", "imgURL" : os.path.join(imagesBaseURL, "excel.png"), "imgAlt" : "excel file"},
"sxi" :{"mapsTo" : "odp", "imgURL" : os.path.join(imagesBaseURL, "impress.png"), "imgAlt" : "presentation file"},
"odp" :{"mapsTo" : "odp", "imgURL" : os.path.join(imagesBaseURL, "impress.png"), "imgAlt" : "presentation file"},
"sxw" :{"mapsTo" : "odt", "imgURL" : os.path.join(imagesBaseURL, "writer.png"), "imgAlt" : "writer file"},
"odt" :{"mapsTo" : "odt", "imgURL" : os.path.join(imagesBaseURL, "writer.png"), "imgAlt" : "writer file"},
"sxc" :{"mapsTo" : "ods", "imgURL" : os.path.join(imagesBaseURL, "calc.png"), "imgAlt" : "spreadsheet file"},
"ods" :{"mapsTo" : "ods", "imgURL" : os.path.join(imagesBaseURL, "calc.png"), "imgAlt" : "spreadsheet file"},
"other" :{"mapsTo" : "other", "imgURL" : os.path.join(imagesBaseURL, "file_small.png"), "imgAlt" : "unknown type file"},
"link" :{"mapsTo" : "link", "imgURL" : os.path.join(imagesBaseURL, "link.png"), "imgAlt" : "link"}
}
def _getVariables(self, conf):
wvars = {}
styleMgr = info.HelperMaKaCInfo.getMaKaCInfoInstance().getStyleManager()
wvars['INCLUDE'] = '../include'
wvars['accessWrapper'] = accessWrapper = self._rh._aw
wvars['conf'] = conf
if conf.getOwnerList():
wvars['category'] = conf.getOwnerList()[0].getName()
else:
wvars['category'] = ''
timezoneUtil = DisplayTZ(accessWrapper, conf)
tz = timezoneUtil.getDisplayTZ()
wvars['startDate'] = conf.getAdjustedStartDate(tz)
wvars['endDate'] = conf.getAdjustedEndDate(tz)
wvars['timezone'] = tz
if conf.getParticipation().displayParticipantList() :
wvars['participants'] = conf.getParticipation().getPresentParticipantListText()
attached_items = conf.attached_items
lectures, folders = [], []
for folder in attached_items.get('folders', []):
if LECTURE_SERIES_RE.match(folder.title):
lectures.append(folder)
elif folder.title != "Internal Page Files":
folders.append(folder)
cmp_title_number = lambda x, y: int(x.title[4:]) - int(y.title[4:])
wvars.update({
'files': attached_items.get('files', []),
'folders': folders,
'lectures': sorted(lectures, cmp=cmp_title_number)
})
if (conf.getType() in ("meeting", "simple_event")
and conf.getParticipation().isAllowedForApplying()
and conf.getStartDate() > nowutc()
and not conf.getParticipation().isFull()):
wvars['registrationOpen'] = True
wvars['supportEmailCaption'] = conf.getSupportInfo().getCaption()
wvars['types'] = self._types
wvars['entries'] = []
confSchedule = conf.getSchedule()
showSession = self._params.get("showSession","all")
detailLevel = self._params.get("detailLevel", "contribution")
showDate = self._params.get("showDate", "all")
# Filter by day
if showDate == "all":
entrylist = confSchedule.getEntries()
else:
entrylist = confSchedule.getEntriesOnDay(timezone(tz).localize(stringToDate(showDate)))
# Check entries filters and access rights
for entry in entrylist:
sessionCand = entry.getOwner().getOwner()
# Filter by session
if isinstance(sessionCand, Session) and (showSession != "all" and sessionCand.getId() != showSession):
continue
# Hide/Show contributions
if isinstance(entry.getOwner(), Contribution) and detailLevel != "contribution":
continue
if entry.getOwner().canView(accessWrapper):
if type(entry) is schedule.BreakTimeSchEntry:
newItem = entry
else:
newItem = entry.getOwner()
wvars['entries'].append(newItem)
wvars['entries'].sort(key=lambda entry: entry.getEndDate(), reverse=True)
wvars['entries'].sort(key=lambda entry: (entry.getStartDate(),
entry.getFullTitle() if hasattr(entry, 'getFullTitle') else None))
wvars["daysPerRow"] = self._daysPerRow
wvars["firstDay"] = self._firstDay
wvars["lastDay"] = self._lastDay
wvars["currentUser"] = self._rh._aw.getUser()
wvars["reportNumberSystems"] = Config.getInstance().getReportNumberSystems()
return wvars
def _getItemType(self, item):
itemClass = item.__class__.__name__
if itemClass == 'BreakTimeSchEntry':
return 'Break'
elif itemClass == 'SessionSlot':
return 'Session'
elif itemClass == 'AcceptedContribution':
return 'Contribution'
else:
# return Conference, Contribution or SubContribution
return itemClass
def _extractInfoForButton(self, item):
info = {}
for key in ['sessId', 'slotId', 'contId', 'subContId']:
info[key] = 'null'
info['confId'] = self._conf.getId()
itemType = self._getItemType(item)
info['uploadURL'] = 'Indico.Urls.UploadAction.%s' % itemType.lower()
if itemType == 'Conference':
info['parentProtection'] = item.getAccessController().isProtected()
if item.canModify(self._rh._aw):
info["modifyLink"] = urlHandlers.UHConferenceModification.getURL(item)
info["minutesLink"] = True
info["materialLink"] = True
info["cloneLink"] = urlHandlers.UHConfClone.getURL(item)
elif item.as_event.can_manage(session.user, 'submit'):
info["minutesLink"] = True
info["materialLink"] = True
elif itemType == 'Session':
sess = item.getSession()
info['parentProtection'] = sess.getAccessController().isProtected()
if sess.canModify(self._rh._aw) or sess.canCoordinate(self._rh._aw):
info["modifyLink"] = urlHandlers.UHSessionModification.getURL(item)
info['slotId'] = item.getId()
info['sessId'] = sess.getId()
if sess.canModify(self._rh._aw) or sess.canCoordinate(self._rh._aw):
info["minutesLink"] = True
info["materialLink"] = True
url = urlHandlers.UHSessionModifSchedule.getURL(sess)
ttLink = "%s#%s.s%sl%s" % (url, sess.getStartDate().strftime('%Y%m%d'), sess.getId(), info['slotId'])
info["sessionTimetableLink"] = ttLink
elif itemType == 'Contribution':
info['parentProtection'] = item.getAccessController().isProtected()
if item.canModify(self._rh._aw):
info["modifyLink"] = urlHandlers.UHContributionModification.getURL(item)
if item.canModify(self._rh._aw) or item.canUserSubmit(self._rh._aw.getUser()):
info["minutesLink"] = True
info["materialLink"] = True
info["contId"] = item.getId()
owner = item.getOwner()
if self._getItemType(owner) == 'Session':
info['sessId'] = owner.getId()
elif itemType == 'SubContribution':
info['parentProtection'] = item.getContribution().getAccessController().isProtected()
if item.canModify(self._rh._aw):
info["modifyLink"] = urlHandlers.UHSubContributionModification.getURL(item)
if item.canModify(self._rh._aw) or item.canUserSubmit(self._rh._aw.getUser()):
info["minutesLink"] = True
info["materialLink"] = True
info["subContId"] = item.getId()
info["contId"] = item.getContribution().getId()
owner = item.getOwner()
if self._getItemType(owner) == 'Session':
info['sessId'] = owner.getId()
return info
def _getHTMLHeader( self ):
return WPConferenceBase._getHTMLHeader(self)
def _getHeadContent( self ):
config = Config.getInstance()
styleMgr = info.HelperMaKaCInfo.getMaKaCInfoInstance().getStyleManager()
htdocs = config.getHtdocsDir()
baseurl = self._getBaseURL()
# First include the default Indico stylesheet
try:
timestamp = os.stat(__file__).st_mtime
except OSError:
timestamp = 0
styleText = """<link rel="stylesheet" href="%s/css/%s?%d">\n""" % \
(baseurl, Config.getInstance().getCssStylesheetName(), timestamp)
# Then the common event display stylesheet
if os.path.exists("%s/css/events/common.css" % htdocs):
styleText += """ <link rel="stylesheet" href="%s/css/events/common.css?%d">\n""" % (baseurl,
timestamp)
# And finally the specific display stylesheet
if styleMgr.existsCSSFile(self._view):
cssPath = os.path.join(baseurl, 'css', 'events', styleMgr.getCSSFilename(self._view))
styleText += """<link rel="stylesheet" href="%s?%d">\n""" % (cssPath, timestamp)
theme_url = get_css_url(self._conf.as_event)
if theme_url:
link = '<link rel="stylesheet" type="text/css" href="{url}">'.format(url=theme_url)
styleText += link
confMetadata = WConfMetadata(self._conf).getHTML()
mathJax = render('js/mathjax.config.js.tpl') + \
'\n'.join(['<script src="{0}" type="text/javascript"></script>'.format(url) for url in
self._asset_env['mathjax_js'].urls()])
return styleText + confMetadata + mathJax
def _getFooter( self ):
"""
"""
wc = wcomponents.WEventFooter(self._conf)
p = {"modificationDate":format_datetime(self._conf.getModificationDate(), format='d MMMM yyyy H:mm'),"subArea": self._getSiteArea(),"dark":True}
if Config.getInstance().getShortEventURL():
id=self._conf.getUrlTag().strip()
if not id:
id = self._conf.getId()
p["shortURL"] = Config.getInstance().getShortEventURL() + id
return wc.getHTML(p)
def _getHeader( self ):
"""
"""
if self._type == "simple_event":
wc = wcomponents.WMenuSimpleEventHeader( self._getAW(), self._conf )
elif self._type == "meeting":
wc = wcomponents.WMenuMeetingHeader( self._getAW(), self._conf )
else:
wc = wcomponents.WMenuConferenceHeader( self._getAW(), self._conf )
return wc.getHTML( { "loginURL": self.getLoginURL(),\
"logoutURL": self.getLogoutURL(),\
"confId": self._conf.getId(),\
"currentView": self._view,\
"type": self._type,\
"selectedDate": self._params.get("showDate",""),\
"selectedSession": self._params.get("showSession",""),\
"detailLevel": self._params.get("detailLevel",""),\
"filterActive": self._params.get("filterActive",""),\
"dark": True } )
def getCSSFiles(self):
return (WPConferenceBase.getCSSFiles(self) +
self._asset_env['eventservices_sass'].urls() +
self._asset_env['event_display_sass'].urls())
def getJSFiles(self):
modules = WPConferenceBase.getJSFiles(self)
# TODO: find way to check if the user is able to manage
# anything inside the conference (sessions, ...)
modules += (self._includeJSPackage('Management') +
self._includeJSPackage('MaterialEditor') +
self._includeJSPackage('Display') +
self._asset_env['modules_vc_js'].urls() +
self._asset_env['modules_event_display_js'].urls() +
self._asset_env['zero_clipboard_js'].urls())
return modules
def _applyDecoration( self, body ):
"""
"""
if self._params.get("frame","")=="no" or self._params.get("fr","")=="no":
return to_unicode(WPrintPageFrame().getHTML({"content":body}))
return WPConferenceBase._applyDecoration(self, body)
def _getHTMLFooter( self ):
if self._params.get("frame","")=="no" or self._params.get("fr","")=="no":
return ""
return WPConferenceBase._getHTMLFooter(self)
@staticmethod
def getLocationInfo(item, roomLink=True, fullName=False):
"""Return a tuple (location, room, url) containing
information about the location of the item."""
minfo = info.HelperMaKaCInfo.getMaKaCInfoInstance()
location = item.getLocation().getName() if item.getLocation() else ""
customRoom = item.getRoom()
if not customRoom:
roomName = ''
elif fullName and location and Config.getInstance().getIsRoomBookingActive():
# if we want the full name and we have a RB DB to search in
roomName = customRoom.getFullName()
if not roomName:
customRoom.retrieveFullName(location) # try to fetch the full name
roomName = customRoom.getFullName() or customRoom.getName()
else:
roomName = customRoom.getName()
# TODO check if the following if is required
if roomName in ['', '0--', 'Select:']:
roomName = ''
if roomLink:
url = linking.RoomLinker().getURL(item.getRoom(), item.getLocation())
else:
url = ""
return (location, roomName, url)
def _getBody(self, params):
"""Return main information about the event."""
if self._view != 'xml':
vars = self._getVariables(self._conf)
vars['getTime'] = lambda date : format_time(date.time(), format="HH:mm")
vars['isTime0H0M'] = lambda date : (date.hour, date.minute) == (0,0)
vars['getDate'] = lambda date : format_date(date, format='yyyy-MM-dd')
vars['prettyDate'] = lambda date : format_date(date, format='full')
vars['prettyDuration'] = MaKaC.common.utils.prettyDuration
vars['parseDate'] = MaKaC.common.utils.parseDate
vars['isStringHTML'] = MaKaC.common.utils.isStringHTML
vars['extractInfoForButton'] = lambda item : self._extractInfoForButton(item)
vars['getItemType'] = lambda item : self._getItemType(item)
vars['getLocationInfo'] = WPTPLConferenceDisplay.getLocationInfo
vars['dumps'] = json.dumps
vars['timedelta'] = timedelta
else:
outGen = outputGenerator(self._rh._aw)
varsForGenerator = self._getBodyVariables()
vars = {}
vars['xml'] = outGen._getBasicXML(self._conf, varsForGenerator, 1, 1, 1, 1)
styleMgr = info.HelperMaKaCInfo.getMaKaCInfoInstance().getStyleManager()
if styleMgr.existsTPLFile(self._view):
fileName = os.path.splitext(styleMgr.getTemplateFilename(self._view))[0]
body = wcomponents.WTemplated(os.path.join("events", fileName)).getHTML(vars)
else:
return _("Template could not be found.")
return body
class WPrintPageFrame (wcomponents.WTemplated):
pass
class WConfDisplayBodyBase(wcomponents.WTemplated):
def _getTitle(self):
entry = get_menu_entry_by_name(self._linkname, self._conf)
return entry.localized_title
class WConfProgram(WConfDisplayBodyBase):
_linkname = 'program'
def __init__(self, aw, conf):
self._conf = conf
self._aw = aw
def buildTrackData(self, track):
"""
Returns a dict representing the data of the track and its Sub-tracks
should it have any.
"""
description = track.getDescription()
formattedTrack = {
'title': track.getTitle(),
'description': description
}
if track.getConference().getAbstractMgr().isActive() and \
track.getConference().hasEnabledSection("cfa") and \
track.canCoordinate(self._aw):
if track.getConference().canModify(self._aw):
formattedTrack['url'] = urlHandlers.UHTrackModification.getURL(track)
else:
formattedTrack['url'] = urlHandlers.UHTrackModifAbstracts.getURL(track)
return formattedTrack
def getVars(self):
pvars = wcomponents.WTemplated.getVars(self)
pvars["body_title"] = self._getTitle()
pvars['description'] = self._conf.getProgramDescription()
pvars['program'] = [self.buildTrackData(t) for t in self._conf.getTrackList()]
pvars['pdf_url'] = urlHandlers.UHConferenceProgramPDF.getURL(self._conf)
return pvars
class WPConferenceProgram(WPConferenceDefaultDisplayBase):
menu_entry_name = 'program'
def _getBody(self, params):
wc = WConfProgram(self._getAW(), self._conf)
return wc.getHTML()
class WConferenceTimeTable(WConfDisplayBodyBase):
_linkname = 'timetable'
def __init__(self, conference, aw):
self._conf = conference
self._aw = aw
def getVars(self):
wvars = wcomponents.WTemplated.getVars(self)
tz = DisplayTZ(self._aw, self._conf).getDisplayTZ()
sf = schedule.ScheduleToJson.process(self._conf.getSchedule(),
tz, self._aw,
useAttrCache=True,
hideWeekends=True)
# TODO: Move to beginning of file when proved useful
try:
import ujson
jsonf = ujson.encode
except ImportError:
jsonf = json.dumps
wvars["ttdata"] = jsonf(sf)
eventInfo = fossilize(self._conf, IConferenceEventInfoFossil, tz=tz)
eventInfo['isCFAEnabled'] = self._conf.getAbstractMgr().isActive()
wvars['eventInfo'] = eventInfo
wvars['timetableLayout'] = wvars.get('ttLyt', '')
return wvars
class WPConferenceTimeTable(WPConferenceDefaultDisplayBase):
navigationEntry = navigation.NEConferenceTimeTable
menu_entry_name = 'timetable'
def getJSFiles(self):
return WPConferenceDefaultDisplayBase.getJSFiles(self) + \
self._includeJSPackage('Timetable')
def _getHeadContent(self):
content = WPConferenceDefaultDisplayBase._getHeadContent(self)
return content + '<link rel="stylesheet" type="text/css" href="{}/css/timetable.css">'.format(
self._getBaseURL())
def _getBody( self, params ):
wc = WConferenceTimeTable( self._conf, self._getAW() )
return wc.getHTML(params)
class WPMeetingTimeTable( WPTPLConferenceDisplay ):
def getJSFiles(self):
return WPXSLConferenceDisplay.getJSFiles(self) + \
self._includeJSPackage('Timetable')
def _getBody( self, params ):
wc = WConferenceTimeTable( self._conf, self._getAW() )
return wc.getHTML(params)
class WPConferenceModifBase(main.WPMainBase):
_userData = ['favorite-user-ids']
def __init__(self, rh, conference, **kwargs):
main.WPMainBase.__init__(self, rh, **kwargs)
self._navigationTarget = self._conf = conference
def getJSFiles(self):
return main.WPMainBase.getJSFiles(self) + \
self._includeJSPackage('Management') + \
self._includeJSPackage('MaterialEditor')
def getCSSFiles(self):
return main.WPMainBase.getCSSFiles(self) + self._asset_env['event_management_sass'].urls()
def _getSiteArea(self):
return "ModificationArea"
def _getHeader( self ):
"""
"""
wc = wcomponents.WHeader( self._getAW() )
return wc.getHTML( { "subArea": self._getSiteArea(), \
"loginURL": self._escapeChars(str(self.getLoginURL())),\
"logoutURL": self._escapeChars(str(self.getLogoutURL())) } )
def _getNavigationDrawer(self):
pars = {"target": self._conf, "isModif": True }
return wcomponents.WNavigationDrawer( pars, bgColor="white" )
def _applyFrame(self, body):
frame = wcomponents.WConferenceModifFrame(self._conf, self._getAW())
params = {
"categDisplayURLGen": urlHandlers.UHCategoryDisplay.getURL,
"confDisplayURLGen": urlHandlers.UHConferenceDisplay.getURL,
"event": "Conference",
"sideMenu": render_sidemenu('event-management-sidemenu', active_item=self.sidemenu_option, old_style=True,
event=self._conf.as_event)
}
wf = self._rh.getWebFactory()
if wf:
params["event"] = wf.getName()
return frame.getHTML(body, **params)
def _getBody( self, params ):
return self._applyFrame( self._getPageContent( params ) )
def _getTabContent( self, params ):
return "nothing"
def _getPageContent( self, params ):
return "nothing"
class WPConferenceModifAbstractBase( WPConferenceModifBase ):
sidemenu_option = 'abstracts'
def __init__(self, rh, conf):
WPConferenceModifBase.__init__(self, rh, conf)
def _createTabCtrl(self):
self._tabCtrl = wcomponents.TabControl()
self._tabCFA = self._tabCtrl.newTab( "cfasetup", _("Setup"), urlHandlers.UHConfModifCFA.getURL( self._conf ) )
self._tabCFAPreview = self._tabCtrl.newTab("cfapreview", _("Preview"), urlHandlers.UHConfModifCFAPreview.getURL(self._conf))
self._tabAbstractList = self._tabCtrl.newTab( "abstractList", _("List of Abstracts"), urlHandlers.UHConfAbstractList.getURL( self._conf ) )
self._tabBOA = self._tabCtrl.newTab("boa", _("Book of Abstracts Setup"), urlHandlers.UHConfModAbstractBook.getURL(self._conf))
self._tabCFAR = self._tabCtrl.newTab("reviewing", _("Reviewing"), urlHandlers.UHAbstractReviewingSetup.getURL(self._conf))
# Create subtabs for the reviewing
self._subTabARSetup = self._tabCFAR.newSubTab( "revsetup", _("Settings"),\
urlHandlers.UHAbstractReviewingSetup.getURL(self._conf))
self._subTabARTeam = self._tabCFAR.newSubTab( "revteam", _("Team"),\
urlHandlers.UHAbstractReviewingTeam.getURL(self._conf))
self._subTabARNotifTpl = self._tabCFAR.newSubTab( "notiftpl", _("Notification templates"),\
urlHandlers.UHAbstractReviewingNotifTpl.getURL(self._conf))
if not self._conf.hasEnabledSection("cfa"):
self._tabBOA.disable()
self._tabCFA.disable()
self._tabAbstractList.disable()
self._tabCFAPreview.disable()
self._tabCFAR.disable()
self._setActiveTab()
def _getPageContent(self, params):
self._createTabCtrl()
return wcomponents.WTabControl( self._tabCtrl, self._getAW() ).getHTML( self._getTabContent( params ) )
def _getTabContent(self, params):
return "nothing"
def _setActiveTab(self):
pass
class WConfModifMainData(wcomponents.WTemplated):
def __init__(self, conference, ct, rh):
self._conf = conference
self._ct = ct
self._rh = rh
def _getChairPersonsList(self):
result = fossilize(self._conf.getChairList())
for chair in result:
user = get_user_by_email(chair['email'])
chair['showManagerCB'] = True
chair['showSubmitterCB'] = True
email_submitters = {x.email for x in self._conf.as_event.acl_entries
if x.type == PrincipalType.email and x.has_management_role('submit', explicit=True)}
if chair['email'] in email_submitters or (user and self._conf.as_event.can_manage(user, 'submit',
explicit_role=True)):
chair['showSubmitterCB'] = False
email_managers = {x.email for x in self._conf.as_event.acl_entries if x.type == PrincipalType.email}
if chair['email'] in email_managers or (user and self._conf.as_event.can_manage(user, explicit_role=True)):
chair['showManagerCB'] = False
return result
def getVars(self):
vars = wcomponents.WTemplated.getVars(self)
type = vars["type"]
vars["defaultStyle"] = self._conf.getDefaultStyle()
vars["visibility"] = self._conf.getVisibility()
vars["dataModificationURL"]=quoteattr(str(urlHandlers.UHConfDataModif.getURL(self._conf)))
vars["addTypeURL"]=urlHandlers.UHConfAddContribType.getURL(self._conf)
vars["removeTypeURL"]=urlHandlers.UHConfRemoveContribType.getURL(self._conf)
vars["title"]=self._conf.getTitle()
if isStringHTML(self._conf.getDescription()):
vars["description"] = self._conf.getDescription()
elif self._conf.getDescription():
vars["description"] = self._conf.getDescription()
else:
vars["description"] = ""
###################################
# Fermi timezone awareness #
###################################
tz = self._conf.getTimezone()
vars["timezone"] = tz
vars["startDate"]=formatDateTime(self._conf.getAdjustedStartDate())
vars["endDate"]=formatDateTime(self._conf.getAdjustedEndDate())
###################################
# Fermi timezone awareness(end) #
###################################
vars["chairText"] = self.htmlText(self._conf.getChairmanText())
place=self._conf.getLocation()
vars["locationName"]=vars["locationAddress"]=""
if place:
vars["locationName"]=self.htmlText(place.getName())
vars["locationAddress"]=self.htmlText(place.getAddress())
room=self._conf.getRoom()
vars["locationRoom"]=""
if room:
vars["locationRoom"]=self.htmlText(room.getName())
if isStringHTML(self._conf.getContactInfo()):
vars["contactInfo"]=self._conf.getContactInfo()
else:
vars["contactInfo"] = """<table class="tablepre"><tr><td><pre>%s</pre></td></tr></table>""" % self._conf.getContactInfo()
vars["supportEmailCaption"] = self._conf.getSupportInfo().getCaption()
vars["supportEmail"] = i18nformat("""--_("not set")--""")
if self._conf.getSupportInfo().hasEmail():
vars["supportEmail"] = self.htmlText(self._conf.getSupportInfo().getEmail())
typeList = []
for type in self._conf.getContribTypeList():
typeList.append("""<input type="checkbox" name="types" value="%s"><a href="%s">%s</a><br>
<table><tr><td width="30"></td><td><font><pre>%s</pre></font></td></tr></table>"""%( \
type.getId(), \
str(urlHandlers.UHConfEditContribType.getURL(type)), \
type.getName(), \
type.getDescription()))
vars["typeList"] = "".join(typeList)
#------------------------------------------------------
vars["reportNumbersTable"]=wcomponents.WReportNumbersTable(self._conf).getHTML()
vars["eventType"] = self._conf.getType()
vars["keywords"] = self._conf.getKeywords()
vars["shortURLBase"] = Config.getInstance().getShortEventURL()
vars["shortURLTag"] = self._conf.getUrlTag()
vars["screenDatesURL"] = urlHandlers.UHConfScreenDatesEdit.getURL(self._conf)
ssdate = format_datetime(self._conf.getAdjustedScreenStartDate(), format='EEEE d MMMM yyyy H:mm')
if self._conf.getScreenStartDate() == self._conf.getStartDate():
ssdate += i18nformat(""" <i> _("(normal)")</i>""")
else:
ssdate += i18nformat(""" <font color='red'>_("(modified)")</font>""")
sedate = format_datetime(self._conf.getAdjustedScreenEndDate(), format='EEEE d MMMM yyyy H:mm')
if self._conf.getScreenEndDate() == self._conf.getEndDate():
sedate += i18nformat(""" <i> _("(normal)")</i>""")
else:
sedate += i18nformat(""" <font color='red'> _("(modified)")</font>""")
vars['rbActive'] = Config.getInstance().getIsRoomBookingActive()
vars["screenDates"] = "%s -> %s" % (ssdate, sedate)
vars["timezoneList"] = TimezoneRegistry.getList()
vars["chairpersons"] = self._getChairPersonsList()
loc = self._conf.getLocation()
room = self._conf.getRoom()
vars["currentLocation"] = { 'location': loc.getName() if loc else "",
'room': room.name if room else "",
'address': loc.getAddress() if loc else "" }
return vars
class WPConferenceModificationClosed( WPConferenceModifBase ):
def __init__(self, rh, target):
WPConferenceModifBase.__init__(self, rh, target)
def _getPageContent( self, params ):
from indico.modules.events.management import can_lock
can_unlock = can_lock(self._conf, session.user)
message = _("The event is currently locked so it cannot be modified.")
if can_unlock:
message += ' ' + _("If you unlock the event, you will be able to modify it again.")
return wcomponents.WClosed().getHTML({"message": message,
"postURL": url_for('event_management.unlock', self._conf),
"showUnlockButton": can_unlock,
"unlockButtonCaption": _("Unlock event")})
class WPConferenceModification( WPConferenceModifBase ):
sidemenu_option = 'general'
def __init__(self, rh, target, ct=None):
WPConferenceModifBase.__init__(self, rh, target)
self._ct = ct
def _getPageContent( self, params ):
wc = WConfModifMainData(self._conf, self._ct, self._rh)
pars = { "type": params.get("type","") , "conferenceId": self._conf.getId()}
return wc.getHTML( pars )
class WConfModScreenDatesEdit(wcomponents.WTemplated):
def __init__(self,conf):
self._conf=conf
def getVars(self):
vars=wcomponents.WTemplated.getVars(self)
vars["postURL"]=quoteattr(str(urlHandlers.UHConfScreenDatesEdit.getURL(self._conf)))
###################################
# Fermi timezone awareness #
###################################
csd = self._conf.getAdjustedStartDate()
ced = self._conf.getAdjustedEndDate()
###################################
# Fermi timezone awareness(end) #
###################################
vars["conf_start_date"]=self.htmlText(format_datetime(csd, format='EEEE d MMMM yyyy H:mm'))
vars["conf_end_date"]=self.htmlText(format_datetime(ced, format='EEEE d MMMM yyyy H:mm'))
vars["start_date_own_sel"]=""
vars["start_date_conf_sel"]=" checked"
vars["sDay"],vars["sMonth"],vars["sYear"]=csd.day,csd.month,csd.year
vars["sHour"],vars["sMin"]=csd.hour,csd.minute
if self._conf.getScreenStartDate() != self._conf.getStartDate():
vars["start_date_own_sel"]=" checked"
vars["start_date_conf_sel"]=""
sd=self._conf.getAdjustedScreenStartDate()
vars["sDay"]=quoteattr(str(sd.day))
vars["sMonth"]=quoteattr(str(sd.month))
vars["sYear"]=quoteattr(str(sd.year))
vars["sHour"]=quoteattr(str(sd.hour))
vars["sMin"]=quoteattr(str(sd.minute))
vars["end_date_own_sel"]=""
vars["end_date_conf_sel"]=" checked"
vars["eDay"],vars["eMonth"],vars["eYear"]=ced.day,ced.month,ced.year
vars["eHour"],vars["eMin"]=ced.hour,ced.minute
if self._conf.getScreenEndDate() != self._conf.getEndDate():
vars["end_date_own_sel"]=" checked"
vars["end_date_conf_sel"]=""
ed=self._conf.getAdjustedScreenEndDate()
vars["eDay"]=quoteattr(str(ed.day))
vars["eMonth"]=quoteattr(str(ed.month))
vars["eYear"]=quoteattr(str(ed.year))
vars["eHour"]=quoteattr(str(ed.hour))
vars["eMin"]=quoteattr(str(ed.minute))
return vars
class WPScreenDatesEdit(WPConferenceModification):
def _getPageContent( self, params ):
wc = WConfModScreenDatesEdit(self._conf)
return wc.getHTML()
class WConferenceDataModificationAdditionalInfo(wcomponents.WTemplated):
def __init__( self, conference ):
self._conf = conference
def getVars(self):
vars = wcomponents.WTemplated.getVars( self )
vars["contactInfo"] = self._conf.getContactInfo()
return vars
class WConferenceDataModification(wcomponents.WTemplated):
def __init__( self, conference, rh ):
self._conf = conference
self._rh = rh
def _getVisibilityHTML(self):
visibility = self._conf.getVisibility()
topcat = self._conf.getOwnerList()[0]
level = 0
selected = ""
if visibility == 0:
selected = "selected"
vis = [ i18nformat("""<option value="0" %s> _("Nowhere")</option>""") % selected]
while topcat:
level += 1
selected = ""
if level == visibility:
selected = "selected"
if topcat.getId() != "0":
from MaKaC.common.TemplateExec import truncateTitle
vis.append("""<option value="%s" %s>%s</option>""" % (level, selected, truncateTitle(topcat.getName(), 120)))
topcat = topcat.getOwner()
selected = ""
if visibility > level:
selected = "selected"
vis.append( i18nformat("""<option value="999" %s> _("Everywhere")</option>""") % selected)
vis.reverse()
return "".join(vis)
def getVars(self):
vars = wcomponents.WTemplated.getVars( self )
minfo = info.HelperMaKaCInfo.getMaKaCInfoInstance()
navigator = ""
styleMgr = info.HelperMaKaCInfo.getMaKaCInfoInstance().getStyleManager()
type = self._conf.getType()
vars["timezoneOptions"] = TimezoneRegistry.getShortSelectItemsHTML(self._conf.getTimezone())
styles=styleMgr.getExistingStylesForEventType(type)
styleoptions = ""
defStyle = self._conf.getDefaultStyle()
if defStyle not in styles:
defStyle = ""
for styleId in styles:
if styleId == defStyle or (defStyle == "" and styleId == "static"):
selected = "selected"
else:
selected = ""
styleoptions += "<option value=\"%s\" %s>%s</option>" % (styleId,selected,styleMgr.getStyleName(styleId))
vars["conference"] = self._conf
vars["useRoomBookingModule"] = Config.getInstance().getIsRoomBookingActive()
vars["styleOptions"] = styleoptions
import MaKaC.webinterface.webFactoryRegistry as webFactoryRegistry
wr = webFactoryRegistry.WebFactoryRegistry()
types = [ "conference" ]
for fact in wr.getFactoryList():
types.append(fact.getId())
vars["types"] = ""
for id in types:
typetext = id
if typetext == "simple_event":
typetext = "lecture"
if self._conf.getType() == id:
vars["types"] += "<option value=\"%s\" selected>%s" % (id,typetext)
else:
vars["types"] += "<option value=\"%s\">%s" % (id,typetext)
vars["title"] = quoteattr( self._conf.getTitle() )
vars["description"] = self._conf.getDescription()
vars["keywords"] = self._conf.getKeywords()
tz = self._conf.getTimezone()
vars["sDay"] = str( self._conf.getAdjustedStartDate(tz).day )
vars["sMonth"] = str( self._conf.getAdjustedStartDate(tz).month )
vars["sYear"] = str( self._conf.getAdjustedStartDate(tz).year )
vars["sHour"] = str( self._conf.getAdjustedStartDate(tz).hour )
vars["sMinute"] = str( self._conf.getAdjustedStartDate(tz).minute )
vars["eDay"] = str( self._conf.getAdjustedEndDate(tz).day )
vars["eMonth"] = str( self._conf.getAdjustedEndDate(tz).month )
vars["eYear"] = str( self._conf.getAdjustedEndDate(tz).year )
vars["eHour"] = str( self._conf.getAdjustedEndDate(tz).hour )
vars["eMinute"] = str( self._conf.getAdjustedEndDate(tz).minute )
vars["chairText"] = quoteattr( self._conf.getChairmanText() )
vars["orgText"] = quoteattr( self._conf.getOrgText() )
vars["visibility"] = self._getVisibilityHTML()
vars["shortURLTag"] = quoteattr( self._conf.getUrlTag() )
locName, locAddress, locRoom = "", "", ""
location = self._conf.getLocation()
if location:
locName = location.getName()
locAddress = location.getAddress()
room = self._conf.getRoom()
if room:
locRoom = room.getName()
vars["locator"] = self._conf.getLocator().getWebForm()
vars["locationAddress"] = locAddress
vars["supportCaption"] = quoteattr(self._conf.getSupportInfo().getCaption())
vars["supportEmail"] = quoteattr( self._conf.getSupportInfo().getEmail() )
vars["locator"] = self._conf.getLocator().getWebForm()
vars["event_type"] = ""
vars["navigator"] = navigator
eventType = self._conf.getType()
if eventType == "conference":
vars["additionalInfo"] = WConferenceDataModificationAdditionalInfo(self._conf).getHTML(vars)
else:
vars["additionalInfo"] = ""
return vars
class WPConfDataModif( WPConferenceModification ):
def _getPageContent( self, params ):
p = WConferenceDataModification( self._conf, self._rh )
pars = {
"postURL": urlHandlers.UHConfPerformDataModif.getURL(self._conf),
"type": params.get("type")
}
return p.getHTML( pars )
class WConfModifScheduleGraphic(wcomponents.WTemplated):
def __init__(self, conference, customLinks, **params):
wcomponents.WTemplated.__init__(self, **params)
self._conf = conference
self._customLinks = customLinks
def getVars( self ):
vars=wcomponents.WTemplated.getVars(self)
################################
# Fermi timezone awareness #
################################
tz = self._conf.getTimezone()
vars["timezone"]= tz
vars["start_date"]=self._conf.getAdjustedStartDate().strftime("%a %d/%m")
vars["end_date"]=self._conf.getAdjustedEndDate().strftime("%a %d/%m")
#################################
# Fermi timezone awareness(end) #
#################################
vars["editURL"]=quoteattr(str(urlHandlers.UHConfModScheduleDataEdit.getURL(self._conf)))
vars['ttdata'] = schedule.ScheduleToJson.process(self._conf.getSchedule(), tz, None,
days = None, mgmtMode = True)
vars['customLinks'] = self._customLinks
eventInfo = fossilize(self._conf, IConferenceEventInfoFossil, tz = tz)
eventInfo['isCFAEnabled'] = self._conf.getAbstractMgr().isActive()
vars['eventInfo'] = eventInfo
return vars
class WPConfModifScheduleGraphic( WPConferenceModifBase ):
sidemenu_option = 'timetable'
_userData = ['favorite-user-list', 'favorite-user-ids']
def __init__(self, rh, conf):
WPConferenceModifBase.__init__(self, rh, conf)
self._contrib = None
def getJSFiles(self):
return WPConferenceModifBase.getJSFiles(self) + self._includeJSPackage('Timetable')
def _getSchedule(self):
custom_links = dict(values_from_signal(signals.event.timetable_buttons.send(self)))
return WConfModifScheduleGraphic(self._conf, custom_links)
def _getTTPage( self, params ):
wc = self._getSchedule()
return wc.getHTML(params)
def _getPageContent(self, params):
return self._getTTPage(params)
#------------------------------------------------------------------------------
class WPConfModifSchedule( WPConferenceModifBase ):
def _setActiveTab( self ):
self._tabSchedule.setActive()
#------------------------------------------------------------------------------
class WConfModScheduleDataEdit(wcomponents.WTemplated):
def __init__(self,conf):
self._conf=conf
def getVars(self):
vars=wcomponents.WTemplated.getVars(self)
vars["postURL"]=quoteattr(str(urlHandlers.UHConfModScheduleDataEdit.getURL(self._conf)))
#######################################
# Fermi timezone awareness #
#######################################
csd = self._conf.getAdjustedStartDate()
ced = self._conf.getAdjustedEndDate()
#######################################
# Fermi timezone awareness(end) #
#######################################
vars["sDay"],vars["sMonth"],vars["sYear"]=str(csd.day),str(csd.month),str(csd.year)
vars["sHour"],vars["sMin"]=str(csd.hour),str(csd.minute)
vars["eDay"],vars["eMonth"],vars["eYear"]=str(ced.day),str(ced.month),str(ced.year)
vars["eHour"],vars["eMin"]=str(ced.hour),str(ced.minute)
return vars
class WPModScheduleDataEdit(WPConfModifSchedule):
def _getPageContent( self, params ):
wc = WConfModScheduleDataEdit(self._conf)
return wc.getHTML()
class WConfModifACSessionCoordinatorRights(wcomponents.WTemplated):
def __init__(self,conf):
self._conf = conf
def getVars( self ):
vars = wcomponents.WTemplated.getVars(self)
url = urlHandlers.UHConfModifCoordinatorRights.getURL(self._conf)
html=[]
scr = conference.SessionCoordinatorRights()
for rightKey in scr.getRightKeys():
url = urlHandlers.UHConfModifCoordinatorRights.getURL(self._conf)
url.addParam("rightId", rightKey)
if self._conf.hasSessionCoordinatorRight(rightKey):
imgurl=Config.getInstance().getSystemIconURL("tick")
else:
imgurl=Config.getInstance().getSystemIconURL("cross")
html.append("""
<a href=%s><img class="imglink" src=%s></a> %s
"""%(quoteattr(str(url)), quoteattr(str(imgurl)), scr.getRight(rightKey)))
vars["optionalRights"]="<br>".join(html)
return vars
class WConfModifAC:
def __init__(self, conference, eventType, user):
self.__conf = conference
self._eventType = eventType
self.__user = user
def getHTML( self, params ):
ac = wcomponents.WConfAccessControlFrame().getHTML( self.__conf,\
params["setVisibilityURL"])
dc = ""
if not self.__conf.isProtected():
dc = "<br>%s"%wcomponents.WDomainControlFrame( self.__conf ).getHTML()
mc = wcomponents.WConfModificationControlFrame().getHTML( self.__conf) + "<br>"
if self._eventType == "conference":
rc = wcomponents.WConfRegistrarsControlFrame().getHTML(self.__conf) + "<br>"
else:
rc = ""
tf = ""
if self._eventType in ["conference", "meeting"]:
tf = "<br>%s" % wcomponents.WConfProtectionToolsFrame(self.__conf).getHTML()
cr = ""
if self._eventType == "conference":
cr = "<br>%s" % WConfModifACSessionCoordinatorRights(self.__conf).getHTML()
return """<br><table width="100%%" class="ACtab"><tr><td>%s%s%s%s%s%s<br></td></tr></table>""" % (mc, rc, ac, dc, tf, cr)
class WPConfModifAC(WPConferenceModifBase):
sidemenu_option = 'protection'
def __init__(self, rh, conf):
WPConferenceModifBase.__init__(self, rh, conf)
self._eventType = "conference"
if self._rh.getWebFactory() is not None:
self._eventType = self._rh.getWebFactory().getId()
self._user = self._rh._getUser()
def _getPageContent(self, params):
wc = WConfModifAC(self._conf, self._eventType, self._user)
p = {
'setVisibilityURL': urlHandlers.UHConfSetVisibility.getURL(self._conf)
}
return wc.getHTML(p)
class WPConfModifToolsBase(WPConferenceModifBase):
sidemenu_option = 'utilities'
def _createTabCtrl(self):
self._tabCtrl = wcomponents.TabControl()
self._tabPosters = self._tabCtrl.newTab("posters", _("Posters"), \
urlHandlers.UHConfModifPosterPrinting.getURL(self._conf))
self._tabBadges = self._tabCtrl.newTab("badges", _("Badges/Tablesigns"), \
urlHandlers.UHConfModifBadgePrinting.getURL(self._conf))
self._setActiveTab()
wf = self._rh.getWebFactory()
if wf:
wf.customiseToolsTabCtrl(self._tabCtrl)
def _getPageContent(self, params):
self._createTabCtrl()
html = wcomponents.WTabControl(self._tabCtrl, self._getAW()).getHTML(self._getTabContent(params))
return html
def _setActiveTab(self):
pass
def _getTabContent(self, params):
return "nothing"
class WPConfCloneConfirm(WPConferenceModifBase):
def __init__(self, rh, conf, nbClones):
WPConfModifToolsBase.__init__(self, rh, conf)
self._nbClones = nbClones
def _getPageContent(self, params):
msg = _("This action will create {0} new events. Are you sure you want to proceed").format(self._nbClones)
wc = wcomponents.WConfirmation()
url = urlHandlers.UHConfPerformCloning.getURL(self._conf)
params = self._rh._getRequestParams()
for key in params.keys():
url.addParam(key, params[key])
return wc.getHTML( msg, \
url, {}, True, \
confirmButtonCaption=_("Yes"), cancelButtonCaption=_("No"))
#---------------------------------------------------------------------------
class WPConferenceModifParticipantBase(WPConferenceModifBase):
sidemenu_option = 'participants'
def __init__(self, rh, conf):
WPConferenceModifBase.__init__(self, rh, conf)
def _createTabCtrl(self):
self._tabCtrl = wcomponents.TabControl()
self._tabParticipantsSetup = self._tabCtrl.newTab("participantsetup", _("Setup"), urlHandlers.UHConfModifParticipantsSetup.getURL(self._conf))
self._tabParticipantsList = self._tabCtrl.newTab("participantsList", _("Participants"), urlHandlers.UHConfModifParticipants.getURL(self._conf))
self._tabStatistics = self._tabCtrl.newTab("statistics", _("Statistics"), urlHandlers.UHConfModifParticipantsStatistics.getURL(self._conf))
if self._conf.getParticipation().getPendingParticipantList() and nowutc() < self._conf.getStartDate():
self._tabParticipantsPendingList = self._tabCtrl.newTab("pendingList", _("Pending"), urlHandlers.UHConfModifParticipantsPending.getURL(self._conf), className="pendingTab")
if self._conf.getParticipation().getDeclinedParticipantList():
self._tabParticipantsDeclinedList = self._tabCtrl.newTab("declinedList", _("Declined"), urlHandlers.UHConfModifParticipantsDeclined.getURL(self._conf))
self._setActiveTab()
def _getPageContent(self, params):
self._createTabCtrl()
return wcomponents.WTabControl(self._tabCtrl, self._getAW()).getHTML(self._getTabContent(params))
def getJSFiles(self):
return WPConferenceModifBase.getJSFiles(self) + \
self._includeJSPackage('Display')
def _getTabContent(self, params):
return "nothing"
def _setActiveTab(self):
pass
class WConferenceParticipant(wcomponents.WTemplated):
def __init__(self, conference, participant):
self._conf = conference
self._participant = participant
def getVars(self):
vars = wcomponents.WTemplated.getVars(self)
vars["conference"] = self._conf
vars["participant"] = self._participant
return vars
class WConferenceParticipantPending(wcomponents.WTemplated):
def __init__(self, conference, id, pending):
self._conf = conference
self._id = id
self._pending = pending
def getVars(self):
vars = wcomponents.WTemplated.getVars(self)
vars["conference"] = self._conf
vars["id"] = self._id
vars["pending"] = self._pending
return vars
class WConferenceParticipantsSetup(wcomponents.WTemplated):
def __init__(self, conference):
self._conf = conference
def getVars(self):
vars = wcomponents.WTemplated.getVars(self)
vars["confId"] = self._conf.getId()
vars["isObligatory"] = self._conf.getParticipation().isObligatory()
vars["allowDisplay"] = self._conf.getParticipation().displayParticipantList()
vars["addedInfo"] = self._conf.getParticipation().isAddedInfo()
vars["allowForApply"] = self._conf.getParticipation().isAllowedForApplying()
vars["autoAccept"] = self._conf.getParticipation().isAutoAccept()
vars["numMaxParticipants"] = self._conf.getParticipation().getNumMaxParticipants()
vars["notifyMgrNewParticipant"] = self._conf.getParticipation().isNotifyMgrNewParticipant()
return vars
class WPConfModifParticipantsSetup(WPConferenceModifParticipantBase):
def _setActiveTab(self):
self._tabParticipantsSetup.setActive()
def _getTabContent(self, params):
p = WConferenceParticipantsSetup(self._conf)
return p.getHTML(params)
class WConferenceParticipants(wcomponents.WTemplated):
def __init__(self, conference):
self._conf = conference
def getVars(self):
vars = wcomponents.WTemplated.getVars(self)
vars["selectAll"] = Config.getInstance().getSystemIconURL("checkAll")
vars["deselectAll"] = Config.getInstance().getSystemIconURL("uncheckAll")
vars["participantsAction"] = str(urlHandlers.UHConfModifParticipantsAction.getURL(self._conf))
vars["hasStarted"] = nowutc() < self._conf.getStartDate()
vars["currentUser"] = self._rh._aw.getUser()
vars["numberParticipants"] = len(self._conf.getParticipation().getParticipantList())
vars["conf"] = self._conf
vars["excelIconURL"] = quoteattr(str(Config.getInstance().getSystemIconURL("excel")))
return vars
class WPConfModifParticipants(WPConferenceModifParticipantBase):
def _setActiveTab(self):
self._tabParticipantsList.setActive()
def _getTabContent(self, params):
p = WConferenceParticipants(self._conf)
return p.getHTML(params)
class WConferenceParticipantsPending(wcomponents.WTemplated):
def __init__(self, conference):
self._conf = conference
def getVars(self):
vars = wcomponents.WTemplated.getVars(self)
vars["selectAll"] = Config.getInstance().getSystemIconURL("checkAll")
vars["deselectAll"] = Config.getInstance().getSystemIconURL("uncheckAll")
vars["pending"] = self._getPendingParticipantsList()
vars["numberPending"] = self._conf.getParticipation().getPendingNumber()
vars["conf"] = self._conf
vars["conferenceStarted"] = nowutc() > self._conf.getStartDate()
vars["currentUser"] = self._rh._aw.getUser()
return vars
def _getPendingParticipantsList(self):
l = []
for k in self._conf.getParticipation().getPendingParticipantList().keys():
p = self._conf.getParticipation().getPendingParticipantByKey(k)
l.append((k, p))
return l
class WPConfModifParticipantsPending(WPConferenceModifParticipantBase):
def _setActiveTab(self):
self._tabParticipantsPendingList.setActive()
def _getTabContent(self, params):
p = WConferenceParticipantsPending(self._conf)
return p.getHTML()
class WConferenceParticipantsDeclined(wcomponents.WTemplated):
def __init__(self, conference):
self._conf = conference
def getVars(self):
vars = wcomponents.WTemplated.getVars(self)
vars["declined"] = self._getDeclinedParticipantsList()
vars["numberDeclined"] = self._conf.getParticipation().getDeclinedNumber()
return vars
def _getDeclinedParticipantsList(self):
l = []
for k in self._conf.getParticipation().getDeclinedParticipantList().keys():
p = self._conf.getParticipation().getDeclinedParticipantByKey(k)
l.append((k, p))
return l
class WPConfModifParticipantsDeclined(WPConferenceModifParticipantBase):
def _setActiveTab(self):
self._tabParticipantsDeclinedList.setActive()
def _getTabContent(self, params):
p = WConferenceParticipantsDeclined(self._conf)
return p.getHTML()
class WConferenceParticipantsStatistics(wcomponents.WTemplated):
def __init__(self, conference):
self._conf = conference
def getVars(self):
vars = wcomponents.WTemplated.getVars(self)
vars["invited"] = self._conf.getParticipation().getInvitedNumber()
vars["rejected"] = self._conf.getParticipation().getRejectedNumber()
vars["added"] = self._conf.getParticipation().getAddedNumber()
vars["refused"] = self._conf.getParticipation().getRefusedNumber()
vars["pending"] = self._conf.getParticipation().getPendingNumber()
vars["declined"] = self._conf.getParticipation().getDeclinedNumber()
vars["conferenceStarted"] = nowutc() > self._conf.getStartDate()
vars["present"] = self._conf.getParticipation().getPresentNumber()
vars["absent"] = self._conf.getParticipation().getAbsentNumber()
vars["excused"] = self._conf.getParticipation().getExcusedNumber()
return vars
class WPConfModifParticipantsStatistics(WPConferenceModifParticipantBase):
def _setActiveTab(self):
self._tabStatistics.setActive()
def _getTabContent(self, params):
p = WConferenceParticipantsStatistics(self._conf)
return p.getHTML(params)
class WPConfModifParticipantsInvitationBase(WPConferenceDisplayBase):
def _getHeader(self):
"""
"""
wc = wcomponents.WMenuSimpleEventHeader(self._getAW(), self._conf)
return wc.getHTML({"loginURL": self.getLoginURL(),\
"logoutURL": self.getLogoutURL(),\
"confId": self._conf.getId(),\
"currentView": "static",\
"type": WebFactory.getId(),\
"dark": True})
def _getBody(self, params):
return '<div style="margin:10px">{0}</div>'.format(self._getContent(params))
class WPConfModifParticipantsInvite(WPConfModifParticipantsInvitationBase):
def _getContent(self, params):
msg = _("Please indicate whether you want to accept or reject the invitation to '{0}'").format(self._conf.getTitle())
wc = wcomponents.WConfirmation()
url = urlHandlers.UHConfParticipantsInvitation.getURL(self._conf)
url.addParam("participantId",params["participantId"])
return wc.getHTML(msg,
url,
{},
confirmButtonCaption=_("Accept"),
cancelButtonCaption=_("Reject"),
severity="accept")
#---------------------------------------------------------------------------
class WPConfModifParticipantsRefuse(WPConfModifParticipantsInvitationBase):
def _getContent( self, params ):
msg = i18nformat("""
<font size="+2"> _("Are you sure you want to refuse to attend the '%s'")?</font>
""")%(self._conf.getTitle())
wc = wcomponents.WConfirmation()
url = urlHandlers.UHConfParticipantsRefusal.getURL( self._conf )
url.addParam("participantId",params["participantId"])
return wc.getHTML( msg, url, {}, \
confirmButtonCaption= _("Refuse"), cancelButtonCaption= _("Cancel") )
#---------------------------------------------------------------------------
class WConfModifListings( wcomponents.WTemplated ):
def __init__( self, conference ):
self.__conf = conference
def getVars( self ):
vars = wcomponents.WTemplated.getVars( self )
vars["pendingQueuesIconURL"]=quoteattr(str(Config.getInstance().getSystemIconURL("listing")))
vars["pendingQueuesURL"]=quoteattr(str(urlHandlers.UHConfModifPendingQueues.getURL( self.__conf )))
vars["allSessionsConvenersIconURL"]=quoteattr(str(Config.getInstance().getSystemIconURL("listing")))
vars["allSessionsConvenersURL"]=quoteattr(str(urlHandlers.UHConfAllSessionsConveners.getURL( self.__conf )))
vars["allSpeakersIconURL"]=quoteattr(str(Config.getInstance().getSystemIconURL("listing")))
vars["allSpeakersURL"]=quoteattr(str(urlHandlers.UHConfAllSpeakers.getURL( self.__conf )))
return vars
class WPConfModifListings(WPConferenceModifBase):
sidemenu_option = 'lists'
def __init__(self, rh, conference):
WPConferenceModifBase.__init__(self, rh, conference)
self._createTabCtrl()
def _createTabCtrl(self):
self._tabCtrl = wcomponents.TabControl()
self._subTabSpeakers = self._tabCtrl.newTab('speakers',
_('All Contribution Speakers'),
urlHandlers.UHConfAllSpeakers.getURL(self._conf))
self._subTabConveners = self._tabCtrl.newTab('conveners',
_('All Session Conveners'),
urlHandlers.UHConfAllSessionsConveners.getURL(self._conf))
self._subTabUsers = self._tabCtrl.newTab('users',
_('People Pending'),
urlHandlers.UHConfModifPendingQueues.getURL(self._conf))
def _getPageContent(self, params):
self._setActiveTab()
return wcomponents.WTabControl(self._tabCtrl, self._getAW()).getHTML(self._getTabContent(params))
def _setActiveTab(self):
self._subTabUsers.setActive()
#---------------------------------------------------------------------------
#---------------------------------------------------------------------------
class WConferenceClone(wcomponents.WTemplated):
def __init__(self, conference):
self.__conf = conference
def _getSelectDay(self):
sd = ""
for i in range(31) :
selected = ""
if datetime.today().day == (i+1) :
selected = "selected=\"selected\""
sd += "<OPTION VALUE=\"%d\" %s>%d\n"%(i+1, selected, i+1)
return sd
def _getSelectMonth(self):
sm = ""
month = [ "January", "February", "March", "April", "May", "June",
"July", "August", "September", "October", "November", "December"]
for i in range(12) :
selected = ""
if datetime.today().month == (i+1) :
selected = "selected=\"selected\""
sm += "\t<OPTION VALUE=\"%d\" %s>%s\n"%(i+1, selected, _(month[i]))
return sm
def _getSelectYear(self):
sy = ""
i = 1995
while i < 2015 :
selected = ""
if datetime.today().year == i :
selected = "selected=\"selected\""
sy += "\t<OPTION VALUE=\"%d\" %s>%d\n"%(i, selected, i)
i += 1
return sy
def getVars(self):
vars = wcomponents.WTemplated.getVars(self)
vars["confTitle"] = self.__conf.getTitle()
vars["confId"] = self.__conf.getId()
vars["selectDay"] = self._getSelectDay()
vars["selectMonth"] = self._getSelectMonth()
vars["selectYear"] = self._getSelectYear()
return vars
class WPConfClone(WPConferenceModifBase):
def _getPageContent(self, params):
p = WConferenceClone(self._conf)
pars = {"cancelURL": urlHandlers.UHConfModifTools.getURL(self._conf),
"cloning": urlHandlers.UHConfPerformCloning.getURL(self._conf),
"cloneOptions": i18nformat("""<li><input type="checkbox" name="cloneTracks" id="cloneTracks" value="1" />_("Tracks")</li>
<li><input type="checkbox" name="cloneTimetable" id="cloneTimetable" value="1" />_("Full timetable")</li>
<li><ul style="list-style-type: none;"><li><input type="checkbox" name="cloneSessions" id="cloneSessions" value="1" />_("Sessions")</li></ul></li>
<li><input type="checkbox" name="cloneRegistration" id="cloneRegistration" value="1" >_("Registration")</li>""") }
pars['cloneOptions'] += EventCloner.get_plugin_items(self._conf)
return p.getHTML(pars)
class WConferenceAllSessionsConveners(wcomponents.WTemplated):
def __init__(self, conference):
self.__conf = conference
def getVars(self):
vars = wcomponents.WTemplated.getVars(self)
vars["confTitle"] = self.__conf.getTitle()
vars["confId"] = self.__conf.getId()
vars["convenerSelectionAction"] = quoteattr(str(urlHandlers.UHConfAllSessionsConvenersAction.getURL(self.__conf)))
vars["contribSetIndex"] = 'index'
vars["convenerNumber"] = str(len(self.__conf.getAllSessionsConvenerList()))
vars["conveners"] = self._getAllConveners()
return vars
def _getTimetableURL(self, convener):
url = urlHandlers.UHSessionModifSchedule.getURL(self.__conf)
url.addParam("sessionId", convener.getSession().getId())
if hasattr(convener, "getSlot"):
timetable = "#" + str(convener.getSlot().getStartDate().strftime("%Y%m%d")) + ".s%sl%s" % (convener.getSession().getId(), convener.getSlot().getId())
else:
timetable = "#" + str(convener.getSession().getStartDate().strftime("%Y%m%d"))
return "%s%s" % (url, timetable)
def _getAllConveners(self):
convenersFormatted = []
convenersDict = self.__conf.getAllSessionsConvenerList()
for key, conveners in convenersDict.iteritems():
data = None
for convener in convenersDict[key]:
if not data:
data = {
'email': convener.getEmail(),
'name': convener.getFullName() or '',
'sessions': []
}
sessionData = {
'title': '',
'urlTimetable': self._getTimetableURL(convener),
'urlSessionModif': None
}
if isinstance(convener, conference.SlotChair):
title = convener.getSlot().getTitle() or "Block %s" % convener.getSlot().getId()
sessionData['title'] = convener.getSession().getTitle() + ': ' + title
else:
url = urlHandlers.UHSessionModification.getURL(self.__conf)
url.addParam('sessionId', convener.getSession().getId())
sessionData['urlSessionModif'] = str(url)
sessionData['title'] = convener.getSession().getTitle() or ''
data['sessions'].append(sessionData)
convenersFormatted.append(data)
return convenersFormatted
class WPConfAllSessionsConveners(WPConfModifListings):
def _setActiveTab(self):
self._subTabConveners.setActive()
def _getTabContent(self, params):
p = WConferenceAllSessionsConveners(self._conf)
return p.getHTML()
#---------------------------------------------------------------------------------------
class WConfModifAllContribParticipants(wcomponents.WTemplated):
def __init__(self, conference, partIndex):
self._title = _("All participants list")
self._conf = conference
self._order = ""
self._dispopts = ["Email", "Contributions"]
self._partIndex = partIndex
def getVars(self):
vars = wcomponents.WTemplated.getVars(self)
self._url = vars["participantMainPageURL"]
vars["speakers"] = self._getAllParticipants()
vars["participantNumber"] = str(len(self._partIndex.getParticipationKeys()))
return vars
def _getAllParticipants(self):
speakers = []
for key in self._partIndex.getParticipationKeys():
participationList = self._partIndex.getById(key)
if participationList:
participant = participationList[0]
pData = {
'name': participant.getFullName(),
'email': participant.getEmail(),
'contributions': []
}
for participation in participationList:
contribution = participation.getContribution()
if contribution:
pData['contributions'].append({
'title': contribution.getTitle(),
'url': str(urlHandlers.UHContributionModification.getURL(contribution))
})
speakers.append(pData)
return speakers
def _getURL(self):
return self._url
class WPConfAllSpeakers(WPConfModifListings):
def _setActiveTab(self):
self._subTabSpeakers.setActive()
def _getTabContent(self, params):
p = WConfModifAllContribParticipants( self._conf, self._conf.getSpeakerIndex() )
return p.getHTML({"title": _("All speakers list"), \
"participantMainPageURL":urlHandlers.UHConfAllSpeakers.getURL(self._conf), \
"participantSelectionAction":quoteattr(str(urlHandlers.UHConfAllSpeakersAction.getURL(self._conf)))})
class WPEMailContribParticipants ( WPConfModifListings):
def __init__(self, rh, conf, participantList):
WPConfModifListings.__init__(self, rh, conf)
self._participantList = participantList
def _getPageContent(self,params):
wc = WEmailToContribParticipants(self._conf, self._getAW().getUser(), self._participantList)
return wc.getHTML()
class WEmailToContribParticipants(wcomponents.WTemplated):
def __init__(self,conf,user,contribParticipantList):
self._conf = conf
try:
self._fromemail = user.getEmail()
except:
self._fromemail = ""
self._contribParticipantList = contribParticipantList
def getVars(self):
vars = wcomponents.WTemplated.getVars( self )
toEmails=[]
toIds=[]
for email in self._contribParticipantList:
if len(email) > 0 :
toEmails.append(email)
vars["From"] = self._fromemail
vars["toEmails"]= ", ".join(toEmails)
vars["emails"]= ",".join(toEmails)
vars["postURL"]=urlHandlers.UHContribParticipantsSendEmail.getURL(self._conf)
vars["subject"]=""
vars["body"]=""
return vars
#---------------------------------------------------------------------------------------
class WPEMailConveners ( WPConfModifListings):
def __init__(self, rh, conf, convenerList):
WPConfModifListings.__init__(self, rh, conf)
self._convenerList = convenerList
def _getPageContent(self,params):
wc = WEmailToConveners(self._conf, self._getAW().getUser(), self._convenerList)
return wc.getHTML()
class WEmailToConveners(wcomponents.WTemplated):
def __init__(self,conf,user,convenerList):
self._conf = conf
try:
self._fromemail = user.getEmail()
except:
self._fromemail = ""
self._convenerList = convenerList
def getVars(self):
vars = wcomponents.WTemplated.getVars( self )
toEmails=[]
toIds=[]
for email in self._convenerList:
if len(email) > 0 :
toEmails.append(email)
vars["From"] = self._fromemail
vars["toEmails"]= ", ".join(toEmails)
vars["emails"]= ",".join(toEmails)
vars["postURL"]=urlHandlers.UHConvenersSendEmail.getURL(self._conf)
vars["subject"]=""
vars["body"]=""
return vars
#---------------------------------------------------------------------------------------
class WConvenerSentMail (wcomponents.WTemplated):
def __init__(self,conf):
self._conf = conf
def getVars(self):
vars = wcomponents.WTemplated.getVars( self )
vars["BackURL"]=quoteattr(str(urlHandlers.UHConfAllSessionsConveners.getURL(self._conf)))
return vars
class WPConvenerSentEmail( WPConfModifListings ):
def _getTabContent(self,params):
wc = WConvenerSentMail(self._conf)
return wc.getHTML()
class WContribParticipationSentMail(wcomponents.WTemplated):
def __init__(self,conf):
self._conf = conf
def getVars(self):
vars = wcomponents.WTemplated.getVars( self )
vars["BackURL"]=quoteattr(str(urlHandlers.UHConfAllSpeakers.getURL(self._conf)))
return vars
class WPContribParticipationSentEmail( WPConfModifListings ):
def _getTabContent(self,params):
wc = WContribParticipationSentMail(self._conf)
return wc.getHTML()
class WConfModifCFA(wcomponents.WTemplated):
def __init__(self, conference):
self._conf = conference
def _getAbstractFieldsHTML(self, vars):
abMgr = self._conf.getAbstractMgr()
enabledText = _("Click to disable")
disabledText = _("Click to enable")
laf = []
urlRemove = str(urlHandlers.UHConfModifCFARemoveOptFld.getURL(self._conf))
laf.append("""<form action="" method="POST">""")
for af in abMgr.getAbstractFieldsMgr().getFields():
urlUp = urlHandlers.UHConfModifCFAAbsFieldUp.getURL(self._conf)
urlUp.addParam("fieldId", af.getId())
urlDown = urlHandlers.UHConfModifCFAAbsFieldDown.getURL(self._conf)
urlDown.addParam("fieldId", af.getId())
if af.isMandatory():
mandatoryText = _("mandatory")
else:
mandatoryText = _("optional")
maxCharText = ""
if isinstance(af, AbstractTextField):
maxCharText = " - "
if int(af.getMaxLength()) != 0:
maxCharText += _("max: %s %s.") % (af.getMaxLength(), af.getLimitation())
else:
maxCharText += _("not limited")
addInfo = "(%s%s)" % (mandatoryText, maxCharText)
url = urlHandlers.UHConfModifCFAOptFld.getURL(self._conf)
url.addParam("fieldId", af.getId())
url = quoteattr("%s#optional" % str(url))
if self._conf.getAbstractMgr().hasEnabledAbstractField(af.getId()):
icon = vars["enablePic"]
textIcon = enabledText
else:
icon = vars["disablePic"]
textIcon = disabledText
if af.getId() == "content":
removeButton = ""
else:
removeButton = "<input type=\"checkbox\" name=\"fieldId\" value=\"%s\">" % af.getId()
laf.append("""
<tr>
<td>
<a href=%s><img src=%s alt="%s" class="imglink"></a> <a href=%s><img src=%s border="0" alt=""></a><a href=%s><img src=%s border="0" alt=""></a>
</td>
<td width="1%%">%s</td>
<td>
<a class="edit-field" href="#" data-id=%s data-fieldType=%s>%s</a> %s
</td>
</tr>
""" % (
url,
icon,
textIcon,
quoteattr(str(urlUp)),
quoteattr(str(Config.getInstance().getSystemIconURL("upArrow"))),
quoteattr(str(urlDown)),
quoteattr(str(Config.getInstance().getSystemIconURL("downArrow"))),
removeButton,
af.getId(),
af.getType(),
af.getCaption(),
addInfo))
laf.append(i18nformat("""
<tr>
<td align="right" colspan="3">
<input type="submit" value="_("remove")" onClick="this.form.action='%s';" class="btn">
<input id="add-field-button" type="submit" value="_("add")" class="btn">
</td>
</tr>
</form>""") % urlRemove)
laf.append("</form>")
return "".join(laf)
def getVars(self):
vars = wcomponents.WTemplated.getVars(self)
abMgr = self._conf.getAbstractMgr()
vars["iconDisabled"] = str(Config.getInstance().getSystemIconURL("disabledSection"))
vars["iconEnabled"] = str(Config.getInstance().getSystemIconURL("enabledSection"))
vars["multipleTracks"] = abMgr.getMultipleTracks()
vars["areTracksMandatory"] = abMgr.areTracksMandatory()
vars["canAttachFiles"] = abMgr.canAttachFiles()
vars["showSelectAsSpeaker"] = abMgr.showSelectAsSpeaker()
vars["isSelectSpeakerMandatory"] = abMgr.isSelectSpeakerMandatory()
vars["showAttachedFilesContribList"] = abMgr.showAttachedFilesContribList()
vars["multipleUrl"] = urlHandlers.UHConfCFASwitchMultipleTracks.getURL(self._conf)
vars["mandatoryUrl"] = urlHandlers.UHConfCFAMakeTracksMandatory.getURL(self._conf)
vars["attachUrl"] = urlHandlers.UHConfCFAAllowAttachFiles.getURL(self._conf)
vars["showSpeakerUrl"] = urlHandlers.UHConfCFAShowSelectAsSpeaker.getURL(self._conf)
vars["speakerMandatoryUrl"] = urlHandlers.UHConfCFASelectSpeakerMandatory.getURL(self._conf)
vars["showAttachedFilesUrl"] = urlHandlers.UHConfCFAAttachedFilesContribList.getURL(self._conf)
vars["setStatusURL"] = urlHandlers.UHConfCFAChangeStatus.getURL(self._conf)
vars["dataModificationURL"] = urlHandlers.UHCFADataModification.getURL(self._conf)
if abMgr.getCFAStatus():
vars["changeTo"] = "False"
vars["status"] = _("ENABLED")
vars["changeStatus"] = _("DISABLE")
vars["startDate"] = format_date(abMgr.getStartSubmissionDate(), format='full')
vars["endDate"] = format_date(abMgr.getEndSubmissionDate(), format='full')
vars["announcement"] = abMgr.getAnnouncement()
vars["disabled"] = ""
modifDL = abMgr.getModificationDeadline()
vars["modifDL"] = i18nformat("""--_("not specified")--""")
if modifDL:
vars["modifDL"] = format_date(modifDL, format='full')
vars["notification"] = i18nformat("""
<table align="left">
<tr>
<td align="right"><b> _("To List"):</b></td>
<td align="left">%s</td>
</tr>
<tr>
<td align="right"><b> _("Cc List"):</b></td>
<td align="left">%s</td>
</tr>
</table>
""") % (", ".join(abMgr.getSubmissionNotification().getToList()) or i18nformat("""--_("no TO list")--"""), ", ".join(abMgr.getSubmissionNotification().getCCList()) or i18nformat("""--_("no CC list")--"""))
else:
vars["changeTo"] = "True"
vars["status"] = _("DISABLED")
vars["changeStatus"] = _("ENABLE")
vars["startDate"] = ""
vars["endDate"] = ""
vars["announcement"] = ""
vars["manage"] = ""
vars["type"] = ""
vars["disabled"] = "disabled"
vars["modifDL"] = ""
vars["submitters"] = ""
vars["notification"] = ""
vars["enablePic"] = quoteattr(str(Config.getInstance().getSystemIconURL("enabledSection")))
vars["disablePic"] = quoteattr(str(Config.getInstance().getSystemIconURL("disabledSection")))
vars["abstractFields"] = self._getAbstractFieldsHTML(vars)
vars["addNotifTplURL"] = urlHandlers.UHAbstractModNotifTplNew.getURL(self._conf)
vars["remNotifTplURL"] = urlHandlers.UHAbstractModNotifTplRem.getURL(self._conf)
vars["confId"] = self._conf.getId()
vars["lateAuthUsers"] = fossilize(self._conf.getAbstractMgr().getAuthorizedSubmitterList())
return vars
class WPConfModifCFAPreview(WPConferenceModifAbstractBase):
def _setActiveTab(self):
self._tabCFAPreview.setActive()
def _getHeadContent(self):
return WPConferenceModifAbstractBase._getHeadContent(self) + render('js/mathjax.config.js.tpl') + \
'\n'.join(['<script src="{0}" type="text/javascript"></script>'.format(url)
for url in self._asset_env['mathjax_js'].urls()])
def getCSSFiles(self):
return WPConferenceModifAbstractBase.getCSSFiles(self) + \
self._asset_env['contributions_sass'].urls()
def getJSFiles(self):
return WPConferenceModifAbstractBase.getJSFiles(self) + \
self._asset_env['abstracts_js'].urls()
def _getTabContent(self, params):
import MaKaC.webinterface.pages.abstracts as abstracts
wc = abstracts.WAbstractDataModification(self._conf)
# Simulate fake abstract
from MaKaC.webinterface.common.abstractDataWrapper import AbstractData
ad = AbstractData(self._conf.getAbstractMgr(), {}, 9999)
params = ad.toDict()
params["postURL"] = ""
params["origin"] = "management"
return wc.getHTML(params)
class WPConfModifCFA(WPConferenceModifAbstractBase):
def _setActiveTab(self):
self._tabCFA.setActive()
def _getTabContent(self, params):
wc = WConfModifCFA(self._conf)
return wc.getHTML()
class WCFADataModification(wcomponents.WTemplated):
def __init__(self, conf):
self._conf = conf
def getVars(self):
vars = wcomponents.WTemplated.getVars(self)
abMgr = self._conf.getAbstractMgr()
vars["sDay"] = abMgr.getStartSubmissionDate().day
vars["sMonth"] = abMgr.getStartSubmissionDate().month
vars["sYear"] = abMgr.getStartSubmissionDate().year
vars["eDay"] = abMgr.getEndSubmissionDate().day
vars["eMonth"] = abMgr.getEndSubmissionDate().month
vars["eYear"] = abMgr.getEndSubmissionDate().year
vars["mDay"] = ""
vars["mMonth"] = ""
vars["mYear"] = ""
if abMgr.getModificationDeadline():
vars["mDay"] = str(abMgr.getModificationDeadline().day)
vars["mMonth"] = str(abMgr.getModificationDeadline().month)
vars["mYear"] = str(abMgr.getModificationDeadline().year)
vars["announcement"] = abMgr.getAnnouncement()
vars["toList"] = ", ".join(abMgr.getSubmissionNotification().getToList())
vars["ccList"] = ", ".join(abMgr.getSubmissionNotification().getCCList())
vars["postURL"] = urlHandlers.UHCFAPerformDataModification.getURL(self._conf)
return vars
class WPCFADataModification(WPConferenceModifAbstractBase):
def _setActiveTab(self):
self._tabCFA.setActive()
def _getTabContent(self, params):
p = WCFADataModification(self._conf)
return p.getHTML()
class WConfModifProgram(wcomponents.WTemplated):
def __init__( self, conference ):
self._conf = conference
def getVars( self ):
vars = wcomponents.WTemplated.getVars(self)
vars["deleteItemsURL"]=urlHandlers.UHConfDelTracks.getURL(self._conf)
vars["addTrackURL"]=urlHandlers.UHConfAddTrack.getURL( self._conf )
vars["conf"] = self._conf
return vars
class WPConfModifProgram( WPConferenceModifBase ):
sidemenu_option = 'program'
def _getPageContent( self, params ):
wc = WConfModifProgram( self._conf )
return wc.getHTML()
class WTrackCreation( wcomponents.WTemplated ):
def __init__( self, targetConf ):
self.__conf = targetConf
def getVars( self ):
vars = wcomponents.WTemplated.getVars(self)
vars['title'] = ''
vars['description'] = ''
return vars
class WPConfAddTrack( WPConfModifProgram ):
def _getPageContent( self, params ):
p = WTrackCreation( self._conf )
pars = {"postURL": urlHandlers.UHConfPerformAddTrack.getURL(self._conf)}
return p.getHTML( pars )
class WFilterCriteriaAbstracts(wcomponents.WFilterCriteria):
"""
Draws the options for a filter criteria object
This means rendering the actual table that contains
all the HTML for the several criteria
"""
def __init__(self, options, filterCrit, extraInfo=""):
wcomponents.WFilterCriteria.__init__(self, options, filterCrit, extraInfo)
def _drawFieldOptions(self, id, data):
page = WFilterCriterionOptionsAbstracts(id, data)
# TODO: remove when we have a better template system
return page.getHTML()
class WFilterCriterionOptionsAbstracts(wcomponents.WTemplated):
def __init__(self, id, data):
self._id = id
self._data = data
def getVars(self):
vars = wcomponents.WTemplated.getVars( self )
vars["id"] = self._id
vars["title"] = self._data["title"]
vars["options"] = self._data["options"]
vars["selectFunc"] = self._data.get("selectFunc", True)
return vars
class WAbstracts( wcomponents.WTemplated ):
# available columns
COLUMNS = ["ID", "Title", "PrimaryAuthor", "Tracks", "Type", "Status", "Rating", "AccTrack", "AccType", "SubmissionDate", "ModificationDate"]
def __init__( self, conference, filterCrit, sortingCrit, order, display, filterUsed):
self._conf = conference
self._filterCrit = filterCrit
self._sortingCrit = sortingCrit
self._order = order
self._display = display
self._filterUsed = filterUsed
def _getURL( self, sortingField, column ):
url = urlHandlers.UHConfAbstractManagment.getURL(self._conf)
url.addParam("sortBy", column)
if sortingField and sortingField.getId() == column:
if self._order == "down":
url.addParam("order","up")
elif self._order == "up":
url.addParam("order","down")
return url
def _getTrackFilterItemList( self ):
checked = ""
field=self._filterCrit.getField("track")
if field is not None and field.getShowNoValue():
checked = " checked"
l = [ i18nformat("""<input type="checkbox" name="trackShowNoValue"%s> --_("not specified")--""")%checked]
for t in self._conf.getTrackList():
checked = ""
if field is not None and t.getId() in field.getValues():
checked = " checked"
l.append( """<input type="checkbox" name="track" value=%s%s> (%s) %s\n"""%(quoteattr(t.getId()),checked,self.htmlText(t.getCode()),self.htmlText(t.getTitle())))
return l
def _getContribTypeFilterItemList( self ):
checked = ""
field=self._filterCrit.getField("type")
if field is not None and field.getShowNoValue():
checked = " checked"
l = [ i18nformat("""<input type="checkbox" name="typeShowNoValue"%s> --_("not specified")--""")%checked]
for contribType in self._conf.getContribTypeList():
checked = ""
if field is not None and contribType.getId() in field.getValues():
checked = " checked"
l.append( """<input type="checkbox" name="type" value=%s%s> %s"""%(quoteattr(contribType.getId()), checked, self.htmlText(contribType.getName())) )
return l
def _getAccTrackFilterItemList( self ):
checked = ""
field=self._filterCrit.getField("acc_track")
if field is not None and field.getShowNoValue():
checked = " checked"
l = [ i18nformat("""<input type="checkbox" name="accTrackShowNoValue"%s> --_("not specified")--""")%checked]
for t in self._conf.getTrackList():
checked = ""
if field is not None and t.getId() in field.getValues():
checked=" checked"
l.append("""<input type="checkbox" name="acc_track" value=%s%s> (%s) %s"""%(quoteattr(t.getId()),checked,self.htmlText(t.getCode()),self.htmlText(t.getTitle())))
return l
def _getAccContribTypeFilterItemList( self ):
checked = ""
field=self._filterCrit.getField("acc_type")
if field is not None and field.getShowNoValue():
checked = " checked"
l = [ i18nformat("""<input type="checkbox" name="accTypeShowNoValue"%s> --_("not specified")--""")%checked]
for contribType in self._conf.getContribTypeList():
checked = ""
if field is not None and contribType.getId() in field.getValues():
checked = " checked"
l.append( """<input type="checkbox" name="acc_type" value=%s%s> %s"""%(quoteattr(contribType.getId()),checked,self.htmlText(contribType.getName())))
return l
def _getStatusFilterItemList( self ):
l = []
for status in AbstractStatusList.getInstance().getStatusList():
checked = ""
statusId = AbstractStatusList.getInstance().getId( status )
statusCaption = AbstractStatusList.getInstance().getCaption( status )
statusCode=AbstractStatusList.getInstance().getCode(status)
statusIconURL= AbstractStatusList.getInstance().getIconURL( status )
field=self._filterCrit.getField("status")
if field is not None and statusId in field.getValues():
checked = "checked"
imgHTML = """<img src=%s border="0" alt="">"""%(quoteattr(str(statusIconURL)))
l.append( """<input type="checkbox" name="status" value=%s%s>%s (%s) %s"""%(quoteattr(statusId),checked,imgHTML,self.htmlText(statusCode),self.htmlText(statusCaption)))
return l
def _getOthersFilterItemList( self ):
checkedShowMultiple, checkedShowComments = "", ""
track_field=self._filterCrit.getField("track")
if track_field is not None and track_field.onlyMultiple():
checkedShowMultiple = " checked"
if self._filterCrit.getField("comment") is not None:
checkedShowComments = " checked"
l = [ i18nformat("""<input type="checkbox" name="trackShowMultiple"%s> _("only multiple tracks")""")%checkedShowMultiple,
i18nformat("""<input type="checkbox" name="comment"%s> _("only with comments")""")%checkedShowComments]
return l
def _getFilterMenu(self):
options = [
('Tracks', {"title": _("tracks"),
"options": self._getTrackFilterItemList()}),
('Types', {"title": _("types"),
"options": self._getContribTypeFilterItemList()}),
('Status', {"title": _("status"),
"options": self._getStatusFilterItemList()}),
('AccTracks', {"title": _("(proposed to be) accepted for tracks"),
"options": self._getAccTrackFilterItemList()}),
('AccTypes', {"title": _("(proposed to be) accepted for types"),
"options": self._getAccContribTypeFilterItemList()}),
('Others', {"title": _("others"),
"selectFunc": False,
"options": self._getOthersFilterItemList()})
]
extraInfo = ""
if self._conf.getRegistrationForm().getStatusesList():
extraInfo = i18nformat("""<table align="center" cellspacing="10" width="100%%">
<tr>
<td colspan="5" class="titleCellFormat"> _("Author search") <input type="text" name="authSearch" value=%s></td>
</tr>
</table>
""")%(quoteattr(str(self._authSearch)))
p = WFilterCriteriaAbstracts(options, None, extraInfo)
return p.getHTML()
def _getColumnTitlesDict(self):
"""
Dictionary with the translation from "ids" to "name to display" for each of the options you can choose for the display.
This method complements the method "_setDispOpts" in which we get a dictonary with "ids".
"""
if not hasattr(self, "_columns"):
self._columns = {"ID": "ID","Title": "Title", "PrimaryAuthor": "Primary Author", "Tracks": "Tracks", "Type": "Type", "Status":"Status", \
"Rating":" Rating", "AccTrack": "Acc. Track", "AccType": "Acc. Type", "SubmissionDate": "Submission Date", "ModificationDate": "Modification Date"}
return self._columns
def _getDisplay(self):
"""
These are the 'display' options selected by the user. In case no options were selected we add some of them by default.
"""
display = self._display[:]
if display == []:
display = self.COLUMNS
return display
def _getAccType(self, abstract):
status = abstract.getCurrentStatus()
if isinstance(status,(review.AbstractStatusAccepted, review.AbstractStatusProposedToAccept)) and status.getType() is not None:
return self.htmlText(status.getType().getName())
return ""
def _getAccTrack(self, abstract):
acc_track = abstract.getAcceptedTrack()
if not acc_track:
return ""
return self.htmlText(acc_track.getCode())
def getVars( self ):
vars = wcomponents.WTemplated.getVars(self)
vars["abstractSelectionAction"]=quoteattr(str(urlHandlers.UHAbstractConfSelectionAction.getURL(self._conf)))
vars["confId"] = self._conf.getId()
self._authSearch=vars.get("authSearch","")
vars["filterMenu"] = self._getFilterMenu()
sortingField=None
if self._sortingCrit is not None:
sortingField=self._sortingCrit.getField()
vars["sortingField"] = sortingField.getId()
vars["order"] = self._order
vars["downArrow"] = Config.getInstance().getSystemIconURL("downArrow")
vars["upArrow"] = Config.getInstance().getSystemIconURL("upArrow")
vars["getSortingURL"] = lambda column: self._getURL(sortingField, column)
vars["getAccType"] = lambda abstract: self._getAccType(abstract)
vars["getAccTrack"] = lambda abstract: self._getAccTrack(abstract)
f = filters.SimpleFilter( self._filterCrit, self._sortingCrit )
abstractList=f.apply(self._conf.getAbstractMgr().getAbstractsMatchingAuth(self._authSearch))
if self._order =="up":
abstractList.reverse()
vars["abstracts"] = abstractList
vars["totalNumberAbstracts"] = str(len(self._conf.getAbstractMgr().getAbstractList()))
vars["filteredNumberAbstracts"] = str(len(abstractList))
vars["filterUsed"] = self._filterUsed
vars["accessAbstract"] = quoteattr(str(urlHandlers.UHAbstractDirectAccess.getURL(self._conf)))
url = urlHandlers.UHConfAbstractManagment.getURL(self._conf)
url.setSegment( "results" )
vars["filterPostURL"] = quoteattr(str(url))
vars["excelIconURL"]=quoteattr(str(Config.getInstance().getSystemIconURL("excel")))
vars["pdfIconURL"]=quoteattr(str(Config.getInstance().getSystemIconURL("pdf")))
vars["xmlIconURL"]=quoteattr(str(Config.getInstance().getSystemIconURL("xml")))
vars["displayColumns"] = self._getDisplay()
vars["columnsDict"] = self._getColumnTitlesDict()
vars["columns"] = self.COLUMNS
return vars
class WPConfAbstractList( WPConferenceModifAbstractBase ):
def __init__(self, rh, conf, msg, filterUsed = False ):
self._msg = msg
self._filterUsed = filterUsed
WPConferenceModifAbstractBase.__init__(self, rh, conf)
def _getTabContent( self, params ):
order = params.get("order","down")
wc = WAbstracts( self._conf, params.get("filterCrit", None ),
params.get("sortingCrit", None),
order,
params.get("display",None),
self._filterUsed )
p = {"authSearch":params.get("authSearch","")}
return wc.getHTML( p )
def _setActiveTab(self):
self._tabAbstractList.setActive()
class WPModNewAbstract(WPConfAbstractList):
def __init__(self, rh, conf, abstractData):
WPConfAbstractList.__init__(self, rh, conf, "")
def _getTabContent(self, params):
from MaKaC.webinterface.pages.abstracts import WAbstractDataModification
params["postURL"] = urlHandlers.UHConfModNewAbstract.getURL(self._conf)
params["origin"] = "management"
wc = WAbstractDataModification(self._conf)
return wc.getHTML(params)
def getCSSFiles(self):
return WPConfAbstractList.getCSSFiles(self) + \
self._asset_env['contributions_sass'].urls()
def getJSFiles(self):
return WPConfAbstractList.getJSFiles(self) + \
self._includeJSPackage('Management') + \
self._asset_env['abstracts_js'].urls()
def _getHeadContent(self):
return WPConfAbstractList._getHeadContent(self) + render('js/mathjax.config.js.tpl') + \
'\n'.join(['<script src="{0}" type="text/javascript"></script>'.format(url)
for url in self._asset_env['mathjax_js'].urls()])
class WConfModAbstractsMerge(wcomponents.WTemplated):
def __init__(self,conf):
self._conf=conf
def getVars(self):
vars=wcomponents.WTemplated.getVars(self)
vars["postURL"]=quoteattr(str(urlHandlers.UHConfModAbstractsMerge.getURL(self._conf)))
vars["selAbstracts"]=",".join(vars.get("absIdList",[]))
vars["targetAbs"]=quoteattr(str(vars.get("targetAbsId","")))
vars["inclAuthChecked"]=""
if vars.get("inclAuth",False):
vars["inclAuthChecked"]=" checked"
vars["comments"]=self.htmlText(vars.get("comments",""))
vars["notifyChecked"]=""
if vars.get("notify",False):
vars["notifyChecked"]=" checked"
return vars
class WPModMergeAbstracts(WPConfAbstractList):
def __init__(self, rh, conf):
WPConfAbstractList.__init__(self, rh, conf, "")
def _getTabContent(self, params):
wc = WConfModAbstractsMerge(self._conf)
p = {"absIdList": params.get("absIdList", []),
"targetAbsId": params.get("targetAbsId", ""),
"inclAuth": params.get("inclAuth", False),
"comments": params.get("comments", ""),
"notify": params.get("notify", True),
}
return wc.getHTML(p)
class WPConfParticipantList( WPConfAbstractList ):
def __init__(self, rh, conf, emailList, displayedGroups, abstracts):
WPConfAbstractList.__init__(self, rh, conf, None)
self._emailList = emailList
self._displayedGroups = displayedGroups
self._abstracts = abstracts
def _getTabContent( self, params ):
wc = WAbstractsParticipantList(self._conf, self._emailList, self._displayedGroups, self._abstracts)
return wc.getHTML()
class WPConfModifParticipantList( WPConferenceBase ):
def __init__(self, rh, conf, emailList, displayedGroups, contribs):
WPConferenceBase.__init__(self, rh, conf)
self._emailList = emailList
self._displayedGroups = displayedGroups
self._contribs = contribs
def _getBody( self, params ):
WPConferenceBase._getBody(self, params)
wc = WContribParticipantList(self._conf, self._emailList, self._displayedGroups, self._contribs)
params = {"urlDisplayGroup":urlHandlers.UHContribsConfManagerDisplayParticipantList.getURL(self._conf)}
return wc.getHTML(params)
class WConfModifContribList(wcomponents.WTemplated):
def __init__(self,conf,filterCrit, sortingCrit, order, filterUsed=False, filterUrl=None):
self._conf=conf
self._filterCrit=filterCrit
self._sortingCrit=sortingCrit
self._order = order
self._totaldur =timedelta(0)
self._filterUsed = filterUsed
self._filterUrl = filterUrl
def _getURL( self ):
#builds the URL to the contribution list page
# preserving the current filter and sorting status
url = urlHandlers.UHConfModifContribList.getURL(self._conf)
#save params in websession
dict = session.setdefault('ContributionFilterConf%s' % self._conf.getId(), {})
if self._filterCrit.getField("type"):
l=[]
for t in self._filterCrit.getField("type").getValues():
if t!="":
l.append(t)
dict["types"] = l
if self._filterCrit.getField("type").getShowNoValue():
dict["typeShowNoValue"] = "1"
if self._filterCrit.getField("track"):
dict["tracks"] = self._filterCrit.getField("track").getValues()
if self._filterCrit.getField("track").getShowNoValue():
dict["trackShowNoValue"] = "1"
if self._filterCrit.getField("session"):
dict["sessions"] = self._filterCrit.getField("session").getValues()
if self._filterCrit.getField("session").getShowNoValue():
dict["sessionShowNoValue"] = "1"
if self._filterCrit.getField("status"):
dict["status"] = self._filterCrit.getField("status").getValues()
if self._sortingCrit.getField():
dict["sortBy"] = self._sortingCrit.getField().getId()
dict["order"] = "down"
dict["OK"] = "1"
session.modified = True
return url
def _getMaterialsHTML(self, contrib):
attached_items = contrib.attached_items
if attached_items:
num_files = len(attached_items['files']) + sum(len(f.attachments) for f in attached_items['folders'])
return '<a href="{}">{}</a>'.format(
url_for('attachments.management', contrib),
ngettext('1 file', '{num} files', num_files).format(num=num_files)
)
def _getContribHTML( self, contrib ):
try:
sdate=contrib.getAdjustedStartDate().strftime("%d-%b-%Y %H:%M" )
except AttributeError:
sdate = ""
title = """<a href=%s>%s</a>"""%( quoteattr( str( urlHandlers.UHContributionModification.getURL( contrib ) ) ), self.htmlText( contrib.getTitle() ))
strdur = ""
if contrib.getDuration() is not None and contrib.getDuration().seconds != 0:
strdur = (datetime(1900,1,1)+ contrib.getDuration()).strftime("%Hh%M'")
dur = contrib.getDuration()
self._totaldur = self._totaldur + dur
l = [self.htmlText( spk.getFullName() ) for spk in contrib.getSpeakerList()]
speaker = "<br>".join( l )
session = ""
if contrib.getSession() is not None:
if contrib.getSession().getCode() != "no code":
session=self.htmlText(contrib.getSession().getCode())
else:
session=self.htmlText(contrib.getSession().getId())
track = ""
if contrib.getTrack() is not None:
if contrib.getTrack().getCode() is not None:
track = self.htmlText( contrib.getTrack().getCode() )
else:
track = self.htmlText( contrib.getTrack().getId() )
cType=""
if contrib.getType() is not None:
cType=self.htmlText(contrib.getType().getName())
status=contrib.getCurrentStatus()
statusCaption=ContribStatusList().getCode(status.__class__)
html = """
<tr id="contributions%s" style="background-color: transparent;" onmouseout="javascript:onMouseOut('contributions%s')" onmouseover="javascript:onMouseOver('contributions%s')">
<td valign="top" align="right" nowrap><input onchange="javascript:isSelected('contributions%s')" type="checkbox" name="contributions" value=%s></td>
<td valign="top" nowrap class="CRLabstractDataCell">%s</td>
<td valign="top" nowrap class="CRLabstractDataCell">%s</td>
<td valign="top" nowrap class="CRLabstractDataCell">%s</td>
<td valign="top" class="CRLabstractDataCell">%s</td>
<td valign="top" class="CRLabstractDataCell">%s</td>
<td valign="top" class="CRLabstractDataCell">%s</td>
<td valign="top" class="CRLabstractDataCell">%s</td>
<td valign="top" class="CRLabstractDataCell">%s</td>
<td valign="top" class="CRLabstractDataCell">%s</td>
<td valign="top" class="CRLabstractDataCell" nowrap>%s</td>
</tr>
"""%(contrib.getId(), contrib.getId(), contrib.getId(),
contrib.getId(), contrib.getId(),
self.htmlText(contrib.getId()),
sdate or " ",strdur or " ",cType or " ",
title or " ",
speaker or " ",session or " ",
track or " ",statusCaption or " ",
self._getMaterialsHTML(contrib) or " ")
return html
def _getTypeItemsHTML(self):
checked=""
if self._filterCrit.getField("type").getShowNoValue():
checked=" checked"
res=[ i18nformat("""<input type="checkbox" name="typeShowNoValue" value="--none--"%s> --_("not specified")--""")%checked]
for t in self._conf.getContribTypeList():
checked=""
if t.getId() in self._filterCrit.getField("type").getValues():
checked=" checked"
res.append("""<input type="checkbox" name="types" value=%s%s> %s"""%(quoteattr(str(t.getId())),checked,self.htmlText(t.getName())))
return res
def _getSessionItemsHTML(self):
checked=""
if self._filterCrit.getField("session").getShowNoValue():
checked=" checked"
res=[ i18nformat("""<input type="checkbox" name="sessionShowNoValue" value="--none--"%s> --_("not specified")--""")%checked]
for s in self._conf.getSessionListSorted():
checked=""
l = self._filterCrit.getField("session").getValues()
if not isinstance(l, list):
l = [l]
if s.getId() in l:
checked=" checked"
res.append("""<input type="checkbox" name="sessions" value=%s%s> (%s) %s"""%(quoteattr(str(s.getId())),checked,self.htmlText(s.getCode()),self.htmlText(s.getTitle())))
return res
def _getTrackItemsHTML(self):
checked=""
if self._filterCrit.getField("track").getShowNoValue():
checked=" checked"
res=[ i18nformat("""<input type="checkbox" name="trackShowNoValue" value="--none--"%s> --_("not specified")--""")%checked]
for t in self._conf.getTrackList():
checked=""
if t.getId() in self._filterCrit.getField("track").getValues():
checked=" checked"
res.append("""<input type="checkbox" name="tracks" value=%s%s> (%s) %s"""%(quoteattr(str(t.getId())),checked,self.htmlText(t.getCode()),self.htmlText(t.getTitle())))
return res
def _getStatusItemsHTML(self):
res=[]
for st in ContribStatusList().getList():
id=ContribStatusList().getId(st)
checked=""
if id in self._filterCrit.getField("status").getValues():
checked=" checked"
code=ContribStatusList().getCode(st)
caption=ContribStatusList().getCaption(st)
res.append("""<input type="checkbox" name="status" value=%s%s> (%s) %s"""%(quoteattr(str(id)),checked,self.htmlText(code),self.htmlText(caption)))
return res
def _getFilterMenu(self):
options = [
('Types', {"title": _("Types"),
"options": self._getTypeItemsHTML()}),
('Sessions', {"title": _("Sessions"),
"options": self._getSessionItemsHTML()}),
('Tracks', {"title": _("Tracks"),
"options": self._getTrackItemsHTML()}),
('Status', {"title": _("Status"),
"options": self._getStatusItemsHTML()})
]
extraInfo = i18nformat("""<table align="center" cellspacing="10" width="100%%">
<tr>
<td colspan="5" class="titleCellFormat"> _("Author search") <input type="text" name="authSearch" value=%s></td>
</tr>
</table>
""")%(quoteattr(str(self._authSearch)))
p = WFilterCriteriaContribs(options, None, extraInfo)
return p.getHTML()
def getVars( self ):
vars = wcomponents.WTemplated.getVars( self )
vars["filterUrl"] = str(self._filterUrl).replace('%', '%%')
vars["quickSearchURL"]=quoteattr(str(urlHandlers.UHConfModContribQuickAccess.getURL(self._conf)))
vars["filterPostURL"]=quoteattr(str(urlHandlers.UHConfModifContribList.getURL(self._conf)))
self._authSearch=vars.get("authSearch","").strip()
cl=self._conf.getContribsMatchingAuth(self._authSearch)
sortingField = self._sortingCrit.getField()
self._currentSorting=""
if sortingField is not None:
self._currentSorting=sortingField.getId()
vars["currentSorting"]=""
url=self._getURL()
url.addParam("sortBy","number")
vars["numberImg"]=""
if self._currentSorting == "number":
vars["currentSorting"] = i18nformat("""<input type="hidden" name="sortBy" value="_("number")">""")
if self._order == "down":
vars["numberImg"] = """<img src=%s alt="down">"""%(quoteattr(Config.getInstance().getSystemIconURL("downArrow")))
url.addParam("order","up")
elif self._order == "up":
vars["numberImg"] = """<img src=%s alt="up">"""%(quoteattr(Config.getInstance().getSystemIconURL("upArrow")))
url.addParam("order","down")
vars["numberSortingURL"]=quoteattr(str(url))
url = self._getURL()
url.addParam("sortBy", "date")
vars["dateImg"] = ""
if self._currentSorting == "date":
vars["currentSorting"]= i18nformat("""<input type="hidden" name="sortBy" value="_("date")">""")
if self._order == "down":
vars["dateImg"]="""<img src=%s alt="down">"""%(quoteattr(Config.getInstance().getSystemIconURL("downArrow")))
url.addParam("order","up")
elif self._order == "up":
vars["dateImg"]="""<img src=%s alt="up">"""%(quoteattr(Config.getInstance().getSystemIconURL("upArrow")))
url.addParam("order","down")
vars["dateSortingURL"]=quoteattr(str(url))
url = self._getURL()
url.addParam("sortBy", "name")
vars["titleImg"] = ""
if self._currentSorting == "name":
vars["currentSorting"]= i18nformat("""<input type="hidden" name="sortBy" value="_("name")">""")
if self._order == "down":
vars["titleImg"]="""<img src=%s alt="down">"""%(quoteattr(Config.getInstance().getSystemIconURL("downArrow")))
url.addParam("order","up")
elif self._order == "up":
vars["titleImg"]="""<img src=%s alt="up">"""%(quoteattr(Config.getInstance().getSystemIconURL("upArrow")))
url.addParam("order","down")
vars["titleSortingURL"]=quoteattr(str(url))
url = self._getURL()
url.addParam("sortBy", "type")
vars["typeImg"] = ""
if self._currentSorting == "type":
vars["currentSorting"]= i18nformat("""<input type="hidden" name="sortBy" value="_("type")">""")
if self._order == "down":
vars["typeImg"]="""<img src=%s alt="down">"""%(quoteattr(Config.getInstance().getSystemIconURL("downArrow")))
url.addParam("order","up")
elif self._order == "up":
vars["typeImg"]="""<img src=%s alt="up">"""%(quoteattr(Config.getInstance().getSystemIconURL("upArrow")))
url.addParam("order","down")
vars["typeSortingURL"] = quoteattr( str( url ) )
url = self._getURL()
url.addParam("sortBy", "session")
vars["sessionImg"] = ""
if self._currentSorting == "session":
vars["currentSorting"] = i18nformat("""<input type="hidden" name="sortBy" value='_("session")'>""")
if self._order == "down":
vars["sessionImg"] = """<img src=%s alt="down">"""%(quoteattr(Config.getInstance().getSystemIconURL("downArrow")))
url.addParam("order","up")
elif self._order == "up":
vars["sessionImg"] = """<img src=%s alt="up">"""%(quoteattr(Config.getInstance().getSystemIconURL("upArrow")))
url.addParam("order","down")
vars["sessionSortingURL"] = quoteattr( str( url ) )
url = self._getURL()
url.addParam("sortBy", "speaker")
vars["speakerImg"]=""
if self._currentSorting=="speaker":
vars["currentSorting"] = i18nformat("""<input type="hidden" name="sortBy" value="_("speaker")">""")
if self._order == "down":
vars["speakerImg"] = """<img src=%s alt="down">"""%(quoteattr(Config.getInstance().getSystemIconURL("downArrow")))
url.addParam("order","up")
elif self._order == "up":
vars["speakerImg"] = """<img src=%s alt="up">"""%(quoteattr(Config.getInstance().getSystemIconURL("upArrow")))
url.addParam("order","down")
vars["speakerSortingURL"]=quoteattr( str( url ) )
url = self._getURL()
url.addParam("sortBy","track")
vars["trackImg"] = ""
if self._currentSorting == "track":
vars["currentSorting"] = i18nformat("""<input type="hidden" name="sortBy" value="_("track")">""")
if self._order == "down":
vars["trackImg"] = """<img src=%s alt="down">"""%(quoteattr(Config.getInstance().getSystemIconURL("downArrow")))
url.addParam("order","up")
elif self._order == "up":
vars["trackImg"] = """<img src=%s alt="up">"""%(quoteattr(Config.getInstance().getSystemIconURL("upArrow")))
url.addParam("order","down")
vars["trackSortingURL"] = quoteattr( str( url ) )
f=filters.SimpleFilter(self._filterCrit,self._sortingCrit)
filteredContribs = f.apply(cl)
l = [self._getContribHTML(contrib) for contrib in filteredContribs]
contribsToPrint = ["""<input type="hidden" name="contributions" value="%s">"""%contrib.getId() for contrib in filteredContribs]
numContribs = len(filteredContribs)
if self._order =="up":
l.reverse()
vars["contribsToPrint"] = "\n".join(contribsToPrint)
vars["contributions"] = "".join(l)
orginURL = urlHandlers.UHConfModifContribList.getURL(self._conf)
vars["numContribs"]=str(numContribs)
vars["totalNumContribs"] = str(len(self._conf.getContributionList()))
vars["filterUsed"] = self._filterUsed
vars["contributionsPDFURL"]=quoteattr(str(urlHandlers.UHContribsConfManagerDisplayMenuPDF.getURL(self._conf)))
vars["contribSelectionAction"]=quoteattr(str(urlHandlers.UHContribConfSelectionAction.getURL(self._conf)))
totaldur = self._totaldur
days = totaldur.days
hours = (totaldur.seconds)/3600
dayhours = (days * 24)+hours
mins = ((totaldur.seconds)/60)-(hours*60)
vars["totaldur"] = """%sh%sm""" % (dayhours, mins)
vars['rbActive'] = Config.getInstance().getIsRoomBookingActive()
vars["bookings"] = Conversion.reservationsList(self._conf.getRoomBookingList())
vars["filterMenu"] = self._getFilterMenu()
vars["sortingOptions"]="""<input type="hidden" name="sortBy" value="%s">
<input type="hidden" name="order" value="%s">"""%(self._sortingCrit.getField().getId(), self._order)
vars["pdfIconURL"]=quoteattr(str(Config.getInstance().getSystemIconURL("pdf")))
vars["excelIconURL"] = quoteattr(str(Config.getInstance().getSystemIconURL("excel")))
vars["xmlIconURL"]=quoteattr(str(Config.getInstance().getSystemIconURL("xml")))
return vars
class WFilterCriteriaContribs(wcomponents.WFilterCriteria):
"""
Draws the options for a filter criteria object
This means rendering the actual table that contains
all the HTML for the several criteria
"""
def __init__(self, options, filterCrit, extraInfo=""):
wcomponents.WFilterCriteria.__init__(self, options, filterCrit, extraInfo)
def _drawFieldOptions(self, id, data):
page = WFilterCriterionOptionsContribs(id, data)
# TODO: remove when we have a better template system
return page.getHTML().replace('%','%%')
class WFilterCriterionOptionsContribs(wcomponents.WTemplated):
def __init__(self, id, data):
self._id = id
self._data = data
def getVars(self):
vars = wcomponents.WTemplated.getVars( self )
vars["id"] = self._id
vars["title"] = self._data["title"]
vars["options"] = self._data["options"]
vars["selectFunc"] = self._data.get("selectFunc", True)
return vars
class WPModifContribList( WPConferenceModifBase ):
sidemenu_option = 'contributions'
_userData = ['favorite-user-list', 'favorite-user-ids']
def __init__(self, rh, conference, filterUsed=False):
WPConferenceModifBase.__init__(self, rh, conference)
self._filterUsed = filterUsed
def _getPageContent( self, params ):
filterCrit=params.get("filterCrit",None)
sortingCrit=params.get("sortingCrit",None)
order = params.get("order","down")
filterParams = {}
fields = getattr(filterCrit, '_fields')
for field in fields.values():
id = field.getId()
showNoValue = field.getShowNoValue()
values = field.getValues()
if showNoValue:
filterParams['%sShowNoValue' % id] = '--none--'
filterParams[id] = values
requestParams = self._rh.getRequestParams()
operationType = requestParams.get('operationType')
if operationType != 'resetFilters':
operationType = 'filter'
urlParams = dict(isBookmark='y', operationType=operationType)
urlParams.update(self._rh.getRequestParams())
urlParams.update(filterParams)
filterUrl = self._rh._uh.getURL(None, **urlParams)
wc = WConfModifContribList(self._conf,filterCrit, sortingCrit, order, self._filterUsed, filterUrl)
p={"authSearch":params.get("authSearch","")}
return wc.getHTML(p)
class WPConfModifContribToPDFMenu( WPModifContribList ):
def __init__(self, rh, conf, contribIds):
WPModifContribList.__init__(self, rh, conf)
self._contribIds = contribIds
def _getPageContent(self, params):
wc = WConfModifContribToPDFMenu(self._conf, self._contribIds)
return wc.getHTML(params)
class WConfModifContribToPDFMenu(wcomponents.WTemplated):
def __init__(self, conf, contribIds):
self._conf = conf
self.contribIds = contribIds
def getVars( self ):
vars = wcomponents.WTemplated.getVars( self )
vars["createPDFURL"] = urlHandlers.UHContribsConfManagerDisplayMenuPDF.getURL(self._conf)
l = []
for id in self.contribIds:
l.append("""<input type="hidden" name="contributions" value="%s">"""%id)
vars["contribIdsList"] = "\n".join(l)
return vars
class WConfModMoveContribsToSession(wcomponents.WTemplated):
def __init__(self,conf,contribIdList=[]):
self._conf=conf
self._contribIdList=contribIdList
def getVars(self):
vars=wcomponents.WTemplated.getVars(self)
vars["postURL"]=quoteattr(str(urlHandlers.UHConfModMoveContribsToSession.getURL(self._conf)))
vars["contribs"]=",".join(self._contribIdList)
s=["""<option value="--none--">--none--</option>"""]
for session in self._conf.getSessionListSorted():
if not session.isClosed():
s.append("""<option value=%s>%s</option>"""%(
quoteattr(str(session.getId())),
self.htmlText(session.getTitle())))
vars["sessions"]="".join(s)
return vars
class WPModMoveContribsToSession(WPModifContribList):
def _getPageContent(self,params):
wc=WConfModMoveContribsToSession(self._conf,params.get("contribIds",[]))
return wc.getHTML()
class WPModMoveContribsToSessionConfirmation(WPModifContribList):
def _getPageContent(self,params):
wc=wcomponents.WConfModMoveContribsToSessionConfirmation(self._conf,params.get("contribIds",[]),params.get("targetSession",None))
p={"postURL":urlHandlers.UHConfModMoveContribsToSession.getURL(self._conf),}
return wc.getHTML(p)
class WPConfEditContribType(WPConferenceModifBase):
sidemenu_option = 'general'
def __init__(self, rh, ct):
self._conf = ct.getConference()
self._contribType = ct
WPConferenceModifBase.__init__(self, rh, self._conf)
def _getPageContent( self, params ):
wc = WConfEditContribType(self._contribType)
params["saveURL"] = quoteattr(str(urlHandlers.UHConfEditContribType.getURL(self._contribType)))
return wc.getHTML(params)
class WConfEditContribType(wcomponents.WTemplated):
def __init__(self, contribType):
self._contribType = contribType
def getVars(self):
vars = wcomponents.WTemplated.getVars(self)
vars["ctName"] = self._contribType.getName()
vars["ctDescription"] = self._contribType.getDescription()
return vars
class WPConfAddContribType(WPConferenceModifBase):
sidemenu_option = 'general'
def _getPageContent( self, params ):
wc = WConfAddContribType()
params["saveURL"] = quoteattr(str(urlHandlers.UHConfAddContribType.getURL(self._conf)))
return wc.getHTML(params)
class WConfAddContribType(wcomponents.WTemplated):
def getVars(self):
vars = wcomponents.WTemplated.getVars(self)
return vars
class WAbstractsParticipantList(wcomponents.WTemplated):
def __init__(self, conf, emailList, displayedGroups, abstracts):
self._emailList = emailList
self._displayedGroups = displayedGroups
self._conf = conf
self._abstracts = abstracts
def getVars(self):
vars = wcomponents.WTemplated.getVars(self)
vars["submitterEmails"] = ",".join(self._emailList["submitters"]["emails"])
vars["primaryAuthorEmails"] = ",".join(self._emailList["primaryAuthors"]["emails"])
vars["coAuthorEmails"] = ",".join(self._emailList["coAuthors"]["emails"])
urlDisplayGroup = urlHandlers.UHAbstractsConfManagerDisplayParticipantList.getURL(self._conf)
abstractsToPrint = []
for abst in self._abstracts:
abstractsToPrint.append("""<input type="hidden" name="abstracts" value="%s">"""%abst)
abstractsList = "".join(abstractsToPrint)
displayedGroups = []
for dg in self._displayedGroups:
displayedGroups.append("""<input type="hidden" name="displayedGroups" value="%s">"""%dg)
groupsList = "".join(displayedGroups)
# Submitters
text = _("show list")
vars["submitters"] = "<tr colspan=\"2\"><td> </td></tr>"
if "submitters" in self._displayedGroups:
l = []
color = "white"
text = _("close list")
for subm in self._emailList["submitters"]["tree"].values():
if color=="white":
color="#F6F6F6"
else:
color="white"
participant = "%s %s %s <%s>"%(subm.getTitle(), subm.getFirstName(), safe_upper(subm.getFamilyName()), subm.getEmail())
l.append("<tr>\
<td colspan=\"2\" nowrap bgcolor=\"%s\" class=\"blacktext\">\
%s</td></tr>"%(color, self.htmlText(participant)))
vars["submitters"] = "".join(l)
urlDisplayGroup.addParam("clickedGroup", "submitters")
vars["showSubmitters"] = """<form action="%s" method="post">\
%s
%s
<input type="submit" class="btn" value="%s">
</form>"""%(str(urlDisplayGroup), abstractsList,groupsList, text)
# Primary authors
text = _("show list")
vars["primaryAuthors"] = "<tr colspan=\"2\"><td> </td></tr>"
if "primaryAuthors" in self._displayedGroups:
l = []
color = "white"
text = _("close list")
for pAuth in self._emailList["primaryAuthors"]["tree"].values():
if color=="white":
color="#F6F6F6"
else:
color="white"
participant = "%s <%s>"%(pAuth.getFullName(), pAuth.getEmail())
l.append("<tr><td colspan=\"2\" nowrap bgcolor=\"%s\" \
class=\"blacktext\"> %s</td></tr>"%(color, self.htmlText(participant)))
vars["primaryAuthors"] = "".join(l)
urlDisplayGroup.addParam("clickedGroup", "primaryAuthors")
vars["showPrimaryAuthors"] = """<form action="%s" method="post">\
%s
%s
<input type="submit" class="btn" value="%s">
</form>"""%(str(urlDisplayGroup), abstractsList,groupsList, text)
# Co-Authors
text = _("show list")
vars["coAuthors"] = "<tr colspan=\"2\"><td> </td></tr>"
if "coAuthors" in self._displayedGroups:
l = []
color = "white"
text = _("close list")
for cAuth in self._emailList["coAuthors"]["tree"].values():
if color=="white":
color="#F6F6F6"
else:
color="white"
cAuthEmail = cAuth.getEmail()
if cAuthEmail.strip() == "":
participant = "%s"%cAuth.getFullName()
else:
participant = "%s <%s>"%(cAuth.getFullName(), cAuthEmail)
l.append("<tr><td colspan=\"2\" nowrap bgcolor=\"%s\" class=\"blacktext\">\
%s</td></tr>"%(color, self.htmlText(participant)))
vars["coAuthors"] = "".join(l)
urlDisplayGroup.addParam("clickedGroup", "coAuthors")
vars["showCoAuthors"] = """<form action="%s" method="post">\
%s
%s
<input type="submit" class="btn" value="%s">
</form>"""%(str(urlDisplayGroup), abstractsList,groupsList, text)
return vars
class WContribParticipantList(wcomponents.WTemplated):
def __init__(self, conf, emailList, displayedGroups, contribs):
self._emailList = emailList
self._displayedGroups = displayedGroups
self._conf = conf
self._contribs = contribs
def getVars(self):
vars = wcomponents.WTemplated.getVars(self)
vars["speakerEmails"] = ", ".join(self._emailList["speakers"]["emails"])
vars["primaryAuthorEmails"] = ", ".join(self._emailList["primaryAuthors"]["emails"])
vars["coAuthorEmails"] = ", ".join(self._emailList["coAuthors"]["emails"])
urlDisplayGroup = vars["urlDisplayGroup"]
contribsToPrint = []
for contrib in self._contribs:
contribsToPrint.append("""<input type="hidden" name="contributions" value="%s">"""%contrib)
contribsList = "".join(contribsToPrint)
displayedGroups = []
for dg in self._displayedGroups:
displayedGroups.append("""<input type="hidden" name="displayedGroups" value="%s">"""%dg)
groupsList = "".join(displayedGroups)
# Speakers
text = _("show list")
vars["speakers"] = "<tr colspan=\"2\"><td> </td></tr>"
if "speakers" in self._displayedGroups:
l = []
color = "white"
text = _("close list")
for speaker in self._emailList["speakers"]["tree"].values():
if color=="white":
color="#F6F6F6"
else:
color="white"
participant = "%s <%s>"%(speaker.getFullName(), speaker.getEmail())
l.append("<tr>\
<td colspan=\"2\" nowrap bgcolor=\"%s\" class=\"blacktext\">\
%s</td></tr>"%(color, self.htmlText(participant)))
vars["speakers"] = "".join(l)
urlDisplayGroup.addParam("clickedGroup", "speakers")
vars["showSpeakers"] = """<form action="%s" method="post">\
%s
%s
<input type="submit" class="btn" value="%s">
</form>"""%(str(urlDisplayGroup), contribsList,groupsList, text)
# Primary authors
text = _("show list")
vars["primaryAuthors"] = "<tr colspan=\"2\"><td> </td></tr>"
if "primaryAuthors" in self._displayedGroups:
l = []
color = "white"
text = _("close list")
for pAuth in self._emailList["primaryAuthors"]["tree"].values():
if color=="white":
color="#F6F6F6"
else:
color="white"
participant = "%s %s %s <%s>"%(pAuth.getTitle(), pAuth.getFirstName(), safe_upper(pAuth.getFamilyName()), pAuth.getEmail())
l.append("<tr><td colspan=\"2\" nowrap bgcolor=\"%s\" \
class=\"blacktext\"> %s</td></tr>"%(color, self.htmlText(participant)))
vars["primaryAuthors"] = "".join(l)
urlDisplayGroup.addParam("clickedGroup", "primaryAuthors")
vars["showPrimaryAuthors"] = """<form action="%s" method="post">\
%s
%s
<input type="submit" class="btn" value="%s">
</form>"""%(str(urlDisplayGroup), contribsList,groupsList, text)
# Co-Authors
text = _("show list")
vars["coAuthors"] = "<tr colspan=\"2\"><td> </td></tr>"
if "coAuthors" in self._displayedGroups:
l = []
color = "white"
text = _("close list")
for cAuth in self._emailList["coAuthors"]["tree"].values():
if color=="white":
color="#F6F6F6"
else:
color="white"
cAuthEmail = cAuth.getEmail()
if cAuthEmail.strip() == "":
participant = "%s %s %s"%(cAuth.getTitle(), cAuth.getFirstName(), safe_upper(cAuth.getFamilyName()))
else:
participant = "%s %s %s <%s>"%(cAuth.getTitle(), cAuth.getFirstName(), safe_upper(cAuth.getFamilyName()), cAuthEmail)
l.append("<tr><td colspan=\"2\" nowrap bgcolor=\"%s\" class=\"blacktext\">\
%s</td></tr>"%(color, self.htmlText(participant)))
vars["coAuthors"] = "".join(l)
urlDisplayGroup.addParam("clickedGroup", "coAuthors")
vars["showCoAuthors"] = """<form action="%s" method="post">\
%s
%s
<input type="submit" class="btn" value="%s">
</form>"""%(str(urlDisplayGroup), contribsList,groupsList, text)
return vars
class WPAbstractSendNotificationMail(WPConferenceBase):
def __init__(self, rh, conf, count):
WPConferenceBase.__init__(self, rh, conf)
self._count = count
def _getBody( self, params ):
return i18nformat("""
<table align="center"><tr><td align="center">
<b> _("The submitters of the selected abstracts will nearly recieve the notification mail").<br>
<br>
_("You can now close this window.")</b>
</td></tr></table>
""")
class WPContributionList( WPConferenceDefaultDisplayBase ):
navigationEntry = navigation.NEContributionList
menu_entry_name = 'contributions'
def _getBody( self, params ):
wc = WConfContributionList( self._getAW(), self._conf, params["filterCrit"], params.get("filterText",""))
return wc.getHTML()
class WConfContributionList (WConfDisplayBodyBase):
_linkname = 'contributions'
def __init__(self, aw, conf, filterCrit, filterText):
self._aw = aw
self._conf = conf
self._filterCrit = filterCrit
self._filterText = filterText
def getVars(self):
wvars = wcomponents.WTemplated.getVars(self)
wvars["body_title"] = self._getTitle()
wvars["contributions"] = self._conf.getContributionListSorted(includeWithdrawn=False, key="title")
wvars["showAttachedFiles"] = self._conf.getAbstractMgr().showAttachedFilesContribList()
wvars["conf"] = self._conf
wvars["accessWrapper"] = self._aw
wvars["filterCriteria"] = self._filterCrit
wvars["filterText"] = self._filterText
wvars["formatDate"] = lambda date: format_date(date, "d MMM yyyy")
wvars["formatTime"] = lambda time: format_time(time, format="short", timezone=timezone(DisplayTZ(self._aw, self._conf).getDisplayTZ()))
return wvars
class WConfAuthorIndex(WConfDisplayBodyBase):
_linkname = 'author_index'
def __init__(self, conf):
self._conf = conf
def getVars(self):
wvars = wcomponents.WTemplated.getVars(self)
wvars["body_title"] = self._getTitle()
wvars["items"] = dict(enumerate(self._getItems()))
return wvars
def _getItems(self):
res = []
for key, authors in self._conf.getAuthorIndex().iteritems():
# get the first identity that matches the author
if len(authors) == 0:
continue
else:
auth = next((x for x in authors if x.getContribution() and x.getContribution().getConference()), None)
if auth is None:
continue
authorURL = urlHandlers.UHContribAuthorDisplay.getURL(auth.getContribution(), authorId=auth.getId())
contribs = []
res.append({'fullName': auth.getFullNameNoTitle(),
'affiliation': auth.getAffiliation(),
'authorURL': authorURL,
'contributions': contribs})
for auth in authors:
contrib = auth.getContribution()
if contrib is not None and contrib.getConference() is not None:
contribs.append({
'title': contrib.getTitle(),
'url': str(urlHandlers.UHContributionDisplay.getURL(auth.getContribution())),
'attached_items': contrib.attached_items
})
return res
class WPAuthorIndex(WPConferenceDefaultDisplayBase):
navigationEntry = navigation.NEAuthorIndex
menu_entry_name = 'author_index'
def getJSFiles(self):
return WPConferenceDefaultDisplayBase.getJSFiles(self) + \
self._asset_env['indico_authors'].urls()
def _getBody(self, params):
wc = WConfAuthorIndex(self._conf)
return wc.getHTML()
class WConfSpeakerIndex(WConfDisplayBodyBase):
_linkname = 'speaker_index'
def __init__(self, conf):
self._conf = conf
def getVars(self):
wvars = wcomponents.WTemplated.getVars(self)
res = collections.defaultdict(list)
for index, key in enumerate(self._conf.getSpeakerIndex().getParticipationKeys()):
pl = self._conf.getSpeakerIndex().getById(key)
try:
speaker = pl[0]
except IndexError:
continue
res[index].append({'fullName': speaker.getFullNameNoTitle(), 'affiliation': speaker.getAffiliation()})
for speaker in pl:
if isinstance(speaker, conference.SubContribParticipation):
participation = speaker.getSubContrib()
if participation is None:
continue
url = urlHandlers.UHSubContributionDisplay.getURL(participation)
else:
participation = speaker.getContribution()
if participation is None:
continue
url = urlHandlers.UHContributionDisplay.getURL(participation)
if participation.getConference() is not None:
res[index].append({'title': participation.getTitle(),
'url': str(url),
'attached_items': participation.getContribution().attached_items})
wvars["body_title"] = self._getTitle()
wvars["items"] = res
return wvars
class WPSpeakerIndex(WPConferenceDefaultDisplayBase):
navigationEntry = navigation.NESpeakerIndex
menu_entry_name = 'speaker_index'
def _getBody(self, params):
wc=WConfSpeakerIndex(self._conf)
return wc.getHTML()
def getJSFiles(self):
return WPConferenceDefaultDisplayBase.getJSFiles(self) + \
self._asset_env['indico_authors'].urls()
class WConfMyContributions(wcomponents.WTemplated):
def __init__(self, aw, conf):
self._aw=aw
self._conf=conf
def getHTML(self, params):
return wcomponents.WTemplated.getHTML(self, params)
def getVars(self):
vars = wcomponents.WTemplated.getVars( self )
vars["User"] = self._aw.getUser()
vars["Conference"] = self._conf
vars["ConfReviewingChoice"] = self._conf.getConfPaperReview().getChoice()
return vars
class WConfMyStuffMySessions(WConfDisplayBodyBase):
_linkname = 'my_sessions'
def __init__(self, aw, conf):
self._aw = aw
self._conf = conf
def _getSessionsHTML(self):
if self._aw.getUser() is None:
return ""
#ls=self._conf.getCoordinatedSessions(self._aw.getUser())+self._conf.getManagedSession(self._aw.getUser())
ls = set(self._conf.getCoordinatedSessions(self._aw.getUser()))
ls = list(ls | set(self._conf.getManagedSession(self._aw.getUser())))
if len(ls) <= 0:
return ""
res = []
iconURL = Config.getInstance().getSystemIconURL("conf_edit")
for s in ls:
modURL = urlHandlers.UHSessionModification.getURL(s)
dispURL = urlHandlers.UHSessionDisplay.getURL(s)
res.append("""
<tr class="infoTR">
<td class="infoTD" width="100%%">%s</td>
<td nowrap class="infoTD"><a href=%s>%s</a><span class="horizontalSeparator">|</span><a href=%s>%s</a></td>
</tr>""" % (self.htmlText(s.getTitle()),
quoteattr(str(modURL)),
_("Edit"),
quoteattr(str(dispURL)),
_("View")))
return """
<table class="infoTable" cellspacing="0" width="100%%">
<tr>
<td nowrap class="tableHeader"> %s </td>
<td nowrap class="tableHeader" style="text-align:right;"> %s </td>
</tr>
<tr>
<td>%s</td>
</tr>
</table>
""" % (_("Session"),
_("Actions"),
"".join(res))
def getVars(self):
wvars = wcomponents.WTemplated.getVars(self)
wvars["body_title"] = self._getTitle()
wvars["items"] = self._getSessionsHTML()
return wvars
class WPConfMyStuffMySessions(WPConferenceDefaultDisplayBase):
navigationEntry = navigation.NEMyStuff
menu_entry_name = 'my_conference'
def _getBody(self,params):
wc=WConfMyStuffMySessions(self._getAW(),self._conf)
return wc.getHTML()
class WConfMyStuffMyContributions(WConfDisplayBodyBase):
_linkname = 'my_contributions'
def __init__(self, aw, conf):
self._aw = aw
self._conf = conf
def _getContribsHTML(self):
return WConfMyContributions(self._aw, self._conf).getHTML({})
def getVars(self):
wvars = wcomponents.WTemplated.getVars(self)
wvars["body_title"] = self._getTitle()
wvars["items"] = self._getContribsHTML()
return wvars
class WPConfMyStuffMyContributions(WPConferenceDefaultDisplayBase):
navigationEntry = navigation.NEMyStuff
menu_entry_name = 'my_contributions'
def _getBody(self,params):
wc=WConfMyStuffMyContributions(self._getAW(),self._conf)
return wc.getHTML()
class WConfMyStuffMyTracks(WConfDisplayBodyBase):
_linkname = 'my_tracks'
def __init__(self, aw, conf):
self._aw = aw
self._conf = conf
def _getTracksHTML(self):
if self._aw.getUser() is None or not self._conf.getAbstractMgr().isActive() or not self._conf.hasEnabledSection("cfa"):
return ""
lt = self._conf.getCoordinatedTracks(self._aw.getUser())
if len(lt) <= 0:
return ""
res = []
iconURL = Config.getInstance().getSystemIconURL("conf_edit")
for t in lt:
modURL = urlHandlers.UHTrackModifAbstracts.getURL(t)
res.append("""
<tr class="infoTR">
<td class="infoTD" width="100%%">%s</td>
<td nowrap class="infoTD"><a href=%s>%s</a></td>
</tr>""" % (self.htmlText(t.getTitle()),
quoteattr(str(modURL)),
_("Edit")))
return """
<table class="infoTable" cellspacing="0" width="100%%">
<tr>
<td nowrap class="tableHeader"> %s </td>
<td nowrap class="tableHeader" style="text-align:right;"> %s </td>
</tr>
<tr>
<td>%s</td>
</tr>
</table>
""" % (_("Track"),
_("Actions"),
"".join(res))
def getVars(self):
wvars = wcomponents.WTemplated.getVars(self)
wvars["body_title"] = self._getTitle()
wvars["items"] = self._getTracksHTML()
return wvars
class WPConfMyStuffMyTracks(WPConferenceDefaultDisplayBase):
navigationEntry = navigation.NEMyStuff
menu_entry_name = 'my_tracks'
def _getBody(self,params):
wc=WConfMyStuffMyTracks(self._getAW(),self._conf)
return wc.getHTML()
class WConfMyStuff(WConfDisplayBodyBase):
_linkname = 'my_conference'
def __init__(self, aw, conf):
self._aw = aw
self._conf = conf
def getVars(self):
wvars = wcomponents.WTemplated.getVars(self)
wvars["body_title"] = self._getTitle()
return wvars
class WPMyStuff(WPConferenceDefaultDisplayBase):
navigationEntry = navigation.NEMyStuff
menu_entry_name = 'my_conference'
def _getBody(self,params):
wc=WConfMyStuff(self._getAW(),self._conf)
return wc.getHTML()
class WConfModAbstractBook(wcomponents.WTemplated):
def __init__(self,conf):
self._conf = conf
def getVars(self):
vars = wcomponents.WTemplated.getVars(self)
boaConfig = self._conf.getBOAConfig()
vars["sortByList"] = boaConfig.getSortByTypes()
vars["modURL"] = quoteattr(str(urlHandlers.UHConfModAbstractBook.getURL(self._conf)))
vars["previewURL"] = quoteattr(str(urlHandlers.UHConfAbstractBook.getURL(self._conf)))
vars["sortBy"] = boaConfig.getSortBy()
vars["boaConfig"] = boaConfig
vars["urlToogleShowIds"] = str(urlHandlers.UHConfModAbstractBookToogleShowIds.getURL(self._conf))
vars["conf"] = self._conf
vars["bookOfAbstractsActive"] = self._conf.getAbstractMgr().getCFAStatus()
vars["bookOfAbstractsMenuActive"] = get_menu_entry_by_name('abstracts_book', self._conf).is_enabled
vars["correspondingAuthorList"] = boaConfig.getCorrespondingAuthorTypes()
vars["correspondingAuthor"] = boaConfig.getCorrespondingAuthor()
return vars
class WPModAbstractBook(WPConferenceModifAbstractBase):
def _setActiveTab(self):
self._tabBOA.setActive()
def _getTabContent(self, params):
wc = WConfModAbstractBook(self._conf)
return wc.getHTML()
def getCSSFiles(self):
return WPConferenceModifAbstractBase.getCSSFiles(self) + \
self._asset_env['contributions_sass'].urls()
def getJSFiles(self):
return WPConferenceModifAbstractBase.getJSFiles(self) + \
self._includeJSPackage('Management') + \
self._asset_env['abstracts_js'].urls()
def _getHeadContent(self):
return WPConferenceModifAbstractBase._getHeadContent(self) + render('js/mathjax.config.js.tpl') + \
'\n'.join(['<script src="{0}" type="text/javascript"></script>'.format(url)
for url in self._asset_env['mathjax_js'].urls()])
class WTimeTableCustomizePDF(wcomponents.WTemplated):
def __init__(self, conf):
self._conf = conf
def getVars(self):
vars = wcomponents.WTemplated.getVars(self)
url = urlHandlers.UHConfTimeTablePDF.getURL(self._conf)
vars["getPDFURL"] = quoteattr(str(url))
vars["showDays"] = vars.get("showDays", "all")
vars["showSessions"] = vars.get("showSessions", "all")
wc = WConfCommonPDFOptions(self._conf)
vars["commonPDFOptions"] = wc.getHTML()
return vars
class WPTimeTableCustomizePDF(WPConferenceDefaultDisplayBase):
navigationEntry = navigation.NETimeTableCustomizePDF
menu_entry_name = 'timetable'
def _getBody(self, params):
wc = WTimeTableCustomizePDF(self._conf)
return wc.getHTML(params)
class WConfModifPendingQueuesList(wcomponents.WTemplated):
def __init__(self, url, title, target, list, pType):
self._postURL = url
self._title = title
self._target = target
self._list = list
self._pType = pType
def _cmpByConfName(self, cp1, cp2):
if cp1 is None and cp2 is not None:
return -1
elif cp1 is not None and cp2 is None:
return 1
elif cp1 is None and cp2 is None:
return 0
return cmp(cp1.getTitle(), cp2.getTitle())
def _cmpByContribName(self, cp1, cp2):
if cp1 is None and cp2 is not None:
return -1
elif cp1 is not None and cp2 is None:
return 1
elif cp1 is None and cp2 is None:
return 0
return cmp(cp1.getContribution().getTitle(), cp2.getContribution().getTitle())
def _cmpBySessionName(self, cp1, cp2):
if cp1 is None and cp2 is not None:
return -1
elif cp1 is not None and cp2 is None:
return 1
elif cp1 is None and cp2 is None:
return 0
return cmp(cp1.getSession().getTitle(), cp2.getSession().getTitle())
def getVars(self):
vars = wcomponents.WTemplated.getVars(self)
vars["postURL"] = self._postURL
vars["title"] = self._title
vars["target"] = self._target
vars["list"] = self._list
vars["pType"] = self._pType
return vars
class WConfModifPendingQueues(wcomponents.WTemplated):
def __init__(self, conf, aw, activeTab="submitters"):
self._conf = conf
self._aw = aw
self._activeTab = activeTab
self._pendingSubmitters = self._conf.getPendingQueuesMgr().getPendingSubmitters()
self._pendingManagers = self._conf.getPendingQueuesMgr().getPendingManagers()
self._pendingCoordinators = self._conf.getPendingQueuesMgr().getPendingCoordinators()
def _createTabCtrl(self):
self._tabCtrl = wcomponents.TabControl()
url = urlHandlers.UHConfModifPendingQueues.getURL(self._conf)
url.addParam("tab", "conf_submitters")
self._tabConfSubmitters = self._tabCtrl.newTab("conf_submitters", _("Pending Conference Submitters"), str(url))
url.addParam("tab", "conf_managers")
self._tabConfManagers = self._tabCtrl.newTab("conf_managers", _("Pending Conference Managers"), str(url))
url.addParam("tab", "submitters")
self._tabSubmitters = self._tabCtrl.newTab("submitters", _("Pending Contribution Submitters"), str(url))
url.addParam("tab", "managers")
self._tabManagers = self._tabCtrl.newTab("managers", _("Pending Managers"), str(url))
url.addParam("tab", "coordinators")
self._tabCoordinators = self._tabCtrl.newTab("coordinators", _("Pending Coordinators"), str(url))
self._tabSubmitters.setEnabled(True)
tab = self._tabCtrl.getTabById(self._activeTab)
if tab is None:
tab = self._tabCtrl.getTabById("conf_submitters")
tab.setActive()
def getVars(self):
vars = wcomponents.WTemplated.getVars(self)
self._createTabCtrl()
list = []
url = ""
title = ""
if self._tabConfSubmitters.isActive():
# Pending conference submitters
url = urlHandlers.UHConfModifPendingQueuesActionConfSubm.getURL(self._conf)
url.addParam("tab","conf_submitters")
title = _("Pending chairpersons/speakers to become submitters")
target = _("Conference")
pType = "ConfSubmitters"
emails = [x.principal.email for x in self._conf.as_event.acl_entries
if x.type == PrincipalType.email and x.has_management_role('submit', explicit=True)]
chairs = {c.getEmail().strip().lower(): c for c in self._conf.getChairList() if c.getEmail().strip()}
for email in emails:
# XXX: this will fail if we ever have a submitter without a corresponding chairperson.
# i don't think this can happen unless you mess with the DB...
# if it does simply ignore KeyErrors here.. it's legacy code anyway!
list.append((email, [chairs[email]]))
elif self._tabConfManagers.isActive():
url = url_for('event_mgmt.confModifPendingQueues-actionConfManagers', self._conf)
title = _("Pending chairpersons to become managers")
target = _("Conference")
pType = "ConfManagers"
emails = [x.principal.email for x in self._conf.as_event.acl_entries
if x.type == PrincipalType.email and x.has_management_role()]
chairs = {c.getEmail().strip().lower(): c for c in self._conf.getChairList() if c.getEmail().strip()}
for email in emails:
# XXX: this will fail if we ever have a pending manager without a corresponding chairperson.
# i don't think this can happen unless you mess with the DB...
# if it does simply ignore KeyErrors here.. it's legacy code anyway!
list.append((email, [chairs[email]]))
elif self._tabSubmitters.isActive():
# Pending submitters
keys = self._conf.getPendingQueuesMgr().getPendingSubmittersKeys(True)
url = urlHandlers.UHConfModifPendingQueuesActionSubm.getURL(self._conf)
url.addParam("tab", "submitters")
title = _("Pending authors/speakers to become submitters")
target = _("Contribution")
pType = "Submitters"
for key in keys:
list.append((key, self._pendingSubmitters[key][:]))
elif self._tabManagers.isActive():
# Pending managers
keys = self._conf.getPendingQueuesMgr().getPendingManagersKeys(True)
url = urlHandlers.UHConfModifPendingQueuesActionMgr.getURL(self._conf)
url.addParam("tab", "managers")
title = _("Pending conveners to become managers")
target = _("Session")
pType = "Managers"
for key in keys:
list.append((key, self._pendingManagers[key][:]))
#list.sort(conference.SessionChair._cmpFamilyName)
elif self._tabCoordinators.isActive():
# Pending coordinators
keys = self._conf.getPendingQueuesMgr().getPendingCoordinatorsKeys(True)
url = urlHandlers.UHConfModifPendingQueuesActionCoord.getURL(self._conf)
url.addParam("tab", "coordinators")
title = _("Pending conveners to become coordinators")
target = _("Session")
pType = "Coordinators"
for key in keys:
list.append((key, self._pendingCoordinators[key][:]))
list.sort(conference.ConferenceParticipation._cmpFamilyName)
html = WConfModifPendingQueuesList(str(url), title, target, list, pType).getHTML()
vars["pendingQueue"] = wcomponents.WTabControl(self._tabCtrl, self._aw).getHTML(html)
return vars
class WPConfModifPendingQueuesBase(WPConfModifListings):
sidemenu_option = 'lists'
def __init__(self, rh, conf, activeTab=""):
WPConfModifListings.__init__(self, rh, conf)
self._activeTab = activeTab
class WPConfModifPendingQueues(WPConfModifPendingQueuesBase):
def _getTabContent(self, params):
wc = WConfModifPendingQueues(self._conf, self._getAW(), self._activeTab)
return wc.getHTML()
class WPConfModifPendingQueuesRemoveConfMgrConfirm(WPConfModifPendingQueuesBase):
def __init__(self, rh, conf, pendingConfMgrs):
WPConfModifPendingQueuesBase.__init__(self, rh, conf)
self._pendingConfMgrs = pendingConfMgrs
def _getTabContent(self, params):
wc = wcomponents.WConfirmation()
psubs = ''.join(list("<li>{0}</li>".format(s) for s in self._pendingConfMgrs))
msg = {'challenge': _("Are you sure you want to delete the following users pending to become conference "
"managers?"),
'target': "<ul>{0}</ul>".format(psubs),
'subtext': _("Please note that they will still remain as user")}
url = url_for('event_mgmt.confModifPendingQueues-actionConfManagers', self._conf)
return wc.getHTML(msg, url, {"pendingUsers": self._pendingConfMgrs, "remove": _("remove")})
class WPConfModifPendingQueuesReminderConfMgrConfirm(WPConfModifPendingQueuesBase):
def __init__(self, rh, conf, pendingConfMgrs):
WPConfModifPendingQueuesBase.__init__(self, rh, conf)
self._pendingConfMgrs = pendingConfMgrs
def _getTabContent(self, params):
wc = wcomponents.WConfirmation()
psubs = ''.join(list("<li>{0}</li>".format(s) for s in self._pendingConfMgrs))
msg = {'challenge': _("Are you sure that you want to send these users an email with a reminder to create an "
"account in Indico?"),
'target': "<ul>{0}</ul>".format(psubs)}
url = url_for('event_mgmt.confModifPendingQueues-actionConfManagers', self._conf)
return wc.getHTML(msg, url, {"pendingUsers": self._pendingConfMgrs, "reminder": _("reminder")},
severity='accept')
class WPConfModifPendingQueuesRemoveConfSubmConfirm(WPConfModifPendingQueuesBase):
def __init__(self, rh, conf, pendingConfSubms):
WPConfModifPendingQueuesBase.__init__(self, rh, conf)
self._pendingConfSubms = pendingConfSubms
def _getTabContent(self,params):
wc = wcomponents.WConfirmation()
psubs = ''.join(list("<li>{0}</li>".format(s) for s in self._pendingConfSubms))
msg = {'challenge': _("Are you sure you want to delete the following users pending to become submitters?"),
'target': "<ul>{0}</ul>".format(psubs),
'subtext': _("Please note that they will still remain as user"),
}
url = urlHandlers.UHConfModifPendingQueuesActionConfSubm.getURL(self._conf)
return wc.getHTML(msg,url,{"pendingUsers":self._pendingConfSubms, "remove": _("remove")})
class WPConfModifPendingQueuesReminderConfSubmConfirm(WPConfModifPendingQueuesBase):
def __init__(self, rh, conf, pendingConfSubms):
WPConfModifPendingQueuesBase.__init__(self, rh, conf)
self._pendingConfSubms = pendingConfSubms
def _getTabContent(self,params):
wc = wcomponents.WConfirmation()
psubs = ''.join(list("<li>{0}</li>".format(s) for s in self._pendingConfSubms))
msg = {'challenge': _("Are you sure that you want to send these users an email with a reminder to create an account in Indico?"),
'target': "<ul>{0}</ul>".format(psubs)
}
url = urlHandlers.UHConfModifPendingQueuesActionConfSubm.getURL(self._conf)
return wc.getHTML(
msg,
url, {
"pendingUsers": self._pendingConfSubms,
"reminder": _("reminder")
},
severity='accept')
class WPConfModifPendingQueuesRemoveSubmConfirm(WPConfModifPendingQueuesBase):
def __init__(self, rh, conf, pendingSubms):
WPConfModifPendingQueuesBase.__init__(self, rh, conf)
self._pendingSubms = pendingSubms
def _getTabContent(self, params):
wc = wcomponents.WConfirmation()
psubs = ''.join(list("<li>{0}</li>".format(s) for s in self._pendingSubms))
msg = {'challenge': _("Are you sure you want to delete the following participants pending to become submitters?"),
'target': "<ul>{0}</ul>".format(psubs),
'subtext': _("Please note that they will still remain as participants"),
}
url = urlHandlers.UHConfModifPendingQueuesActionSubm.getURL(self._conf)
return wc.getHTML(msg, url, {"pendingUsers": self._pendingSubms, "remove": _("remove")})
class WPConfModifPendingQueuesReminderSubmConfirm( WPConfModifPendingQueuesBase ):
def __init__(self,rh, conf, pendingSubms):
WPConfModifPendingQueuesBase.__init__(self,rh,conf)
self._pendingSubms = pendingSubms
def _getTabContent(self,params):
wc = wcomponents.WConfirmation()
psubs = ''.join(list("<li>{0}</li>".format(s) for s in self._pendingSubms))
msg = {'challenge': _("Are you sure that you want to send these users an email with a reminder to create an account in Indico?"),
'target': "<ul>{0}</ul>".format(psubs)
}
url = urlHandlers.UHConfModifPendingQueuesActionSubm.getURL(self._conf)
return wc.getHTML(
msg,
url, {
"pendingUsers": self._pendingSubms,
"reminder": _("reminder")
},
severity='accept')
class WPConfModifPendingQueuesRemoveMgrConfirm( WPConfModifPendingQueuesBase ):
def __init__(self,rh, conf, pendingMgrs):
WPConfModifPendingQueuesBase.__init__(self,rh,conf)
self._pendingMgrs = pendingMgrs
def _getTabContent(self,params):
wc = wcomponents.WConfirmation()
pmgrs = ''.join(list("<li>{0}</li>".format(s) for s in self._pendingMgrs))
msg = {'challenge': _("Are you sure you want to delete the following conveners pending to become managers?"),
'target': "<ul>{0}</ul>".format(pmgrs),
'subtext': _("Please note that they will still remain as conveners")
}
url = urlHandlers.UHConfModifPendingQueuesActionMgr.getURL(self._conf)
return wc.getHTML(msg,url,{"pendingUsers":self._pendingMgrs, "remove": _("remove")})
class WPConfModifPendingQueuesReminderMgrConfirm( WPConfModifPendingQueuesBase ):
def __init__(self,rh, conf, pendingMgrs):
WPConfModifPendingQueuesBase.__init__(self,rh,conf)
self._pendingMgrs = pendingMgrs
def _getTabContent(self,params):
wc = wcomponents.WConfirmation()
pmgrs = ''.join(list("<li>{0}</li>".format(s) for s in self._pendingMgrs))
msg = {'challenge': _("Are you sure that you want to send these users an email with a reminder to create an account in Indico?"),
'target': "<ul>{0}</ul>".format(pmgrs)
}
url = urlHandlers.UHConfModifPendingQueuesActionMgr.getURL(self._conf)
return wc.getHTML(msg,url,{"pendingUsers":self._pendingMgrs, "reminder": _("reminder")})
class WPConfModifPendingQueuesRemoveCoordConfirm( WPConfModifPendingQueuesBase ):
def __init__(self,rh, conf, pendingCoords):
WPConfModifPendingQueuesBase.__init__(self,rh,conf)
self._pendingCoords = pendingCoords
def _getTabContent(self,params):
wc = wcomponents.WConfirmation()
pcoords = ''.join(list("<li>{0}</li>".format(s) for s in self._pendingMgrs))
msg = {'challenge': _("Are you sure you want to delete the following conveners pending to become coordinators?"),
'target': "<ul>{0}</ul>".format(pcoords),
'subtext': _("Please note that they will still remain as conveners")
}
url = urlHandlers.UHConfModifPendingQueuesActionCoord.getURL(self._conf)
return wc.getHTML(msg, url,{
"pendingUsers": self._pendingCoords,
"remove": _("remove")
})
class WPConfModifPendingQueuesReminderCoordConfirm( WPConfModifPendingQueuesBase ):
def __init__(self,rh, conf, pendingCoords):
WPConfModifPendingQueuesBase.__init__(self,rh,conf)
self._pendingCoords = pendingCoords
def _getTabContent(self,params):
wc = wcomponents.WConfirmation()
pcoords = ''.join(list("<li>{0}</li>".format(s) for s in self._pendingMgrs))
msg = {'challenge': _("Are you sure that you want to send these users an email with a reminder to create an account in Indico?"),
'target': "<ul>{0}</ul>".format(pcoords)
}
url = urlHandlers.UHConfModifPendingQueuesActionCoord.getURL(self._conf)
return wc.getHTML(
msg, url, {
"pendingUsers": self._pendingCoords,
"reminder": _("reminder")
})
class WConfModifReschedule(wcomponents.WTemplated):
def __init__(self, targetDay):
self._targetDay = targetDay
def getVars(self):
vars = wcomponents.WTemplated.getVars(self)
vars["targetDay"]=quoteattr(str(self._targetDay))
return vars
class WPConfModifReschedule(WPConferenceModifBase):
def __init__(self, rh, conf, targetDay):
WPConferenceModifBase.__init__(self, rh, conf)
self._targetDay=targetDay
def _getPageContent( self, params):
wc=WConfModifReschedule(self._targetDay)
p={"postURL":quoteattr(str(urlHandlers.UHConfModifReschedule.getURL(self._conf)))}
return wc.getHTML(p)
# ============================================================================
# === Badges related =========================================================
# ============================================================================
##------------------------------------------------------------------------------------------------------------
"""
Badge Printing classes
"""
class WConfModifBadgePrinting(wcomponents.WTemplated):
""" This class corresponds to the screen where badge templates are
listed and can be created, edited, deleted, and tried.
"""
def __init__(self, conference, user=None):
self.__conf = conference
self._user = user
def _getBaseTemplateOptions(self):
dconf = conference.CategoryManager().getDefaultConference()
templates = dconf.getBadgeTemplateManager().getTemplates()
options = [{'value': 'blank', 'label': _('Blank Page')}]
for id, template in templates.iteritems():
options.append({'value': id, 'label': template.getName()})
return options
def getVars(self):
uh = urlHandlers
templates = []
sortedTemplates = self.__conf.getBadgeTemplateManager().getTemplates().items()
sortedTemplates.sort(lambda x, y: cmp(x[1].getName(), y[1].getName()))
for templateId, template in sortedTemplates:
data = {
'id': templateId,
'name': template.getName(),
'urlEdit': str(uh.UHConfModifBadgeDesign.getURL(self.__conf, templateId)),
'urlDelete': str(uh.UHConfModifBadgePrinting.getURL(self.__conf, deleteTemplateId=templateId)),
'urlCopy': str(uh.UHConfModifBadgePrinting.getURL(self.__conf, copyTemplateId=templateId))
}
templates.append(data)
wcPDFOptions = WConfModifBadgePDFOptions(self.__conf)
vars = wcomponents.WTemplated.getVars(self)
vars['NewTemplateURL'] = str(uh.UHConfModifBadgeDesign.getURL(self.__conf,
self.__conf.getBadgeTemplateManager().getNewTemplateId(),new = True))
vars['CreatePDFURL'] = str(uh.UHConfModifBadgePrintingPDF.getURL(self.__conf))
vars['templateList'] = templates
vars['PDFOptions'] = wcPDFOptions.getHTML()
vars['baseTemplates'] = self._getBaseTemplateOptions()
return vars
class WConfModifBadgePDFOptions(wcomponents.WTemplated):
def __init__(self, conference, showKeepValues=True, showTip=True):
self.__conf = conference
self.__showKeepValues = showKeepValues
self.__showTip = showTip
def getVars(self):
vars = wcomponents.WTemplated.getVars(self)
pagesizeNames = PDFSizes().PDFpagesizes.keys()
pagesizeNames.sort()
vars['PagesizeNames'] = pagesizeNames
vars['PDFOptions'] = self.__conf.getBadgeTemplateManager().getPDFOptions()
vars['ShowKeepValues'] = self.__showKeepValues
vars['ShowTip'] = self.__showTip
return vars
class WPBadgeBase(WPConfModifToolsBase):
def getCSSFiles(self):
return WPConfModifToolsBase.getCSSFiles(self) + self._asset_env['indico_badges_css'].urls()
def getJSFiles(self):
return WPConfModifToolsBase.getJSFiles(self) + self._includeJSPackage('badges_js')
class WPConfModifBadgePrinting(WPBadgeBase):
def _setActiveTab(self):
self._tabBadges.setActive()
def _getTabContent(self, params):
wc = WConfModifBadgePrinting(self._conf)
return wc.getHTML()
##------------------------------------------------------------------------------------------------------------
"""
Badge Design classes
"""
class WConfModifBadgeDesign(wcomponents.WTemplated):
""" This class corresponds to the screen where a template
is designed inserting, dragging and editing items.
"""
def __init__(self, conference, templateId, new=False, user=None):
self.__conf = conference
self.__templateId = templateId
self.__new = new
self._user = user
def getVars( self ):
vars = wcomponents.WTemplated.getVars( self )
vars["baseURL"] = Config.getInstance().getBaseURL() ##base url of the application, used for the ruler images
vars["cancelURL"] = urlHandlers.UHConfModifBadgePrinting.getURL(self.__conf, templateId = self.__templateId, cancel = True)
vars["saveBackgroundURL"] = urlHandlers.UHConfModifBadgeSaveBackground.getURL(self.__conf, self.__templateId)
vars["loadingIconURL"] = quoteattr(str(Config.getInstance().getSystemIconURL("loading")))
vars["templateId"] = self.__templateId
badgeDesignConfiguration = BadgeDesignConfiguration()
from MaKaC.services.interface.rpc.json import encode as jsonEncode
vars["translateName"]= jsonEncode(dict([(key, value[0]) for key, value in badgeDesignConfiguration.items_actions.iteritems()]))
cases = []
for itemKey in badgeDesignConfiguration.items_actions.keys():
case = []
case.append('case "')
case.append(itemKey)
case.append('":')
case.append('\n')
case.append('items[itemId] = new Item(itemId, "')
case.append(itemKey)
case.append('");')
case.append('\n')
case.append('newDiv.html(items[itemId].toHTML());')
case.append('\n')
case.append('break;')
cases.append("".join(case))
vars['switchCases'] = "\n".join(cases)
optgroups = []
for optgroupName, options in badgeDesignConfiguration.groups:
optgroup = []
optgroup.append('<optgroup label="')
optgroup.append(optgroupName)
optgroup.append('">')
optgroup.append('\n')
for optionName in options:
optgroup.append('<option value="%s">'%optionName)
optgroup.append(badgeDesignConfiguration.items_actions[optionName][0])
optgroup.append('</option>')
optgroup.append('\n')
optgroup.append('</optgroup>')
optgroups.append("".join(optgroup))
vars['selectOptions'] = "\n".join(optgroups)
vars["backgroundPos"] = "Stretch"
if self.__new:
vars["saveTemplateURL"]=urlHandlers.UHConfModifBadgePrinting.getURL(self.__conf, new=True)
vars["titleMessage"]= _("Creating new badge template")
vars["editingTemplate"]="false"
vars["templateData"]="[]"
vars["hasBackground"]="false"
vars["backgroundURL"]="false"
vars["backgroundId"]=-1
elif self.__templateId is None:
vars["saveTemplateURL"]=urlHandlers.UHConfModifBadgePrinting.getURL(self.__conf)
vars["titleMessage"]= _("No template id given")
vars["editingTemplate"]="false"
vars["templateData"]="[]"
vars["hasBackground"]="false"
vars["backgroundURL"]="false"
vars["backgroundId"]=-1
else:
vars["saveTemplateURL"]=urlHandlers.UHConfModifBadgePrinting.getURL(self.__conf)
vars["titleMessage"]= _("Editing badge template")
vars["editingTemplate"]="true"
templateDataString = jsonEncode(self.__conf.getBadgeTemplateManager().getTemplateData(self.__templateId))
vars["templateData"]= templateDataString
usedBackgroundId = self.__conf.getBadgeTemplateManager().getTemplateById(self.__templateId).getUsedBackgroundId()
vars["backgroundId"] = usedBackgroundId
if usedBackgroundId != -1:
vars["hasBackground"]="true"
vars["backgroundURL"]=str(urlHandlers.UHConfModifBadgeGetBackground.getURL(self.__conf, self.__templateId, usedBackgroundId))
else:
vars["hasBackground"]="false"
vars["backgroundURL"]="false"
return vars
class WPConfModifBadgeDesign(WPBadgeBase):
def __init__(self, rh, conf, templateId=None, new=False, baseTemplateId="blank"):
WPBadgeBase.__init__(self, rh, conf)
self.__templateId = templateId
self.__new = new
self.__baseTemplate = baseTemplateId
if baseTemplateId != 'blank':
dconf = conference.CategoryManager().getDefaultConference()
templMan = conf.getBadgeTemplateManager()
newId = templateId
dconf.getBadgeTemplateManager().getTemplateById(baseTemplateId).clone(templMan, newId)
# now, let's pretend nothing happened, and let the code
# handle the template as if it existed before
self.__new = False
def _setActiveTab(self):
self._tabBadges.setActive()
def _getTabContent(self, params):
wc = WConfModifBadgeDesign(self._conf, self.__templateId, self.__new)
return wc.getHTML()
##------------------------------------------------------------------------------------------------------------
"""
Common PDF Options classes
"""
class WConfCommonPDFOptions( wcomponents.WTemplated ):
""" This class corresponds to a section of options
that are common to each PDF in Indico.
"""
def __init__( self, conference, user=None ):
self.__conf = conference
self._user=user
def getVars(self):
vars = wcomponents.WTemplated.getVars( self )
pagesizeNames = PDFSizes().PDFpagesizes.keys()
pagesizeNames.sort()
pagesizeOptions = []
for pagesizeName in pagesizeNames:
pagesizeOptions.append('<option ')
if pagesizeName == 'A4':
pagesizeOptions.append('selected="selected"')
pagesizeOptions.append('>')
pagesizeOptions.append(pagesizeName)
pagesizeOptions.append('</option>')
vars['pagesizes'] = "".join(pagesizeOptions)
fontsizeOptions = []
for fontsizeName in PDFSizes().PDFfontsizes:
fontsizeOptions.append('<option ')
if fontsizeName == 'normal':
fontsizeOptions.append('selected="selected"')
fontsizeOptions.append('>')
fontsizeOptions.append(fontsizeName)
fontsizeOptions.append('</option>')
vars['fontsizes'] = "".join(fontsizeOptions)
return vars
# ============================================================================
# === Posters related ========================================================
# ============================================================================
##------------------------------------------------------------------------------------------------------------
"""
Poster Printing classes
"""
class WConfModifPosterPrinting(wcomponents.WTemplated):
""" This class corresponds to the screen where poster templates are
listed and can be created, edited, deleted, and tried.
"""
def __init__(self, conference, user=None):
self.__conf = conference
self._user = user
def _getFullTemplateListOptions(self):
templates = {}
templates['global'] = conference.CategoryManager().getDefaultConference().getPosterTemplateManager().getTemplates()
templates['local'] = self.__conf.getPosterTemplateManager().getTemplates()
options = []
def _iterTemplatesToObjectList(key, templates):
newList = []
for id, template in templates.iteritems():
pKey = ' (' + key + ')'
# Only if the template is 'global' should it have the word prefixed.
value = key + str(id) if key == 'global' else str(id)
newList.append({'value': value,
'label': template.getName() + pKey})
return newList
for k, v in templates.iteritems():
options.extend(_iterTemplatesToObjectList(k, v))
return options
def _getBaseTemplateListOptions(self):
templates = conference.CategoryManager().getDefaultConference().getPosterTemplateManager().getTemplates()
options = [{'value': 'blank', 'label': _('Blank Page')}]
for id, template in templates.iteritems():
options.append({'value': id, 'label': template.getName()})
return options
def getVars(self):
uh = urlHandlers
templates = []
wcPDFOptions = WConfModifPosterPDFOptions(self.__conf)
sortedTemplates = self.__conf.getPosterTemplateManager().getTemplates().items()
sortedTemplates.sort(lambda item1, item2: cmp(item1[1].getName(), item2[1].getName()))
for templateId, template in sortedTemplates:
data = {
'id': templateId,
'name': template.getName(),
'urlEdit': str(uh.UHConfModifPosterDesign.getURL(self.__conf, templateId)),
'urlDelete': str(uh.UHConfModifPosterPrinting.getURL(self.__conf, deleteTemplateId=templateId)),
'urlCopy': str(uh.UHConfModifPosterPrinting.getURL(self.__conf, copyTemplateId=templateId))
}
templates.append(data)
vars = wcomponents.WTemplated.getVars(self)
vars["NewTemplateURL"] = str(uh.UHConfModifPosterDesign.getURL(self.__conf, self.__conf.getPosterTemplateManager().getNewTemplateId(),new=True))
vars["CreatePDFURL"]= str(uh.UHConfModifPosterPrintingPDF.getURL(self.__conf))
vars["templateList"] = templates
vars['PDFOptions'] = wcPDFOptions.getHTML()
vars['baseTemplates'] = self._getBaseTemplateListOptions()
vars['fullTemplateList'] = self._getFullTemplateListOptions()
return vars
class WConfModifPosterPDFOptions(wcomponents.WTemplated):
def __init__(self, conference, user=None):
self.__conf = conference
self._user= user
def getVars(self):
vars = wcomponents.WTemplated.getVars(self)
pagesizeNames = PDFSizes().PDFpagesizes.keys()
pagesizeNames.sort()
pagesizeOptions = []
for pagesizeName in pagesizeNames:
pagesizeOptions.append('<option ')
if pagesizeName == 'A4':
pagesizeOptions.append('selected="selected"')
pagesizeOptions.append('>')
pagesizeOptions.append(pagesizeName)
pagesizeOptions.append('</option>')
vars['pagesizes'] = "".join(pagesizeOptions)
return vars
class WPConfModifPosterPrinting(WPBadgeBase):
def _setActiveTab(self):
self._tabPosters.setActive()
def _getTabContent(self, params):
wc = WConfModifPosterPrinting(self._conf)
return wc.getHTML()
##------------------------------------------------------------------------------------------------------------
"""
Poster Design classes
"""
class WConfModifPosterDesign( wcomponents.WTemplated ):
""" This class corresponds to the screen where a template
is designed inserting, dragging and editing items.
"""
def __init__(self, conference, templateId, new=False, user=None):
self.__conf = conference
self.__templateId = templateId
self.__new = new
self._user = user
def getVars(self):
vars = wcomponents.WTemplated.getVars( self )
vars["baseURL"] = Config.getInstance().getBaseURL() # base url of the application, used for the ruler images
vars["cancelURL"] = urlHandlers.UHConfModifPosterPrinting.getURL(self.__conf, templateId = self.__templateId, cancel = True)
vars["saveBackgroundURL"] = urlHandlers.UHConfModifPosterSaveBackground.getURL(self.__conf, self.__templateId)
vars["loadingIconURL"] = quoteattr(str(Config.getInstance().getSystemIconURL("loading")))
vars["templateId"] = self.__templateId
posterDesignConfiguration = PosterDesignConfiguration()
from MaKaC.services.interface.rpc.json import encode as jsonEncode
vars["translateName"]= jsonEncode(dict([(key, value[0]) for key, value in posterDesignConfiguration.items_actions.iteritems()]))
cases = []
for itemKey in posterDesignConfiguration.items_actions.keys():
case = []
case.append('case "')
case.append(itemKey)
case.append('":')
case.append('\n')
case.append('items[itemId] = new Item(itemId, "')
case.append(itemKey)
case.append('");')
case.append('\n')
case.append('newDiv.html(items[itemId].toHTML());')
case.append('\n')
case.append('break;')
cases.append("".join(case))
vars['switchCases'] = "\n".join(cases)
optgroups = []
for optgroupName, options in posterDesignConfiguration.groups:
optgroup = []
optgroup.append('<optgroup label="')
optgroup.append(optgroupName)
optgroup.append('">')
optgroup.append('\n')
for optionName in options:
optgroup.append('<option value="%s">'%optionName)
optgroup.append(posterDesignConfiguration.items_actions[optionName][0])
optgroup.append('</option>')
optgroup.append('\n')
optgroup.append('</optgroup>')
optgroups.append("".join(optgroup))
vars['selectOptions'] = "\n".join(optgroups)
if self.__new:
vars["saveTemplateURL"]=urlHandlers.UHConfModifPosterPrinting.getURL(self.__conf, new=True)
vars["titleMessage"]= _("Creating new poster template")
vars["hasBackground"]="false"
vars["backgroundURL"]="false"
vars["backgroundId"]=-1
vars["backgroundPos"]="Stretch"
vars["templateData"]="[]"
vars["editingTemplate"]="false"
elif self.__templateId is None:
vars["saveTemplateURL"]=urlHandlers.UHConfModifPosterPrinting.getURL(self.__conf)
vars["titleMessage"]= _("No template id given")
vars["hasBackground"]="false"
vars["backgroundURL"]="false"
vars["backgroundId"]=-1
vars["backgroundPos"]="Stretch"
vars["templateData"] = "[]"
vars["editingTemplate"]="false"
else:
vars["saveTemplateURL"]=urlHandlers.UHConfModifPosterPrinting.getURL(self.__conf)
vars["titleMessage"]= _("Editing poster template")
vars["editingTemplate"]="true"
templateDataString = jsonEncode(self.__conf.getPosterTemplateManager().getTemplateData(self.__templateId))
vars["templateData"]= templateDataString
usedBackgroundId = self.__conf.getPosterTemplateManager().getTemplateById(self.__templateId).getUsedBackgroundId()
vars["backgroundId"] = usedBackgroundId
if usedBackgroundId != -1:
vars["hasBackground"]="true"
vars["backgroundURL"]=str(urlHandlers.UHConfModifPosterGetBackground.getURL(self.__conf, self.__templateId, usedBackgroundId))
vars["backgroundPos"]=self.__conf.getPosterTemplateManager().getTemplateById(self.__templateId).getBackgroundPosition(usedBackgroundId)
else:
vars["hasBackground"]="false"
vars["backgroundURL"]="false"
vars["backgroundPos"]="Stretch"
return vars
class WPConfModifPosterDesign(WPBadgeBase):
def __init__(self, rh, conf, templateId=None, new=False, baseTemplateId="blank"):
WPBadgeBase.__init__(self, rh, conf)
self.__templateId = templateId
self.__new = new
self.__baseTemplate = baseTemplateId
def _setActiveTab(self):
self._tabPosters.setActive()
def _getTabContent(self, params):
wc = WConfModifPosterDesign(self._conf, self.__templateId, self.__new)
return wc.getHTML()
def sortByName(x,y):
return cmp(x.getFamilyName(),y.getFamilyName())
class WPConfModifPreviewCSS( WPConferenceDefaultDisplayBase ):
def __init__(self, rh, conf, **kwargs):
WPConferenceDefaultDisplayBase.__init__(self, rh, conf, **kwargs)
self._conf = conf
self._cssTplsModule = ModuleHolder().getById("cssTpls")
def _applyDecoration( self, body ):
"""
"""
return "%s%s%s"%( self._getHeader(), body, self._getFooter() )
def _getBody( self, params ):
params['confId'] = self._conf.getId()
params['conf'] = self._conf
###############################
# injecting ConferenceDisplay #
###############################
p = WPConferenceDisplay( self._rh, self._conf )
p.event = self._conf.as_event
p.logo_url = p.event.logo_url if p.event.has_logo else None
params["bodyConf"] = p._applyConfDisplayDecoration(p._getBody(params))
###############################
###############################
wc = WPreviewPage()
return wc.getHTML(params)
def _getHeadContent(self):
path = Config.getInstance().getCssBaseURL()
try:
timestamp = os.stat(__file__).st_mtime
except OSError:
timestamp = 0
printCSS = '<link rel="stylesheet" type="text/css" href="{}/Conf_Basic.css?{}">\n'.format(path, timestamp)
if self._kwargs['css_url']:
printCSS += '<link rel="stylesheet" type="text/css" href="{url}">'.format(url=self._kwargs['css_url'])
return printCSS
class WPreviewPage( wcomponents.WTemplated ):
pass
|
iemejia/coursera-dl
|
refs/heads/master
|
coursera/test/test_utils.py
|
2
|
# -*- coding: utf-8 -*-
"""
Test the utility functions.
"""
import datetime
import os
import pytest
import random
import json
from time import time
import requests
import six
from mock import Mock
from coursera import utils
from coursera import coursera_dl
from coursera import api
from coursera.test.utils import slurp_fixture
@pytest.mark.parametrize(
"unclean,clean", [
('(23:90)', '23-90'),
('(:', '-'),
('a téest &and a@noòtheèr', 'a_test_and_another'),
('Lecture 2.7 - Evaluation and Operators (16:25)',
'Lecture_2.7_-_Evaluation_and_Operators_16-25'),
('Week 3: Data and Abstraction', 'Week_3-_Data_and_Abstraction'),
(' (Week 1) BRANDING: Marketing Strategy and Brand Positioning',
'Week_1_BRANDING-__Marketing_Strategy_and_Brand_Positioning'),
('test & " adfas', 'test___adfas'),
(' ', ''),
('☂℮﹩т ω☤☂ℌ Ṳᾔ☤ḉ◎ⅾε', '__')
]
)
def test_clean_filename(unclean, clean):
assert utils.clean_filename(unclean) == clean
@pytest.mark.parametrize(
"unclean,clean", [
('(23:90)', '(23-90)'),
('(:', '(-'),
('a téest &and a@noòtheèr', 'a téest &and a@noòtheèr'),
('Lecture 2.7 - Evaluation and Operators (16:25)',
'Lecture 2.7 - Evaluation and Operators (16-25)'),
('Week 3: Data and Abstraction',
'Week 3- Data and Abstraction'),
(' (Week 1) BRANDING: Marketing Strategy and Brand Positioning',
' (Week 1) BRANDING- Marketing Strategy and Brand Positioning'),
('test & " adfas', 'test & " adfas'),
(' ', u'\xa0'),
('☂℮﹩т ω☤☂ℌ Ṳᾔ☤ḉ◎ⅾε', '☂℮﹩т ω☤☂ℌ Ṳᾔ☤ḉ◎ⅾε')
]
)
def test_clean_filename_minimal_change(unclean, clean):
assert utils.clean_filename(unclean, minimal_change=True) == clean
@pytest.mark.parametrize(
"url,format", [
('https://class.coursera.org/sub?q=123_en&format=txt', 'txt'),
('https://class.coursera.org/sub?q=123_en&format=srt', 'srt'),
('https://d396qusza40orc.cloudfront.net/week7-4.pdf', 'pdf'),
('https://class.coursera.org/download.mp4?lecture_id=123', 'mp4'),
]
)
def test_get_anchor_format(url, format):
assert utils.get_anchor_format(url) == format
def test_random_string():
random.seed(0) # set seed for reproducible tests
res = utils.random_string(8)
assert len(res) == 8
# Python 2 and Python 3 use different strategies for generation of
# PRNG, according to the documentation available at
# https://docs.python.org/3.4/library/random.html#random.seed
if six.PY2:
assert res == '0UAqFzWs'
else:
assert res == '2yW4Acq9'
def test_fix_url_adds_scheme():
url = "www.coursera.org"
assert utils.fix_url(url) == 'http://www.coursera.org'
def test_fix_url_removes_spaces():
url = " www.coursera.org "
assert utils.fix_url(url) == 'http://www.coursera.org'
def test_format_combine_resource_works_correctly():
rv = coursera_dl.format_combine_number_resource(5, 4, "Moving_the_furniture", 'The_Basics', "mp4")
assert '05_04_Moving_the_furniture_The_Basics.mp4' == rv
def test_format_combine_resource_works_correctly_without_title():
rv = coursera_dl.format_combine_number_resource(5, 1, "Introduction", '', "mp4")
assert '05_01_Introduction.mp4' == rv
def test_format_resource_works_correctly():
rv = coursera_dl.format_resource(2, "Washing", "Dishes", "mp9")
assert '02_Washing_Dishes.mp9' == rv
def test_format_resource_works_correctly_without_title():
rv = coursera_dl.format_resource(1, "Introduction", '', "mp2")
assert '01_Introduction.mp2' == rv
def test_format_section_works_correctly():
rv = coursera_dl.format_section(9, 'bob', 'WEAVING', False)
assert '09_bob' == rv
def test_format_section_works_correctly_with_verbose():
rv = coursera_dl.format_section(9, 'bill', 'WEAVING', True)
assert 'WEAVING_09_bill' == rv
def test_fix_url_doesnt_alters_empty_url():
url = None
assert utils.fix_url(url) is None
url = ""
assert utils.fix_url(url) == ""
def test_decode_input():
encoded_inputs = [
str("/home/user/темп"),
str("22少女時代22")]
for encoded_input in encoded_inputs:
decoded_input = utils.decode_input(encoded_input)
assert isinstance(decoded_input, six.text_type), "Decoded input is not a text type."
def test_total_seconds():
ts = coursera_dl.total_seconds(datetime.timedelta(days=30))
assert ts == 2592000
def test_is_course_complete_should_give_false_if_there_was_recent_update():
delta = coursera_dl.total_seconds(datetime.timedelta(days=29))
tm = time() - delta
rv = coursera_dl.is_course_complete(tm)
assert rv is False
def test_is_course_complete_should_give_true_if_there_was_no_recent_update():
delta = coursera_dl.total_seconds(datetime.timedelta(days=31))
tm = time() - delta
rv = coursera_dl.is_course_complete(tm)
assert rv is True
def test_correct_formatting_of_class_URL():
url = coursera_dl.get_syllabus_url('bob', False)
assert 'https://class.coursera.org/bob/lecture/index' == url
def test_correct_formatting_of_class_with_preview_URL():
url = coursera_dl.get_syllabus_url('bill', True)
assert 'https://class.coursera.org/bill/lecture/preview' == url
def test_parse_args():
args = coursera_dl.parse_args(['-u', 'bob', '-p', 'bill', 'posa-001'])
assert args.about is False
assert args.class_names == ['posa-001']
assert args.username == 'bob'
assert args.password == 'bill'
def get_mock_session(page_text):
page_obj = Mock()
page_obj.text = page_text
page_obj.raise_for_status = Mock()
session = requests.Session()
session.get = Mock(return_value=page_obj)
return page_obj, session
def test_get_page():
page_obj, session = get_mock_session('<page/>')
p = coursera_dl.get_page(session, 'http://www.not.here')
session.get.assert_called_once_with('http://www.not.here')
page_obj.raise_for_status.assert_called_once_with()
assert p == '<page/>'
def test_grab_hidden_video_url():
filename = os.path.join(
os.path.dirname(__file__), "fixtures", "html",
"hidden-videos_2.html")
page_text = open(filename).read()
page_obj, session = get_mock_session(page_text)
p = coursera_dl.grab_hidden_video_url(session,
'http://www.hidden.video')
assert 'video1.mp4' == p
@pytest.mark.parametrize(
"input,output", [
('html/supplement-deduplication.html', 'json/supplement-deduplication.json'),
('html/supplement-skip-sites.html', 'json/supplement-skip-sites.json'),
('html/supplement-two-zips.html', 'json/supplement-two-zips.json'),
]
)
def test_extract_supplement_links(input, output):
page_text = slurp_fixture(input)
expected_output = json.loads(slurp_fixture(output))
course = api.CourseraOnDemand(session=None, course_id='0')
output = course._extract_links_from_text(page_text)
# This is the easiest way to convert nested tuples to lists
output = json.loads(json.dumps(output))
assert expected_output == output
|
levkar/odoo
|
refs/heads/10.0
|
addons/delivery/models/delivery_price_rule.py
|
24
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import models, fields, api
import odoo.addons.decimal_precision as dp
class PriceRule(models.Model):
_name = "delivery.price.rule"
_description = "Delivery Price Rules"
_order = 'sequence, list_price'
@api.depends('variable', 'operator', 'max_value', 'list_base_price', 'list_price', 'variable_factor')
def _get_name(self):
for rule in self:
name = 'if %s %s %s then' % (rule.variable, rule.operator, rule.max_value)
if rule.list_base_price and not rule.list_price:
name = '%s fixed price %s' % (name, rule.list_base_price)
elif rule.list_price and not rule.list_base_price:
name = '%s %s times %s' % (name, rule.list_price, rule.variable_factor)
else:
name = '%s fixed price %s and %s times %s Extra' % (name, rule.list_base_price, rule.list_price, rule.variable_factor)
rule.name = name
name = fields.Char(compute='_get_name')
sequence = fields.Integer(required=True, help="Gives the sequence order when calculating delivery carrier.", default=10)
carrier_id = fields.Many2one('delivery.carrier', 'Carrier', required=True, ondelete='cascade')
variable = fields.Selection([('weight', 'Weight'), ('volume', 'Volume'), ('wv', 'Weight * Volume'), ('price', 'Price'), ('quantity', 'Quantity')], 'Variable', required=True, default='weight')
operator = fields.Selection([('==', '='), ('<=', '<='), ('<', '<'), ('>=', '>='), ('>', '>')], 'Operator', required=True, default='<=')
max_value = fields.Float('Maximum Value', required=True)
variable_factor = fields.Selection([('weight', 'Weight'), ('volume', 'Volume'), ('wv', 'Weight * Volume'), ('price', 'Price'), ('quantity', 'Quantity')], 'Variable Factor', required=True, default='weight')
list_base_price = fields.Float(string='Sale Base Price', digits=dp.get_precision('Product Price'), required=True, default=0.0)
list_price = fields.Float('Sale Price', digits=dp.get_precision('Product Price'), required=True, default=0.0)
standard_price = fields.Float('Cost Price', digits=dp.get_precision('Product Price'), required=True, default=0.0)
|
bartosh/zipline
|
refs/heads/master
|
zipline/finance/performance/period.py
|
5
|
#
# Copyright 2014 Quantopian, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Performance Period
==================
Performance Periods are updated with every trade. When calling
code needs a portfolio object that fulfills the algorithm
protocol, use the PerformancePeriod.as_portfolio method. See that
method for comments on the specific fields provided (and
omitted).
+---------------+------------------------------------------------------+
| key | value |
+===============+======================================================+
| ending_value | the total market value of the positions held at the |
| | end of the period |
+---------------+------------------------------------------------------+
| cash_flow | the cash flow in the period (negative means spent) |
| | from buying and selling assets in the period. |
| | Includes dividend payments in the period as well. |
+---------------+------------------------------------------------------+
| starting_value| the total market value of the positions held at the |
| | start of the period |
+---------------+------------------------------------------------------+
| starting_cash | cash on hand at the beginning of the period |
+---------------+------------------------------------------------------+
| ending_cash | cash on hand at the end of the period |
+---------------+------------------------------------------------------+
| positions | a list of dicts representing positions, see |
| | :py:meth:`Position.to_dict()` |
| | for details on the contents of the dict |
+---------------+------------------------------------------------------+
| pnl | Dollar value profit and loss, for both realized and |
| | unrealized gains. |
+---------------+------------------------------------------------------+
| returns | percentage returns for the entire portfolio over the |
| | period |
+---------------+------------------------------------------------------+
| cumulative\ | The net capital used (positive is spent) during |
| _capital_used | the period |
+---------------+------------------------------------------------------+
| max_capital\ | The maximum amount of capital deployed during the |
| _used | period. |
+---------------+------------------------------------------------------+
| period_close | The last close of the market in period. datetime in |
| | pytz.utc timezone. |
+---------------+------------------------------------------------------+
| period_open | The first open of the market in period. datetime in |
| | pytz.utc timezone. |
+---------------+------------------------------------------------------+
| transactions | all the transactions that were acrued during this |
| | period. Unset/missing for cumulative periods. |
+---------------+------------------------------------------------------+
"""
from __future__ import division
import logbook
import numpy as np
from collections import namedtuple
from zipline.assets import Future
try:
# optional cython based OrderedDict
from cyordereddict import OrderedDict
except ImportError:
from collections import OrderedDict
from six import itervalues, iteritems
import zipline.protocol as zp
log = logbook.Logger('Performance')
TRADE_TYPE = zp.DATASOURCE_TYPE.TRADE
PeriodStats = namedtuple('PeriodStats',
['net_liquidation',
'gross_leverage',
'net_leverage'])
PrevSubPeriodStats = namedtuple(
'PrevSubPeriodStats', ['returns', 'pnl', 'cash_flow']
)
CurrSubPeriodStats = namedtuple(
'CurrSubPeriodStats', ['starting_value', 'starting_cash']
)
def calc_net_liquidation(ending_cash, long_value, short_value):
return ending_cash + long_value + short_value
def calc_leverage(exposure, net_liq):
if net_liq != 0:
return exposure / net_liq
return np.inf
def calc_period_stats(pos_stats, ending_cash):
net_liq = calc_net_liquidation(ending_cash,
pos_stats.long_value,
pos_stats.short_value)
gross_leverage = calc_leverage(pos_stats.gross_exposure, net_liq)
net_leverage = calc_leverage(pos_stats.net_exposure, net_liq)
return PeriodStats(
net_liquidation=net_liq,
gross_leverage=gross_leverage,
net_leverage=net_leverage)
def calc_payout(multiplier, amount, old_price, price):
return (price - old_price) * multiplier * amount
class PerformancePeriod(object):
def __init__(
self,
starting_cash,
data_frequency,
period_open=None,
period_close=None,
keep_transactions=True,
keep_orders=False,
serialize_positions=True,
name=None):
self.data_frequency = data_frequency
# Start and end of the entire period
self.period_open = period_open
self.period_close = period_close
self.initialize(starting_cash=starting_cash,
starting_value=0.0,
starting_exposure=0.0)
self.ending_value = 0.0
self.ending_exposure = 0.0
self.ending_cash = starting_cash
self.subperiod_divider = None
# Keyed by asset, the previous last sale price of positions with
# payouts on price differences, e.g. Futures.
#
# This dt is not the previous minute to the minute for which the
# calculation is done, but the last sale price either before the period
# start, or when the price at execution.
self._payout_last_sale_prices = {}
self.keep_transactions = keep_transactions
self.keep_orders = keep_orders
self.name = name
# An object to recycle via assigning new values
# when returning portfolio information.
# So as not to avoid creating a new object for each event
self._portfolio_store = zp.Portfolio()
self._account_store = zp.Account()
self.serialize_positions = serialize_positions
_position_tracker = None
def initialize(self, starting_cash, starting_value, starting_exposure):
# Performance stats for the entire period, returned externally
self.pnl = 0.0
self.returns = 0.0
self.cash_flow = 0.0
self.starting_value = starting_value
self.starting_exposure = starting_exposure
self.starting_cash = starting_cash
# The cumulative capital change occurred within the period
self._total_intraperiod_capital_change = 0.0
self.processed_transactions = {}
self.orders_by_modified = {}
self.orders_by_id = OrderedDict()
@property
def position_tracker(self):
return self._position_tracker
@position_tracker.setter
def position_tracker(self, obj):
if obj is None:
raise ValueError("position_tracker can not be None")
self._position_tracker = obj
# we only calculate perf once we inject PositionTracker
self.calculate_performance()
def adjust_period_starting_capital(self, capital_change):
self.ending_cash += capital_change
self.starting_cash += capital_change
def rollover(self):
# We are starting a new period
self.initialize(starting_cash=self.ending_cash,
starting_value=self.ending_value,
starting_exposure=self.ending_exposure)
self.subperiod_divider = None
payout_assets = self._payout_last_sale_prices.keys()
for asset in payout_assets:
if asset in self._payout_last_sale_prices:
self._payout_last_sale_prices[asset] = \
self.position_tracker.positions[asset].last_sale_price
else:
del self._payout_last_sale_prices[asset]
def initialize_subperiod_divider(self):
self.calculate_performance()
# Initialize a subperiod divider to stash the current performance
# values. Current period starting values are set to equal ending values
# of the previous subperiod
self.subperiod_divider = SubPeriodDivider(
prev_returns=self.returns,
prev_pnl=self.pnl,
prev_cash_flow=self.cash_flow,
curr_starting_value=self.ending_value,
curr_starting_cash=self.ending_cash
)
def set_current_subperiod_starting_values(self, capital_change):
# Apply the capital change to the ending cash
self.ending_cash += capital_change
# Increment the total capital change occurred within the period
self._total_intraperiod_capital_change += capital_change
# Update the current subperiod starting cash to reflect the capital
# change
starting_value = self.subperiod_divider.curr_subperiod.starting_value
self.subperiod_divider.curr_subperiod = CurrSubPeriodStats(
starting_value=starting_value,
starting_cash=self.ending_cash)
def handle_dividends_paid(self, net_cash_payment):
if net_cash_payment:
self.handle_cash_payment(net_cash_payment)
self.calculate_performance()
def handle_cash_payment(self, payment_amount):
self.adjust_cash(payment_amount)
def handle_commission(self, cost):
# Deduct from our total cash pool.
self.adjust_cash(-cost)
def adjust_cash(self, amount):
self.cash_flow += amount
def adjust_field(self, field, value):
setattr(self, field, value)
def _get_payout_total(self, positions):
payouts = []
for asset, old_price in iteritems(self._payout_last_sale_prices):
pos = positions[asset]
amount = pos.amount
payout = calc_payout(
asset.multiplier,
amount,
old_price,
pos.last_sale_price)
payouts.append(payout)
return sum(payouts)
def calculate_performance(self):
pt = self.position_tracker
pos_stats = pt.stats()
self.ending_value = pos_stats.net_value
self.ending_exposure = pos_stats.net_exposure
payout = self._get_payout_total(pt.positions)
self.ending_cash = self.starting_cash + self.cash_flow + \
self._total_intraperiod_capital_change + payout
total_at_end = self.ending_cash + self.ending_value
# If there is a previous subperiod, the performance is calculated
# from the previous and current subperiods. Otherwise, the performance
# is calculated based on the start and end values of the whole period
if self.subperiod_divider:
starting_cash = self.subperiod_divider.curr_subperiod.starting_cash
total_at_start = starting_cash + \
self.subperiod_divider.curr_subperiod.starting_value
# Performance for this subperiod
pnl = total_at_end - total_at_start
if total_at_start != 0:
returns = pnl / total_at_start
else:
returns = 0.0
# Performance for this whole period
self.pnl = self.subperiod_divider.prev_subperiod.pnl + pnl
self.returns = \
(1 + self.subperiod_divider.prev_subperiod.returns) * \
(1 + returns) - 1
else:
total_at_start = self.starting_cash + self.starting_value
self.pnl = total_at_end - total_at_start
if total_at_start != 0:
self.returns = self.pnl / total_at_start
else:
self.returns = 0.0
def record_order(self, order):
if self.keep_orders:
try:
dt_orders = self.orders_by_modified[order.dt]
if order.id in dt_orders:
del dt_orders[order.id]
except KeyError:
self.orders_by_modified[order.dt] = dt_orders = OrderedDict()
dt_orders[order.id] = order
# to preserve the order of the orders by modified date
# we delete and add back. (ordered dictionary is sorted by
# first insertion date).
if order.id in self.orders_by_id:
del self.orders_by_id[order.id]
self.orders_by_id[order.id] = order
def handle_execution(self, txn):
self.cash_flow += self._calculate_execution_cash_flow(txn)
asset = txn.asset
if isinstance(asset, Future):
try:
old_price = self._payout_last_sale_prices[asset]
pos = self.position_tracker.positions[asset]
amount = pos.amount
price = txn.price
cash_adj = calc_payout(
asset.multiplier, amount, old_price, price)
self.adjust_cash(cash_adj)
if amount + txn.amount == 0:
del self._payout_last_sale_prices[asset]
else:
self._payout_last_sale_prices[asset] = price
except KeyError:
self._payout_last_sale_prices[asset] = txn.price
if self.keep_transactions:
try:
self.processed_transactions[txn.dt].append(txn)
except KeyError:
self.processed_transactions[txn.dt] = [txn]
@staticmethod
def _calculate_execution_cash_flow(txn):
"""
Calculates the cash flow from executing the given transaction
"""
if isinstance(txn.asset, Future):
return 0.0
return -1 * txn.price * txn.amount
# backwards compat. TODO: remove?
@property
def positions(self):
return self.position_tracker.positions
@property
def position_amounts(self):
return self.position_tracker.position_amounts
def __core_dict(self):
pos_stats = self.position_tracker.stats()
period_stats = calc_period_stats(pos_stats, self.ending_cash)
rval = {
'ending_value': self.ending_value,
'ending_exposure': self.ending_exposure,
# this field is renamed to capital_used for backward
# compatibility.
'capital_used': self.cash_flow,
'starting_value': self.starting_value,
'starting_exposure': self.starting_exposure,
'starting_cash': self.starting_cash,
'ending_cash': self.ending_cash,
'portfolio_value': self.ending_cash + self.ending_value,
'pnl': self.pnl,
'returns': self.returns,
'period_open': self.period_open,
'period_close': self.period_close,
'gross_leverage': period_stats.gross_leverage,
'net_leverage': period_stats.net_leverage,
'short_exposure': pos_stats.short_exposure,
'long_exposure': pos_stats.long_exposure,
'short_value': pos_stats.short_value,
'long_value': pos_stats.long_value,
'longs_count': pos_stats.longs_count,
'shorts_count': pos_stats.shorts_count,
}
return rval
def to_dict(self, dt=None):
"""
Creates a dictionary representing the state of this performance
period. See header comments for a detailed description.
Kwargs:
dt (datetime): If present, only return transactions for the dt.
"""
rval = self.__core_dict()
if self.serialize_positions:
positions = self.position_tracker.get_positions_list()
rval['positions'] = positions
# we want the key to be absent, not just empty
if self.keep_transactions:
if dt:
# Only include transactions for given dt
try:
transactions = [x.to_dict()
for x in self.processed_transactions[dt]]
except KeyError:
transactions = []
else:
transactions = \
[y.to_dict()
for x in itervalues(self.processed_transactions)
for y in x]
rval['transactions'] = transactions
if self.keep_orders:
if dt:
# only include orders modified as of the given dt.
try:
orders = [x.to_dict()
for x in itervalues(self.orders_by_modified[dt])]
except KeyError:
orders = []
else:
orders = [x.to_dict() for x in itervalues(self.orders_by_id)]
rval['orders'] = orders
return rval
def as_portfolio(self):
"""
The purpose of this method is to provide a portfolio
object to algorithms running inside the same trading
client. The data needed is captured raw in a
PerformancePeriod, and in this method we rename some
fields for usability and remove extraneous fields.
"""
# Recycles containing objects' Portfolio object
# which is used for returning values.
# as_portfolio is called in an inner loop,
# so repeated object creation becomes too expensive
portfolio = self._portfolio_store
# maintaining the old name for the portfolio field for
# backward compatibility
portfolio.capital_used = self.cash_flow
portfolio.starting_cash = self.starting_cash
portfolio.portfolio_value = self.ending_cash + self.ending_value
portfolio.pnl = self.pnl
portfolio.returns = self.returns
portfolio.cash = self.ending_cash
portfolio.start_date = self.period_open
portfolio.positions = self.position_tracker.get_positions()
portfolio.positions_value = self.ending_value
portfolio.positions_exposure = self.ending_exposure
return portfolio
def as_account(self):
account = self._account_store
pt = self.position_tracker
pos_stats = pt.stats()
period_stats = calc_period_stats(pos_stats, self.ending_cash)
# If no attribute is found on the PerformancePeriod resort to the
# following default values. If an attribute is found use the existing
# value. For instance, a broker may provide updates to these
# attributes. In this case we do not want to over write the broker
# values with the default values.
account.settled_cash = \
getattr(self, 'settled_cash', self.ending_cash)
account.accrued_interest = \
getattr(self, 'accrued_interest', 0.0)
account.buying_power = \
getattr(self, 'buying_power', float('inf'))
account.equity_with_loan = \
getattr(self, 'equity_with_loan',
self.ending_cash + self.ending_value)
account.total_positions_value = \
getattr(self, 'total_positions_value', self.ending_value)
account.total_positions_exposure = \
getattr(self, 'total_positions_exposure', self.ending_exposure)
account.regt_equity = \
getattr(self, 'regt_equity', self.ending_cash)
account.regt_margin = \
getattr(self, 'regt_margin', float('inf'))
account.initial_margin_requirement = \
getattr(self, 'initial_margin_requirement', 0.0)
account.maintenance_margin_requirement = \
getattr(self, 'maintenance_margin_requirement', 0.0)
account.available_funds = \
getattr(self, 'available_funds', self.ending_cash)
account.excess_liquidity = \
getattr(self, 'excess_liquidity', self.ending_cash)
account.cushion = \
getattr(self, 'cushion',
self.ending_cash / (self.ending_cash + self.ending_value))
account.day_trades_remaining = \
getattr(self, 'day_trades_remaining', float('inf'))
account.leverage = getattr(self, 'leverage',
period_stats.gross_leverage)
account.net_leverage = getattr(self, 'net_leverage',
period_stats.net_leverage)
account.net_liquidation = getattr(self, 'net_liquidation',
period_stats.net_liquidation)
return account
class SubPeriodDivider(object):
"""
A marker for subdividing the period at the latest intraperiod capital
change. prev_subperiod and curr_subperiod hold information respective to
the previous and current subperiods.
"""
def __init__(self, prev_returns, prev_pnl, prev_cash_flow,
curr_starting_value, curr_starting_cash):
self.prev_subperiod = PrevSubPeriodStats(
returns=prev_returns,
pnl=prev_pnl,
cash_flow=prev_cash_flow)
self.curr_subperiod = CurrSubPeriodStats(
starting_value=curr_starting_value,
starting_cash=curr_starting_cash)
|
aptivate/django-filer
|
refs/heads/develop
|
filer/fields/__init__.py
|
12133432
| |
xuxiao19910803/edx-platform
|
refs/heads/master
|
lms/djangoapps/django_comment_client/tests/mock_cs_server/mock_cs_server.py
|
47
|
from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler
import json
from logging import getLogger
logger = getLogger(__name__)
class MockCommentServiceRequestHandler(BaseHTTPRequestHandler):
'''
A handler for Comment Service POST requests.
'''
protocol = "HTTP/1.0"
def do_POST(self):
'''
Handle a POST request from the client
Used by the APIs for comment threads, commentables, comments,
subscriptions, commentables, users
'''
# Retrieve the POST data into a dict.
# It should have been sent in json format
length = int(self.headers.getheader('content-length'))
data_string = self.rfile.read(length)
post_dict = json.loads(data_string)
# Log the request
logger.debug(
"Comment Service received POST request {0} to path {1}"
.format(json.dumps(post_dict), self.path)
)
# Every good post has at least an API key
if 'X-Edx-Api-Key' in self.headers:
response = self.server._response_str
# Log the response
logger.debug("Comment Service: sending response %s" % json.dumps(response))
# Send a response back to the client
self.send_response(200)
self.send_header('Content-type', 'application/json')
self.end_headers()
self.wfile.write(response)
else:
# Respond with failure
self.send_response(500, 'Bad Request: does not contain API key')
self.send_header('Content-type', 'text/plain')
self.end_headers()
return False
def do_PUT(self):
'''
Handle a PUT request from the client
Used by the APIs for comment threads, commentables, comments,
subscriptions, commentables, users
'''
# Retrieve the PUT data into a dict.
# It should have been sent in json format
length = int(self.headers.getheader('content-length'))
data_string = self.rfile.read(length)
post_dict = json.loads(data_string)
# Log the request
logger.debug(
"Comment Service received PUT request {0} to path {1}"
.format(json.dumps(post_dict), self.path)
)
# Every good post has at least an API key
if 'X-Edx-Api-Key' in self.headers:
response = self.server._response_str
# Log the response
logger.debug("Comment Service: sending response %s" % json.dumps(response))
# Send a response back to the client
self.send_response(200)
self.send_header('Content-type', 'application/json')
self.end_headers()
self.wfile.write(response)
else:
# Respond with failure
self.send_response(500, 'Bad Request: does not contain API key')
self.send_header('Content-type', 'text/plain')
self.end_headers()
return False
class MockCommentServiceServer(HTTPServer):
'''
A mock Comment Service server that responds
to POST requests to localhost.
'''
def __init__(self, port_num,
response={'username': 'new', 'external_id': 1}):
'''
Initialize the mock Comment Service server instance.
*port_num* is the localhost port to listen to
*response* is a dictionary that will be JSON-serialized
and sent in response to comment service requests.
'''
self._response_str = json.dumps(response)
handler = MockCommentServiceRequestHandler
address = ('', port_num)
HTTPServer.__init__(self, address, handler)
def shutdown(self):
'''
Stop the server and free up the port
'''
# First call superclass shutdown()
HTTPServer.shutdown(self)
# We also need to manually close the socket
self.socket.close()
|
paolodedios/tensorflow
|
refs/heads/master
|
tensorflow/python/kernel_tests/identity_op_py_test.py
|
13
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for IdentityOp."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.ops import array_ops
from tensorflow.python.platform import test
class IdentityOpTest(test.TestCase):
def testInt32_6(self):
value = self.evaluate(array_ops.identity([1, 2, 3, 4, 5, 6]))
self.assertAllEqual(np.array([1, 2, 3, 4, 5, 6]), value)
def testInt32_2_3(self):
inp = constant_op.constant([10, 20, 30, 40, 50, 60], shape=[2, 3])
value = self.evaluate(array_ops.identity(inp))
self.assertAllEqual(np.array([[10, 20, 30], [40, 50, 60]]), value)
def testString(self):
source = [b"A", b"b", b"C", b"d", b"E", b"f"]
value = self.evaluate(array_ops.identity(source))
self.assertAllEqual(source, value)
def testIdentityShape(self):
with self.cached_session():
shape = [2, 3]
array_2x3 = [[1, 2, 3], [6, 5, 4]]
tensor = constant_op.constant(array_2x3)
self.assertEqual(shape, tensor.get_shape())
self.assertEqual(shape, array_ops.identity(tensor).get_shape())
self.assertEqual(shape, array_ops.identity(array_2x3).get_shape())
self.assertEqual(shape,
array_ops.identity(np.array(array_2x3)).get_shape())
def testCompositeTensor(self):
original = sparse_tensor.SparseTensor([[3]], [1.0], [100])
copied = array_ops.identity(original)
self.assertAllEqual(original.indices, copied.indices)
self.assertAllEqual(original.values, copied.values)
self.assertAllEqual(original.dense_shape, copied.dense_shape)
if __name__ == "__main__":
test.main()
|
hackaugusto/contracts.py
|
refs/heads/master
|
contracts.py
|
1
|
'''
'''
import codecs
import encodings
import re
import sys
import tokenize
import unittest
from collections import namedtuple
try:
from cStringIO import StringIO
except:
# py3
from io import StringIO
__all__ = ('contract', )
KEYWORDS = ('require', 'ensure', 'body')
ENDMARKER = (tokenize.ENDMARKER, '')
ROWCOL_START = 2
ROWCOL_END = 3
LINE = 4
ENCODING_REGEX = re.compile('contract-([a-z0-9]+)')
Token = namedtuple('Token', ('type', 'string', 'tokenstart', 'tokenend', 'line'))
TokenPos = namedtuple('TokenPos', ('name_pos', 'block_indent', 'block_dedent'))
def contract_from_string(code):
'''Simple wrapper for contract()'''
return contract(StringIO(code).readline)
def keyword(name):
return (1, name, (1, 0), (1, len(name)), name)
def op(name):
return (51, name, (1, 0), (1, len(name)), name)
# To simplify token stream handling, all Operators and Delimiters tokens are returned using the generic token.OP token type.
CLOSE_PARENTESIS = op(')')
COLON = op(':')
COMMA = op(',')
DEF = keyword('def')
EQUAL = op('=')
NEWLINE = (4, '\n', (1, 0), (1, 1), '\n')
OPEN_PARENTESIS = op('(')
RETURN = keyword('return')
def contract(code):
tokens_peek = []
tokens_buffer = []
decorators = []
readline = (line.rstrip('\n \t').expandtabs() + "\n" for line in code).next
token_generator = tokenize.generate_tokens(readline)
def peek(iterations):
missing = (iterations + 1) - len(tokens_peek)
if missing > 0:
for _ in range(missing):
try:
tokens_peek.append(next(token_generator))
except StopIteration:
return None
if iterations < len(tokens_peek):
return tokens_peek[iterations]
return None
def peek_name(iterations):
value = peek(iterations)
if value:
return value[1]
def is_token(position, type_, name):
token_type, token_name, _, _, _ = peek(position)
if token_type == type_ and token_name == name:
return True
return False
def is_type(position, type_):
token_type, _, _, _, _ = peek(position)
if token_type == type_:
return True
return False
def closing(position, open_, close):
missing = 0
while True:
if open_(position):
missing += 1
if close(position):
missing -= 1
if missing:
position += 1
else:
break
return position
def tokens_for(code):
return list(tokenize.generate_tokens(StringIO(code).readline))
def reformat(tokens, old_indent, new_indent):
# (token type, token string, (tokenstart row, tokenstart col), (tokenend row, tokenend col), line)
return (
(type, string, srow_scol, erow_ecol, line.replace(old_indent, new_indent))
for type, string, srow_scol, trow_tcol in tokens
)
def create_contract(name, arguments_tokens, require, ensure):
# Details:
# - tokenize.generate_tokens() return ENDMARKER that we need to remove
# - we are using the INDET/DEDENT pair from the original source code (it is included in the boundaries)
first = require or ensure
last = ensure or require
indent = tokens_peek[first[1]]
dedent = tokens_peek[last[2]]
name = '_{}_contract{}'.format(name, len(decorators))
# def <decorator>(function):\n
tokens = [
DEF,
keyword(name),
OPEN_PARENTESIS,
keyword('_function'),
CLOSE_PARENTESIS,
COLON,
NEWLINE,
indent,
]
# def wrap(<args>):\n
tokens.extend([
DEF,
keyword('wrap'),
OPEN_PARENTESIS,
] + arguments_tokens + [
CLOSE_PARENTESIS,
COLON,
NEWLINE,
indent
])
if require:
# the slice is non-inclusive, the [2] is the actual index of the
# dedent, so with this slice we do *not* include the dedent
tokens.extend(tokens_peek[require[1]+1:require[2]])
tokens.extend([
keyword('result'),
EQUAL,
keyword('_function'),
OPEN_PARENTESIS,
] + arguments_tokens + [
CLOSE_PARENTESIS,
NEWLINE,
])
if ensure:
tokens.extend(tokens_peek[ensure[1]+1:ensure[2]])
tokens.extend([
RETURN,
keyword('result'),
NEWLINE,
dedent,
RETURN,
keyword('wrap'),
NEWLINE,
dedent,
])
decorators.extend(tokens)
return name
open_parentheses = lambda pos: is_token(pos, tokenize.OP, '(')
close_parentheses = lambda pos: is_token(pos, tokenize.OP, ')')
open_ident = lambda pos: is_type(pos, tokenize.INDENT)
close_ident = lambda pos: is_type(pos, tokenize.DEDENT)
# 0 1 2
# def name(...):
# body:
#
# After the colon we have a NEWLINE and then a INDENT
while peek(0):
if is_token(0, tokenize.NAME, 'def'):
name = peek_name(1)
colon_pos = closing(2, open_parentheses, close_parentheses) + 1
arguments_tokens = tokens_peek[3:colon_pos-1]
if is_token(colon_pos, tokenize.OP, ':'):
function_start = colon_pos + 1 + 1 # + NEWLINE + INDENT
maybe_next_pos = function_start + 1 # + NAME
block_positions = {}
tokens = []
indent = peek(colon_pos + 1 + 1)
while peek_name(maybe_next_pos) in KEYWORDS and is_token(maybe_next_pos + 1, tokenize.OP, ':'):
# we have a new block
name_pos = maybe_next_pos
if block_positions.get(peek_name(name_pos)) is not None:
raise SyntaxError('{} block defined more than once for the function {}'.format(peek_name(name_pos), name))
block_start = name_pos + 1 + 1 + 1 # colon + NEWLINE + INDENT
block_end = closing(block_start, open_ident, close_ident)
block_positions[peek_name(name_pos)] = TokenPos(name_pos, block_start, block_end)
maybe_next_pos = block_end + 1 # NAME
if len(block_positions):
if not is_type(maybe_next_pos, tokenize.DEDENT):
raise SyntaxError('The function {} has code outside one of the blocks {}'.format(name, ', '.join(KEYWORDS)))
if 'body' not in block_positions:
raise SyntaxError('Missing body block for the function {}'.format(name))
if 'require' in block_positions or 'ensure' in block_positions:
decorator = create_contract(name, arguments_tokens, block_positions.get('require'), block_positions.get('ensure'))
tokens.append((tokenize.OP, '@', None, None, None))
tokens.append((tokenize.NAME, decorator, None, None, None))
tokens.append((tokenize.NL, '\n', None, None, None))
tokens.extend(tokens_peek[:function_start])
# using INDENT/DEDENT from the body block
body = block_positions['body']
tokens.extend(tokens_peek[body[1]:body[2]+1])
# replace old token with the new ones
tokens_peek = tokens
tokens_buffer.extend(tokens_peek)
tokens_peek = []
for type_, name in reindent(decorators):
yield type_, name
for type_, name in reindent(tokens_buffer):
yield type_, name
def contract_decoder(codec_decode):
def decode(data):
decoded, __ = codec_decode(data)
return tokenize.untokenize(contract(decoded))
return decode
def reindent(tokens):
level = 0
for type_, name, _, _, _ in tokens:
if type_ == 5:
level += 1
yield type_, ' ' * level
else:
yield type_, name
def contract_codec(name, codec):
'''Wrapper for a given encoder that will add the contract pre-processing'''
decoder = contract_decoder(codec.decode)
incrementaldecoder = type('IncrementalDecoder', (codecs.BufferedIncrementalDecoder,), {'decode': decoder})
streamreader = type('StreamReader', (codecs.StreamReader, incrementaldecoder), {'decode': decoder})
return codecs.CodecInfo(
name=name,
decode=decoder,
incrementaldecoder=incrementaldecoder,
streamreader=streamreader,
# these need no changes
encode=codec.encode,
incrementalencoder=codec.incrementalencoder,
streamwriter=codec.streamwriter,
)
# This function is not exposed (will be None)
@codecs.register
def contract_search(codec):
match = ENCODING_REGEX.match(codec)
if match:
encoding = encodings.search_function(match.group(1))
return contract_codec(codec, encoding)
class ContractTestCase(unittest.TestCase):
def test_empty(self):
self.assertEqual(
list(contract('')),
[ENDMARKER],
)
def test_one(self):
self.assertEqual(
list(contract('1')),
[(tokenize.NUMBER, '1'), ENDMARKER],
)
def test_normal_function(self):
code = 'def a(): pass'
tokens = list(contract(code))
self.assertEqual(
tokens,
[
(tokenize.NAME, 'def'),
(tokenize.NAME, 'a'),
(tokenize.OP, '('),
(tokenize.OP, ')'),
(tokenize.OP, ':'),
(tokenize.NAME, 'pass'),
ENDMARKER,
]
)
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--test', action='store_true', default=False, help='flag to run the tests')
parser.add_argument('--failfast', action='store_true', default=False, help='unittest failfast')
parser.add_argument('file', help='File to be preprocessed', nargs='?')
args = parser.parse_args()
if args.test:
import doctest
(failures, total) = doctest.testmod()
if failures:
sys.exit(failures)
suite = unittest.defaultTestLoader.loadTestsFromTestCase(ContractTestCase)
result = unittest.TextTestRunner(failfast=args.failfast).run(suite)
if result.errors or result.failures:
sys.exit(len(result.errors) + len(result.failures))
elif args.file:
with open(args.file) as handler:
print(tokenize.untokenize(contract(handler)))
else:
parser.print_help()
|
BRupholdt/KissTodo
|
refs/heads/master
|
todo/views.py
|
1
|
# KissTodo - a simple, Django based todo management tool.
# Copyright (C) 2011 Massimo Barbieri - http://www.massimobarbieri.it
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.shortcuts import render_to_response
from django.http import HttpResponseRedirect
from django.http import HttpResponse
from django.template.loader import get_template
from django.core.urlresolvers import reverse
from django.template import RequestContext
from django.contrib.auth.decorators import login_required
from django.contrib.auth import logout
from django.conf import settings
from django import forms
from random import choice
from datetime import datetime
import os
if settings.KISSTODO_USE_GAE:
from google.appengine.api import users
from google.appengine.api import mail
from models import *
def my_login_required(function):
def decorated_view(*args, **kwargs):
if settings.KISSTODO_USE_GAE:
f = function # GAE authentication, nothing to do (see app.yaml)
else:
f = login_required(function) # Django authentication
return f(*args, **kwargs)
return decorated_view
def test_page(request):
if 'op' in request.POST and request.POST['op']=='simulate error':
raise Exception("Simulated Error!")
return render_to_response('todo/test_page.html',
#RequestContext(request, {'media_root':settings.MEDIA_ROOT, 'lists':List.objects, 'todos':Todo.objects,}))
RequestContext(request, {'media_root':settings.MEDIA_ROOT}))
@my_login_required
def board(request, mobile=False, selected_list_id=''):
inbox = List.objects.get_or_create_inbox(_get_current_user(request))
if settings.KISSTODO_USE_GAE:
logout_url=users.create_logout_url(settings.KISSTODO_SITE_URL)
else:
logout_url=reverse('logout')
#login_url=users.create_login_url("/")
#request.session['mobile']=mobile
return render_to_response('todo/board.html', RequestContext(request, {'inbox_list_id':inbox.id, 'logout_url':logout_url, 'mobile':mobile, 'selected_list_id':selected_list_id}))
def _do_send_mail(t, request):
address_from = "todo_reminder@"+str(os.environ['APPLICATION_ID'])+".appspotmail.com"
address_to = t.list.owner
if not '@' in address_to: address_to += "@gmail.com"
subject="KissTodo notification"
template = get_template('todo/todo_notification_email.txt')
ctx=RequestContext(request, {'todo':t})
mail.send_mail(sender=address_from,to=address_to,subject=subject,body=template.render(ctx))
def todo_send_mail(request):
todos = Todo.objects.filter(notify_todo=True, complete=False, due_date__isnull=False).order_by('due_date')
res = "\nres:\n"
now = datetime.now()
for t in todos:
if t.due_date - timedelta(minutes=t.notify_minutes) + timedelta(minutes=t.time_offset)< now:
res += "\nTODO:"+t.description+"\n"
_do_send_mail(t, request)
t.notify_todo=False
t.save()
return HttpResponse(str(now)+res, mimetype="text/plain")
@my_login_required
def todo_empty_trash(request):
for t in Todo.objects.deleted(_get_current_user(request)): t.delete_raw()
return HttpResponse("", mimetype="text/plain")
@my_login_required
def todo_clear_completed_items(request, list_id):
l=List.objects.get(id=int(list_id))
_check_permission(request, l)
todos=Todo.objects.filter(list__id=list_id)
for t in todos:
if t.complete: t.delete_raw()
return HttpResponse("", mimetype="text/plain")
@my_login_required
def todo_search(request, search_string, sort_mode, show_complete='F'):
todos = Todo.objects.search(_get_current_user(request), search_string)
if (show_complete=='F'):
todos = [t for t in todos if not t.complete]
return render_to_response('todo/todo_list.html', RequestContext(request, {'todos':Todo.todo_sort(todos, sort_mode), 'show_list': True }))
@my_login_required
def todo_list(request, list_id, sort_mode, show_complete='F', mobile=False):
#import time
#time.sleep(1)
if int(list_id)>0:
l=List.objects.get(id=int(list_id))
_check_permission(request, l)
show_list = False
show_empty_trash = False
show_clear_completed_items = False
if int(list_id)==-2:
todos = Todo.objects.hot(_get_current_user(request))
show_list = True
elif int(list_id)==-3:
todos = Todo.objects.deleted(_get_current_user(request))
show_list = True
if len(todos)>0: show_empty_trash = True
elif int(list_id)==-4:
todos = Todo.objects.all_by_user(_get_current_user(request))
show_list = True
else:
todos = Todo.objects.filter(list__id=list_id)
#if (show_complete=='F'):
# todos = [t for t in todos if not t.complete]
if int(list_id)>0: show_clear_completed_items=any([t.complete for t in todos])
return render_to_response('todo/todo_list.html', RequestContext(request, {'list_id':list_id,'todos':Todo.todo_sort(todos, sort_mode), 'show_list':show_list, 'show_empty_trash':show_empty_trash, 'show_clear_completed_items':show_clear_completed_items, 'mobile':mobile}))
@my_login_required
def list_list(request, selected_list_id, mobile=False):
inbox = List.objects.get_or_create_inbox(_get_current_user(request))
return render_to_response('todo/list_list.html', RequestContext(request, {'lists':List.objects.filter(owner=_get_current_user(request)), 'inbox_list': inbox, 'selected_list_id': str(selected_list_id), 'mobile':mobile}))
@my_login_required
def list_add(request):
l=List()
l.name=request.POST['name']
l.owner=_get_current_user(request)
if not l.is_special():
l.save()
out = l.id
else:
out=-1
return HttpResponse(out, mimetype="text/plain")
@my_login_required
def list_delete(request):
l=List.objects.get(id=int(request.POST['list_id']))
_check_permission(request, l)
l.delete()
return HttpResponse("", mimetype="text/plain")
@my_login_required
def todo_delete(request):
t=Todo.objects_raw.get(id=int(request.POST['todo_id']))
_check_permission(request, t.list)
t.delete()
return HttpResponse("", mimetype="text/plain")
@my_login_required
def todo_undelete(request):
t=Todo.objects_raw.get(id=int(request.POST['todo_id']))
_check_permission(request, t.list)
t.undelete()
return HttpResponse("", mimetype="text/plain")
@my_login_required
def todo_complete(request):
t=Todo.objects_raw.get(id=int(request.POST['todo_id']))
_check_permission(request, t.list)
t.toggle_complete()
t.save()
return HttpResponse("", mimetype="text/plain")
@my_login_required
def todo_postpone(request):
t=Todo.objects_raw.get(id=int(request.POST['todo_id']))
_check_permission(request, t.list)
t.postpone()
t.save()
return HttpResponse("", mimetype="text/plain")
@my_login_required
def todo_edit(request, todo_id, mobile=False):
t=Todo.objects_raw.get(id=int(todo_id))
_check_permission(request, t.list)
if request.method == 'POST':
if 'priority' in request.POST: t.priority=int(request.POST['priority'])
if 'description' in request.POST: t.description=request.POST['description']
if 'list_id' in request.POST: t.list=List.objects.get(id=int(request.POST['list_id']))
if 'due_date' in request.POST:
t.due_date=None
if request.POST['due_date']:
try:
t.due_date=datetime.strptime(request.POST['due_date'],'%Y/%m/%d %H:%M') # 2012/12/21 15:42
except:
try:
t.due_date=datetime.strptime(request.POST['due_date'],'%Y/%m/%d') # 2012/12/21
except:
try:
t.due_date=datetime.strptime(request.POST['due_date'],'%Y-%m-%dT%H:%M') # 2012-12-21T15:42 - for html5 input type
except:
try:
t.due_date=datetime.strptime(request.POST['due_date'],'%Y-%m-%d') # 2012-12-21 - for html5 input type
except:
pass # wrong format
if 'repeat_type' in request.POST: t.repeat_type=request.POST['repeat_type']
if 'repeat_every' in request.POST and request.POST['repeat_every']: t.repeat_every=int(request.POST['repeat_every'])
if 'notify_minutes' in request.POST and request.POST['notify_minutes']: t.notify_minutes=int(request.POST['notify_minutes'])
if 'time_offset' in request.POST and request.POST['time_offset']: t.time_offset=int(request.POST['time_offset'])
t.update_notify_todo()
t.save()
#return render_to_response('todo/todo_item.html', RequestContext(request, {'todo':t,}))
return HttpResponseRedirect('/todo/ajax/todo/show_item/'+str(t.id));
else:
return render_to_response('todo/todo_edit.html', RequestContext(request, {'todo':t,'repeat_type_choiches':Todo.repeat_type_choiches,'lists':List.objects.filter(owner=_get_current_user(request)), 'mobile':mobile}))
@my_login_required
def list_edit(request, list_id):
l=List.objects.get(id=int(list_id))
_check_permission(request, l)
if request.method == 'POST':
if 'name' in request.POST: l.name=request.POST['name']
l.save()
return HttpResponse("", mimetype="text/plain")
#return render_to_response('todo/todo_item.html', RequestContext(request, {'todo':t,}))
else:
return render_to_response('todo/list_edit.html', RequestContext(request, {'list':l}))
@my_login_required
def todo_show_item(request, todo_id, mobile=False):
t = Todo.objects_raw.get(id=int(todo_id))
_check_permission(request, t.list)
return render_to_response('todo/todo_item.html', RequestContext(request, {'todo':t, 'mobile':mobile}))
@my_login_required
def todo_add(request):
l=List.objects.get(id=request.POST['list_id'])
_check_permission(request, l)
t=Todo()
t.description=request.POST['description']
for p in range(1,4):
if t.description[0:2]=="!"+str(p):
t.priority=p
t.description=t.description[2:]
t.list=l
t.save()
out = t.id
return HttpResponse(out, mimetype="text/plain")
@my_login_required
def import_rtm(request):
if request.method == 'POST':
form = ImportRtmForm(request.POST)
if not form.is_valid(): return HttpResponse("FORM ERROR", mimetype="text/plain")
url = form.cleaned_data['url']
if url == "":
for t in Todo.objects_raw.filter(external_source="ATOM"):
if t.list.owner==_get_current_user(request): t.delete_raw()
return HttpResponse("Empty atom feed received. Cleanup complete.", mimetype="text/plain")
import urllib2
text = urllib2.urlopen(url).read()
#return HttpResponse(text, mimetype="text/plain")
from xml.dom import minidom
from datetime import datetime
#xmldoc = minidom.parseString(text.encode( "utf-8" ))
xmldoc = minidom.parseString(text)
entries=xmldoc.getElementsByTagName("entry")
out=""
for e in entries:
t = Todo()
t.description = e.getElementsByTagName("title")[0].firstChild.nodeValue
t.deleted = False
t.completed = False
t.external_source = "ATOM"
t.external_id = e.getElementsByTagName("id")[0].firstChild.nodeValue
out += 'external_id: "'+e.getElementsByTagName("id")[0].firstChild.nodeValue+'"\n'
out += "title: "+e.getElementsByTagName("title")[0].firstChild.nodeValue+"\n"
count=0
field_name = ""
for c in e.getElementsByTagName("content")[0].getElementsByTagName("span"):
#out += ('"'+(c.firstChild.nodeValue or u"*")+'" ')
if count % 2 == 0:
field_name = str(c.firstChild.nodeValue).strip()[0:-1]
else:
out += '"%s"=>"%s"' % (field_name, c.firstChild.nodeValue)
if field_name == "Due":
t.due_date = _parse_date(c.firstChild.nodeValue)
elif field_name == "Priority":
t.priority = str(_parse_priority(c.firstChild.nodeValue))
elif field_name == "List":
t.list = _parse_list(c.firstChild.nodeValue, _get_current_user(request))
elif field_name == "URL":
t.description += " (%s)" % (c.firstChild.firstChild.nodeValue,)
elif field_name == "Repeat every":
t.repeat_every = int(c.firstChild.nodeValue)
elif field_name == "Repeat type":
t.repeat_type = _parse_repeat_type(c.firstChild.nodeValue)
out += u"\n"
count+=1
out += t.__unicode__() +"\n"
out += "\n"
t.save()
return HttpResponse(out, mimetype="text/plain")
else:
return render_to_response("todo/import_rtm_form.html",RequestContext(request, {'form': ImportRtmForm()}))
@my_login_required
def export_atom(request):
list=[]
for t in Todo.objects.filter(complete=False):
if t.list.owner==_get_current_user(request): list.append(t)
return render_to_response("todo/export_atom.atom",RequestContext(request, {'todos': list})) # , mimetype="application/atom+xml"
#return HttpResponse(out, mimetype="text/plain")
#return HttpResponse(out, mimetype="application/atom+xml")
def cache_manifest(request):
#import uuid
#guid=uuid.uuid1()
return HttpResponse(get_template('todo/cache.manifest').render(RequestContext(request, {'host': request.META.get('HTTP_HOST')})), mimetype="text/cache-manifest")
def redirect_login(request):
return render_to_response("todo/redirect_login.html",RequestContext(request, {}))
def do_logout(request):
logout(request)
return HttpResponseRedirect(settings.KISSTODO_SITE_URL)
def _parse_date(date):
# 'never' or 'Mon 13 Jun 11 18:30' or 'Mon 13 Jun 11'
if date=='never': return None
try:
dt = datetime.strptime(date, '%a %d %b %y %H:%M')
except:
dt = datetime.strptime(date, '%a %d %b %y')
return dt
def _parse_priority(priority):
# 'none', '1', '2', '3'
if priority=='none': return 4
return int(priority)
def _parse_repeat_type(r):
# 'none', 'd', 'w', 'm', 'y'
if r=='none': return ''
return r
def _parse_list(list, user):
if list=='Inbox':
return List.objects.get_or_create_inbox(user)
else:
list, created = List.objects.get_or_create(name=list, owner=user)
return list
return int(priority)
def _check_permission(request, list):
if list.owner!=_get_current_user(request): raise Exception("Permission denied")
def _get_current_user(request):
if settings.KISSTODO_USE_GAE:
user = users.get_current_user()
if user: return user.nickname()
else:
return request.user.username
class ImportRtmForm(forms.Form):
#text = forms.CharField(widget=forms.Textarea(), label='Atom feed', required=False)
url = forms.CharField(label='url', required=False)
|
uni-peter-zheng/autotest
|
refs/heads/master
|
frontend/tko/csv_encoder.py
|
4
|
import csv
import django.http
try:
import autotest.common as common
except ImportError:
import common
from autotest.frontend.afe import rpc_utils
class CsvEncoder(object):
def __init__(self, request, response):
self._request = request
self._response = response
self._output_rows = []
def _append_output_row(self, row):
self._output_rows.append(row)
def _build_response(self):
response = django.http.HttpResponse(mimetype='text/csv')
response['Content-Disposition'] = (
'attachment; filename=tko_query.csv')
writer = csv.writer(response)
writer.writerows(self._output_rows)
return response
def encode(self):
raise NotImplementedError
class UnhandledMethodEncoder(CsvEncoder):
def encode(self):
return rpc_utils.raw_http_response(
'Unhandled method %s (this indicates a bug)\r\n' %
self._request['method'])
class SpreadsheetCsvEncoder(CsvEncoder):
def _total_index(self, group, num_columns):
row_index, column_index = group['header_indices']
return row_index * num_columns + column_index
def _group_string(self, group):
result = '%s / %s' % (group['pass_count'], group['complete_count'])
if group['incomplete_count'] > 0:
result += ' (%s incomplete)' % group['incomplete_count']
if 'extra_info' in group:
result = '\n'.join([result] + group['extra_info'])
return result
def _build_value_table(self):
value_table = [''] * self._num_rows * self._num_columns
for group in self._response['groups']:
total_index = self._total_index(group, self._num_columns)
value_table[total_index] = self._group_string(group)
return value_table
def _header_string(self, header_value):
return '/'.join(header_value)
def _process_value_table(self, value_table, row_headers):
total_index = 0
for row_index in xrange(self._num_rows):
row_header = self._header_string(row_headers[row_index])
row_end_index = total_index + self._num_columns
row_values = value_table[total_index:row_end_index]
self._append_output_row([row_header] + row_values)
total_index += self._num_columns
def encode(self):
header_values = self._response['header_values']
assert len(header_values) == 2
row_headers, column_headers = header_values
self._num_rows, self._num_columns = (len(row_headers),
len(column_headers))
value_table = self._build_value_table()
first_line = [''] + [self._header_string(header_value)
for header_value in column_headers]
self._append_output_row(first_line)
self._process_value_table(value_table, row_headers)
return self._build_response()
class TableCsvEncoder(CsvEncoder):
def __init__(self, request, response):
super(TableCsvEncoder, self).__init__(request, response)
self._column_specs = request['columns']
def _format_row(self, row_object):
"""Extract data from a row object into a list of strings"""
return [row_object.get(field) for field, name in self._column_specs]
def _encode_table(self, row_objects):
self._append_output_row([column_spec[1] # header row
for column_spec in self._column_specs])
for row_object in row_objects:
self._append_output_row(self._format_row(row_object))
return self._build_response()
def encode(self):
return self._encode_table(self._response)
class GroupedTableCsvEncoder(TableCsvEncoder):
def encode(self):
return self._encode_table(self._response['groups'])
class StatusCountTableCsvEncoder(GroupedTableCsvEncoder):
_PASS_RATE_FIELD = '_test_pass_rate'
def __init__(self, request, response):
super(StatusCountTableCsvEncoder, self).__init__(request, response)
# inject a more sensible field name for test pass rate
for column_spec in self._column_specs:
field, name = column_spec
if name == 'Test pass rate':
column_spec[0] = self._PASS_RATE_FIELD
break
def _format_pass_rate(self, row_object):
result = '%s / %s' % (row_object['pass_count'],
row_object['complete_count'])
incomplete_count = row_object['incomplete_count']
if incomplete_count:
result += ' (%s incomplete)' % incomplete_count
return result
def _format_row(self, row_object):
row_object[self._PASS_RATE_FIELD] = self._format_pass_rate(row_object)
return super(StatusCountTableCsvEncoder, self)._format_row(row_object)
_ENCODER_MAP = {
'get_latest_tests': SpreadsheetCsvEncoder,
'get_test_views': TableCsvEncoder,
'get_group_counts': GroupedTableCsvEncoder,
}
def _get_encoder_class(request):
method = request['method']
if method in _ENCODER_MAP:
return _ENCODER_MAP[method]
if method == 'get_status_counts':
if 'columns' in request:
return StatusCountTableCsvEncoder
return SpreadsheetCsvEncoder
return UnhandledMethodEncoder
def encoder(request, response):
EncoderClass = _get_encoder_class(request)
return EncoderClass(request, response)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.