repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
|---|---|---|---|---|
vvv1559/intellij-community
|
refs/heads/master
|
python/testData/intentions/PyConvertCollectionLiteralIntentionTest/convertOneElementIncompleteListToTuple_after.py
|
37
|
(42,)
|
alanjw/GreenOpenERP-Win-X86
|
refs/heads/7.0
|
python/Lib/site-packages/Werkzeug-0.8.3-py2.7.egg/werkzeug/testsuite/contrib/sessions.py
|
76
|
# -*- coding: utf-8 -*-
"""
werkzeug.testsuite.sessions
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Added tests for the sessions.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import unittest
import shutil
from werkzeug.testsuite import WerkzeugTestCase
from werkzeug.contrib.sessions import FilesystemSessionStore
from tempfile import mkdtemp, gettempdir
class SessionTestCase(WerkzeugTestCase):
def setup(self):
self.session_folder = mkdtemp()
def teardown(self):
shutil.rmtree(self.session_folder)
def test_default_tempdir(self):
store = FilesystemSessionStore()
assert store.path == gettempdir()
def test_basic_fs_sessions(self):
store = FilesystemSessionStore(self.session_folder)
x = store.new()
assert x.new
assert not x.modified
x['foo'] = [1, 2, 3]
assert x.modified
store.save(x)
x2 = store.get(x.sid)
assert not x2.new
assert not x2.modified
assert x2 is not x
assert x2 == x
x2['test'] = 3
assert x2.modified
assert not x2.new
store.save(x2)
x = store.get(x.sid)
store.delete(x)
x2 = store.get(x.sid)
# the session is not new when it was used previously.
assert not x2.new
def test_renewing_fs_session(self):
store = FilesystemSessionStore(self.session_folder, renew_missing=True)
x = store.new()
store.save(x)
store.delete(x)
x2 = store.get(x.sid)
assert x2.new
def test_fs_session_lising(self):
store = FilesystemSessionStore(self.session_folder, renew_missing=True)
sessions = set()
for x in xrange(10):
sess = store.new()
store.save(sess)
sessions.add(sess.sid)
listed_sessions = set(store.list())
assert sessions == listed_sessions
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(SessionTestCase))
return suite
|
gregomni/swift
|
refs/heads/master
|
utils/build_swift/build_swift/presets.py
|
18
|
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2020 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
"""
Swift preset parsing and handling functionality.
"""
from __future__ import absolute_import, unicode_literals
import functools
import io
from collections import OrderedDict, namedtuple
from six import StringIO
from six.moves import configparser
from . import class_utils
__all__ = [
'PresetError',
'DuplicatePresetError',
'DuplicateOptionError',
'InterpolationError',
'PresetNotFoundError',
'UnparsedFilesError',
'Preset',
'PresetParser',
]
# -----------------------------------------------------------------------------
# Constants
_PRESET_PREFIX = 'preset: '
# -----------------------------------------------------------------------------
# Helpers
_Mixin = namedtuple('_Mixin', ['name'])
_Option = namedtuple('_Option', ['name', 'value'])
_RawPreset = namedtuple('_RawPreset', ['name', 'options'])
_UnparsedFile = namedtuple('_UnparsedFile', ['filename', 'reason'])
def _interpolate_string(string, values):
if string is None:
return string
return string % values
def _remove_prefix(string, prefix):
if string.startswith(prefix):
return string[len(prefix):]
return string
def _catch_duplicate_option_error(func):
"""Decorator used to catch and rethrowing configparser's
DuplicateOptionError.
"""
if not hasattr(configparser, 'DuplicateOptionError'):
return func
@functools.wraps(func)
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except configparser.DuplicateOptionError as e:
preset_name = _remove_prefix(e.section, _PRESET_PREFIX).strip()
raise DuplicateOptionError(preset_name, e.option)
return wrapper
def _catch_duplicate_section_error(func):
"""Decorator used to catch and rethrowing configparser's
DuplicateSectionError.
"""
@functools.wraps(func)
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except configparser.DuplicateSectionError as e:
preset_name = _remove_prefix(e.section, _PRESET_PREFIX).strip()
raise DuplicatePresetError(preset_name)
return wrapper
# -----------------------------------------------------------------------------
# Errors
class PresetError(Exception):
"""Base class for preset errors.
"""
def __init__(self, message=''):
super(PresetError, self).__init__(self, message)
self.message = message
def __str__(self):
return self.message
__repr__ = __str__
class DuplicatePresetError(PresetError):
"""Raised when an existing preset would be overriden.
"""
def __init__(self, preset_name):
super(DuplicatePresetError, self).__init__(
'{} already exists'.format(preset_name))
self.preset_name = preset_name
class DuplicateOptionError(PresetError):
"""Raised when an option is repeated in a single preset.
"""
def __init__(self, preset_name, option):
super(DuplicateOptionError, self).__init__(
'{} already exists in preset {}'.format(option, preset_name))
self.preset_name = preset_name
self.option = option
class InterpolationError(PresetError):
"""Raised when an error is encountered while interpolating use-provided
values in preset arguments.
"""
def __init__(self, preset_name, option, rawval, reference):
super(InterpolationError, self).__init__(
'no value found for {} in "{}"'.format(reference, rawval))
self.preset_name = preset_name
self.option = option
self.rawval = rawval
self.reference = reference
class PresetNotFoundError(PresetError):
"""Raised when a requested preset cannot be found.
"""
def __init__(self, preset_name):
super(PresetNotFoundError, self).__init__(
'{} not found'.format(preset_name))
self.preset_name = preset_name
class UnparsedFilesError(PresetError):
"""Raised when an error was encountered parsing one or more preset files.
"""
def __init__(self, unparsed_files):
super(UnparsedFilesError, self).__init__(
'unable to parse files: {}'.format(unparsed_files))
self.unparsed_files = unparsed_files
# -----------------------------------------------------------------------------
@class_utils.generate_repr('name', 'options')
class Preset(object):
"""Container class used to wrap preset names and expanded options list.
"""
__slots__ = ('name', 'options')
def __init__(self, name, options):
self.name = name
self.options = options
def __str__(self):
return repr(self)
@property
def args(self):
"""Format options into command line arguments.
"""
args = []
for (name, value) in self.options:
if value is None:
args.append('--{}'.format(name))
else:
args.append('--{}={}'.format(name, value))
return args
# -----------------------------------------------------------------------------
# Preset Parsing
class PresetParser(object):
"""Parser class used to read and manipulate Swift preset files.
"""
def __init__(self):
self._parser = configparser.RawConfigParser(allow_no_value=True)
self._presets = OrderedDict()
# -------------------------------------------------------------------------
# Properties
@property
def preset_names(self):
"""Returns a list of all parsed preset names in the order they were
parsed.
"""
return self._presets.keys()
@property
def presets(self):
"""Returns a list of all parsed presets in the order they were parsed.
"""
return self._presets.values()
# -------------------------------------------------------------------------
# Parsing
def _parse_raw_preset(self, section):
preset_name = _remove_prefix(section, _PRESET_PREFIX)
try:
section_items = self._parser.items(section)
except configparser.InterpolationMissingOptionError as e:
raise InterpolationError(
preset_name, e.option, e.rawval, e.reference)
options = []
for (name, value) in section_items:
# Ignore the '--' separator, it's no longer necessary
if name == 'dash-dash':
continue
# Parse out mixin options
if name == 'mixin-preset':
lines = value.strip().splitlines()
options += [_Mixin(mixin_name.strip()) for mixin_name in lines]
continue
options.append(_Option(name, value))
return _RawPreset(preset_name, options)
def _parse_raw_presets(self):
for section in self._parser.sections():
# Skip all non-preset sections
if not section.startswith(_PRESET_PREFIX):
continue
raw_preset = self._parse_raw_preset(section)
self._presets[raw_preset.name] = raw_preset
@_catch_duplicate_option_error
@_catch_duplicate_section_error
def read_file(self, filename):
"""Reads and parses a single file.
"""
with io.open(filename, 'r') as fp:
if hasattr(self._parser, 'read_file'):
self._parser.read_file(fp)
else:
self._parser.readfp(fp)
self._parse_raw_presets()
def read_files(self, filenames):
"""Reads and parses preset files. Throws an UnparsedFilesError if any
of the files couldn't be read.
"""
unparsed_files = []
for filename in filenames:
try:
self.read_file(filename)
except Exception as e:
unparsed_files.append(_UnparsedFile(filename, e))
if len(unparsed_files) > 0:
raise UnparsedFilesError(unparsed_files)
self._parse_raw_presets()
@_catch_duplicate_option_error
@_catch_duplicate_section_error
def read_string(self, string):
"""Reads and parses a string containing preset definintions.
"""
fp = StringIO(string)
# ConfigParser changes drastically from Python 2 to 3
if hasattr(self._parser, 'read_file'):
self._parser.read_file(fp)
else:
self._parser.readfp(fp)
self._parse_raw_presets()
# -------------------------------------------------------------------------
# Resolving
def _resolve_preset_mixins(self, raw_preset):
"""Resolve all mixins in a preset, fully expanding the options list.
"""
assert isinstance(raw_preset, _RawPreset)
# Expand mixin options.
options = []
for option in raw_preset.options:
if isinstance(option, _Mixin):
options += self._get_preset(option.name).options
elif isinstance(option, _Option):
options.append((option.name, option.value))
else:
# Should be unreachable.
raise ValueError('invalid argument type: {}', option.__class__)
return Preset(raw_preset.name, options)
def _get_preset(self, name):
preset = self._presets.get(name)
if preset is None:
raise PresetNotFoundError(name)
if isinstance(preset, _RawPreset):
preset = self._resolve_preset_mixins(preset)
# Cache resolved preset
self._presets[name] = preset
return preset
def _interpolate_preset_vars(self, preset, vars):
interpolated_options = []
for (name, value) in preset.options:
try:
value = _interpolate_string(value, vars)
except KeyError as e:
raise InterpolationError(
preset.name, name, value, e.args[0])
interpolated_options.append((name, value))
return Preset(preset.name, interpolated_options)
def get_preset(self, name, raw=False, vars=None):
"""Returns the preset with the requested name or throws a
PresetNotFoundError.
If raw is False vars will be interpolated into the preset arguments.
Otherwise presets will be returned without interpolation.
Presets are retrieved using a dynamic caching algorithm that expands
only the requested preset and it's mixins recursively. Every expanded
preset is then cached. All subsequent expansions or calls to
`get_preset` for any pre-expanded presets will use the cached results.
"""
vars = vars or {}
preset = self._get_preset(name)
if not raw:
preset = self._interpolate_preset_vars(preset, vars)
return preset
|
jjmleiro/hue
|
refs/heads/master
|
desktop/core/src/desktop/management/commands/create_desktop_app.py
|
35
|
# adapted from django-extensions (http://code.google.com/p/django-command-extensions/)
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import re
import shutil
from django.core.management.base import CommandError, BaseCommand
from mako.template import Template
import logging
from django.utils.translation import ugettext as _
LOG = logging.getLogger(__name__)
class Command(BaseCommand):
help = _("Creates a Hue application directory structure.")
args = "[appname]"
label = _('application name')
def handle(self, *args, **options):
if len(args) > 2 or len(args) == 0:
raise CommandError(_("Expected arguments: app_name [app_dir]"))
app_name = args[0]
if len(args) == 2:
app_dir = args[1]
else:
app_dir = os.getcwd()
app_template = os.path.abspath(os.path.join(os.path.dirname(__file__),'..','..','app_template'))
assert os.path.isdir(app_template), _("App template dir missing: %(template)s.") % {'template': app_template}
app_dir = os.path.join(app_dir, app_name)
if not os.path.exists(app_template):
raise CommandError(_("The template path, %(path)r, does not exist.") % {'path': app_template})
if not re.search(r'^\w+$', app_name):
raise CommandError(_("%(name)r is not a valid application name. Use only numbers, letters and underscores.") % {'name': app_name})
try:
os.makedirs(app_dir)
except OSError, e:
raise CommandError(e)
copy_template(app_template, app_dir, app_name)
def copy_template(app_template, copy_to, app_name):
"""copies the specified template directory to the copy_to location"""
app_name_spaces = " ".join(word.capitalize() for word in app_name.split("_"))
app_name_camel = "".join(word.capitalize() for word in app_name.split("_"))
# walks the template structure and copies it
for directory, subdirs, files in os.walk(app_template):
relative_dir = directory[len(app_template)+1:].replace('app_name_camel', app_name_camel).replace('app_name',app_name)
if not os.path.exists(os.path.join(copy_to, relative_dir)):
os.mkdir(os.path.join(copy_to, relative_dir))
for f in files:
if f.endswith('.pyc') or f.startswith("."):
continue
path_old = os.path.join(directory, f)
path_new = os.path.join(copy_to, relative_dir, f.replace('app_name_camel', app_name_camel).replace('app_name', app_name))
LOG.info("Writing %s" % path_new)
fp_new = open(path_new, 'w')
if path_old.endswith(".png"):
shutil.copyfileobj(file(path_old), fp_new)
else:
fp_new.write( Template(filename=path_old).render(app_name=app_name, app_name_camel=app_name_camel, app_name_spaces=app_name_spaces) )
fp_new.close()
shutil.copymode(path_old, path_new)
|
inonit/wagtail
|
refs/heads/master
|
wagtail/wagtailcore/management/commands/move_pages.py
|
5
|
from django.core.management.base import BaseCommand
from wagtail.wagtailcore.models import Page
class Command(BaseCommand):
args = "<from id> <to id>"
def handle(self, _from_id, _to_id, **options):
# Convert args to integers
from_id = int(_from_id)
to_id = int(_to_id)
# Get pages
from_page = Page.objects.get(pk=from_id)
to_page = Page.objects.get(pk=to_id)
pages = from_page.get_children()
# Move the pages
self.stdout.write(
'Moving ' + str(len(pages)) + ' pages from "' + from_page.title + '" to "' + to_page.title + '"'
)
for page in pages:
page.move(to_page, pos='last-child')
self.stdout.write('Done')
|
nzlosh/st2
|
refs/heads/master
|
st2auth/setup.py
|
3
|
# -*- coding: utf-8 -*-
# Copyright 2020 The StackStorm Authors.
# Copyright 2019 Extreme Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os.path
from setuptools import setup, find_packages
from dist_utils import fetch_requirements
from dist_utils import apply_vagrant_workaround
from st2auth import __version__
ST2_COMPONENT = "st2auth"
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
REQUIREMENTS_FILE = os.path.join(BASE_DIR, "requirements.txt")
install_reqs, dep_links = fetch_requirements(REQUIREMENTS_FILE)
apply_vagrant_workaround()
setup(
name=ST2_COMPONENT,
version=__version__,
description="{} StackStorm event-driven automation platform component".format(
ST2_COMPONENT
),
author="StackStorm",
author_email="info@stackstorm.com",
license="Apache License (2.0)",
url="https://stackstorm.com/",
install_requires=install_reqs,
dependency_links=dep_links,
test_suite=ST2_COMPONENT,
zip_safe=False,
include_package_data=True,
packages=find_packages(exclude=["setuptools", "tests"]),
scripts=["bin/st2auth"],
entry_points={
"st2auth.sso.backends": ["noop = st2auth.sso.noop:NoOpSingleSignOnBackend"]
},
)
|
surhudm/hscTools
|
refs/heads/master
|
bick/bin/showVisit.py
|
2
|
#!/usr/bin/env python
# Original filename: showVisit.py
#
# Author: Steve Bickerton
# Email:
# Date: Thu 2013-12-12 15:01:41
#
# Summary:
#
import sys
import os
import re
import math
import argparse
import datetime
import lsst.daf.persistence as dafPersist
import hsc.pipe.base.butler as hscButler
import numpy
import matplotlib.figure as figure
from matplotlib.backends.backend_agg import FigureCanvasAgg as FigCanvas
import matplotlib.font_manager as fm
import hsc.tools.bick.utils as hscUtil
import lsst.afw.cameraGeom as camGeom
import lsst.afw.cameraGeom.utils as camGeomUtils
scaleLookup = {
'linear' : lambda x : x,
'histeq' : hscUtil.histeq,
}
def compute(im1, im2, op):
if op == 'p':
ret = im1 + im2
if op == 'm':
ret = im1 - im2
if op == 'd':
ret = im1 / im2
if op == 't':
ret = im1 * im2
return ret
calibTypes = "dark", "bias", "flat"
#############################################################
#
# Main body of code
#
#############################################################
def main(rerun, visit1, visit2, op, ccdno,
datatype='calexp', scale=None, root=None, invert=False, cmap='gray',
vmax=None, annotate=None, bins=16, rerun2=None, showCbar=False, vsig=None,
showAnnotate=False, percent=None, hilite=None):
vsigDefault = 5.0
visit1 = int(visit1)
if visit2:
visit2 = int(visit2)
ccdno = set([int(x) for x in hscUtil.idSplit(ccdno)])
butler1 = hscUtil.getButler(rerun, root=root)
if rerun2:
butler2 = hscUtil.getButler(rerun2, root=root)
else:
butler2 = butler1
if datatype not in calibTypes:
dataIds1 = butler1.queryMetadata(datatype, "ccd", format=["visit", "ccd"], dataId={'visit':visit1})
dataIds1 = [{'visit':x[0], 'ccd':x[1]} for x in dataIds1]
else:
dataIds1 = [{'visit': visit1, 'ccd':x} for x in range(104)]
dataRef1 = None
dataRef2 = None
if visit2:
dataIds2 = butler2.queryMetadata(datatype, "ccd", format=["visit", "ccd"], dataId={'visit':visit2})
dataIds2 = [{'visit':x[0], 'ccd':x[1]} for x in dataIds2]
# flip the color map
if invert:
cmap = re.sub("_r$", "", cmap) if re.match('_r$', cmap) else '_r'
# sleezy, but if vmax isn't set and we're linear, just use ccd in the middle to norm
if vmax is None and scale != 'histeq':
vmax = 'c049'
# handle the gray scale normalization
vmin = 0.0
mdRef1 = None
mdRef2 = None
if vmax:
if scale == 'histeq':
raise ValueError("Cannot specify vmax with histeq scaling.")
# if it identifies a CCD to use
if re.match("^c", vmax):
vmaxCcd = int(re.sub("c", "", vmax))
try:
dataRef1 = hscButler.getDataRef(butler1, {'visit':visit1, 'ccd':vmaxCcd})
imgRef1 = dataRef1.get(datatype).getMaskedImage().getImage().getArray()
if datatype not in calibTypes:
mdRef1 = dataRef1.get(datatype+'_md', immediate=True)
if visit2:
dataRef2 = hscButler.getDataRef(butler2, {'visit':visit2, 'ccd':vmaxCcd})
imgRef2 = dataRef2.get(datatype).getMaskedImage().getImage().getArray()
mdRef2 = dataRef2.get(datatype+'_md', immediate=True)
img_op = hscUtil.rebin(compute(imgRef1, imgRef2, op), bins)
med = numpy.median(img_op)
std = numpy.std(img_op)
if not vsig:
vsig = vsigDefault
delta = vsig*std
if percent:
delta = percent*med
vmin = med - abs(delta)
vmax = med + abs(delta)
else:
if showAnnotate:
exp1 = dataRef1.get(datatype)
aval = float(exp1.getMetadata().get(annotate))
med = aval
delta = vsig
if not vsig:
delta = 0.5*aval
if percent:
delta = percent*aval
vmin = med - abs(delta)
vmax = med + abs(delta)
else:
img_op = imgRef1
med = numpy.median(img_op)
sig = numpy.sqrt(med)
delta = vsigDefault*sig
if vsig:
delta = vsig*sig
if percent:
delta = percent*med
vmin = med - abs(delta)
vmax = med + abs(delta)
if not vsig and not percent:
vmin = 0.5*med
vmax = med + 5.0*sig
except Exception, e:
raise RuntimeError("Could not get stats on vmax CCD" + str(vmax)+ " Exiting." + str(e))
elif re.search(":", vmax):
vmaxCcd = None
vmin, vmax = [float(x) for x in vmax.split(":")]
med = 0.5*(vmax + vmin)
else:
vmaxCcd = None
vmax = float(vmax)
med = 0.5*(vmax + vmin)
###########################
fig = figure.Figure((8,8))
canvas = FigCanvas(fig)
l, b, w, h = 0.08, 0.12, 0.84, 0.78
rect = l, b, w, h
if showCbar:
rect = (0.06, 0.12, 0.76, 0.76)
fpa_fig = hscUtil.FpaFigure(fig, butler1.get('camera'), rect=rect)
im_ax = None
i_show = 0
for dataId1 in dataIds1:
if dataId1['ccd'] not in ccdno:
continue
print dataId1,
if visit2:
dataId2 = {'visit':visit2, 'ccd':dataId1['ccd']}
print dataId2
else:
print ""
try:
dataRef1 = hscButler.getDataRef(butler1, dataId1)
exp1 = dataRef1.get(datatype)
img1 = None
if not showAnnotate:
img1 = exp1.getMaskedImage().getImage().getArray()
img1 = img1
if visit2:
dataRef2 = hscButler.getDataRef(butler2, dataId2)
exp2 = dataRef2.get(datatype)
img2 = exp2.getMaskedImage().getImage().getArray()
except Exception, e:
#raise
print "getDataRef() failed for ", visit1, visit2, ccdno, str(e)
continue
ax = fpa_fig.getAxes(dataId1['ccd'])
# labels
labels = [ str(dataId1['ccd']) ]
aValues = None
if annotate:
for a in annotate.split('|'):
if a == 'NQUARTER':
labels.append(str(exp.getDetector().getOrientation().getNQuarter()))
if a in exp1.getMetadata().paramNames():
aval = exp1.getMetadata().get(annotate)
labels.append(aval)
if not aValues:
aValues = aval
fpa_fig.addLabel(dataId1['ccd'], labels)
if hilite is not None:
fpa_fig.highlightAmp(dataId1['ccd'], hilite)
imshow_kwargs = {}
if scale == 'linear':
imshow_kwargs = {'vmin': vmin, 'vmax': vmax}
if showAnnotate:
ny, nx = 4, 2
if exp1.getDetector().getOrientation().getNQuarter() % 2:
ny, nx = 2, 4
imtmp = numpy.ones((ny, nx))*float(aval)
else:
if visit2:
img_op = compute(img1, img2, op)
else:
img_op = img1
# scale as requested
img_op = scaleLookup[scale](img_op)
imtmp = hscUtil.rebin(img_op, bins)[::-1]
im_ax = ax.imshow(imtmp, cmap=cmap, **imshow_kwargs)
if showCbar and im_ax and scale != "histeq":
ylo, yhi = vmin, vmax
rect = (0.91, 0.2, 0.02, 0.6)
cax = fig.add_axes(rect)
cax.set_ylim([ylo, yhi])
cax.get_xaxis().set_ticks([])
cax.get_yaxis().get_major_formatter().set_useOffset(False)
cbar = fig.colorbar(im_ax, cax=cax)
caxp = cax.twinx()
caxp.set_ylim([ylo/med, yhi/med])
caxp.get_xaxis().set_ticks([])
caxp.get_yaxis().get_major_formatter().set_useOffset(False)
for t in cax.get_yticklabels():
t.set_size('small')
for t in caxp.get_yticklabels():
t.set_size("small")
i = 0
# get some header info to print labels
hdrcards = ["OBJECT", "FILTER", "EXPTIME", "DATE-OBS"]
#ax = fig.add_axes((l, 0.05, w, 0.08))
# kill the ticks
#for tick in ax.get_xticklines() + ax.get_yticklines() + ax.get_yticklabels() + ax.get_xticklabels():
# tick.set_visible(False)
ops = {'p': 'plus', "m": 'minus', 'd': 'div', "t": 'times'}
for card in hdrcards:
if not mdRef1:
if dataRef1 and datatype not in calibTypes:
mdRef1 = dataRef1.get(datatype+'_md', immediate=True)
if visit2 and not mdRef2 and datatype not in calibTypes:
mdRef2 = dataRef2.get(datatype+'_md', immediate=True)
if mdRef1 and card in mdRef1.paramNames():
ax.text(0.25, 0.02+0.015*i, card+": "+str(mdRef1.get(card)), fontsize=8,
horizontalalignment='left', verticalalignment='center', transform=fig.transFigure)
if visit2:
ax.text(0.5, 0.04, ops[op], fontsize=8,
horizontalalignment='center', verticalalignment='center', transform=fig.transFigure)
ax.text(0.6, 0.02+0.015*i, card+": "+str(mdRef2.get(card)), fontsize=8,
horizontalalignment='left', verticalalignment='center', transform=fig.transFigure)
i += 1
rerunStr = re.sub("/", "_", rerun)
date = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
if visit2:
if rerun2:
rerun2Str = re.sub("/", "_", rerun2)
key = "%s_%s-%d%s%d-%s" % (rerunStr, rerun2Str, visit1, op, visit2, scale[0].upper())
else:
key = "%s-%d%s%d-%s" % (rerunStr, visit1, op, visit2, scale[0].upper())
else:
key = "%s-%d-%s" % (rerunStr, visit1, scale[0].upper())
if showAnnotate:
key += "-%s" % (annotate.split("|")[0])
fig.suptitle(key + " (" + datatype + ") "+ date)
fig.savefig("fpa-%s.png" % (key))
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("--version", action='version', version="0.0")
parser.add_argument("rerun", type=str, help="")
parser.add_argument("visit", type=str, help="")
group = parser.add_mutually_exclusive_group()
group.add_argument("--plus", default=None)
group.add_argument("--minus", default=None)
group.add_argument("--div", default=None)
group.add_argument("--times", default=None)
parser.add_argument("ccd", type=str, help="")
parser.add_argument("-a", "--annotate", default=None, help="Header values to display. Options may include: T_CCDTV, T_GAIN1, GAINEFF, SKYLEVEL, SKYSIGMA, HIERARCH FLATNESS_PP, DET-TMED, HIERARCH NOBJ_BRIGHT,HIERARCH runEndCpuTime, HIERARCH fwhmRobust")
parser.add_argument("-A", "--showAnnotate", default=False, action='store_true', help="Use the value of the first annotate term to color the figure. Use '-V' to control the cmap range above/below the ")
parser.add_argument("-b", '--bins', type=int, default=16, help="Binning to use.")
parser.add_argument("-C", "--nocbar", default=False, action='store_true', help="Do *NOT* show color bar")
parser.add_argument("-c", "--cmap", type=str, default='copper',
choices=("gray", 'jet', 'copper', "gist_rainbow", "Spectral", "Accent", "Blues",
"BrBG", "BuPu", "Dark2", "GnBu", "Greens", "Greys", "OrRd", "Oranges",
"PRGn", "Paired", "Pastel1", "Pastel2", "PiYG", "PuBu", "PuBuGn", "PuOr",
"PuRd", "Purples", "RdBu", "RdGy", "RdPu", "RdYlBu", "RdYlGn", "Reds",
"Set1", "Set2", "Set3", "YlGn", "YlGnBu", "YlOrBr", "YlOrRd", "autumn",
"binary", "bone", "cool", "flag", "gist_earth", "gist_gray", "gist_heat",
"gist_ncar", "gist_rainbow", "gist_stern", "gist_yarg", "gray", "hot",
"hsv", "pink", "prism", "spectral", "sprint", "summer", "winter"),
help="Specify a color map")
parser.add_argument("-d", "--datatype", default="calexp", help="Type of image to try to load",
choices=("raw", "calexp", "postISRCCD", 'dark', 'bias', 'flat'))
parser.add_argument("-H", '--hilite', type=int, default=None, help="Hilight amp by number.")
parser.add_argument("-p", "--percent", default=None, type=float,
help="Override vsig with a range in percent")
parser.add_argument("-R", "--rerun2", default=None, help="Rerun for visit2 if different from visit1")
parser.add_argument("-s", "--scale", type=str, default='linear', help="Gray scaling.",
choices=("histeq", "linear"))
parser.add_argument("-i", "--invert", default=False, action='store_true', help="invert the gray scale")
parser.add_argument("-r", "--root", type=str, default=None, help="")
parser.add_argument("-V", "--vsig", type=float, default=None, help="Sigma for color map")
parser.add_argument("-x", "--vmax", type=str, default=None, help="Scaling max value.")
args = parser.parse_args()
visit2 = None
op = None
if args.plus:
visit2 = args.plus
op = 'p'
if args.minus:
visit2 = args.minus
op = 'm'
if args.div:
visit2 = args.div
op = 'd'
if args.times:
visit2 = args.times
op = 't'
if visit2 and args.showAnnotate:
print "Cannot use showAnnotate with math operation ... yet."
sys.exit(1)
main(args.rerun, args.visit, visit2, op, args.ccd, scale=args.scale, root=args.root,
datatype=args.datatype, invert=args.invert, cmap=args.cmap, vmax=args.vmax,
annotate=args.annotate, bins=args.bins, rerun2=args.rerun2,
showCbar=(not args.nocbar), vsig=args.vsig,
showAnnotate=args.showAnnotate, percent=args.percent, hilite=args.hilite)
|
metabrainz/musicbrainz-logs
|
refs/heads/master
|
musicbrainz-logs/ml_server.py
|
1
|
#!/usr/bin/env python
from mblogs import app
app.SOLR_SERVER = "localhost"
app.SOLR_PORT = 8983
if __name__ == '__main__':
app.run(host="0.0.0.0", port=5000, debug=True)
|
rch/flask-extdirect-example
|
refs/heads/master
|
example.py
|
1
|
from flask import Flask, redirect, url_for, session, request
from flask.ext.sencha import direct
from example_utils import RedisSessionInterface
app = Flask(__name__)
app.config['SESSION_COOKIE_NAME'] = 'example'
app.session_interface = RedisSessionInterface()
app.register_blueprint(direct.blueprint, url_prefix='/direct')
@app.route("/")
def index():
return redirect(url_for('static', filename='index.html'))
@app.route("/clear")
def clear():
session.clear()
return redirect(url_for('static', filename='index.html'))
@app.route("/chk")
def check():
print request.headers
print request.cookies
print app.config
return 'OK'
app.secret_key = '\xf4\xf70Q\x90\xe7k\xbe=\x1b\x8f\xee\xb8zFh\x0c\xdd\x18x\x12;N{'
if __name__ == "__main__":
app.run(debug=True)
|
joelddiaz/openshift-tools
|
refs/heads/prod
|
openshift/installer/vendored/openshift-ansible-3.4.40/roles/lib_openshift/src/lib/project.py
|
82
|
# pylint: skip-file
# flake8: noqa
# pylint: disable=too-many-instance-attributes
class ProjectConfig(OpenShiftCLIConfig):
''' project config object '''
def __init__(self, rname, namespace, kubeconfig, project_options):
super(ProjectConfig, self).__init__(rname, None, kubeconfig, project_options)
class Project(Yedit):
''' Class to wrap the oc command line tools '''
annotations_path = "metadata.annotations"
kind = 'Project'
annotation_prefix = 'openshift.io/'
def __init__(self, content):
'''Project constructor'''
super(Project, self).__init__(content=content)
def get_annotations(self):
''' return the annotations'''
return self.get(Project.annotations_path) or {}
def add_annotations(self, inc_annos):
''' add an annotation to the other annotations'''
if not isinstance(inc_annos, list):
inc_annos = [inc_annos]
annos = self.get_annotations()
if not annos:
self.put(Project.annotations_path, inc_annos)
else:
for anno in inc_annos:
for key, value in anno.items():
annos[key] = value
return True
def find_annotation(self, key):
''' find an annotation'''
annotations = self.get_annotations()
for anno in annotations:
if Project.annotation_prefix + key == anno:
return annotations[anno]
return None
def delete_annotation(self, inc_anno_keys):
''' remove an annotation from a project'''
if not isinstance(inc_anno_keys, list):
inc_anno_keys = [inc_anno_keys]
annos = self.get(Project.annotations_path) or {}
if not annos:
return True
removed = False
for inc_anno in inc_anno_keys:
anno = self.find_annotation(inc_anno)
if anno:
del annos[Project.annotation_prefix + anno]
removed = True
return removed
def update_annotation(self, key, value):
''' remove an annotation for a project'''
annos = self.get(Project.annotations_path) or {}
if not annos:
return True
updated = False
anno = self.find_annotation(key)
if anno:
annos[Project.annotation_prefix + key] = value
updated = True
else:
self.add_annotations({Project.annotation_prefix + key: value})
return updated
|
petermalcolm/osf.io
|
refs/heads/develop
|
website/addons/googledrive/settings/defaults.py
|
45
|
# Drive credentials
CLIENT_ID = 'chaneme'
CLIENT_SECRET = 'changeme'
REFRESH_TIME = 5 * 60 # 5 minutes
# Check https://developers.google.com/drive/scopes for all available scopes
OAUTH_SCOPE = [
'https://www.googleapis.com/auth/userinfo.profile',
'https://www.googleapis.com/auth/drive',
]
OAUTH_BASE_URL = 'https://accounts.google.com/o/oauth2/'
API_BASE_URL = 'https://www.googleapis.com/'
|
otfried/cs101
|
refs/heads/master
|
code/robots/maze1.py
|
1
|
# This program lets the robot go around his world counterclockwise,
# stopping when he comes back to his starting point.
from cs1robots import *
load_world("../worlds/maze1.wld")
hubo = Robot(beepers = 10)
hubo.set_trace("blue")
def turn_right():
for i in range(3):
hubo.turn_left()
def mark_starting_point_and_move():
hubo.drop_beeper()
while not hubo.front_is_clear():
hubo.turn_left()
hubo.move()
def follow_right_wall():
if hubo.right_is_clear():
# Keep to the right
turn_right()
hubo.move()
elif hubo.front_is_clear():
# move following the right wall
hubo.move()
else:
# follow the wall
hubo.turn_left()
# end of definitions, begin solution
mark_starting_point_and_move()
finished = hubo.on_beeper
print type(finished)
while not finished():
follow_right_wall()
|
eugena/django
|
refs/heads/master
|
tests/template_tests/syntax_tests/test_cycle.py
|
199
|
from django.template import TemplateSyntaxError
from django.test import SimpleTestCase, ignore_warnings
from django.utils.deprecation import RemovedInDjango110Warning
from ..utils import setup
class CycleTagTests(SimpleTestCase):
libraries = {'future': 'django.templatetags.future'}
@setup({'cycle01': '{% cycle a %}'})
def test_cycle01(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('cycle01')
@ignore_warnings(category=RemovedInDjango110Warning)
@setup({'cycle02': '{% cycle a,b,c as abc %}{% cycle abc %}'})
def test_cycle02(self):
output = self.engine.render_to_string('cycle02')
self.assertEqual(output, 'ab')
@ignore_warnings(category=RemovedInDjango110Warning)
@setup({'cycle03': '{% cycle a,b,c as abc %}{% cycle abc %}{% cycle abc %}'})
def test_cycle03(self):
output = self.engine.render_to_string('cycle03')
self.assertEqual(output, 'abc')
@ignore_warnings(category=RemovedInDjango110Warning)
@setup({'cycle04': '{% cycle a,b,c as abc %}{% cycle abc %}{% cycle abc %}{% cycle abc %}'})
def test_cycle04(self):
output = self.engine.render_to_string('cycle04')
self.assertEqual(output, 'abca')
@setup({'cycle05': '{% cycle %}'})
def test_cycle05(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('cycle05')
@setup({'cycle06': '{% cycle a %}'})
def test_cycle06(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('cycle06')
@ignore_warnings(category=RemovedInDjango110Warning)
@setup({'cycle07': '{% cycle a,b,c as foo %}{% cycle bar %}'})
def test_cycle07(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('cycle07')
@ignore_warnings(category=RemovedInDjango110Warning)
@setup({'cycle08': '{% cycle a,b,c as foo %}{% cycle foo %}{{ foo }}{{ foo }}{% cycle foo %}{{ foo }}'})
def test_cycle08(self):
output = self.engine.render_to_string('cycle08')
self.assertEqual(output, 'abbbcc')
@ignore_warnings(category=RemovedInDjango110Warning)
@setup({'cycle09': '{% for i in test %}{% cycle a,b %}{{ i }},{% endfor %}'})
def test_cycle09(self):
output = self.engine.render_to_string('cycle09', {'test': list(range(5))})
self.assertEqual(output, 'a0,b1,a2,b3,a4,')
@setup({'cycle10': "{% cycle 'a' 'b' 'c' as abc %}{% cycle abc %}"})
def test_cycle10(self):
output = self.engine.render_to_string('cycle10')
self.assertEqual(output, 'ab')
@setup({'cycle11': "{% cycle 'a' 'b' 'c' as abc %}{% cycle abc %}{% cycle abc %}"})
def test_cycle11(self):
output = self.engine.render_to_string('cycle11')
self.assertEqual(output, 'abc')
@setup({'cycle12': "{% cycle 'a' 'b' 'c' as abc %}{% cycle abc %}{% cycle abc %}{% cycle abc %}"})
def test_cycle12(self):
output = self.engine.render_to_string('cycle12')
self.assertEqual(output, 'abca')
@setup({'cycle13': "{% for i in test %}{% cycle 'a' 'b' %}{{ i }},{% endfor %}"})
def test_cycle13(self):
output = self.engine.render_to_string('cycle13', {'test': list(range(5))})
self.assertEqual(output, 'a0,b1,a2,b3,a4,')
@setup({'cycle14': '{% cycle one two as foo %}{% cycle foo %}'})
def test_cycle14(self):
output = self.engine.render_to_string('cycle14', {'one': '1', 'two': '2'})
self.assertEqual(output, '12')
@setup({'cycle15': '{% for i in test %}{% cycle aye bee %}{{ i }},{% endfor %}'})
def test_cycle15(self):
output = self.engine.render_to_string('cycle15', {'test': list(range(5)), 'aye': 'a', 'bee': 'b'})
self.assertEqual(output, 'a0,b1,a2,b3,a4,')
@setup({'cycle16': '{% cycle one|lower two as foo %}{% cycle foo %}'})
def test_cycle16(self):
output = self.engine.render_to_string('cycle16', {'one': 'A', 'two': '2'})
self.assertEqual(output, 'a2')
@setup({'cycle17': "{% cycle 'a' 'b' 'c' as abc silent %}"
"{% cycle abc %}{% cycle abc %}{% cycle abc %}{% cycle abc %}"})
def test_cycle17(self):
output = self.engine.render_to_string('cycle17')
self.assertEqual(output, '')
@setup({'cycle18': "{% cycle 'a' 'b' 'c' as foo invalid_flag %}"})
def test_cycle18(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('cycle18')
@setup({'cycle19': "{% cycle 'a' 'b' as silent %}{% cycle silent %}"})
def test_cycle19(self):
output = self.engine.render_to_string('cycle19')
self.assertEqual(output, 'ab')
@setup({'cycle20': '{% cycle one two as foo %} & {% cycle foo %}'})
def test_cycle20(self):
output = self.engine.render_to_string('cycle20', {'two': 'C & D', 'one': 'A & B'})
self.assertEqual(output, 'A & B & C & D')
@setup({'cycle21': '{% filter force_escape %}'
'{% cycle one two as foo %} & {% cycle foo %}{% endfilter %}'})
def test_cycle21(self):
output = self.engine.render_to_string('cycle21', {'two': 'C & D', 'one': 'A & B'})
self.assertEqual(output, 'A &amp; B & C &amp; D')
@setup({'cycle22': "{% for x in values %}{% cycle 'a' 'b' 'c' as abc silent %}{{ x }}{% endfor %}"})
def test_cycle22(self):
output = self.engine.render_to_string('cycle22', {'values': [1, 2, 3, 4]})
self.assertEqual(output, '1234')
@setup({'cycle23': "{% for x in values %}"
"{% cycle 'a' 'b' 'c' as abc silent %}{{ abc }}{{ x }}{% endfor %}"})
def test_cycle23(self):
output = self.engine.render_to_string('cycle23', {'values': [1, 2, 3, 4]})
self.assertEqual(output, 'a1b2c3a4')
@setup({
'cycle24': "{% for x in values %}"
"{% cycle 'a' 'b' 'c' as abc silent %}{% include 'included-cycle' %}{% endfor %}",
'included-cycle': '{{ abc }}',
})
def test_cycle24(self):
output = self.engine.render_to_string('cycle24', {'values': [1, 2, 3, 4]})
self.assertEqual(output, 'abca')
@setup({'cycle25': '{% cycle a as abc %}'})
def test_cycle25(self):
output = self.engine.render_to_string('cycle25', {'a': '<'})
self.assertEqual(output, '<')
@ignore_warnings(category=RemovedInDjango110Warning)
@setup({'cycle26': '{% load cycle from future %}{% cycle a b as ab %}{% cycle ab %}'})
def test_cycle26(self):
output = self.engine.render_to_string('cycle26', {'a': '<', 'b': '>'})
self.assertEqual(output, '<>')
@ignore_warnings(category=RemovedInDjango110Warning)
@setup({'cycle27': '{% load cycle from future %}'
'{% autoescape off %}{% cycle a b as ab %}{% cycle ab %}{% endautoescape %}'})
def test_cycle27(self):
output = self.engine.render_to_string('cycle27', {'a': '<', 'b': '>'})
self.assertEqual(output, '<>')
@ignore_warnings(category=RemovedInDjango110Warning)
@setup({'cycle28': '{% load cycle from future %}{% cycle a|safe b as ab %}{% cycle ab %}'})
def test_cycle28(self):
output = self.engine.render_to_string('cycle28', {'a': '<', 'b': '>'})
self.assertEqual(output, '<>')
|
matthaywardwebdesign/rethinkdb
|
refs/heads/next
|
external/v8_3.30.33.16/build/gyp/pylib/gyp/generator/gypsh.py
|
2779
|
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""gypsh output module
gypsh is a GYP shell. It's not really a generator per se. All it does is
fire up an interactive Python session with a few local variables set to the
variables passed to the generator. Like gypd, it's intended as a debugging
aid, to facilitate the exploration of .gyp structures after being processed
by the input module.
The expected usage is "gyp -f gypsh -D OS=desired_os".
"""
import code
import sys
# All of this stuff about generator variables was lovingly ripped from gypd.py.
# That module has a much better description of what's going on and why.
_generator_identity_variables = [
'EXECUTABLE_PREFIX',
'EXECUTABLE_SUFFIX',
'INTERMEDIATE_DIR',
'PRODUCT_DIR',
'RULE_INPUT_ROOT',
'RULE_INPUT_DIRNAME',
'RULE_INPUT_EXT',
'RULE_INPUT_NAME',
'RULE_INPUT_PATH',
'SHARED_INTERMEDIATE_DIR',
]
generator_default_variables = {
}
for v in _generator_identity_variables:
generator_default_variables[v] = '<(%s)' % v
def GenerateOutput(target_list, target_dicts, data, params):
locals = {
'target_list': target_list,
'target_dicts': target_dicts,
'data': data,
}
# Use a banner that looks like the stock Python one and like what
# code.interact uses by default, but tack on something to indicate what
# locals are available, and identify gypsh.
banner='Python %s on %s\nlocals.keys() = %s\ngypsh' % \
(sys.version, sys.platform, repr(sorted(locals.keys())))
code.interact(banner, local=locals)
|
barthisrael/OmniDB
|
refs/heads/master
|
OmniDB/OmniDB_app/include/paramiko/buffered_pipe.py
|
2
|
# Copyright (C) 2006-2007 Robey Pointer <robeypointer@gmail.com>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
"""
Attempt to generalize the "feeder" part of a `.Channel`: an object which can be
read from and closed, but is reading from a buffer fed by another thread. The
read operations are blocking and can have a timeout set.
"""
import array
import threading
import time
from paramiko.py3compat import PY2, b
class PipeTimeout(IOError):
"""
Indicates that a timeout was reached on a read from a `.BufferedPipe`.
"""
pass
class BufferedPipe(object):
"""
A buffer that obeys normal read (with timeout) & close semantics for a
file or socket, but is fed data from another thread. This is used by
`.Channel`.
"""
def __init__(self):
self._lock = threading.Lock()
self._cv = threading.Condition(self._lock)
self._event = None
self._buffer = array.array("B")
self._closed = False
if PY2:
def _buffer_frombytes(self, data):
self._buffer.fromstring(data)
def _buffer_tobytes(self, limit=None):
return self._buffer[:limit].tostring()
else:
def _buffer_frombytes(self, data):
self._buffer.frombytes(data)
def _buffer_tobytes(self, limit=None):
return self._buffer[:limit].tobytes()
def set_event(self, event):
"""
Set an event on this buffer. When data is ready to be read (or the
buffer has been closed), the event will be set. When no data is
ready, the event will be cleared.
:param threading.Event event: the event to set/clear
"""
self._lock.acquire()
try:
self._event = event
# Make sure the event starts in `set` state if we appear to already
# be closed; otherwise, if we start in `clear` state & are closed,
# nothing will ever call `.feed` and the event (& OS pipe, if we're
# wrapping one - see `Channel.fileno`) will permanently stay in
# `clear`, causing deadlock if e.g. `select`ed upon.
if self._closed or len(self._buffer) > 0:
event.set()
else:
event.clear()
finally:
self._lock.release()
def feed(self, data):
"""
Feed new data into this pipe. This method is assumed to be called
from a separate thread, so synchronization is done.
:param data: the data to add, as a ``str`` or ``bytes``
"""
self._lock.acquire()
try:
if self._event is not None:
self._event.set()
self._buffer_frombytes(b(data))
self._cv.notifyAll()
finally:
self._lock.release()
def read_ready(self):
"""
Returns true if data is buffered and ready to be read from this
feeder. A ``False`` result does not mean that the feeder has closed;
it means you may need to wait before more data arrives.
:return:
``True`` if a `read` call would immediately return at least one
byte; ``False`` otherwise.
"""
self._lock.acquire()
try:
if len(self._buffer) == 0:
return False
return True
finally:
self._lock.release()
def read(self, nbytes, timeout=None):
"""
Read data from the pipe. The return value is a string representing
the data received. The maximum amount of data to be received at once
is specified by ``nbytes``. If a string of length zero is returned,
the pipe has been closed.
The optional ``timeout`` argument can be a nonnegative float expressing
seconds, or ``None`` for no timeout. If a float is given, a
`.PipeTimeout` will be raised if the timeout period value has elapsed
before any data arrives.
:param int nbytes: maximum number of bytes to read
:param float timeout:
maximum seconds to wait (or ``None``, the default, to wait forever)
:return: the read data, as a ``str`` or ``bytes``
:raises:
`.PipeTimeout` -- if a timeout was specified and no data was ready
before that timeout
"""
out = bytes()
self._lock.acquire()
try:
if len(self._buffer) == 0:
if self._closed:
return out
# should we block?
if timeout == 0.0:
raise PipeTimeout()
# loop here in case we get woken up but a different thread has
# grabbed everything in the buffer.
while (len(self._buffer) == 0) and not self._closed:
then = time.time()
self._cv.wait(timeout)
if timeout is not None:
timeout -= time.time() - then
if timeout <= 0.0:
raise PipeTimeout()
# something's in the buffer and we have the lock!
if len(self._buffer) <= nbytes:
out = self._buffer_tobytes()
del self._buffer[:]
if (self._event is not None) and not self._closed:
self._event.clear()
else:
out = self._buffer_tobytes(nbytes)
del self._buffer[:nbytes]
finally:
self._lock.release()
return out
def empty(self):
"""
Clear out the buffer and return all data that was in it.
:return:
any data that was in the buffer prior to clearing it out, as a
`str`
"""
self._lock.acquire()
try:
out = self._buffer_tobytes()
del self._buffer[:]
if (self._event is not None) and not self._closed:
self._event.clear()
return out
finally:
self._lock.release()
def close(self):
"""
Close this pipe object. Future calls to `read` after the buffer
has been emptied will return immediately with an empty string.
"""
self._lock.acquire()
try:
self._closed = True
self._cv.notifyAll()
if self._event is not None:
self._event.set()
finally:
self._lock.release()
def __len__(self):
"""
Return the number of bytes buffered.
:return: number (`int`) of bytes buffered
"""
self._lock.acquire()
try:
return len(self._buffer)
finally:
self._lock.release()
|
Gimpneek/exclusive-raid-gym-tracker
|
refs/heads/master
|
app/migrations/0014_auto_20171218_0008.py
|
1
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-12-18 00:08
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('app', '0013_auto_20171218_0006'),
]
operations = [
migrations.RemoveField(
model_name='profile',
name='tracked_gyms',
),
migrations.AddField(
model_name='profile',
name='tracked_gyms',
field=models.ManyToManyField(blank=True, null=True, to='app.Gym'),
),
]
|
donSchoe/p2pool-cache
|
refs/heads/master
|
wstools/tests/test_wstools.py
|
308
|
#!/usr/bin/env python
############################################################################
# Joshua R. Boverhof, David W. Robertson, LBNL
# See LBNLCopyright for copyright notice!
###########################################################################
import unittest, tarfile, os, ConfigParser
import test_wsdl
SECTION='files'
CONFIG_FILE = 'config.txt'
def extractFiles(section, option):
config = ConfigParser.ConfigParser()
config.read(CONFIG_FILE)
archives = config.get(section, option)
archives = eval(archives)
for file in archives:
tar = tarfile.open(file)
if not os.access(tar.membernames[0], os.R_OK):
for i in tar.getnames():
tar.extract(i)
def makeTestSuite():
suite = unittest.TestSuite()
suite.addTest(test_wsdl.makeTestSuite("services_by_file"))
return suite
def main():
extractFiles(SECTION, 'archives')
unittest.main(defaultTest="makeTestSuite")
if __name__ == "__main__" : main()
|
acenario/Payable
|
refs/heads/master
|
lib/python2.7/site-packages/django/utils/tree.py
|
88
|
"""
A class for storing a tree graph. Primarily used for filter constructs in the
ORM.
"""
import copy
class Node(object):
"""
A single internal node in the tree graph. A Node should be viewed as a
connection (the root) with the children being either leaf nodes or other
Node instances.
"""
# Standard connector type. Clients usually won't use this at all and
# subclasses will usually override the value.
default = 'DEFAULT'
def __init__(self, children=None, connector=None, negated=False):
"""
Constructs a new Node. If no connector is given, the default will be
used.
"""
self.children = children[:] if children else []
self.connector = connector or self.default
self.negated = negated
# We need this because of django.db.models.query_utils.Q. Q. __init__() is
# problematic, but it is a natural Node subclass in all other respects.
@classmethod
def _new_instance(cls, children=None, connector=None, negated=False):
"""
This is called to create a new instance of this class when we need new
Nodes (or subclasses) in the internal code in this class. Normally, it
just shadows __init__(). However, subclasses with an __init__ signature
that is not an extension of Node.__init__ might need to implement this
method to allow a Node to create a new instance of them (if they have
any extra setting up to do).
"""
obj = Node(children, connector, negated)
obj.__class__ = cls
return obj
def __str__(self):
if self.negated:
return '(NOT (%s: %s))' % (self.connector, ', '.join(str(c) for c
in self.children))
return '(%s: %s)' % (self.connector, ', '.join(str(c) for c in
self.children))
def __repr__(self):
return "<%s: %s>" % (self.__class__.__name__, self)
def __deepcopy__(self, memodict):
"""
Utility method used by copy.deepcopy().
"""
obj = Node(connector=self.connector, negated=self.negated)
obj.__class__ = self.__class__
obj.children = copy.deepcopy(self.children, memodict)
return obj
def __len__(self):
"""
The size of a node if the number of children it has.
"""
return len(self.children)
def __bool__(self):
"""
For truth value testing.
"""
return bool(self.children)
def __nonzero__(self): # Python 2 compatibility
return type(self).__bool__(self)
def __contains__(self, other):
"""
Returns True is 'other' is a direct child of this instance.
"""
return other in self.children
def _prepare_data(self, data):
"""
A subclass hook for doing subclass specific transformations of the
given data on combine() or add().
"""
return data
def add(self, data, conn_type, squash=True):
"""
Combines this tree and the data represented by data using the
connector conn_type. The combine is done by squashing the node other
away if possible.
This tree (self) will never be pushed to a child node of the
combined tree, nor will the connector or negated properties change.
The function returns a node which can be used in place of data
regardless if the node other got squashed or not.
If `squash` is False the data is prepared and added as a child to
this tree without further logic.
"""
if data in self.children:
return data
data = self._prepare_data(data)
if not squash:
self.children.append(data)
return data
if self.connector == conn_type:
# We can reuse self.children to append or squash the node other.
if (isinstance(data, Node) and not data.negated
and (data.connector == conn_type or len(data) == 1)):
# We can squash the other node's children directly into this
# node. We are just doing (AB)(CD) == (ABCD) here, with the
# addition that if the length of the other node is 1 the
# connector doesn't matter. However, for the len(self) == 1
# case we don't want to do the squashing, as it would alter
# self.connector.
self.children.extend(data.children)
return self
else:
# We could use perhaps additional logic here to see if some
# children could be used for pushdown here.
self.children.append(data)
return data
else:
obj = self._new_instance(self.children, self.connector,
self.negated)
self.connector = conn_type
self.children = [obj, data]
return data
def negate(self):
"""
Negate the sense of the root connector.
"""
self.negated = not self.negated
|
jeremiahyan/odoo
|
refs/heads/master
|
odoo/addons/test_lint/tests/test_pylint.py
|
1
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import logging
try:
import pylint
except ImportError:
pylint = None
import subprocess
from distutils.version import LooseVersion
import os
from os.path import join
import sys
from odoo.tests.common import TransactionCase
from odoo import tools
from odoo.modules import get_modules, get_module_path
HERE = os.path.dirname(os.path.realpath(__file__))
_logger = logging.getLogger(__name__)
class TestPyLint(TransactionCase):
ENABLED_CODES = [
'used-before-assignment',
'undefined-variable',
'eval-used',
'unreachable',
'function-redefined',
# custom checkers
'sql-injection',
'gettext-variable',
'raise-unlink-override',
]
BAD_FUNCTIONS = [
'input',
]
BAD_MODULES = [
'csv',
'urllib',
'cgi',
] + list(tools.SUPPORTED_DEBUGGER)
def _skip_test(self, reason):
_logger.warning(reason)
self.skipTest(reason)
def test_pylint(self):
if pylint is None:
self._skip_test('please install pylint')
required_pylint_version = LooseVersion('1.6.4')
if sys.version_info >= (3, 6):
required_pylint_version = LooseVersion('1.7.0')
if LooseVersion(getattr(pylint, '__version__', '0.0.1')) < required_pylint_version:
self._skip_test('please upgrade pylint to >= %s' % required_pylint_version)
paths = [tools.config['root_path']]
for module in get_modules():
module_path = get_module_path(module)
if not module_path.startswith(join(tools.config['root_path'], 'addons')):
paths.append(module_path)
options = [
'--rcfile=%s' % os.devnull,
'--disable=all',
'--enable=%s' % ','.join(self.ENABLED_CODES),
'--reports=n',
"--msg-template='{msg} ({msg_id}) at {path}:{line}'",
'--load-plugins=pylint.extensions.bad_builtin,_odoo_checker_sql_injection,_odoo_checker_gettext,_odoo_checker_unlink_override',
'--bad-functions=%s' % ','.join(self.BAD_FUNCTIONS),
'--deprecated-modules=%s' % ','.join(self.BAD_MODULES)
]
pypath = HERE + os.pathsep + os.environ.get('PYTHONPATH', '')
env = dict(os.environ, PYTHONPATH=pypath)
try:
pylint_bin = tools.which('pylint')
process = subprocess.Popen(
[pylint_bin] + options + paths,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env=env,
)
except (OSError, IOError):
self._skip_test('pylint executable not found in the path')
else:
out, err = process.communicate()
if process.returncode:
self.fail("pylint test failed:\n" + (b"\n" + out + b"\n" + err).decode('utf-8').strip())
|
esatterly/splunk-cassandra
|
refs/heads/master
|
bin/extern.py
|
1
|
#!/usr/bin/env python
#
# Copyright 2011 Splunk, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"): you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# UNDONE: Need to locate installed Python on Windows
"""Common 'wrapper' script used to invoke an 'external' Python scripts. This
module is intended to be invoked using Splunk's internal Python stack and
uses the subprocess module to execute another Python script using the
platform's installed Python stack."""
from os import path
from subprocess import Popen, STDOUT
import sys
def extern(fname):
"""Invoke the given 'external' python script."""
run([fname] + sys.argv[1:])
def run(argv):
process = Popen(["/usr/bin/python"] + argv, env={}, stderr=STDOUT)
process.communicate()
process.wait()
if __name__ == "__main__":
run(sys.argv[1:])
|
vkosuri/pyang
|
refs/heads/master
|
pyang/plugins/depend.py
|
1
|
"""Makefile dependency rule output plugin
"""
import optparse
import sys
import os.path
from pyang import plugin
from pyang import error
def pyang_plugin_init():
plugin.register_plugin(DependPlugin())
class DependPlugin(plugin.PyangPlugin):
def add_opts(self, optparser):
optlist = [
optparse.make_option("--depend-target",
dest="depend_target",
help="Makefile rule target"),
optparse.make_option("--depend-no-submodules",
dest="depend_no_submodules",
action="store_true",
help="Do not generate dependencies for " \
"included submodules"),
optparse.make_option("--depend-from-submodules",
dest="depend_from_submodules",
action="store_true",
help="Generate dependencies from " \
"included submodules"),
optparse.make_option("--depend-recurse",
dest="depend_recurse",
action="store_true",
help="Generate dependencies to all " \
"imports, recursively"),
optparse.make_option("--depend-extension",
dest="depend_extension",
help="YANG module file name extension"),
optparse.make_option("--depend-include-path",
dest="depend_include_path",
action="store_true",
help="Include file path in the prerequisites"),
optparse.make_option("--depend-ignore-module",
dest="depend_ignore",
default=[],
action="append",
help="(sub)module to ignore in the" \
" prerequisites. This option can be" \
" given multiple times."),
]
g = optparser.add_option_group("Depend output specific options")
g.add_options(optlist)
def add_output_format(self, fmts):
self.multiple_modules = True
fmts['depend'] = self
def emit(self, ctx, modules, fd):
# cannot do this unless everything is ok for our module
modulenames = [m.arg for m in modules]
for (epos, etag, eargs) in ctx.errors:
if ((epos.top is None or epos.top.arg in modulenames) and
error.is_error(error.err_level(etag))):
raise error.EmitError("%s contains errors" % epos.top.arg)
emit_depend(ctx, modules, fd)
def emit_depend(ctx, modules, fd):
for module in modules:
if ctx.opts.depend_target is None:
fd.write('%s :' % module.pos.ref)
else:
fd.write('%s :' % ctx.opts.depend_target)
prereqs = []
add_prereqs(ctx, module, prereqs)
for i in prereqs:
if i in ctx.opts.depend_ignore:
continue
if ctx.opts.depend_include_path:
m = ctx.get_module(i)
if ctx.opts.depend_extension is None:
filename = m.pos.ref
else:
basename = os.path.splitext(m.pos.ref)[0]
filename = '%s%s' % (basename, ctx.opts.depend_extension)
fd.write(' %s' % filename)
else:
if ctx.opts.depend_extension is None:
ext = ""
else:
ext = ctx.opts.depend_extension
fd.write(' %s%s' % (i, ext))
fd.write('\n')
def add_prereqs(ctx, module, prereqs):
new = [i.arg for i in module.search("import") if i.arg not in prereqs]
if not ctx.opts.depend_no_submodules:
new += [i.arg for i in module.search("include")
if i.arg not in prereqs and i.arg not in new]
if ctx.opts.depend_from_submodules:
for i in module.search("include"):
subm = ctx.get_module(i.arg)
if subm is not None:
new += [i.arg for i in subm.search("import")
if i.arg not in prereqs and i.arg not in new]
prereqs.extend(new)
if ctx.opts.depend_recurse:
for i in new:
m = ctx.get_module(i)
add_prereqs(ctx, m, prereqs)
|
duqiao/django
|
refs/heads/master
|
tests/migrations/test_migrations_squashed_complex/1_auto.py
|
1155
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
operations = [
migrations.RunPython(migrations.RunPython.noop)
]
|
dahlstrom-g/intellij-community
|
refs/heads/master
|
python/testData/inspections/PyRedeclarationInspection/typeDeclarationPrecedesRedeclaredDefinition.py
|
12
|
x = 1
x: int
<warning descr="Redeclared 'x' defined above without usage">x</warning> = 2
|
lepistone/purchase-workflow
|
refs/heads/8.0
|
purchase_requisition_multicurrency/model/__init__.py
|
27
|
# -*- coding: utf-8 -*-
from . import purchase_requisition
from . import purchase_order
|
jcora-nyt/fpa_app
|
refs/heads/master
|
lib/werkzeug/serving.py
|
309
|
# -*- coding: utf-8 -*-
"""
werkzeug.serving
~~~~~~~~~~~~~~~~
There are many ways to serve a WSGI application. While you're developing
it you usually don't want a full blown webserver like Apache but a simple
standalone one. From Python 2.5 onwards there is the `wsgiref`_ server in
the standard library. If you're using older versions of Python you can
download the package from the cheeseshop.
However there are some caveats. Sourcecode won't reload itself when
changed and each time you kill the server using ``^C`` you get an
`KeyboardInterrupt` error. While the latter is easy to solve the first
one can be a pain in the ass in some situations.
The easiest way is creating a small ``start-myproject.py`` that runs the
application::
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from myproject import make_app
from werkzeug.serving import run_simple
app = make_app(...)
run_simple('localhost', 8080, app, use_reloader=True)
You can also pass it a `extra_files` keyword argument with a list of
additional files (like configuration files) you want to observe.
For bigger applications you should consider using `werkzeug.script`
instead of a simple start file.
:copyright: (c) 2013 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import with_statement
import os
import socket
import sys
import time
import signal
import subprocess
try:
import thread
except ImportError:
import _thread as thread
try:
from SocketServer import ThreadingMixIn, ForkingMixIn
from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler
except ImportError:
from socketserver import ThreadingMixIn, ForkingMixIn
from http.server import HTTPServer, BaseHTTPRequestHandler
import werkzeug
from werkzeug._internal import _log
from werkzeug._compat import iteritems, PY2, reraise, text_type, \
wsgi_encoding_dance
from werkzeug.urls import url_parse, url_unquote
from werkzeug.exceptions import InternalServerError, BadRequest
class WSGIRequestHandler(BaseHTTPRequestHandler, object):
"""A request handler that implements WSGI dispatching."""
@property
def server_version(self):
return 'Werkzeug/' + werkzeug.__version__
def make_environ(self):
request_url = url_parse(self.path)
def shutdown_server():
self.server.shutdown_signal = True
url_scheme = self.server.ssl_context is None and 'http' or 'https'
path_info = url_unquote(request_url.path)
environ = {
'wsgi.version': (1, 0),
'wsgi.url_scheme': url_scheme,
'wsgi.input': self.rfile,
'wsgi.errors': sys.stderr,
'wsgi.multithread': self.server.multithread,
'wsgi.multiprocess': self.server.multiprocess,
'wsgi.run_once': False,
'werkzeug.server.shutdown':
shutdown_server,
'SERVER_SOFTWARE': self.server_version,
'REQUEST_METHOD': self.command,
'SCRIPT_NAME': '',
'PATH_INFO': wsgi_encoding_dance(path_info),
'QUERY_STRING': wsgi_encoding_dance(request_url.query),
'CONTENT_TYPE': self.headers.get('Content-Type', ''),
'CONTENT_LENGTH': self.headers.get('Content-Length', ''),
'REMOTE_ADDR': self.client_address[0],
'REMOTE_PORT': self.client_address[1],
'SERVER_NAME': self.server.server_address[0],
'SERVER_PORT': str(self.server.server_address[1]),
'SERVER_PROTOCOL': self.request_version
}
for key, value in self.headers.items():
key = 'HTTP_' + key.upper().replace('-', '_')
if key not in ('HTTP_CONTENT_TYPE', 'HTTP_CONTENT_LENGTH'):
environ[key] = value
if request_url.netloc:
environ['HTTP_HOST'] = request_url.netloc
return environ
def run_wsgi(self):
if self.headers.get('Expect', '').lower().strip() == '100-continue':
self.wfile.write(b'HTTP/1.1 100 Continue\r\n\r\n')
environ = self.make_environ()
headers_set = []
headers_sent = []
def write(data):
assert headers_set, 'write() before start_response'
if not headers_sent:
status, response_headers = headers_sent[:] = headers_set
try:
code, msg = status.split(None, 1)
except ValueError:
code, msg = status, ""
self.send_response(int(code), msg)
header_keys = set()
for key, value in response_headers:
self.send_header(key, value)
key = key.lower()
header_keys.add(key)
if 'content-length' not in header_keys:
self.close_connection = True
self.send_header('Connection', 'close')
if 'server' not in header_keys:
self.send_header('Server', self.version_string())
if 'date' not in header_keys:
self.send_header('Date', self.date_time_string())
self.end_headers()
assert type(data) is bytes, 'applications must write bytes'
self.wfile.write(data)
self.wfile.flush()
def start_response(status, response_headers, exc_info=None):
if exc_info:
try:
if headers_sent:
reraise(*exc_info)
finally:
exc_info = None
elif headers_set:
raise AssertionError('Headers already set')
headers_set[:] = [status, response_headers]
return write
def execute(app):
application_iter = app(environ, start_response)
try:
for data in application_iter:
write(data)
if not headers_sent:
write(b'')
finally:
if hasattr(application_iter, 'close'):
application_iter.close()
application_iter = None
try:
execute(self.server.app)
except (socket.error, socket.timeout) as e:
self.connection_dropped(e, environ)
except Exception:
if self.server.passthrough_errors:
raise
from werkzeug.debug.tbtools import get_current_traceback
traceback = get_current_traceback(ignore_system_exceptions=True)
try:
# if we haven't yet sent the headers but they are set
# we roll back to be able to set them again.
if not headers_sent:
del headers_set[:]
execute(InternalServerError())
except Exception:
pass
self.server.log('error', 'Error on request:\n%s',
traceback.plaintext)
def handle(self):
"""Handles a request ignoring dropped connections."""
rv = None
try:
rv = BaseHTTPRequestHandler.handle(self)
except (socket.error, socket.timeout) as e:
self.connection_dropped(e)
except Exception:
if self.server.ssl_context is None or not is_ssl_error():
raise
if self.server.shutdown_signal:
self.initiate_shutdown()
return rv
def initiate_shutdown(self):
"""A horrible, horrible way to kill the server for Python 2.6 and
later. It's the best we can do.
"""
# Windows does not provide SIGKILL, go with SIGTERM then.
sig = getattr(signal, 'SIGKILL', signal.SIGTERM)
# reloader active
if os.environ.get('WERKZEUG_RUN_MAIN') == 'true':
os.kill(os.getpid(), sig)
# python 2.7
self.server._BaseServer__shutdown_request = True
# python 2.6
self.server._BaseServer__serving = False
def connection_dropped(self, error, environ=None):
"""Called if the connection was closed by the client. By default
nothing happens.
"""
def handle_one_request(self):
"""Handle a single HTTP request."""
self.raw_requestline = self.rfile.readline()
if not self.raw_requestline:
self.close_connection = 1
elif self.parse_request():
return self.run_wsgi()
def send_response(self, code, message=None):
"""Send the response header and log the response code."""
self.log_request(code)
if message is None:
message = code in self.responses and self.responses[code][0] or ''
if self.request_version != 'HTTP/0.9':
hdr = "%s %d %s\r\n" % (self.protocol_version, code, message)
self.wfile.write(hdr.encode('ascii'))
def version_string(self):
return BaseHTTPRequestHandler.version_string(self).strip()
def address_string(self):
return self.client_address[0]
def log_request(self, code='-', size='-'):
self.log('info', '"%s" %s %s', self.requestline, code, size)
def log_error(self, *args):
self.log('error', *args)
def log_message(self, format, *args):
self.log('info', format, *args)
def log(self, type, message, *args):
_log(type, '%s - - [%s] %s\n' % (self.address_string(),
self.log_date_time_string(),
message % args))
#: backwards compatible name if someone is subclassing it
BaseRequestHandler = WSGIRequestHandler
def generate_adhoc_ssl_pair(cn=None):
from random import random
from OpenSSL import crypto
# pretty damn sure that this is not actually accepted by anyone
if cn is None:
cn = '*'
cert = crypto.X509()
cert.set_serial_number(int(random() * sys.maxint))
cert.gmtime_adj_notBefore(0)
cert.gmtime_adj_notAfter(60 * 60 * 24 * 365)
subject = cert.get_subject()
subject.CN = cn
subject.O = 'Dummy Certificate'
issuer = cert.get_issuer()
issuer.CN = 'Untrusted Authority'
issuer.O = 'Self-Signed'
pkey = crypto.PKey()
pkey.generate_key(crypto.TYPE_RSA, 768)
cert.set_pubkey(pkey)
cert.sign(pkey, 'md5')
return cert, pkey
def make_ssl_devcert(base_path, host=None, cn=None):
"""Creates an SSL key for development. This should be used instead of
the ``'adhoc'`` key which generates a new cert on each server start.
It accepts a path for where it should store the key and cert and
either a host or CN. If a host is given it will use the CN
``*.host/CN=host``.
For more information see :func:`run_simple`.
.. versionadded:: 0.9
:param base_path: the path to the certificate and key. The extension
``.crt`` is added for the certificate, ``.key`` is
added for the key.
:param host: the name of the host. This can be used as an alternative
for the `cn`.
:param cn: the `CN` to use.
"""
from OpenSSL import crypto
if host is not None:
cn = '*.%s/CN=%s' % (host, host)
cert, pkey = generate_adhoc_ssl_pair(cn=cn)
cert_file = base_path + '.crt'
pkey_file = base_path + '.key'
with open(cert_file, 'w') as f:
f.write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert))
with open(pkey_file, 'w') as f:
f.write(crypto.dump_privatekey(crypto.FILETYPE_PEM, pkey))
return cert_file, pkey_file
def generate_adhoc_ssl_context():
"""Generates an adhoc SSL context for the development server."""
from OpenSSL import SSL
cert, pkey = generate_adhoc_ssl_pair()
ctx = SSL.Context(SSL.SSLv23_METHOD)
ctx.use_privatekey(pkey)
ctx.use_certificate(cert)
return ctx
def load_ssl_context(cert_file, pkey_file):
"""Loads an SSL context from a certificate and private key file."""
from OpenSSL import SSL
ctx = SSL.Context(SSL.SSLv23_METHOD)
ctx.use_certificate_file(cert_file)
ctx.use_privatekey_file(pkey_file)
return ctx
def is_ssl_error(error=None):
"""Checks if the given error (or the current one) is an SSL error."""
if error is None:
error = sys.exc_info()[1]
from OpenSSL import SSL
return isinstance(error, SSL.Error)
class _SSLConnectionFix(object):
"""Wrapper around SSL connection to provide a working makefile()."""
def __init__(self, con):
self._con = con
def makefile(self, mode, bufsize):
return socket._fileobject(self._con, mode, bufsize)
def __getattr__(self, attrib):
return getattr(self._con, attrib)
def shutdown(self, arg=None):
try:
self._con.shutdown()
except Exception:
pass
def select_ip_version(host, port):
"""Returns AF_INET4 or AF_INET6 depending on where to connect to."""
# disabled due to problems with current ipv6 implementations
# and various operating systems. Probably this code also is
# not supposed to work, but I can't come up with any other
# ways to implement this.
##try:
## info = socket.getaddrinfo(host, port, socket.AF_UNSPEC,
## socket.SOCK_STREAM, 0,
## socket.AI_PASSIVE)
## if info:
## return info[0][0]
##except socket.gaierror:
## pass
if ':' in host and hasattr(socket, 'AF_INET6'):
return socket.AF_INET6
return socket.AF_INET
class BaseWSGIServer(HTTPServer, object):
"""Simple single-threaded, single-process WSGI server."""
multithread = False
multiprocess = False
request_queue_size = 128
def __init__(self, host, port, app, handler=None,
passthrough_errors=False, ssl_context=None):
if handler is None:
handler = WSGIRequestHandler
self.address_family = select_ip_version(host, port)
HTTPServer.__init__(self, (host, int(port)), handler)
self.app = app
self.passthrough_errors = passthrough_errors
self.shutdown_signal = False
if ssl_context is not None:
try:
from OpenSSL import tsafe
except ImportError:
raise TypeError('SSL is not available if the OpenSSL '
'library is not installed.')
if isinstance(ssl_context, tuple):
ssl_context = load_ssl_context(*ssl_context)
if ssl_context == 'adhoc':
ssl_context = generate_adhoc_ssl_context()
self.socket = tsafe.Connection(ssl_context, self.socket)
self.ssl_context = ssl_context
else:
self.ssl_context = None
def log(self, type, message, *args):
_log(type, message, *args)
def serve_forever(self):
self.shutdown_signal = False
try:
HTTPServer.serve_forever(self)
except KeyboardInterrupt:
pass
def handle_error(self, request, client_address):
if self.passthrough_errors:
raise
else:
return HTTPServer.handle_error(self, request, client_address)
def get_request(self):
con, info = self.socket.accept()
if self.ssl_context is not None:
con = _SSLConnectionFix(con)
return con, info
class ThreadedWSGIServer(ThreadingMixIn, BaseWSGIServer):
"""A WSGI server that does threading."""
multithread = True
class ForkingWSGIServer(ForkingMixIn, BaseWSGIServer):
"""A WSGI server that does forking."""
multiprocess = True
def __init__(self, host, port, app, processes=40, handler=None,
passthrough_errors=False, ssl_context=None):
BaseWSGIServer.__init__(self, host, port, app, handler,
passthrough_errors, ssl_context)
self.max_children = processes
def make_server(host, port, app=None, threaded=False, processes=1,
request_handler=None, passthrough_errors=False,
ssl_context=None):
"""Create a new server instance that is either threaded, or forks
or just processes one request after another.
"""
if threaded and processes > 1:
raise ValueError("cannot have a multithreaded and "
"multi process server.")
elif threaded:
return ThreadedWSGIServer(host, port, app, request_handler,
passthrough_errors, ssl_context)
elif processes > 1:
return ForkingWSGIServer(host, port, app, processes, request_handler,
passthrough_errors, ssl_context)
else:
return BaseWSGIServer(host, port, app, request_handler,
passthrough_errors, ssl_context)
def _iter_module_files():
# The list call is necessary on Python 3 in case the module
# dictionary modifies during iteration.
for module in list(sys.modules.values()):
filename = getattr(module, '__file__', None)
if filename:
old = None
while not os.path.isfile(filename):
old = filename
filename = os.path.dirname(filename)
if filename == old:
break
else:
if filename[-4:] in ('.pyc', '.pyo'):
filename = filename[:-1]
yield filename
def _reloader_stat_loop(extra_files=None, interval=1):
"""When this function is run from the main thread, it will force other
threads to exit when any modules currently loaded change.
Copyright notice. This function is based on the autoreload.py from
the CherryPy trac which originated from WSGIKit which is now dead.
:param extra_files: a list of additional files it should watch.
"""
from itertools import chain
mtimes = {}
while 1:
for filename in chain(_iter_module_files(), extra_files or ()):
try:
mtime = os.stat(filename).st_mtime
except OSError:
continue
old_time = mtimes.get(filename)
if old_time is None:
mtimes[filename] = mtime
continue
elif mtime > old_time:
_log('info', ' * Detected change in %r, reloading' % filename)
sys.exit(3)
time.sleep(interval)
def _reloader_inotify(extra_files=None, interval=None):
# Mutated by inotify loop when changes occur.
changed = [False]
# Setup inotify watches
from pyinotify import WatchManager, Notifier
# this API changed at one point, support both
try:
from pyinotify import EventsCodes as ec
ec.IN_ATTRIB
except (ImportError, AttributeError):
import pyinotify as ec
wm = WatchManager()
mask = ec.IN_DELETE_SELF | ec.IN_MOVE_SELF | ec.IN_MODIFY | ec.IN_ATTRIB
def signal_changed(event):
if changed[0]:
return
_log('info', ' * Detected change in %r, reloading' % event.path)
changed[:] = [True]
for fname in extra_files or ():
wm.add_watch(fname, mask, signal_changed)
# ... And now we wait...
notif = Notifier(wm)
try:
while not changed[0]:
# always reiterate through sys.modules, adding them
for fname in _iter_module_files():
wm.add_watch(fname, mask, signal_changed)
notif.process_events()
if notif.check_events(timeout=interval):
notif.read_events()
# TODO Set timeout to something small and check parent liveliness
finally:
notif.stop()
sys.exit(3)
# currently we always use the stat loop reloader for the simple reason
# that the inotify one does not respond to added files properly. Also
# it's quite buggy and the API is a mess.
reloader_loop = _reloader_stat_loop
def restart_with_reloader():
"""Spawn a new Python interpreter with the same arguments as this one,
but running the reloader thread.
"""
while 1:
_log('info', ' * Restarting with reloader')
args = [sys.executable] + sys.argv
new_environ = os.environ.copy()
new_environ['WERKZEUG_RUN_MAIN'] = 'true'
# a weird bug on windows. sometimes unicode strings end up in the
# environment and subprocess.call does not like this, encode them
# to latin1 and continue.
if os.name == 'nt' and PY2:
for key, value in iteritems(new_environ):
if isinstance(value, text_type):
new_environ[key] = value.encode('iso-8859-1')
exit_code = subprocess.call(args, env=new_environ)
if exit_code != 3:
return exit_code
def run_with_reloader(main_func, extra_files=None, interval=1):
"""Run the given function in an independent python interpreter."""
import signal
signal.signal(signal.SIGTERM, lambda *args: sys.exit(0))
if os.environ.get('WERKZEUG_RUN_MAIN') == 'true':
thread.start_new_thread(main_func, ())
try:
reloader_loop(extra_files, interval)
except KeyboardInterrupt:
return
try:
sys.exit(restart_with_reloader())
except KeyboardInterrupt:
pass
def run_simple(hostname, port, application, use_reloader=False,
use_debugger=False, use_evalex=True,
extra_files=None, reloader_interval=1, threaded=False,
processes=1, request_handler=None, static_files=None,
passthrough_errors=False, ssl_context=None):
"""Start an application using wsgiref and with an optional reloader. This
wraps `wsgiref` to fix the wrong default reporting of the multithreaded
WSGI variable and adds optional multithreading and fork support.
This function has a command-line interface too::
python -m werkzeug.serving --help
.. versionadded:: 0.5
`static_files` was added to simplify serving of static files as well
as `passthrough_errors`.
.. versionadded:: 0.6
support for SSL was added.
.. versionadded:: 0.8
Added support for automatically loading a SSL context from certificate
file and private key.
.. versionadded:: 0.9
Added command-line interface.
:param hostname: The host for the application. eg: ``'localhost'``
:param port: The port for the server. eg: ``8080``
:param application: the WSGI application to execute
:param use_reloader: should the server automatically restart the python
process if modules were changed?
:param use_debugger: should the werkzeug debugging system be used?
:param use_evalex: should the exception evaluation feature be enabled?
:param extra_files: a list of files the reloader should watch
additionally to the modules. For example configuration
files.
:param reloader_interval: the interval for the reloader in seconds.
:param threaded: should the process handle each request in a separate
thread?
:param processes: if greater than 1 then handle each request in a new process
up to this maximum number of concurrent processes.
:param request_handler: optional parameter that can be used to replace
the default one. You can use this to replace it
with a different
:class:`~BaseHTTPServer.BaseHTTPRequestHandler`
subclass.
:param static_files: a dict of paths for static files. This works exactly
like :class:`SharedDataMiddleware`, it's actually
just wrapping the application in that middleware before
serving.
:param passthrough_errors: set this to `True` to disable the error catching.
This means that the server will die on errors but
it can be useful to hook debuggers in (pdb etc.)
:param ssl_context: an SSL context for the connection. Either an OpenSSL
context, a tuple in the form ``(cert_file, pkey_file)``,
the string ``'adhoc'`` if the server should
automatically create one, or `None` to disable SSL
(which is the default).
"""
if use_debugger:
from werkzeug.debug import DebuggedApplication
application = DebuggedApplication(application, use_evalex)
if static_files:
from werkzeug.wsgi import SharedDataMiddleware
application = SharedDataMiddleware(application, static_files)
def inner():
make_server(hostname, port, application, threaded,
processes, request_handler,
passthrough_errors, ssl_context).serve_forever()
if os.environ.get('WERKZEUG_RUN_MAIN') != 'true':
display_hostname = hostname != '*' and hostname or 'localhost'
if ':' in display_hostname:
display_hostname = '[%s]' % display_hostname
_log('info', ' * Running on %s://%s:%d/', ssl_context is None
and 'http' or 'https', display_hostname, port)
if use_reloader:
# Create and destroy a socket so that any exceptions are raised before
# we spawn a separate Python interpreter and lose this ability.
address_family = select_ip_version(hostname, port)
test_socket = socket.socket(address_family, socket.SOCK_STREAM)
test_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
test_socket.bind((hostname, port))
test_socket.close()
run_with_reloader(inner, extra_files, reloader_interval)
else:
inner()
def main():
'''A simple command-line interface for :py:func:`run_simple`.'''
# in contrast to argparse, this works at least under Python < 2.7
import optparse
from werkzeug.utils import import_string
parser = optparse.OptionParser(usage='Usage: %prog [options] app_module:app_object')
parser.add_option('-b', '--bind', dest='address',
help='The hostname:port the app should listen on.')
parser.add_option('-d', '--debug', dest='use_debugger',
action='store_true', default=False,
help='Use Werkzeug\'s debugger.')
parser.add_option('-r', '--reload', dest='use_reloader',
action='store_true', default=False,
help='Reload Python process if modules change.')
options, args = parser.parse_args()
hostname, port = None, None
if options.address:
address = options.address.split(':')
hostname = address[0]
if len(address) > 1:
port = address[1]
if len(args) != 1:
sys.stdout.write('No application supplied, or too much. See --help\n')
sys.exit(1)
app = import_string(args[0])
run_simple(
hostname=(hostname or '127.0.0.1'), port=int(port or 5000),
application=app, use_reloader=options.use_reloader,
use_debugger=options.use_debugger
)
if __name__ == '__main__':
main()
|
gauribhoite/personfinder
|
refs/heads/master
|
env/site-packages/pygments/lexers/dylan.py
|
72
|
# -*- coding: utf-8 -*-
"""
pygments.lexers.dylan
~~~~~~~~~~~~~~~~~~~~~
Lexers for the Dylan language.
:copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import Lexer, RegexLexer, bygroups, do_insertions, default
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Generic, Literal
__all__ = ['DylanLexer', 'DylanConsoleLexer', 'DylanLidLexer']
class DylanLexer(RegexLexer):
"""
For the `Dylan <http://www.opendylan.org/>`_ language.
.. versionadded:: 0.7
"""
name = 'Dylan'
aliases = ['dylan']
filenames = ['*.dylan', '*.dyl', '*.intr']
mimetypes = ['text/x-dylan']
flags = re.IGNORECASE
builtins = set((
'subclass', 'abstract', 'block', 'concrete', 'constant', 'class',
'compiler-open', 'compiler-sideways', 'domain', 'dynamic',
'each-subclass', 'exception', 'exclude', 'function', 'generic',
'handler', 'inherited', 'inline', 'inline-only', 'instance',
'interface', 'import', 'keyword', 'library', 'macro', 'method',
'module', 'open', 'primary', 'required', 'sealed', 'sideways',
'singleton', 'slot', 'thread', 'variable', 'virtual'))
keywords = set((
'above', 'afterwards', 'begin', 'below', 'by', 'case', 'cleanup',
'create', 'define', 'else', 'elseif', 'end', 'export', 'finally',
'for', 'from', 'if', 'in', 'let', 'local', 'otherwise', 'rename',
'select', 'signal', 'then', 'to', 'unless', 'until', 'use', 'when',
'while'))
operators = set((
'~', '+', '-', '*', '|', '^', '=', '==', '~=', '~==', '<', '<=',
'>', '>=', '&', '|'))
functions = set((
'abort', 'abs', 'add', 'add!', 'add-method', 'add-new', 'add-new!',
'all-superclasses', 'always', 'any?', 'applicable-method?', 'apply',
'aref', 'aref-setter', 'as', 'as-lowercase', 'as-lowercase!',
'as-uppercase', 'as-uppercase!', 'ash', 'backward-iteration-protocol',
'break', 'ceiling', 'ceiling/', 'cerror', 'check-type', 'choose',
'choose-by', 'complement', 'compose', 'concatenate', 'concatenate-as',
'condition-format-arguments', 'condition-format-string', 'conjoin',
'copy-sequence', 'curry', 'default-handler', 'dimension', 'dimensions',
'direct-subclasses', 'direct-superclasses', 'disjoin', 'do',
'do-handlers', 'element', 'element-setter', 'empty?', 'error', 'even?',
'every?', 'false-or', 'fill!', 'find-key', 'find-method', 'first',
'first-setter', 'floor', 'floor/', 'forward-iteration-protocol',
'function-arguments', 'function-return-values',
'function-specializers', 'gcd', 'generic-function-mandatory-keywords',
'generic-function-methods', 'head', 'head-setter', 'identity',
'initialize', 'instance?', 'integral?', 'intersection',
'key-sequence', 'key-test', 'last', 'last-setter', 'lcm', 'limited',
'list', 'logand', 'logbit?', 'logior', 'lognot', 'logxor', 'make',
'map', 'map-as', 'map-into', 'max', 'member?', 'merge-hash-codes',
'min', 'modulo', 'negative', 'negative?', 'next-method',
'object-class', 'object-hash', 'odd?', 'one-of', 'pair', 'pop',
'pop-last', 'positive?', 'push', 'push-last', 'range', 'rank',
'rcurry', 'reduce', 'reduce1', 'remainder', 'remove', 'remove!',
'remove-duplicates', 'remove-duplicates!', 'remove-key!',
'remove-method', 'replace-elements!', 'replace-subsequence!',
'restart-query', 'return-allowed?', 'return-description',
'return-query', 'reverse', 'reverse!', 'round', 'round/',
'row-major-index', 'second', 'second-setter', 'shallow-copy',
'signal', 'singleton', 'size', 'size-setter', 'slot-initialized?',
'sort', 'sort!', 'sorted-applicable-methods', 'subsequence-position',
'subtype?', 'table-protocol', 'tail', 'tail-setter', 'third',
'third-setter', 'truncate', 'truncate/', 'type-error-expected-type',
'type-error-value', 'type-for-copy', 'type-union', 'union', 'values',
'vector', 'zero?'))
valid_name = '\\\\?[\\w!&*<>|^$%@\\-+~?/=]+'
def get_tokens_unprocessed(self, text):
for index, token, value in RegexLexer.get_tokens_unprocessed(self, text):
if token is Name:
lowercase_value = value.lower()
if lowercase_value in self.builtins:
yield index, Name.Builtin, value
continue
if lowercase_value in self.keywords:
yield index, Keyword, value
continue
if lowercase_value in self.functions:
yield index, Name.Builtin, value
continue
if lowercase_value in self.operators:
yield index, Operator, value
continue
yield index, token, value
tokens = {
'root': [
# Whitespace
(r'\s+', Text),
# single line comment
(r'//.*?\n', Comment.Single),
# lid header
(r'([a-z0-9-]+)(:)([ \t]*)(.*(?:\n[ \t].+)*)',
bygroups(Name.Attribute, Operator, Text, String)),
default('code') # no header match, switch to code
],
'code': [
# Whitespace
(r'\s+', Text),
# single line comment
(r'//.*?\n', Comment.Single),
# multi-line comment
(r'/\*', Comment.Multiline, 'comment'),
# strings and characters
(r'"', String, 'string'),
(r"'(\\.|\\[0-7]{1,3}|\\x[a-f0-9]{1,2}|[^\\\'\n])'", String.Char),
# binary integer
(r'#b[01]+', Number.Bin),
# octal integer
(r'#o[0-7]+', Number.Oct),
# floating point
(r'[-+]?(\d*\.\d+(e[-+]?\d+)?|\d+(\.\d*)?e[-+]?\d+)', Number.Float),
# decimal integer
(r'[-+]?\d+', Number.Integer),
# hex integer
(r'#x[0-9a-f]+', Number.Hex),
# Macro parameters
(r'(\?' + valid_name + ')(:)'
r'(token|name|variable|expression|body|case-body|\*)',
bygroups(Name.Tag, Operator, Name.Builtin)),
(r'(\?)(:)(token|name|variable|expression|body|case-body|\*)',
bygroups(Name.Tag, Operator, Name.Builtin)),
(r'\?' + valid_name, Name.Tag),
# Punctuation
(r'(=>|::|#\(|#\[|##|\?\?|\?=|\?|[(){}\[\],.;])', Punctuation),
# Most operators are picked up as names and then re-flagged.
# This one isn't valid in a name though, so we pick it up now.
(r':=', Operator),
# Pick up #t / #f before we match other stuff with #.
(r'#[tf]', Literal),
# #"foo" style keywords
(r'#"', String.Symbol, 'keyword'),
# #rest, #key, #all-keys, etc.
(r'#[a-z0-9-]+', Keyword),
# required-init-keyword: style keywords.
(valid_name + ':', Keyword),
# class names
(r'<' + valid_name + '>', Name.Class),
# define variable forms.
(r'\*' + valid_name + '\*', Name.Variable.Global),
# define constant forms.
(r'\$' + valid_name, Name.Constant),
# everything else. We re-flag some of these in the method above.
(valid_name, Name),
],
'comment': [
(r'[^*/]', Comment.Multiline),
(r'/\*', Comment.Multiline, '#push'),
(r'\*/', Comment.Multiline, '#pop'),
(r'[*/]', Comment.Multiline)
],
'keyword': [
(r'"', String.Symbol, '#pop'),
(r'[^\\"]+', String.Symbol), # all other characters
],
'string': [
(r'"', String, '#pop'),
(r'\\([\\abfnrtv"\']|x[a-f0-9]{2,4}|[0-7]{1,3})', String.Escape),
(r'[^\\"\n]+', String), # all other characters
(r'\\\n', String), # line continuation
(r'\\', String), # stray backslash
]
}
class DylanLidLexer(RegexLexer):
"""
For Dylan LID (Library Interchange Definition) files.
.. versionadded:: 1.6
"""
name = 'DylanLID'
aliases = ['dylan-lid', 'lid']
filenames = ['*.lid', '*.hdp']
mimetypes = ['text/x-dylan-lid']
flags = re.IGNORECASE
tokens = {
'root': [
# Whitespace
(r'\s+', Text),
# single line comment
(r'//.*?\n', Comment.Single),
# lid header
(r'(.*?)(:)([ \t]*)(.*(?:\n[ \t].+)*)',
bygroups(Name.Attribute, Operator, Text, String)),
]
}
class DylanConsoleLexer(Lexer):
"""
For Dylan interactive console output like:
.. sourcecode:: dylan-console
? let a = 1;
=> 1
? a
=> 1
This is based on a copy of the RubyConsoleLexer.
.. versionadded:: 1.6
"""
name = 'Dylan session'
aliases = ['dylan-console', 'dylan-repl']
filenames = ['*.dylan-console']
mimetypes = ['text/x-dylan-console']
_line_re = re.compile('.*?\n')
_prompt_re = re.compile('\?| ')
def get_tokens_unprocessed(self, text):
dylexer = DylanLexer(**self.options)
curcode = ''
insertions = []
for match in self._line_re.finditer(text):
line = match.group()
m = self._prompt_re.match(line)
if m is not None:
end = m.end()
insertions.append((len(curcode),
[(0, Generic.Prompt, line[:end])]))
curcode += line[end:]
else:
if curcode:
for item in do_insertions(insertions,
dylexer.get_tokens_unprocessed(curcode)):
yield item
curcode = ''
insertions = []
yield match.start(), Generic.Output, line
if curcode:
for item in do_insertions(insertions,
dylexer.get_tokens_unprocessed(curcode)):
yield item
|
soldag/home-assistant
|
refs/heads/dev
|
tests/components/nws/test_config_flow.py
|
5
|
"""Test the National Weather Service (NWS) config flow."""
import aiohttp
from homeassistant import config_entries, setup
from homeassistant.components.nws.const import DOMAIN
from tests.async_mock import patch
async def test_form(hass, mock_simple_nws_config):
"""Test we get the form."""
hass.config.latitude = 35
hass.config.longitude = -90
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["errors"] == {}
with patch(
"homeassistant.components.nws.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.nws.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"], {"api_key": "test"}
)
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["title"] == "ABC"
assert result2["data"] == {
"api_key": "test",
"latitude": 35,
"longitude": -90,
"station": "ABC",
}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_cannot_connect(hass, mock_simple_nws_config):
"""Test we handle cannot connect error."""
mock_instance = mock_simple_nws_config.return_value
mock_instance.set_station.side_effect = aiohttp.ClientError
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"api_key": "test"},
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": "cannot_connect"}
async def test_form_unknown_error(hass, mock_simple_nws_config):
"""Test we handle unknown error."""
mock_instance = mock_simple_nws_config.return_value
mock_instance.set_station.side_effect = ValueError
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"api_key": "test"},
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": "unknown"}
async def test_form_already_configured(hass, mock_simple_nws_config):
"""Test we handle duplicate entries."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"homeassistant.components.nws.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.nws.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"api_key": "test"},
)
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"homeassistant.components.nws.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.nws.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"api_key": "test"},
)
assert result2["type"] == "abort"
assert result2["reason"] == "already_configured"
await hass.async_block_till_done()
assert len(mock_setup.mock_calls) == 0
assert len(mock_setup_entry.mock_calls) == 0
|
amarandon/opencore
|
refs/heads/master
|
opencore/scripts/site_announce.py
|
4
|
# Copyright (C) 2008-2009 Open Society Institute
# Thomas Moroz: tmoroz.org
# 2010-2011 Large Blue
# Fergus Doyle: fergus.doyle@largeblue.com
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License Version 2 as published
# by the Free Software Foundation. You may not use, modify or distribute
# this program under any other version of the GNU General Public License.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
"""Set or clear the site announcement:
site_announce (show the current announcement text).
site_announce --clear (clear any existing announcement text).
site_announce <text> (update the announcment text).
"""
from opencore.scripting import get_default_config
from opencore.scripting import open_root
from optparse import OptionParser
import transaction
import logging
logging.basicConfig()
def clear_site_announce(root, *args):
previous, root.site_announcement = getattr(root, 'site_announcement',
''), ''
return previous, ''
def set_site_announce(root, *args):
previous = getattr(root, 'site_announcement', '')
if not args:
return previous, previous
now = root.site_announcement = ' '.join(args)
return previous, now
def main():
parser = OptionParser(description=__doc__,
usage='usage: %prog [options] username password')
parser.add_option('-C', '--config', dest='config', default=None,
help="Specify a paster config file. "
"Defaults to$CWD/etc/openhcd.ini")
parser.add_option('--clear', dest='clear', default=False,
action='store_true',
help="Clear any existing announcement. Default false.")
parser.add_option('-n', '--dry-run', dest='dry_run', default=False,
action='store_true',
help="Don't actually commit any change.")
parser.add_option('-v', '--verbose', dest='verbose', action='count',
default='1', help="Show more information.")
parser.add_option('-q', '--quiet', dest='verbose', action='store_const',
const=0, help="Show no extra information.")
options, args = parser.parse_args()
if options.clear:
if args:
parser.error("No arguments allowed with '--clear'")
func = clear_site_announce
else:
func = set_site_announce
config = options.config
if config is None:
config = get_default_config()
root, closer = open_root(config)
try:
previous, now = func(root, *args)
except:
transaction.abort()
raise
if options.verbose:
print 'Before:', previous
print 'After:', now
if options.dry_run:
transaction.abort()
else:
transaction.commit()
if __name__ == '__main__':
main()
|
hobson/pug-dj
|
refs/heads/master
|
pug/dj/crawlnmine/fabfile/django_fabric_aws.py
|
1
|
'''
--------------------------------------------------------------------------------------
django_fabric_aws.py
--------------------------------------------------------------------------------------
A set of fabric commands to manage a Django deployment on AWS
author : Ashok Fernandez (github.com/ashokfernandez/)
credit : Derived from files in https://github.com/gcollazo/Fabulous
date : 11 / 3 / 2014
Commands include:
- fab spawn instance
- Spawns a new EC2 instance (as definied in project_conf.py) and return's it's public dns
This takes around 8 minutes to complete.
- fab update_packages
- Updates the python packages on the server to match those found in requirements/common.txt and
requirements/prod.txt
- fab deploy
- Pulls the latest commit from the master branch on the server, collects the static files, syncs the db and
restarts the server
- fab reload_gunicorn
- Pushes the gunicorn startup script to the servers and restarts the gunicorn process, use this if you
have made changes to templates/start_gunicorn.bash
- fab reload_nginx
- Pushes the nginx config files to the servers and restarts the nginx, use this if you
have made changes to templates/nginx-app-proxy or templates/nginx.conf
- fab reload_supervisor
- Pushes the supervisor config files to the servers and restarts the supervisor, use this if you
have made changes to templates/supervisord-init or templates/supervisord.conf
- fab manage:command="management command"
- Runs a python manage.py command on the server. To run this command we need to specify an argument, eg for syncdb
type the command -> fab manage:command="syncdb --no-input"
'''
from fabric.api import *
from fabric.colors import green as _green, yellow as _yellow
from project_conf import *
import tasks
import boto
import boto.ec2
import time
# AWS user credentials
env.user = fabconf['SERVER_USERNAME']
env.key_filename = fabconf['SSH_PRIVATE_KEY_PATH']
# List of EC2 instances to work on
env.hosts = fabconf['EC2_INSTANCES']
# ------------------------------------------------------------------------------------------------------------------
# MAIN FABRIC TASKS - Type fab <function_name> in the command line to execute any one of these
# ------------------------------------------------------------------------------------------------------------------
def spawn():
env.hosts = []
def instance():
"""
Creates an EC2 instance from an Ubuntu AMI and configures it as a Django server
with nginx + gunicorn
"""
# Record the starting time and print a starting message
start_time = time.time()
print(_green("Started..."))
# Use boto to create an EC2 instance
env.host_string = _create_ec2_instance()
print(_green("Waiting 30 seconds for server to boot..."))
time.sleep(30)
# Configure the instance that was just created
for item in tasks.configure_instance:
try:
print(_yellow(item['message']))
except KeyError:
pass
globals()["_" + item['action']](item['params'])
# Print out the final runtime and the public dns of the new instance
end_time = time.time()
print(_green("Runtime: %f minutes" % ((end_time - start_time) / 60)))
print(_green("\nPLEASE ADD ADDRESS THIS TO YOUR ")),
print(_yellow("project_conf.py")),
print(_green(" FILE UNDER ")),
print(_yellow("fabconf['EC2_INSTANCES'] : ")),
print(_green(env.host_string))
def deploy():
"""
Pulls the latest commit from bitbucket, resyncs the database, collects the static files and restarts the
server.
"""
_run_task(tasks.deploy, "Updating server to latest commit in the bitbucket repo...", "Finished updating the server")
def update_packages():
"""
Updates the python packages on the server as defined in requirements/common.txt and
requirements/prod.txt
"""
_run_task(tasks.update_packages, "Updating server packages with pip...", "Finished updating python packages")
def reload_nginx():
"""
Reloads the nginx config files and restarts nginx
"""
_run_task(tasks.reload_nginx, "Reloading the nginx config files...", "Finished reloading nginx")
def reload_supervisor():
"""
Reloads the supervisor config files and restarts supervisord
"""
_run_task(tasks.reload_supervisor, "Reloading the supervisor config files...", "Finished reloading supervisor")
def reload_gunicorn():
"""
Reloads the Gunicorn startup script and restarts gunicorn
"""
_run_task(tasks.reload_gunicorn, "Reloading the gunicorn startup script...", "Finished reloading the gunicorn startup script")
def manage(command):
"""
Runs a python manage.py command on the server
"""
# Get the instances to run commands on
env.hosts = fabconf['EC2_INSTANCES']
# Run the management command insite the virtualenv
_virtualenv("python %(PROJECT_PATH)s/manage.py " + command)
# ------------------------------------------------------------------------------------------------------------------
# SUPPORT FUNCTIONS
# ------------------------------------------------------------------------------------------------------------------
def _run_task(task, start_message, finished_message):
"""
Tasks a task from tasks.py and runs through the commands on the server
"""
# Get the hosts and record the start time
env.hosts = fabconf['EC2_INSTANCES']
start = time.time()
# Check if any hosts exist
if env.hosts == []:
print("There are EC2 instances defined in project_conf.py, please add some instances and try again")
print("or run 'fab spawn_instance' to create an instance")
return
# Print the starting message
print(_yellow(start_message))
# Run the task items
for item in task:
try:
print(_yellow(item['message']))
except KeyError:
pass
globals()["_" + item['action']](item['params'])
# Print the final message and the elapsed time
print(_yellow("%s in %.2fs" % (finished_message, time.time() - start)))
def _create_ec2_instance():
"""
Creates EC2 Instance
"""
print(_yellow("Creating instance"))
conn = boto.ec2.connect_to_region(ec2_region, aws_access_key_id=fabconf['AWS_ACCESS_KEY'], aws_secret_access_key=fabconf['AWS_SECRET_KEY'])
image = conn.get_all_images(ec2_amis)
reservation = image[0].run(1, 1, ec2_keypair, ec2_secgroups,
instance_type=ec2_instancetype)
instance = reservation.instances[0]
conn.create_tags([instance.id], {"Name":fabconf['INSTANCE_NAME_TAG']})
while instance.state == u'pending':
print(_yellow("Instance state: %s" % instance.state))
time.sleep(10)
instance.update()
print(_green("Instance state: %s" % instance.state))
print(_green("Public dns: %s" % instance.public_dns_name))
return instance.public_dns_name
def _virtualenv(params):
"""
Allows running commands on the server
with an active virtualenv
"""
with cd(fabconf['APPS_DIR']):
_virtualenv_command(_render(params))
def _apt(params):
"""
Runs apt-get install commands
"""
for pkg in params:
_sudo("apt-get install -qq %s" % pkg)
def _pip(params):
"""
Runs pip install commands
"""
for pkg in params:
_sudo("pip install %s" % pkg)
def _run(params):
"""
Runs command with active user
"""
command = _render(params)
run(command)
def _sudo(params):
"""
Run command as root
"""
command = _render(params)
sudo(command)
def _put(params):
"""
Moves a file from local computer to server
"""
put(_render(params['file']), _render(params['destination']))
def _put_template(params):
"""
Same as _put() but it loads a file and does variable replacement
"""
f = open(_render(params['template']), 'r')
template = f.read()
run(_write_to(_render(template), _render(params['destination'])))
def _render(template, context=fabconf):
"""
Does variable replacement
"""
return template % context
def _write_to(string, path):
"""
Writes a string to a file on the server
"""
return "echo '" + string + "' > " + path
def _append_to(string, path):
"""
Appends to a file on the server
"""
return "echo '" + string + "' >> " + path
def _virtualenv_command(command):
"""
Activates virtualenv and runs command
"""
with cd(fabconf['APPS_DIR']):
sudo(fabconf['ACTIVATE'] + ' && ' + command, user=fabconf['SERVER_USERNAME'])
|
armikhael/software-center
|
refs/heads/master
|
test/disabled_test_gnomekeyring.py
|
4
|
#!/usr/bin/python
from gi.repository import GObject
import gnomekeyring as gk
import unittest
class testGnomeKeyringUsage(unittest.TestCase):
APP = "gk-test"
KEYRING_NAME = "gk-test-keyring"
def setUp(self):
GObject.set_application_name(self.APP)
def test_keyring_available(self):
available = gk.is_available()
self.assertTrue(available)
def test_keyring_populate(self):
attr = { 'token' : 'the-token',
'consumer-key' : 'the-consumer-key',
'usage' : 'software-center-agent-token',
}
secret = 'consumer_secret=xxx&token=xxx&consumer_key=xxx&token_secret=xxx&name=s-c'
keyring_names = gk.list_keyring_names_sync()
print keyring_names
self.assertFalse(self.KEYRING_NAME in keyring_names)
gk.create_sync(self.KEYRING_NAME, "")
keyring_names = gk.list_keyring_names_sync()
self.assertTrue(self.KEYRING_NAME in keyring_names)
res = gk.item_create_sync(self.KEYRING_NAME,
gk.ITEM_GENERIC_SECRET,
"Software Center Agent token",
attr,
secret,
True) # update if exists
self.assertTrue(res)
# get the token from the keyring using the 'usage' field
# in the attr
search_attr = { 'usage' : 'software-center-agent-token',
}
found = gk.find_items_sync(gk.ITEM_GENERIC_SECRET,
search_attr)
self.assertEqual(len(found), 1)
for item in found:
self.assertEqual(item.keyring, self.KEYRING_NAME)
#print item.item_id, item.attributes
self.assertEqual(item.secret, secret)
def tearDown(self):
try:
gk.delete_sync(self.KEYRING_NAME)
except gk.NoSuchKeyringError:
pass
if __name__ == "__main__":
import logging
logging.basicConfig(level=logging.DEBUG)
unittest.main()
|
trueblue2704/AskMeAnything
|
refs/heads/master
|
lib/python2.7/site-packages/flask/ctx.py
|
776
|
# -*- coding: utf-8 -*-
"""
flask.ctx
~~~~~~~~~
Implements the objects required to keep the context.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
from __future__ import with_statement
import sys
from functools import update_wrapper
from werkzeug.exceptions import HTTPException
from .globals import _request_ctx_stack, _app_ctx_stack
from .module import blueprint_is_module
from .signals import appcontext_pushed, appcontext_popped
class _AppCtxGlobals(object):
"""A plain object."""
def get(self, name, default=None):
return self.__dict__.get(name, default)
def __contains__(self, item):
return item in self.__dict__
def __iter__(self):
return iter(self.__dict__)
def __repr__(self):
top = _app_ctx_stack.top
if top is not None:
return '<flask.g of %r>' % top.app.name
return object.__repr__(self)
def after_this_request(f):
"""Executes a function after this request. This is useful to modify
response objects. The function is passed the response object and has
to return the same or a new one.
Example::
@app.route('/')
def index():
@after_this_request
def add_header(response):
response.headers['X-Foo'] = 'Parachute'
return response
return 'Hello World!'
This is more useful if a function other than the view function wants to
modify a response. For instance think of a decorator that wants to add
some headers without converting the return value into a response object.
.. versionadded:: 0.9
"""
_request_ctx_stack.top._after_request_functions.append(f)
return f
def copy_current_request_context(f):
"""A helper function that decorates a function to retain the current
request context. This is useful when working with greenlets. The moment
the function is decorated a copy of the request context is created and
then pushed when the function is called.
Example::
import gevent
from flask import copy_current_request_context
@app.route('/')
def index():
@copy_current_request_context
def do_some_work():
# do some work here, it can access flask.request like you
# would otherwise in the view function.
...
gevent.spawn(do_some_work)
return 'Regular response'
.. versionadded:: 0.10
"""
top = _request_ctx_stack.top
if top is None:
raise RuntimeError('This decorator can only be used at local scopes '
'when a request context is on the stack. For instance within '
'view functions.')
reqctx = top.copy()
def wrapper(*args, **kwargs):
with reqctx:
return f(*args, **kwargs)
return update_wrapper(wrapper, f)
def has_request_context():
"""If you have code that wants to test if a request context is there or
not this function can be used. For instance, you may want to take advantage
of request information if the request object is available, but fail
silently if it is unavailable.
::
class User(db.Model):
def __init__(self, username, remote_addr=None):
self.username = username
if remote_addr is None and has_request_context():
remote_addr = request.remote_addr
self.remote_addr = remote_addr
Alternatively you can also just test any of the context bound objects
(such as :class:`request` or :class:`g` for truthness)::
class User(db.Model):
def __init__(self, username, remote_addr=None):
self.username = username
if remote_addr is None and request:
remote_addr = request.remote_addr
self.remote_addr = remote_addr
.. versionadded:: 0.7
"""
return _request_ctx_stack.top is not None
def has_app_context():
"""Works like :func:`has_request_context` but for the application
context. You can also just do a boolean check on the
:data:`current_app` object instead.
.. versionadded:: 0.9
"""
return _app_ctx_stack.top is not None
class AppContext(object):
"""The application context binds an application object implicitly
to the current thread or greenlet, similar to how the
:class:`RequestContext` binds request information. The application
context is also implicitly created if a request context is created
but the application is not on top of the individual application
context.
"""
def __init__(self, app):
self.app = app
self.url_adapter = app.create_url_adapter(None)
self.g = app.app_ctx_globals_class()
# Like request context, app contexts can be pushed multiple times
# but there a basic "refcount" is enough to track them.
self._refcnt = 0
def push(self):
"""Binds the app context to the current context."""
self._refcnt += 1
_app_ctx_stack.push(self)
appcontext_pushed.send(self.app)
def pop(self, exc=None):
"""Pops the app context."""
self._refcnt -= 1
if self._refcnt <= 0:
if exc is None:
exc = sys.exc_info()[1]
self.app.do_teardown_appcontext(exc)
rv = _app_ctx_stack.pop()
assert rv is self, 'Popped wrong app context. (%r instead of %r)' \
% (rv, self)
appcontext_popped.send(self.app)
def __enter__(self):
self.push()
return self
def __exit__(self, exc_type, exc_value, tb):
self.pop(exc_value)
class RequestContext(object):
"""The request context contains all request relevant information. It is
created at the beginning of the request and pushed to the
`_request_ctx_stack` and removed at the end of it. It will create the
URL adapter and request object for the WSGI environment provided.
Do not attempt to use this class directly, instead use
:meth:`~flask.Flask.test_request_context` and
:meth:`~flask.Flask.request_context` to create this object.
When the request context is popped, it will evaluate all the
functions registered on the application for teardown execution
(:meth:`~flask.Flask.teardown_request`).
The request context is automatically popped at the end of the request
for you. In debug mode the request context is kept around if
exceptions happen so that interactive debuggers have a chance to
introspect the data. With 0.4 this can also be forced for requests
that did not fail and outside of `DEBUG` mode. By setting
``'flask._preserve_context'`` to `True` on the WSGI environment the
context will not pop itself at the end of the request. This is used by
the :meth:`~flask.Flask.test_client` for example to implement the
deferred cleanup functionality.
You might find this helpful for unittests where you need the
information from the context local around for a little longer. Make
sure to properly :meth:`~werkzeug.LocalStack.pop` the stack yourself in
that situation, otherwise your unittests will leak memory.
"""
def __init__(self, app, environ, request=None):
self.app = app
if request is None:
request = app.request_class(environ)
self.request = request
self.url_adapter = app.create_url_adapter(self.request)
self.flashes = None
self.session = None
# Request contexts can be pushed multiple times and interleaved with
# other request contexts. Now only if the last level is popped we
# get rid of them. Additionally if an application context is missing
# one is created implicitly so for each level we add this information
self._implicit_app_ctx_stack = []
# indicator if the context was preserved. Next time another context
# is pushed the preserved context is popped.
self.preserved = False
# remembers the exception for pop if there is one in case the context
# preservation kicks in.
self._preserved_exc = None
# Functions that should be executed after the request on the response
# object. These will be called before the regular "after_request"
# functions.
self._after_request_functions = []
self.match_request()
# XXX: Support for deprecated functionality. This is going away with
# Flask 1.0
blueprint = self.request.blueprint
if blueprint is not None:
# better safe than sorry, we don't want to break code that
# already worked
bp = app.blueprints.get(blueprint)
if bp is not None and blueprint_is_module(bp):
self.request._is_old_module = True
def _get_g(self):
return _app_ctx_stack.top.g
def _set_g(self, value):
_app_ctx_stack.top.g = value
g = property(_get_g, _set_g)
del _get_g, _set_g
def copy(self):
"""Creates a copy of this request context with the same request object.
This can be used to move a request context to a different greenlet.
Because the actual request object is the same this cannot be used to
move a request context to a different thread unless access to the
request object is locked.
.. versionadded:: 0.10
"""
return self.__class__(self.app,
environ=self.request.environ,
request=self.request
)
def match_request(self):
"""Can be overridden by a subclass to hook into the matching
of the request.
"""
try:
url_rule, self.request.view_args = \
self.url_adapter.match(return_rule=True)
self.request.url_rule = url_rule
except HTTPException as e:
self.request.routing_exception = e
def push(self):
"""Binds the request context to the current context."""
# If an exception occurs in debug mode or if context preservation is
# activated under exception situations exactly one context stays
# on the stack. The rationale is that you want to access that
# information under debug situations. However if someone forgets to
# pop that context again we want to make sure that on the next push
# it's invalidated, otherwise we run at risk that something leaks
# memory. This is usually only a problem in testsuite since this
# functionality is not active in production environments.
top = _request_ctx_stack.top
if top is not None and top.preserved:
top.pop(top._preserved_exc)
# Before we push the request context we have to ensure that there
# is an application context.
app_ctx = _app_ctx_stack.top
if app_ctx is None or app_ctx.app != self.app:
app_ctx = self.app.app_context()
app_ctx.push()
self._implicit_app_ctx_stack.append(app_ctx)
else:
self._implicit_app_ctx_stack.append(None)
_request_ctx_stack.push(self)
# Open the session at the moment that the request context is
# available. This allows a custom open_session method to use the
# request context (e.g. code that access database information
# stored on `g` instead of the appcontext).
self.session = self.app.open_session(self.request)
if self.session is None:
self.session = self.app.make_null_session()
def pop(self, exc=None):
"""Pops the request context and unbinds it by doing that. This will
also trigger the execution of functions registered by the
:meth:`~flask.Flask.teardown_request` decorator.
.. versionchanged:: 0.9
Added the `exc` argument.
"""
app_ctx = self._implicit_app_ctx_stack.pop()
clear_request = False
if not self._implicit_app_ctx_stack:
self.preserved = False
self._preserved_exc = None
if exc is None:
exc = sys.exc_info()[1]
self.app.do_teardown_request(exc)
# If this interpreter supports clearing the exception information
# we do that now. This will only go into effect on Python 2.x,
# on 3.x it disappears automatically at the end of the exception
# stack.
if hasattr(sys, 'exc_clear'):
sys.exc_clear()
request_close = getattr(self.request, 'close', None)
if request_close is not None:
request_close()
clear_request = True
rv = _request_ctx_stack.pop()
assert rv is self, 'Popped wrong request context. (%r instead of %r)' \
% (rv, self)
# get rid of circular dependencies at the end of the request
# so that we don't require the GC to be active.
if clear_request:
rv.request.environ['werkzeug.request'] = None
# Get rid of the app as well if necessary.
if app_ctx is not None:
app_ctx.pop(exc)
def auto_pop(self, exc):
if self.request.environ.get('flask._preserve_context') or \
(exc is not None and self.app.preserve_context_on_exception):
self.preserved = True
self._preserved_exc = exc
else:
self.pop(exc)
def __enter__(self):
self.push()
return self
def __exit__(self, exc_type, exc_value, tb):
# do not pop the request stack if we are in debug mode and an
# exception happened. This will allow the debugger to still
# access the request object in the interactive shell. Furthermore
# the context can be force kept alive for the test client.
# See flask.testing for how this works.
self.auto_pop(exc_value)
def __repr__(self):
return '<%s \'%s\' [%s] of %s>' % (
self.__class__.__name__,
self.request.url,
self.request.method,
self.app.name,
)
|
btskinner/grm
|
refs/heads/master
|
grm/gr.py
|
1
|
# gr.py
# -*- coding: utf-8 -*-
from .utils import *
from .api import *
from .loc import *
import json
import os
class GR:
'''
Class for GitRoom initialization/reading functions
'''
def __init__(self, rgo = None, lgo = None):
self.rgo = rgo
self.lgo = lgo
def __str__(self):
text = '\nGitHub ID: {}\n'.format(self.rgo.admin.ghid)
text += 'GitHub token file: {}\n'.format(self.rgo.admin.token_file)
text += 'Protocol: {}\n'.format(self.rgo.admin.protocol)
text += 'Organization name: {}\n'.format(self.rgo.org.name)
text += 'Roster file: {}\n'.format(self.rgo.roster.path)
text += 'Local master repo.: {}\n'.format(self.lgo.master_repo)
text += 'Local student repo. directory: {}\n'.format(self.lgo.student_repo_dir)
return text
def _updateGitRoom(self):
print('\nGetting information from organization remote...')
self.rgo.getMembers()
self.rgo.getTeams()
self.rgo.getRepos()
return 0
def _storeGitRoomInfo(self):
while True:
prompt = 'Please give directory for saving: '
odir = os.path.expanduser(input(prompt).strip())
if not os.path.isdir(odir):
errorMessage('Please input a directory path.')
continue
info = {'github_login': self.rgo.admin.ghid,
'github_token_file': self.rgo.admin.token_file,
'github_protocol': self.rgo.admin.protocol,
'organization_name': self.rgo.org.name,
'roster_file': self.rgo.roster.path,
'master_repo': self.lgo.master_repo,
'student_repo_dir': self.lgo.student_repo_dir}
with open(odir + '/' + self.rgo.org.name + '_grm.json', 'w') as f:
json.dump(info, f, indent = 4)
break
def _initGitRoom(self, github_login = None, token_file = None,
orgname = None, roster_file = None, master_repo = None,
student_repo_dir = None, github_protocol = None):
# remote git inits
self.rgo = RemoteGit()
self.rgo.setAPICreds(ghid = github_login,
tokenfile = token_file,
protocol = github_protocol)
self.rgo.setOrg(name = orgname)
self.rgo.buildRoster(rosterfile = roster_file)
# local git inits
self.lgo = LocalGit()
self.lgo.set_master_repo(master_repo = master_repo)
self.lgo.set_student_repo_dir(student_repo_dir = student_repo_dir)
# option to store inits if any were missing
if (not github_login or not token_file or not orgname
or not roster_file or not master_repo or not student_repo_dir
or not github_protocol):
print('*' * 50)
print('\nThis is what you have entered:\n')
print(self)
print('*' * 50)
prompt = 'Is this correct?'
choice = pickOpt(prompt, ['Yes','No'])
if choice == 0:
prompt = 'Do you want to store GitRoom information in JSON file?'
choice = pickOpt(prompt, ['Yes','No'])
if choice == 0:
self._storeGitRoomInfo()
else:
prompt = 'Would you like to try again?'
choice = pickOpt(prompt, ['Yes','No'])
if choice == 0:
self._initGitRoom(github_login = None, token_file = None,
orgname = None, roster_file = None,
master_repo = None, student_repo_dir = None,
github_protocol = None)
else:
progExit()
try:
self.rgo.getMembers()
except requests.exceptions.ConnectionError:
errorMessage('Not able to connect to remote.')
return 1
return self._updateGitRoom()
def _readGitRoomInfo(self, init_file_path):
with open(init_file_path, 'r') as f:
info = json.load(f)
req_keys = ['github_login', 'github_token_file',
'organization_name', 'roster_file',
'master_repo', 'student_repo_dir',
'github_protocol']
for k in req_keys:
try:
info[k]
except KeyError:
info[k] = None
connect_code = self._initGitRoom(github_login = info['github_login'],
token_file = info['github_token_file'],
orgname = info['organization_name'],
roster_file = info['roster_file'],
master_repo = info['master_repo'],
student_repo_dir = info['student_repo_dir'],
github_protocol = info['github_protocol'])
return connect_code
def getGitRoomObjs(self, connect_code):
if self.rgo and self.lgo:
if connect_code != 0:
try:
self.rgo.getMembers()
except requests.exceptions.ConnectionError:
return 1
return self._updateGitRoom()
else:
return 0
else:
while True:
prompt = 'How would you like to enter GitRoom information?'
opts = ['Manually', 'From JSON file', '<< Exit Program >>']
choice = pickOpt(prompt, opts)
if choice == len(opts) - 1:
progExit()
elif choice == 0:
self._initGitRoom()
else:
prompt = 'Please give path to GitRoom JSON file: '
grjson = os.path.expanduser(input(prompt).strip())
if os.path.isfile(grjson):
connect_code = self._readGitRoomInfo(grjson)
return connect_code
else:
errorMessage('Not a file!')
continue
# ----------------------------------
# GitRoom main menu choices
# ----------------------------------
def buildGR(self, from_scratch = False):
to_add = []
if not from_scratch:
# compare roster to remote
current = []
for k,v in self.rgo.org.members.items():
current.append(v.ghid)
for k,v in self.rgo.roster.students.items():
if v.ghid not in current:
to_add.append(k)
else:
for k,v in self.rgo.roster.students.items():
to_add.append(k)
if len(to_add) == 0:
print('All local students on remote')
return
m = 'Students to be added to {}'.format(self.rgo.org.name)
promptMessage(m, char = '')
for name in to_add:
fn = self.rgo.roster.students[name].first_name
ln = self.rgo.roster.students[name].last_name
print('{} {}'.format(fn, ln))
prompt = 'Should repos be private?'
choice = pickOpt(prompt, ['Yes','No'])
if choice == 0:
priv_bool = True
else:
priv_bool = False
# STEPS
# 1. [Remote] Add student as GitRoom member
# 2. [Remote] Init remote repo
# 3. [Remote] Add team for student's repo
# 4. [Remote] Add student to team
# 5. [Remote] Add repo to team
# 6. [Local] Create student's local repo
# 7. [Local] Copy files from master to student's repo
# 8. [Local] Add, commit, push local student repo to remote
for name in to_add:
ln = self.rgo.roster.students[name].last_name
gh = self.rgo.roster.students[name].ghid
rn = 'student_{}'.format(ln.lower())
rp = os.path.join(self.lgo.student_repo_dir, rn)
# 1
resp = self.rgo.addMember(member = name)
if round(resp.status_code, -2) == 400:
print(resp.json()['message'])
elif resp.json()['state'] == 'active':
print('{} is already an active member.'.format(name))
elif resp.json()['state'] == 'pending':
print('{} has a pending membership.'.format(name))
# 2
resp = self.rgo.createRemoteRepo(repo_name = rn, private = priv_bool)
if resp.status_code == 422:
text = '\nEither:\n\n'
text += '(1) Remote already exists\n'
text += '(2) Your organization plan doesn\'t allow for private repos \n'
text += ' and you must change the setting to public \n'
text += ' or upgrade your plan through GitHub.'
errorMessage(text)
elif round(resp.status_code, -2) == 200:
print('Successfully created remote {}'.format(rn))
# 3
resp = self.rgo.createTeam(team_name = rn)
if resp.status_code == 422:
print('Team {} already exists!\n'.format(rn))
elif round(resp.status_code, -2) == 200:
print('Successfully created team: {}'.format(rn))
resp = resp.json()
team = Team(team_id = resp['id'], name = resp['name'])
self.rgo.org.teams[team.name] = team
# 4
resp = self.rgo.addMemberToTeam(team_name = rn, member = name)
if round(resp.status_code, -2) == 200:
state = resp.json()['state']
print('{}\'s membership on team {} is now {}.'.format(name,
rn,
state))
mem = Member(ghid = gh)
members = {}
members[gh] = mem
# 5
resp = self.rgo.addRepoToTeam(team_name = rn, repo_name = rn)
if round(resp.status_code, -2) == 200:
print('{} now has access to repo {}'.format(rn, rn))
# 6
self.lgo.createLocalRepo(student_repo = rn)
# 7
self.lgo.masterToStudent(student_repo = rn)
# 8
remote = '{}{}/{}.git'.format(self.rgo.admin.proturl, self.rgo.org.name, rn)
self.lgo.gitInit(repo = rp)
self.lgo.gitRemoteAdd(repo = rp, remote = remote)
self.lgo.gitAdd(repo = rp)
self.lgo.gitCommit(repo = rp, message = 'Init course repo')
self.lgo.gitPush(repo = rp)
def addGRAdmin(self):
prompt = 'Please give new administrator\'s GitHub id: '
ghid = input(prompt).strip()
resp = self.rgo.addAdmin(github_id = ghid)
if round(resp.status_code, -2) == 400:
errorMessage(resp.json()['message'])
elif round(resp.status_code, -2) == 200:
print('Successfully added {} to {} as admin.'.format(ghid,
self.org.name))
def cloneGR(self):
self.rgo.getRepos()
while True:
prompt = 'Please give path to local clone directory: '
cdir = input(prompt).strip()
try:
cdir = os.path.expanduser(cdir)
except AttributeError:
errorMessage('Please give a proper path.')
continue
cdir = cdir.rstrip('/')
break
for k,v in self.rgo.org.repos.items():
repo = os.path.join(cdir, k)
remote = '{}{}/{}.git'.format(self.rgo.admin.proturl, self.rgo.org.name, k)
self.lgo.gitClone(repo = repo, remote = remote)
def updateGR(self):
self.rgo.getRepos()
while True:
prompt = 'Please enter commit message: '
comment = input(prompt).strip()
prompt = 'Is this okay or do you want to try again?\n\n'
prompt += 'COMMENT: {}'.format(comment)
choice = pickOpt(prompt, ['Good', 'Try again'])
if choice == 0:
break
for k,v in self.rgo.org.repos.items():
repo = k
repo_path = os.path.join(self.lgo.student_repo_dir, repo)
if not os.path.isdir(repo_path):
continue
self.lgo.gitPull(repo = repo_path)
self.lgo.masterToStudent(student_repo = repo)
self.lgo.gitAdd(repo = repo_path)
self.lgo.gitCommit(repo = repo_path, message = comment)
self.lgo.gitPush(repo = repo_path)
def pullGR(self):
self.rgo.getRepos()
prompt = 'Please select the student repository you wish to pull'
opts = ['-- All student repositories --']
for k,v in self.rgo.org.repos.items():
repo = os.path.join(self.lgo.student_repo_dir, k)
if not os.path.isdir(repo):
continue
opts.append(k)
choice = pickOpt(prompt, opts)
if choice == 0:
for k,v in self.rgo.org.repos.items():
repo_path = os.path.join(self.lgo.student_repo_dir, k)
if not os.path.isdir(repo_path):
continue
self.lgo.gitPull(repo = repo_path)
else:
repo_path = os.path.join(self.lgo.student_repo_dir, opts[choice])
self.lgo.gitPull(repo = repo_path)
def pushGR(self):
self.rgo.getRepos()
prompt = 'Please select the student repository you wish to push'
opts = ['-- All student repositories --']
for k,v in self.rgo.org.repos.items():
repo_path = os.path.join(self.lgo.student_repo_dir, k)
if not os.path.isdir(repo_path):
continue
opts.append(k)
choice = pickOpt(prompt, opts)
while True:
comment = input('Please enter commit message: ').strip()
prompt = 'Is this okay or do you want to try again?\n\n'
prompt += 'COMMENT: {}'.format(comment)
select = pickOpt(prompt, ['Good', 'Try again'])
if select == 0:
break
if choice == 0:
for k,v in self.rgo.org.repos.items():
repo_path = os.path.join(self.lgo.student_repo_dir, k)
if not os.path.isdir(repo_path):
continue
self.lgo.gitPull(repo = repo_path)
self.lgo.gitAdd(repo = repo_path)
self.lgo.gitCommit(repo = repo_path, message = comment)
self.lgo.gitPush(repo = repo_path)
else:
repo_path = os.path.join(self.lgo.student_repo_dir, opts[choice])
self.lgo.gitPull(repo = repo_path)
self.lgo.gitAdd(repo = repo_path)
self.lgo.gitCommit(repo = repo_path, message = comment)
self.lgo.gitPush(repo = repo_path)
|
indictranstech/phrerp
|
refs/heads/develop
|
erpnext/stock/report/stock_projected_qty/stock_projected_qty.py
|
24
|
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
def execute(filters=None):
columns = get_columns()
data = frappe.db.sql("""select
item.name, item.item_name, description, item_group, brand, warehouse, item.stock_uom,
actual_qty, planned_qty, indented_qty, ordered_qty, reserved_qty,
projected_qty, item.re_order_level, item.re_order_qty,
(item.re_order_level - projected_qty) as shortage_qty
from `tabBin` bin,
(select name, company from tabWarehouse
{warehouse_conditions}) wh,
(select name, item_name, description, stock_uom, item_group,
brand, re_order_level, re_order_qty
from `tabItem` {item_conditions}) item
where item_code = item.name and warehouse = wh.name
order by item.name, wh.name"""\
.format(item_conditions=get_item_conditions(filters),
warehouse_conditions=get_warehouse_conditions(filters)), filters)
return columns, data
def get_columns():
return [_("Item Code") + ":Link/Item:140", _("Item Name") + "::100", _("Description") + "::200",
_("Item Group") + ":Link/Item Group:100", _("Brand") + ":Link/Brand:100", _("Warehouse") + ":Link/Warehouse:120",
_("UOM") + ":Link/UOM:100", _("Actual Qty") + ":Float:100", _("Planned Qty") + ":Float:100",
_("Requested Qty") + ":Float:110", _("Ordered Qty") + ":Float:100", _("Reserved Qty") + ":Float:100",
_("Projected Qty") + ":Float:100", _("Reorder Level") + ":Float:100", _("Reorder Qty") + ":Float:100",
_("Shortage Qty") + ":Float:100"]
def get_item_conditions(filters):
conditions = []
if filters.get("item_code"):
conditions.append("name=%(item_code)s")
if filters.get("brand"):
conditions.append("brand=%(brand)s")
return "where {}".format(" and ".join(conditions)) if conditions else ""
def get_warehouse_conditions(filters):
conditions = []
if filters.get("company"):
conditions.append("company=%(company)s")
if filters.get("warehouse"):
conditions.append("name=%(warehouse)s")
return "where {}".format(" and ".join(conditions)) if conditions else ""
|
JasonKessler/scattertext
|
refs/heads/master
|
scattertext/test/test_indexStoreFromDict.py
|
1
|
from unittest import TestCase
from scattertext.indexstore.IndexStoreFromDict import IndexStoreFromDict
class TestIndexStoreFromDict(TestCase):
def test_index_store_from_dict(self):
vocab = {'baloney': 0,
'by': 1,
'first': 2,
'has': 3,
'it': 4,
'meyer': 5,
'my': 6,
'name': 7,
'oscar': 8,
'second': 9}
idxstore = IndexStoreFromDict.build(vocab)
for term, idx in vocab.items():
self.assertEqual(idxstore.getidx(term), idx)
self.assertEqual(idxstore.getnumvals(), len(vocab))
|
myselfHimanshu/Udacity-DataML
|
refs/heads/master
|
Intro-To-Data-Science/Lesson3/PS3_6.py
|
2
|
import numpy as np
import scipy
import matplotlib.pyplot as plt
def plot_residuals(turnstile_weather, predictions):
'''
Using the same methods that we used to plot a histogram of entries
per hour for our data, why don't you make a histogram of the residuals
(that is, the difference between the original hourly entry data and the predicted values).
Try different binwidths for your histogram.
Based on this residual histogram, do you have any insight into how our model
performed? Reading a bit on this webpage might be useful:
http://www.itl.nist.gov/div898/handbook/pri/section2/pri24.htm
'''
plt.figure()
(turnstile_weather['ENTRIESn_hourly'] - predictions).hist(bins=50)
return plt
|
kennedyshead/home-assistant
|
refs/heads/dev
|
tests/components/smappee/test_init.py
|
8
|
"""Tests for the Smappee component init module."""
from unittest.mock import patch
from homeassistant.components.smappee.const import DOMAIN
from homeassistant.config_entries import SOURCE_ZEROCONF
from tests.common import MockConfigEntry
async def test_unload_config_entry(hass):
"""Test unload config entry flow."""
with patch("pysmappee.api.SmappeeLocalApi.logon", return_value={}), patch(
"pysmappee.api.SmappeeLocalApi.load_advanced_config",
return_value=[{"key": "mdnsHostName", "value": "Smappee1006000212"}],
), patch(
"pysmappee.api.SmappeeLocalApi.load_command_control_config", return_value=[]
), patch(
"pysmappee.api.SmappeeLocalApi.load_instantaneous",
return_value=[{"key": "phase0ActivePower", "value": 0}],
):
config_entry = MockConfigEntry(
domain=DOMAIN,
data={"host": "1.2.3.4"},
unique_id="smappee1006000212",
source=SOURCE_ZEROCONF,
)
config_entry.add_to_hass(hass)
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
entry = hass.config_entries.async_entries(DOMAIN)[0]
await hass.config_entries.async_unload(entry.entry_id)
await hass.async_block_till_done()
assert not hass.data.get(DOMAIN)
|
JackKelly/neuralnilm_prototype
|
refs/heads/master
|
scripts/e544.py
|
2
|
from __future__ import print_function, division
import matplotlib
import logging
from sys import stdout
matplotlib.use('Agg') # Must be before importing matplotlib.pyplot or pylab!
from neuralnilm import (Net, RealApplianceSource)
from neuralnilm.source import (standardise, discretize, fdiff, power_and_fdiff,
RandomSegments, RandomSegmentsInMemory,
SameLocation)
from neuralnilm.experiment import run_experiment, init_experiment
from neuralnilm.net import TrainingError
from neuralnilm.layers import (MixtureDensityLayer, DeConv1DLayer,
SharedWeightsDenseLayer)
from neuralnilm.objectives import (scaled_cost, mdn_nll,
scaled_cost_ignore_inactive, ignore_inactive,
scaled_cost3)
from neuralnilm.plot import MDNPlotter, CentralOutputPlotter, Plotter, RectangularOutputPlotter, StartEndMeanPlotter
from neuralnilm.updates import clipped_nesterov_momentum
from neuralnilm.disaggregate import disaggregate
from neuralnilm.rectangulariser import rectangularise
from lasagne.nonlinearities import sigmoid, rectify, tanh, identity, softmax
from lasagne.objectives import squared_error, binary_crossentropy
from lasagne.init import Uniform, Normal
from lasagne.layers import (DenseLayer, Conv1DLayer,
ReshapeLayer, FeaturePoolLayer,
DimshuffleLayer, DropoutLayer, ConcatLayer, PadLayer)
from lasagne.updates import nesterov_momentum, momentum
from functools import partial
import os
import __main__
from copy import deepcopy
from math import sqrt
import numpy as np
import theano.tensor as T
import gc
"""
447: first attempt at disaggregation
"""
NAME = os.path.splitext(os.path.split(__main__.__file__)[1])[0]
#PATH = "/homes/dk3810/workspace/python/neuralnilm/figures"
PATH = "/data/dk3810/figures"
SAVE_PLOT_INTERVAL = 25000
N_SEQ_PER_BATCH = 64
MAX_TARGET_POWER = 300
source_dict = dict(
filename='/data/dk3810/ukdale.h5',
appliances=[
['fridge freezer', 'fridge', 'freezer'],
['washer dryer', 'washing machine'],
'kettle',
'HTPC',
'dish washer'
],
max_appliance_powers=[MAX_TARGET_POWER, 2400, 2400, 200, 2500],
on_power_thresholds=[5] * 5,
min_on_durations=[60, 1800, 30, 60, 1800],
min_off_durations=[12, 600, 1, 12, 1800],
# date finished installing meters in house 1 = 2013-04-12
window=("2013-04-12", "2014-12-10"),
seq_length=512,
output_one_appliance=True,
train_buildings=[1],
validation_buildings=[1],
n_seq_per_batch=N_SEQ_PER_BATCH,
standardise_input=True,
independently_center_inputs=False,
skip_probability=0.75,
# skip_probability_for_first_appliance=0.5,
target_is_start_and_end_and_mean=True,
one_target_per_seq=False
)
net_dict = dict(
save_plot_interval=SAVE_PLOT_INTERVAL,
loss_function=lambda x, t: squared_error(x, t).mean(),
updates_func=nesterov_momentum,
learning_rate=1e-3,
learning_rate_changes_by_iteration={
500000: 1e-4,
600000: 1e-5
},
do_save_activations=True,
auto_reshape=False,
plotter=StartEndMeanPlotter(
n_seq_to_plot=32, max_target_power=MAX_TARGET_POWER)
)
def exp_a(name):
global source
source_dict_copy = deepcopy(source_dict)
source_dict_copy.update(dict(
logger=logging.getLogger(name)
))
source = RealApplianceSource(**source_dict_copy)
net_dict_copy = deepcopy(net_dict)
net_dict_copy.update(dict(
experiment_name=name,
source=source
))
NUM_FILTERS = 16
target_seq_length = source.output_shape_after_processing()[1]
net_dict_copy['layers_config'] = [
{
'type': DimshuffleLayer,
'pattern': (0, 2, 1) # (batch, features, time)
},
{
'type': PadLayer,
'width': 4
},
{
'type': Conv1DLayer, # convolve over the time axis
'num_filters': NUM_FILTERS,
'filter_size': 4,
'stride': 1,
'nonlinearity': None,
'border_mode': 'valid'
},
{
'type': Conv1DLayer, # convolve over the time axis
'num_filters': NUM_FILTERS,
'filter_size': 4,
'stride': 1,
'nonlinearity': None,
'border_mode': 'valid'
},
{
'type': DimshuffleLayer,
'pattern': (0, 2, 1), # back to (batch, time, features)
'label': 'dimshuffle3'
},
{
'type': DenseLayer,
'num_units': 512 * 8,
'nonlinearity': rectify,
'label': 'dense0'
},
{
'type': DenseLayer,
'num_units': 512 * 6,
'nonlinearity': rectify,
'label': 'dense1'
},
{
'type': DenseLayer,
'num_units': 512 * 4,
'nonlinearity': rectify,
'label': 'dense2'
},
{
'type': DenseLayer,
'num_units': 512,
'nonlinearity': rectify
},
{
'type': DenseLayer,
'num_units': target_seq_length,
'nonlinearity': None
}
]
net = Net(**net_dict_copy)
return net
def main():
EXPERIMENTS = list('a')
for experiment in EXPERIMENTS:
full_exp_name = NAME + experiment
func_call = init_experiment(PATH, experiment, full_exp_name)
logger = logging.getLogger(full_exp_name)
try:
net = eval(func_call)
run_experiment(net, epochs=None)
except KeyboardInterrupt:
logger.info("KeyboardInterrupt")
break
except Exception as exception:
logger.exception("Exception")
# raise
finally:
logging.shutdown()
if __name__ == "__main__":
main()
"""
Emacs variables
Local Variables:
compile-command: "cp /home/jack/workspace/python/neuralnilm/scripts/e544.py /mnt/sshfs/imperial/workspace/python/neuralnilm/scripts/"
End:
"""
|
salguarnieri/intellij-community
|
refs/heads/master
|
python/testData/inspections/PyTypeCheckerInspection/UnionReturnTypes.py
|
79
|
def test(c):
def f1(c):
if c < 0:
return []
elif c > 0:
return 'foo'
else:
return None
def f2(x):
"""
:type x: str
"""
pass
def f3(x):
"""
:type x: int
"""
x1 = f1(c)
f2(x1) # Weaker union types
f3(<warning descr="Expected type 'int', got 'Union[list, str, None]' instead">x1</warning>)
f2(<warning descr="Expected type 'str', got 'int' instead">x1.count('')</warning>)
f3(x1.count(''))
f2(x1.strip())
f3(<warning descr="Expected type 'int', got 'str' instead">x1.strip()</warning>)
|
40223202/test
|
refs/heads/master
|
static/Brython3.1.1-20150328-091302/Lib/pydoc.py
|
637
|
#!/usr/bin/env python3
"""Generate Python documentation in HTML or text for interactive use.
In the Python interpreter, do "from pydoc import help" to provide
help. Calling help(thing) on a Python object documents the object.
Or, at the shell command line outside of Python:
Run "pydoc <name>" to show documentation on something. <name> may be
the name of a function, module, package, or a dotted reference to a
class or function within a module or module in a package. If the
argument contains a path segment delimiter (e.g. slash on Unix,
backslash on Windows) it is treated as the path to a Python source file.
Run "pydoc -k <keyword>" to search for a keyword in the synopsis lines
of all available modules.
Run "pydoc -p <port>" to start an HTTP server on the given port on the
local machine. Port number 0 can be used to get an arbitrary unused port.
Run "pydoc -b" to start an HTTP server on an arbitrary unused port and
open a Web browser to interactively browse documentation. The -p option
can be used with the -b option to explicitly specify the server port.
Run "pydoc -w <name>" to write out the HTML documentation for a module
to a file named "<name>.html".
Module docs for core modules are assumed to be in
http://docs.python.org/X.Y/library/
This can be overridden by setting the PYTHONDOCS environment variable
to a different URL or to a local directory containing the Library
Reference Manual pages.
"""
__all__ = ['help']
__author__ = "Ka-Ping Yee <ping@lfw.org>"
__date__ = "26 February 2001"
__credits__ = """Guido van Rossum, for an excellent programming language.
Tommy Burnette, the original creator of manpy.
Paul Prescod, for all his work on onlinehelp.
Richard Chamberlain, for the first implementation of textdoc.
"""
# Known bugs that can't be fixed here:
# - imp.load_module() cannot be prevented from clobbering existing
# loaded modules, so calling synopsis() on a binary module file
# changes the contents of any existing module with the same name.
# - If the __file__ attribute on a module is a relative path and
# the current directory is changed with os.chdir(), an incorrect
# path will be displayed.
import builtins
import imp
import importlib.machinery
#brython fix me
import inspect
import io
import os
#brython fix me
#import pkgutil
import platform
import re
import sys
import time
import tokenize
import warnings
from collections import deque
from reprlib import Repr
#fix me brython
#from traceback import extract_tb, format_exception_only
# --------------------------------------------------------- common routines
def pathdirs():
"""Convert sys.path into a list of absolute, existing, unique paths."""
dirs = []
normdirs = []
for dir in sys.path:
dir = os.path.abspath(dir or '.')
normdir = os.path.normcase(dir)
if normdir not in normdirs and os.path.isdir(dir):
dirs.append(dir)
normdirs.append(normdir)
return dirs
def getdoc(object):
"""Get the doc string or comments for an object."""
result = inspect.getdoc(object) or inspect.getcomments(object)
return result and re.sub('^ *\n', '', result.rstrip()) or ''
def splitdoc(doc):
"""Split a doc string into a synopsis line (if any) and the rest."""
lines = doc.strip().split('\n')
if len(lines) == 1:
return lines[0], ''
elif len(lines) >= 2 and not lines[1].rstrip():
return lines[0], '\n'.join(lines[2:])
return '', '\n'.join(lines)
def classname(object, modname):
"""Get a class name and qualify it with a module name if necessary."""
name = object.__name__
if object.__module__ != modname:
name = object.__module__ + '.' + name
return name
def isdata(object):
"""Check if an object is of a type that probably means it's data."""
return not (inspect.ismodule(object) or inspect.isclass(object) or
inspect.isroutine(object) or inspect.isframe(object) or
inspect.istraceback(object) or inspect.iscode(object))
def replace(text, *pairs):
"""Do a series of global replacements on a string."""
while pairs:
text = pairs[1].join(text.split(pairs[0]))
pairs = pairs[2:]
return text
def cram(text, maxlen):
"""Omit part of a string if needed to make it fit in a maximum length."""
if len(text) > maxlen:
pre = max(0, (maxlen-3)//2)
post = max(0, maxlen-3-pre)
return text[:pre] + '...' + text[len(text)-post:]
return text
_re_stripid = re.compile(r' at 0x[0-9a-f]{6,16}(>+)$', re.IGNORECASE)
def stripid(text):
"""Remove the hexadecimal id from a Python object representation."""
# The behaviour of %p is implementation-dependent in terms of case.
#fix me brython
#return _re_stripid.sub(r'\1', text)
return text
def _is_some_method(obj):
return (inspect.isfunction(obj) or
inspect.ismethod(obj) or
inspect.isbuiltin(obj) or
inspect.ismethoddescriptor(obj))
def allmethods(cl):
methods = {}
for key, value in inspect.getmembers(cl, _is_some_method):
methods[key] = 1
for base in cl.__bases__:
methods.update(allmethods(base)) # all your base are belong to us
for key in methods.keys():
methods[key] = getattr(cl, key)
return methods
def _split_list(s, predicate):
"""Split sequence s via predicate, and return pair ([true], [false]).
The return value is a 2-tuple of lists,
([x for x in s if predicate(x)],
[x for x in s if not predicate(x)])
"""
yes = []
no = []
for x in s:
if predicate(x):
yes.append(x)
else:
no.append(x)
return yes, no
def visiblename(name, all=None, obj=None):
"""Decide whether to show documentation on a variable."""
# Certain special names are redundant or internal.
if name in {'__author__', '__builtins__', '__cached__', '__credits__',
'__date__', '__doc__', '__file__', '__initializing__',
'__loader__', '__module__', '__name__', '__package__',
'__path__', '__qualname__', '__slots__', '__version__'}:
return 0
# Private names are hidden, but special names are displayed.
if name.startswith('__') and name.endswith('__'): return 1
# Namedtuples have public fields and methods with a single leading underscore
if name.startswith('_') and hasattr(obj, '_fields'):
return True
if all is not None:
# only document that which the programmer exported in __all__
return name in all
else:
return not name.startswith('_')
def classify_class_attrs(object):
"""Wrap inspect.classify_class_attrs, with fixup for data descriptors."""
results = []
for (name, kind, cls, value) in inspect.classify_class_attrs(object):
if inspect.isdatadescriptor(value):
kind = 'data descriptor'
results.append((name, kind, cls, value))
return results
# ----------------------------------------------------- module manipulation
def ispackage(path):
"""Guess whether a path refers to a package directory."""
if os.path.isdir(path):
for ext in ('.py', '.pyc', '.pyo'):
if os.path.isfile(os.path.join(path, '__init__' + ext)):
return True
return False
def source_synopsis(file):
line = file.readline()
while line[:1] == '#' or not line.strip():
line = file.readline()
if not line: break
line = line.strip()
if line[:4] == 'r"""': line = line[1:]
if line[:3] == '"""':
line = line[3:]
if line[-1:] == '\\': line = line[:-1]
while not line.strip():
line = file.readline()
if not line: break
result = line.split('"""')[0].strip()
else: result = None
return result
def synopsis(filename, cache={}):
"""Get the one-line summary out of a module file."""
mtime = os.stat(filename).st_mtime
lastupdate, result = cache.get(filename, (None, None))
if lastupdate is None or lastupdate < mtime:
try:
file = tokenize.open(filename)
except IOError:
# module can't be opened, so skip it
return None
binary_suffixes = importlib.machinery.BYTECODE_SUFFIXES[:]
binary_suffixes += importlib.machinery.EXTENSION_SUFFIXES[:]
if any(filename.endswith(x) for x in binary_suffixes):
# binary modules have to be imported
file.close()
if any(filename.endswith(x) for x in
importlib.machinery.BYTECODE_SUFFIXES):
loader = importlib.machinery.SourcelessFileLoader('__temp__',
filename)
else:
loader = importlib.machinery.ExtensionFileLoader('__temp__',
filename)
try:
module = loader.load_module('__temp__')
except:
return None
result = (module.__doc__ or '').splitlines()[0]
del sys.modules['__temp__']
else:
# text modules can be directly examined
result = source_synopsis(file)
file.close()
cache[filename] = (mtime, result)
return result
class ErrorDuringImport(Exception):
"""Errors that occurred while trying to import something to document it."""
def __init__(self, filename, exc_info):
self.filename = filename
self.exc, self.value, self.tb = exc_info
def __str__(self):
exc = self.exc.__name__
return 'problem in %s - %s: %s' % (self.filename, exc, self.value)
def importfile(path):
"""Import a Python source file or compiled file given its path."""
magic = imp.get_magic()
with open(path, 'rb') as file:
if file.read(len(magic)) == magic:
kind = imp.PY_COMPILED
else:
kind = imp.PY_SOURCE
file.seek(0)
filename = os.path.basename(path)
name, ext = os.path.splitext(filename)
try:
module = imp.load_module(name, file, path, (ext, 'r', kind))
except:
raise ErrorDuringImport(path, sys.exc_info())
return module
def safeimport(path, forceload=0, cache={}):
"""Import a module; handle errors; return None if the module isn't found.
If the module *is* found but an exception occurs, it's wrapped in an
ErrorDuringImport exception and reraised. Unlike __import__, if a
package path is specified, the module at the end of the path is returned,
not the package at the beginning. If the optional 'forceload' argument
is 1, we reload the module from disk (unless it's a dynamic extension)."""
try:
# If forceload is 1 and the module has been previously loaded from
# disk, we always have to reload the module. Checking the file's
# mtime isn't good enough (e.g. the module could contain a class
# that inherits from another module that has changed).
if forceload and path in sys.modules:
if path not in sys.builtin_module_names:
# Remove the module from sys.modules and re-import to try
# and avoid problems with partially loaded modules.
# Also remove any submodules because they won't appear
# in the newly loaded module's namespace if they're already
# in sys.modules.
subs = [m for m in sys.modules if m.startswith(path + '.')]
for key in [path] + subs:
# Prevent garbage collection.
cache[key] = sys.modules[key]
del sys.modules[key]
module = __import__(path)
except:
# Did the error occur before or after the module was found?
(exc, value, tb) = info = sys.exc_info()
if path in sys.modules:
# An error occurred while executing the imported module.
raise ErrorDuringImport(sys.modules[path].__file__, info)
elif exc is SyntaxError:
# A SyntaxError occurred before we could execute the module.
raise ErrorDuringImport(value.filename, info)
#fix me brython
#elif exc is ImportError and value.name == path:
elif exc is ImportError and str(value) == str(path):
# No such module in the path.
return None
else:
# Some other error occurred during the importing process.
raise ErrorDuringImport(path, sys.exc_info())
for part in path.split('.')[1:]:
try: module = getattr(module, part)
except AttributeError: return None
return module
# ---------------------------------------------------- formatter base class
class Doc:
PYTHONDOCS = os.environ.get("PYTHONDOCS",
"http://docs.python.org/%d.%d/library"
% sys.version_info[:2])
def document(self, object, name=None, *args):
"""Generate documentation for an object."""
args = (object, name) + args
# 'try' clause is to attempt to handle the possibility that inspect
# identifies something in a way that pydoc itself has issues handling;
# think 'super' and how it is a descriptor (which raises the exception
# by lacking a __name__ attribute) and an instance.
if inspect.isgetsetdescriptor(object): return self.docdata(*args)
if inspect.ismemberdescriptor(object): return self.docdata(*args)
try:
if inspect.ismodule(object): return self.docmodule(*args)
if inspect.isclass(object): return self.docclass(*args)
if inspect.isroutine(object): return self.docroutine(*args)
except AttributeError:
pass
if isinstance(object, property): return self.docproperty(*args)
return self.docother(*args)
def fail(self, object, name=None, *args):
"""Raise an exception for unimplemented types."""
message = "don't know how to document object%s of type %s" % (
name and ' ' + repr(name), type(object).__name__)
raise TypeError(message)
docmodule = docclass = docroutine = docother = docproperty = docdata = fail
def getdocloc(self, object):
"""Return the location of module docs or None"""
try:
file = inspect.getabsfile(object)
except TypeError:
file = '(built-in)'
docloc = os.environ.get("PYTHONDOCS", self.PYTHONDOCS)
basedir = os.path.join(sys.base_exec_prefix, "lib",
"python%d.%d" % sys.version_info[:2])
if (isinstance(object, type(os)) and
(object.__name__ in ('errno', 'exceptions', 'gc', 'imp',
'marshal', 'posix', 'signal', 'sys',
'_thread', 'zipimport') or
(file.startswith(basedir) and
not file.startswith(os.path.join(basedir, 'site-packages')))) and
object.__name__ not in ('xml.etree', 'test.pydoc_mod')):
if docloc.startswith("http://"):
docloc = "%s/%s" % (docloc.rstrip("/"), object.__name__)
else:
docloc = os.path.join(docloc, object.__name__ + ".html")
else:
docloc = None
return docloc
# -------------------------------------------- HTML documentation generator
class HTMLRepr(Repr):
"""Class for safely making an HTML representation of a Python object."""
def __init__(self):
Repr.__init__(self)
self.maxlist = self.maxtuple = 20
self.maxdict = 10
self.maxstring = self.maxother = 100
def escape(self, text):
return replace(text, '&', '&', '<', '<', '>', '>')
def repr(self, object):
return Repr.repr(self, object)
def repr1(self, x, level):
if hasattr(type(x), '__name__'):
methodname = 'repr_' + '_'.join(type(x).__name__.split())
if hasattr(self, methodname):
return getattr(self, methodname)(x, level)
return self.escape(cram(stripid(repr(x)), self.maxother))
def repr_string(self, x, level):
test = cram(x, self.maxstring)
testrepr = repr(test)
if '\\' in test and '\\' not in replace(testrepr, r'\\', ''):
# Backslashes are only literal in the string and are never
# needed to make any special characters, so show a raw string.
return 'r' + testrepr[0] + self.escape(test) + testrepr[0]
return re.sub(r'((\\[\\abfnrtv\'"]|\\[0-9]..|\\x..|\\u....)+)',
r'<font color="#c040c0">\1</font>',
self.escape(testrepr))
repr_str = repr_string
def repr_instance(self, x, level):
try:
return self.escape(cram(stripid(repr(x)), self.maxstring))
except:
return self.escape('<%s instance>' % x.__class__.__name__)
repr_unicode = repr_string
class HTMLDoc(Doc):
"""Formatter class for HTML documentation."""
# ------------------------------------------- HTML formatting utilities
_repr_instance = HTMLRepr()
repr = _repr_instance.repr
escape = _repr_instance.escape
def page(self, title, contents):
"""Format an HTML page."""
return '''\
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN">
<html><head><title>Python: %s</title>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
</head><body bgcolor="#f0f0f8">
%s
</body></html>''' % (title, contents)
def heading(self, title, fgcol, bgcol, extras=''):
"""Format a page heading."""
return '''
<table width="100%%" cellspacing=0 cellpadding=2 border=0 summary="heading">
<tr bgcolor="%s">
<td valign=bottom> <br>
<font color="%s" face="helvetica, arial"> <br>%s</font></td
><td align=right valign=bottom
><font color="%s" face="helvetica, arial">%s</font></td></tr></table>
''' % (bgcol, fgcol, title, fgcol, extras or ' ')
def section(self, title, fgcol, bgcol, contents, width=6,
prelude='', marginalia=None, gap=' '):
"""Format a section with a heading."""
if marginalia is None:
marginalia = '<tt>' + ' ' * width + '</tt>'
result = '''<p>
<table width="100%%" cellspacing=0 cellpadding=2 border=0 summary="section">
<tr bgcolor="%s">
<td colspan=3 valign=bottom> <br>
<font color="%s" face="helvetica, arial">%s</font></td></tr>
''' % (bgcol, fgcol, title)
if prelude:
result = result + '''
<tr bgcolor="%s"><td rowspan=2>%s</td>
<td colspan=2>%s</td></tr>
<tr><td>%s</td>''' % (bgcol, marginalia, prelude, gap)
else:
result = result + '''
<tr><td bgcolor="%s">%s</td><td>%s</td>''' % (bgcol, marginalia, gap)
return result + '\n<td width="100%%">%s</td></tr></table>' % contents
def bigsection(self, title, *args):
"""Format a section with a big heading."""
title = '<big><strong>%s</strong></big>' % title
return self.section(title, *args)
def preformat(self, text):
"""Format literal preformatted text."""
text = self.escape(text.expandtabs())
return replace(text, '\n\n', '\n \n', '\n\n', '\n \n',
' ', ' ', '\n', '<br>\n')
def multicolumn(self, list, format, cols=4):
"""Format a list of items into a multi-column list."""
result = ''
rows = (len(list)+cols-1)//cols
for col in range(cols):
result = result + '<td width="%d%%" valign=top>' % (100//cols)
for i in range(rows*col, rows*col+rows):
if i < len(list):
result = result + format(list[i]) + '<br>\n'
result = result + '</td>'
return '<table width="100%%" summary="list"><tr>%s</tr></table>' % result
def grey(self, text): return '<font color="#909090">%s</font>' % text
def namelink(self, name, *dicts):
"""Make a link for an identifier, given name-to-URL mappings."""
for dict in dicts:
if name in dict:
return '<a href="%s">%s</a>' % (dict[name], name)
return name
def classlink(self, object, modname):
"""Make a link for a class."""
name, module = object.__name__, sys.modules.get(object.__module__)
if hasattr(module, name) and getattr(module, name) is object:
return '<a href="%s.html#%s">%s</a>' % (
module.__name__, name, classname(object, modname))
return classname(object, modname)
def modulelink(self, object):
"""Make a link for a module."""
return '<a href="%s.html">%s</a>' % (object.__name__, object.__name__)
def modpkglink(self, modpkginfo):
"""Make a link for a module or package to display in an index."""
name, path, ispackage, shadowed = modpkginfo
if shadowed:
return self.grey(name)
if path:
url = '%s.%s.html' % (path, name)
else:
url = '%s.html' % name
if ispackage:
text = '<strong>%s</strong> (package)' % name
else:
text = name
return '<a href="%s">%s</a>' % (url, text)
def filelink(self, url, path):
"""Make a link to source file."""
return '<a href="file:%s">%s</a>' % (url, path)
def markup(self, text, escape=None, funcs={}, classes={}, methods={}):
"""Mark up some plain text, given a context of symbols to look for.
Each context dictionary maps object names to anchor names."""
escape = escape or self.escape
results = []
here = 0
pattern = re.compile(r'\b((http|ftp)://\S+[\w/]|'
r'RFC[- ]?(\d+)|'
r'PEP[- ]?(\d+)|'
r'(self\.)?(\w+))')
while True:
match = pattern.search(text, here)
if not match: break
start, end = match.span()
results.append(escape(text[here:start]))
all, scheme, rfc, pep, selfdot, name = match.groups()
if scheme:
url = escape(all).replace('"', '"')
results.append('<a href="%s">%s</a>' % (url, url))
elif rfc:
url = 'http://www.rfc-editor.org/rfc/rfc%d.txt' % int(rfc)
results.append('<a href="%s">%s</a>' % (url, escape(all)))
elif pep:
url = 'http://www.python.org/dev/peps/pep-%04d/' % int(pep)
results.append('<a href="%s">%s</a>' % (url, escape(all)))
elif text[end:end+1] == '(':
results.append(self.namelink(name, methods, funcs, classes))
elif selfdot:
results.append('self.<strong>%s</strong>' % name)
else:
results.append(self.namelink(name, classes))
here = end
results.append(escape(text[here:]))
return ''.join(results)
# ---------------------------------------------- type-specific routines
def formattree(self, tree, modname, parent=None):
"""Produce HTML for a class tree as given by inspect.getclasstree()."""
result = ''
for entry in tree:
if type(entry) is type(()):
c, bases = entry
result = result + '<dt><font face="helvetica, arial">'
result = result + self.classlink(c, modname)
if bases and bases != (parent,):
parents = []
for base in bases:
parents.append(self.classlink(base, modname))
result = result + '(' + ', '.join(parents) + ')'
result = result + '\n</font></dt>'
elif type(entry) is type([]):
result = result + '<dd>\n%s</dd>\n' % self.formattree(
entry, modname, c)
return '<dl>\n%s</dl>\n' % result
def docmodule(self, object, name=None, mod=None, *ignored):
"""Produce HTML documentation for a module object."""
name = object.__name__ # ignore the passed-in name
try:
all = object.__all__
except AttributeError:
all = None
parts = name.split('.')
links = []
for i in range(len(parts)-1):
links.append(
'<a href="%s.html"><font color="#ffffff">%s</font></a>' %
('.'.join(parts[:i+1]), parts[i]))
linkedname = '.'.join(links + parts[-1:])
head = '<big><big><strong>%s</strong></big></big>' % linkedname
try:
path = inspect.getabsfile(object)
url = path
if sys.platform == 'win32':
import nturl2path
url = nturl2path.pathname2url(path)
filelink = self.filelink(url, path)
except TypeError:
filelink = '(built-in)'
info = []
if hasattr(object, '__version__'):
version = str(object.__version__)
if version[:11] == '$' + 'Revision: ' and version[-1:] == '$':
version = version[11:-1].strip()
info.append('version %s' % self.escape(version))
if hasattr(object, '__date__'):
info.append(self.escape(str(object.__date__)))
if info:
head = head + ' (%s)' % ', '.join(info)
docloc = self.getdocloc(object)
if docloc is not None:
docloc = '<br><a href="%(docloc)s">Module Reference</a>' % locals()
else:
docloc = ''
result = self.heading(
head, '#ffffff', '#7799ee',
'<a href=".">index</a><br>' + filelink + docloc)
modules = inspect.getmembers(object, inspect.ismodule)
classes, cdict = [], {}
for key, value in inspect.getmembers(object, inspect.isclass):
# if __all__ exists, believe it. Otherwise use old heuristic.
if (all is not None or
(inspect.getmodule(value) or object) is object):
if visiblename(key, all, object):
classes.append((key, value))
cdict[key] = cdict[value] = '#' + key
for key, value in classes:
for base in value.__bases__:
key, modname = base.__name__, base.__module__
module = sys.modules.get(modname)
if modname != name and module and hasattr(module, key):
if getattr(module, key) is base:
if not key in cdict:
cdict[key] = cdict[base] = modname + '.html#' + key
funcs, fdict = [], {}
for key, value in inspect.getmembers(object, inspect.isroutine):
# if __all__ exists, believe it. Otherwise use old heuristic.
if (all is not None or
inspect.isbuiltin(value) or inspect.getmodule(value) is object):
if visiblename(key, all, object):
funcs.append((key, value))
fdict[key] = '#-' + key
if inspect.isfunction(value): fdict[value] = fdict[key]
data = []
for key, value in inspect.getmembers(object, isdata):
if visiblename(key, all, object):
data.append((key, value))
doc = self.markup(getdoc(object), self.preformat, fdict, cdict)
doc = doc and '<tt>%s</tt>' % doc
result = result + '<p>%s</p>\n' % doc
if hasattr(object, '__path__'):
modpkgs = []
for importer, modname, ispkg in pkgutil.iter_modules(object.__path__):
modpkgs.append((modname, name, ispkg, 0))
modpkgs.sort()
contents = self.multicolumn(modpkgs, self.modpkglink)
result = result + self.bigsection(
'Package Contents', '#ffffff', '#aa55cc', contents)
elif modules:
contents = self.multicolumn(
modules, lambda t: self.modulelink(t[1]))
result = result + self.bigsection(
'Modules', '#ffffff', '#aa55cc', contents)
if classes:
classlist = [value for (key, value) in classes]
contents = [
self.formattree(inspect.getclasstree(classlist, 1), name)]
for key, value in classes:
contents.append(self.document(value, key, name, fdict, cdict))
result = result + self.bigsection(
'Classes', '#ffffff', '#ee77aa', ' '.join(contents))
if funcs:
contents = []
for key, value in funcs:
contents.append(self.document(value, key, name, fdict, cdict))
result = result + self.bigsection(
'Functions', '#ffffff', '#eeaa77', ' '.join(contents))
if data:
contents = []
for key, value in data:
contents.append(self.document(value, key))
result = result + self.bigsection(
'Data', '#ffffff', '#55aa55', '<br>\n'.join(contents))
if hasattr(object, '__author__'):
contents = self.markup(str(object.__author__), self.preformat)
result = result + self.bigsection(
'Author', '#ffffff', '#7799ee', contents)
if hasattr(object, '__credits__'):
contents = self.markup(str(object.__credits__), self.preformat)
result = result + self.bigsection(
'Credits', '#ffffff', '#7799ee', contents)
return result
def docclass(self, object, name=None, mod=None, funcs={}, classes={},
*ignored):
"""Produce HTML documentation for a class object."""
print('docclass')
realname = object.__name__
name = name or realname
bases = object.__bases__
contents = []
push = contents.append
# Cute little class to pump out a horizontal rule between sections.
class HorizontalRule:
def __init__(self):
self.needone = 0
def maybe(self):
if self.needone:
push('<hr>\n')
self.needone = 1
hr = HorizontalRule()
# List the mro, if non-trivial.
mro = deque(inspect.getmro(object))
if len(mro) > 2:
hr.maybe()
push('<dl><dt>Method resolution order:</dt>\n')
for base in mro:
push('<dd>%s</dd>\n' % self.classlink(base,
object.__module__))
push('</dl>\n')
def spill(msg, attrs, predicate):
ok, attrs = _split_list(attrs, predicate)
if ok:
hr.maybe()
push(msg)
for name, kind, homecls, value in ok:
try:
value = getattr(object, name)
except Exception:
# Some descriptors may meet a failure in their __get__.
# (bug #1785)
push(self._docdescriptor(name, value, mod))
else:
push(self.document(value, name, mod,
funcs, classes, mdict, object))
push('\n')
return attrs
def spilldescriptors(msg, attrs, predicate):
ok, attrs = _split_list(attrs, predicate)
if ok:
hr.maybe()
push(msg)
for name, kind, homecls, value in ok:
push(self._docdescriptor(name, value, mod))
return attrs
def spilldata(msg, attrs, predicate):
ok, attrs = _split_list(attrs, predicate)
if ok:
hr.maybe()
push(msg)
for name, kind, homecls, value in ok:
base = self.docother(getattr(object, name), name, mod)
if callable(value) or inspect.isdatadescriptor(value):
doc = getattr(value, "__doc__", None)
else:
doc = None
if doc is None:
push('<dl><dt>%s</dl>\n' % base)
else:
doc = self.markup(getdoc(value), self.preformat,
funcs, classes, mdict)
doc = '<dd><tt>%s</tt>' % doc
push('<dl><dt>%s%s</dl>\n' % (base, doc))
push('\n')
return attrs
attrs = [(name, kind, cls, value)
for name, kind, cls, value in classify_class_attrs(object)
if visiblename(name, obj=object)]
mdict = {}
for key, kind, homecls, value in attrs:
mdict[key] = anchor = '#' + name + '-' + key
try:
value = getattr(object, name)
except Exception:
# Some descriptors may meet a failure in their __get__.
# (bug #1785)
pass
try:
# The value may not be hashable (e.g., a data attr with
# a dict or list value).
mdict[value] = anchor
except TypeError:
pass
while attrs:
if mro:
thisclass = mro.popleft()
else:
thisclass = attrs[0][2]
attrs, inherited = _split_list(attrs, lambda t: t[2] is thisclass)
if thisclass is builtins.object:
attrs = inherited
continue
elif thisclass is object:
tag = 'defined here'
else:
tag = 'inherited from %s' % self.classlink(thisclass,
object.__module__)
tag += ':<br>\n'
# Sort attrs by name.
attrs.sort(key=lambda t: t[0])
# Pump out the attrs, segregated by kind.
attrs = spill('Methods %s' % tag, attrs,
lambda t: t[1] == 'method')
attrs = spill('Class methods %s' % tag, attrs,
lambda t: t[1] == 'class method')
attrs = spill('Static methods %s' % tag, attrs,
lambda t: t[1] == 'static method')
attrs = spilldescriptors('Data descriptors %s' % tag, attrs,
lambda t: t[1] == 'data descriptor')
attrs = spilldata('Data and other attributes %s' % tag, attrs,
lambda t: t[1] == 'data')
assert attrs == []
attrs = inherited
contents = ''.join(contents)
if name == realname:
title = '<a name="%s">class <strong>%s</strong></a>' % (
name, realname)
else:
title = '<strong>%s</strong> = <a name="%s">class %s</a>' % (
name, name, realname)
if bases:
parents = []
for base in bases:
parents.append(self.classlink(base, object.__module__))
title = title + '(%s)' % ', '.join(parents)
doc = self.markup(getdoc(object), self.preformat, funcs, classes, mdict)
doc = doc and '<tt>%s<br> </tt>' % doc
return self.section(title, '#000000', '#ffc8d8', contents, 3, doc)
def formatvalue(self, object):
"""Format an argument default value as text."""
return self.grey('=' + self.repr(object))
def docroutine(self, object, name=None, mod=None,
funcs={}, classes={}, methods={}, cl=None):
"""Produce HTML documentation for a function or method object."""
realname = object.__name__
name = name or realname
anchor = (cl and cl.__name__ or '') + '-' + name
note = ''
skipdocs = 0
if inspect.ismethod(object):
imclass = object.__self__.__class__
if cl:
if imclass is not cl:
note = ' from ' + self.classlink(imclass, mod)
else:
if object.__self__ is not None:
note = ' method of %s instance' % self.classlink(
object.__self__.__class__, mod)
else:
note = ' unbound %s method' % self.classlink(imclass,mod)
object = object.__func__
if name == realname:
title = '<a name="%s"><strong>%s</strong></a>' % (anchor, realname)
else:
if (cl and realname in cl.__dict__ and
cl.__dict__[realname] is object):
reallink = '<a href="#%s">%s</a>' % (
cl.__name__ + '-' + realname, realname)
skipdocs = 1
else:
reallink = realname
title = '<a name="%s"><strong>%s</strong></a> = %s' % (
anchor, name, reallink)
if inspect.isfunction(object):
args, varargs, kwonlyargs, kwdefaults, varkw, defaults, ann = \
inspect.getfullargspec(object)
argspec = inspect.formatargspec(
args, varargs, kwonlyargs, kwdefaults, varkw, defaults, ann,
formatvalue=self.formatvalue,
formatannotation=inspect.formatannotationrelativeto(object))
if realname == '<lambda>':
title = '<strong>%s</strong> <em>lambda</em> ' % name
# XXX lambda's won't usually have func_annotations['return']
# since the syntax doesn't support but it is possible.
# So removing parentheses isn't truly safe.
argspec = argspec[1:-1] # remove parentheses
else:
argspec = '(...)'
decl = title + argspec + (note and self.grey(
'<font face="helvetica, arial">%s</font>' % note))
if skipdocs:
return '<dl><dt>%s</dt></dl>\n' % decl
else:
doc = self.markup(
getdoc(object), self.preformat, funcs, classes, methods)
doc = doc and '<dd><tt>%s</tt></dd>' % doc
return '<dl><dt>%s</dt>%s</dl>\n' % (decl, doc)
def _docdescriptor(self, name, value, mod):
results = []
push = results.append
if name:
push('<dl><dt><strong>%s</strong></dt>\n' % name)
if value.__doc__ is not None:
doc = self.markup(getdoc(value), self.preformat)
push('<dd><tt>%s</tt></dd>\n' % doc)
push('</dl>\n')
return ''.join(results)
def docproperty(self, object, name=None, mod=None, cl=None):
"""Produce html documentation for a property."""
return self._docdescriptor(name, object, mod)
def docother(self, object, name=None, mod=None, *ignored):
"""Produce HTML documentation for a data object."""
lhs = name and '<strong>%s</strong> = ' % name or ''
return lhs + self.repr(object)
def docdata(self, object, name=None, mod=None, cl=None):
"""Produce html documentation for a data descriptor."""
return self._docdescriptor(name, object, mod)
def index(self, dir, shadowed=None):
"""Generate an HTML index for a directory of modules."""
modpkgs = []
if shadowed is None: shadowed = {}
for importer, name, ispkg in pkgutil.iter_modules([dir]):
if any((0xD800 <= ord(ch) <= 0xDFFF) for ch in name):
# ignore a module if its name contains a surrogate character
continue
modpkgs.append((name, '', ispkg, name in shadowed))
shadowed[name] = 1
modpkgs.sort()
contents = self.multicolumn(modpkgs, self.modpkglink)
return self.bigsection(dir, '#ffffff', '#ee77aa', contents)
# -------------------------------------------- text documentation generator
class TextRepr(Repr):
"""Class for safely making a text representation of a Python object."""
def __init__(self):
Repr.__init__(self)
self.maxlist = self.maxtuple = 20
self.maxdict = 10
self.maxstring = self.maxother = 100
#def repr1(self, x, level):
# if hasattr(type(x), '__name__'):
# methodname = 'repr_' + '_'.join(type(x).__name__.split())
# if hasattr(self, methodname):
# return getattr(self, methodname)(x, level)
# return cram(stripid(repr(x)), self.maxother)
def repr_string(self, x, level):
test = cram(x, self.maxstring)
testrepr = repr(test)
if '\\' in test and '\\' not in replace(testrepr, r'\\', ''):
# Backslashes are only literal in the string and are never
# needed to make any special characters, so show a raw string.
return 'r' + testrepr[0] + test + testrepr[0]
return testrepr
repr_str = repr_string
def repr_instance(self, x, level):
try:
return cram(stripid(repr(x)), self.maxstring)
except:
return '<%s instance>' % x.__class__.__name__
class TextDoc(Doc):
"""Formatter class for text documentation."""
# ------------------------------------------- text formatting utilities
_repr_instance = TextRepr()
repr = _repr_instance.repr
def bold(self, text):
"""Format a string in bold by overstriking."""
return ''.join(ch + '\b' + ch for ch in text)
def indent(self, text, prefix=' '):
"""Indent text by prepending a given prefix to each line."""
if not text: return ''
lines = [prefix + line for line in text.split('\n')]
if lines: lines[-1] = lines[-1].rstrip()
return '\n'.join(lines)
def section(self, title, contents):
"""Format a section with a given heading."""
clean_contents = self.indent(contents).rstrip()
return self.bold(title) + '\n' + clean_contents + '\n\n'
# ---------------------------------------------- type-specific routines
def formattree(self, tree, modname, parent=None, prefix=''):
"""Render in text a class tree as returned by inspect.getclasstree()."""
result = ''
for entry in tree:
if type(entry) is type(()):
c, bases = entry
result = result + prefix + classname(c, modname)
if bases and bases != (parent,):
parents = (classname(c, modname) for c in bases)
result = result + '(%s)' % ', '.join(parents)
result = result + '\n'
elif type(entry) is type([]):
result = result + self.formattree(
entry, modname, c, prefix + ' ')
return result
def docmodule(self, object, name=None, mod=None):
"""Produce text documentation for a given module object."""
name = object.__name__ # ignore the passed-in name
synop, desc = splitdoc(getdoc(object))
result = self.section('NAME', name + (synop and ' - ' + synop))
all = getattr(object, '__all__', None)
docloc = self.getdocloc(object)
if docloc is not None:
result = result + self.section('MODULE REFERENCE', docloc + """
The following documentation is automatically generated from the Python
source files. It may be incomplete, incorrect or include features that
are considered implementation detail and may vary between Python
implementations. When in doubt, consult the module reference at the
location listed above.
""")
if desc:
result = result + self.section('DESCRIPTION', desc)
classes = []
for key, value in inspect.getmembers(object, inspect.isclass):
# if __all__ exists, believe it. Otherwise use old heuristic.
if (all is not None
or (inspect.getmodule(value) or object) is object):
if visiblename(key, all, object):
classes.append((key, value))
funcs = []
for key, value in inspect.getmembers(object, inspect.isroutine):
# if __all__ exists, believe it. Otherwise use old heuristic.
if (all is not None or
inspect.isbuiltin(value) or inspect.getmodule(value) is object):
if visiblename(key, all, object):
funcs.append((key, value))
data = []
for key, value in inspect.getmembers(object, isdata):
if visiblename(key, all, object):
data.append((key, value))
modpkgs = []
modpkgs_names = set()
if hasattr(object, '__path__'):
for importer, modname, ispkg in pkgutil.iter_modules(object.__path__):
modpkgs_names.add(modname)
if ispkg:
modpkgs.append(modname + ' (package)')
else:
modpkgs.append(modname)
modpkgs.sort()
result = result + self.section(
'PACKAGE CONTENTS', '\n'.join(modpkgs))
# Detect submodules as sometimes created by C extensions
submodules = []
for key, value in inspect.getmembers(object, inspect.ismodule):
if value.__name__.startswith(name + '.') and key not in modpkgs_names:
submodules.append(key)
if submodules:
submodules.sort()
result = result + self.section(
'SUBMODULES', '\n'.join(submodules))
if classes:
classlist = [value for key, value in classes]
contents = [self.formattree(
inspect.getclasstree(classlist, 1), name)]
for key, value in classes:
contents.append(self.document(value, key, name))
result = result + self.section('CLASSES', '\n'.join(contents))
if funcs:
contents = []
for key, value in funcs:
contents.append(self.document(value, key, name))
result = result + self.section('FUNCTIONS', '\n'.join(contents))
if data:
contents = []
for key, value in data:
contents.append(self.docother(value, key, name, maxlen=70))
result = result + self.section('DATA', '\n'.join(contents))
if hasattr(object, '__version__'):
version = str(object.__version__)
if version[:11] == '$' + 'Revision: ' and version[-1:] == '$':
version = version[11:-1].strip()
result = result + self.section('VERSION', version)
if hasattr(object, '__date__'):
result = result + self.section('DATE', str(object.__date__))
if hasattr(object, '__author__'):
result = result + self.section('AUTHOR', str(object.__author__))
if hasattr(object, '__credits__'):
result = result + self.section('CREDITS', str(object.__credits__))
try:
file = inspect.getabsfile(object)
except TypeError:
file = '(built-in)'
result = result + self.section('FILE', file)
return result
def docclass(self, object, name=None, mod=None, *ignored):
"""Produce text documentation for a given class object."""
realname = object.__name__
name = name or realname
bases = object.__bases__
def makename(c, m=object.__module__):
return classname(c, m)
if name == realname:
title = 'class ' + self.bold(realname)
else:
title = self.bold(name) + ' = class ' + realname
if bases:
parents = map(makename, bases)
title = title + '(%s)' % ', '.join(parents)
doc = getdoc(object)
contents = doc and [doc + '\n'] or []
push = contents.append
# List the mro, if non-trivial.
mro = deque(inspect.getmro(object))
if len(mro) > 2:
push("Method resolution order:")
for base in mro:
push(' ' + makename(base))
push('')
# Cute little class to pump out a horizontal rule between sections.
class HorizontalRule:
def __init__(self):
self.needone = 0
def maybe(self):
if self.needone:
push('-' * 70)
self.needone = 1
hr = HorizontalRule()
def spill(msg, attrs, predicate):
ok, attrs = _split_list(attrs, predicate)
if ok:
hr.maybe()
push(msg)
for name, kind, homecls, value in ok:
try:
value = getattr(object, name)
except Exception:
# Some descriptors may meet a failure in their __get__.
# (bug #1785)
push(self._docdescriptor(name, value, mod))
else:
push(self.document(value,
name, mod, object))
return attrs
def spilldescriptors(msg, attrs, predicate):
ok, attrs = _split_list(attrs, predicate)
if ok:
hr.maybe()
push(msg)
for name, kind, homecls, value in ok:
push(self._docdescriptor(name, value, mod))
return attrs
def spilldata(msg, attrs, predicate):
ok, attrs = _split_list(attrs, predicate)
if ok:
hr.maybe()
push(msg)
for name, kind, homecls, value in ok:
if callable(value) or inspect.isdatadescriptor(value):
doc = getdoc(value)
else:
doc = None
push(self.docother(getattr(object, name),
name, mod, maxlen=70, doc=doc) + '\n')
return attrs
attrs = [(name, kind, cls, value)
for name, kind, cls, value in classify_class_attrs(object)
if visiblename(name, obj=object)]
while attrs:
if mro:
thisclass = mro.popleft()
else:
thisclass = attrs[0][2]
attrs, inherited = _split_list(attrs, lambda t: t[2] is thisclass)
if thisclass is builtins.object:
attrs = inherited
continue
elif thisclass is object:
tag = "defined here"
else:
tag = "inherited from %s" % classname(thisclass,
object.__module__)
# Sort attrs by name.
attrs.sort()
# Pump out the attrs, segregated by kind.
attrs = spill("Methods %s:\n" % tag, attrs,
lambda t: t[1] == 'method')
attrs = spill("Class methods %s:\n" % tag, attrs,
lambda t: t[1] == 'class method')
attrs = spill("Static methods %s:\n" % tag, attrs,
lambda t: t[1] == 'static method')
attrs = spilldescriptors("Data descriptors %s:\n" % tag, attrs,
lambda t: t[1] == 'data descriptor')
attrs = spilldata("Data and other attributes %s:\n" % tag, attrs,
lambda t: t[1] == 'data')
assert attrs == []
attrs = inherited
contents = '\n'.join(contents)
if not contents:
return title + '\n'
return title + '\n' + self.indent(contents.rstrip(), ' | ') + '\n'
def formatvalue(self, object):
"""Format an argument default value as text."""
return '=' + self.repr(object)
def docroutine(self, object, name=None, mod=None, cl=None):
"""Produce text documentation for a function or method object."""
realname = object.__name__
name = name or realname
note = ''
skipdocs = 0
if inspect.ismethod(object):
imclass = object.__self__.__class__
if cl:
if imclass is not cl:
note = ' from ' + classname(imclass, mod)
else:
if object.__self__ is not None:
note = ' method of %s instance' % classname(
object.__self__.__class__, mod)
else:
note = ' unbound %s method' % classname(imclass,mod)
object = object.__func__
if name == realname:
title = self.bold(realname)
else:
if (cl and realname in cl.__dict__ and
cl.__dict__[realname] is object):
skipdocs = 1
title = self.bold(name) + ' = ' + realname
if inspect.isfunction(object):
args, varargs, varkw, defaults, kwonlyargs, kwdefaults, ann = \
inspect.getfullargspec(object)
argspec = inspect.formatargspec(
args, varargs, varkw, defaults, kwonlyargs, kwdefaults, ann,
formatvalue=self.formatvalue,
formatannotation=inspect.formatannotationrelativeto(object))
if realname == '<lambda>':
title = self.bold(name) + ' lambda '
# XXX lambda's won't usually have func_annotations['return']
# since the syntax doesn't support but it is possible.
# So removing parentheses isn't truly safe.
argspec = argspec[1:-1] # remove parentheses
else:
argspec = '(...)'
decl = title + argspec + note
if skipdocs:
return decl + '\n'
else:
doc = getdoc(object) or ''
return decl + '\n' + (doc and self.indent(doc).rstrip() + '\n')
def _docdescriptor(self, name, value, mod):
results = []
push = results.append
if name:
push(self.bold(name))
push('\n')
doc = getdoc(value) or ''
if doc:
push(self.indent(doc))
push('\n')
return ''.join(results)
def docproperty(self, object, name=None, mod=None, cl=None):
"""Produce text documentation for a property."""
return self._docdescriptor(name, object, mod)
def docdata(self, object, name=None, mod=None, cl=None):
"""Produce text documentation for a data descriptor."""
return self._docdescriptor(name, object, mod)
def docother(self, object, name=None, mod=None, parent=None, maxlen=None, doc=None):
"""Produce text documentation for a data object."""
repr = self.repr(object)
if maxlen:
line = (name and name + ' = ' or '') + repr
chop = maxlen - len(line)
if chop < 0: repr = repr[:chop] + '...'
line = (name and self.bold(name) + ' = ' or '') + repr
if doc is not None:
line += '\n' + self.indent(str(doc))
return line
class _PlainTextDoc(TextDoc):
"""Subclass of TextDoc which overrides string styling"""
def bold(self, text):
return text
# --------------------------------------------------------- user interfaces
def pager(text):
"""The first time this is called, determine what kind of pager to use."""
global pager
pager = getpager()
pager(text)
def getpager():
"""Decide what method to use for paging through text."""
if not hasattr(sys.stdout, "isatty"):
return plainpager
if not sys.stdin.isatty() or not sys.stdout.isatty():
return plainpager
if 'PAGER' in os.environ:
if sys.platform == 'win32': # pipes completely broken in Windows
return lambda text: tempfilepager(plain(text), os.environ['PAGER'])
elif os.environ.get('TERM') in ('dumb', 'emacs'):
return lambda text: pipepager(plain(text), os.environ['PAGER'])
else:
return lambda text: pipepager(text, os.environ['PAGER'])
if os.environ.get('TERM') in ('dumb', 'emacs'):
return plainpager
if sys.platform == 'win32' or sys.platform.startswith('os2'):
return lambda text: tempfilepager(plain(text), 'more <')
if hasattr(os, 'system') and os.system('(less) 2>/dev/null') == 0:
return lambda text: pipepager(text, 'less')
import tempfile
(fd, filename) = tempfile.mkstemp()
os.close(fd)
try:
if hasattr(os, 'system') and os.system('more "%s"' % filename) == 0:
return lambda text: pipepager(text, 'more')
else:
return ttypager
finally:
os.unlink(filename)
def plain(text):
"""Remove boldface formatting from text."""
return re.sub('.\b', '', text)
def pipepager(text, cmd):
"""Page through text by feeding it to another program."""
pipe = os.popen(cmd, 'w')
try:
pipe.write(text)
pipe.close()
except IOError:
pass # Ignore broken pipes caused by quitting the pager program.
def tempfilepager(text, cmd):
"""Page through text by invoking a program on a temporary file."""
import tempfile
filename = tempfile.mktemp()
file = open(filename, 'w')
file.write(text)
file.close()
try:
os.system(cmd + ' "' + filename + '"')
finally:
os.unlink(filename)
def ttypager(text):
"""Page through text on a text terminal."""
lines = plain(text).split('\n')
try:
import tty
fd = sys.stdin.fileno()
old = tty.tcgetattr(fd)
tty.setcbreak(fd)
getchar = lambda: sys.stdin.read(1)
except (ImportError, AttributeError):
tty = None
getchar = lambda: sys.stdin.readline()[:-1][:1]
try:
r = inc = os.environ.get('LINES', 25) - 1
sys.stdout.write('\n'.join(lines[:inc]) + '\n')
while lines[r:]:
sys.stdout.write('-- more --')
sys.stdout.flush()
c = getchar()
if c in ('q', 'Q'):
sys.stdout.write('\r \r')
break
elif c in ('\r', '\n'):
sys.stdout.write('\r \r' + lines[r] + '\n')
r = r + 1
continue
if c in ('b', 'B', '\x1b'):
r = r - inc - inc
if r < 0: r = 0
sys.stdout.write('\n' + '\n'.join(lines[r:r+inc]) + '\n')
r = r + inc
finally:
if tty:
tty.tcsetattr(fd, tty.TCSAFLUSH, old)
def plainpager(text):
"""Simply print unformatted text. This is the ultimate fallback."""
sys.stdout.write(plain(text))
def describe(thing):
"""Produce a short description of the given thing."""
if inspect.ismodule(thing):
if thing.__name__ in sys.builtin_module_names:
return 'built-in module ' + thing.__name__
if hasattr(thing, '__path__'):
return 'package ' + thing.__name__
else:
return 'module ' + thing.__name__
if inspect.isbuiltin(thing):
return 'built-in function ' + thing.__name__
if inspect.isgetsetdescriptor(thing):
return 'getset descriptor %s.%s.%s' % (
thing.__objclass__.__module__, thing.__objclass__.__name__,
thing.__name__)
if inspect.ismemberdescriptor(thing):
return 'member descriptor %s.%s.%s' % (
thing.__objclass__.__module__, thing.__objclass__.__name__,
thing.__name__)
if inspect.isclass(thing):
return 'class ' + thing.__name__
if inspect.isfunction(thing):
return 'function ' + thing.__name__
if inspect.ismethod(thing):
return 'method ' + thing.__name__
return type(thing).__name__
def locate(path, forceload=0):
"""Locate an object by name or dotted path, importing as necessary."""
parts = [part for part in path.split('.') if part]
module, n = None, 0
while n < len(parts):
nextmodule = safeimport('.'.join(parts[:n+1]), forceload)
if nextmodule: module, n = nextmodule, n + 1
else: break
if module:
object = module
else:
object = builtins
for part in parts[n:]:
try:
object = getattr(object, part)
except AttributeError:
return None
return object
# --------------------------------------- interactive interpreter interface
text = TextDoc()
plaintext = _PlainTextDoc()
html = HTMLDoc()
def resolve(thing, forceload=0):
"""Given an object or a path to an object, get the object and its name."""
if isinstance(thing, str):
object = locate(thing, forceload)
if not object:
raise ImportError('no Python documentation found for %r' % thing)
return object, thing
else:
name = getattr(thing, '__name__', None)
return thing, name if isinstance(name, str) else None
def render_doc(thing, title='Python Library Documentation: %s', forceload=0,
renderer=None):
"""Render text documentation, given an object or a path to an object."""
if renderer is None:
renderer = text
object, name = resolve(thing, forceload)
desc = describe(object)
module = inspect.getmodule(object)
if name and '.' in name:
desc += ' in ' + name[:name.rfind('.')]
elif module and module is not object:
desc += ' in module ' + module.__name__
if not (inspect.ismodule(object) or
inspect.isclass(object) or
inspect.isroutine(object) or
inspect.isgetsetdescriptor(object) or
inspect.ismemberdescriptor(object) or
isinstance(object, property)):
# If the passed object is a piece of data or an instance,
# document its available methods instead of its value.
object = type(object)
desc += ' object'
return title % desc + '\n\n' + renderer.document(object, name)
def doc(thing, title='Python Library Documentation: %s', forceload=0,
output=None):
"""Display text documentation, given an object or a path to an object."""
try:
if output is None:
pager(render_doc(thing, title, forceload))
else:
output.write(render_doc(thing, title, forceload, plaintext))
except (ImportError, ErrorDuringImport) as value:
print(value)
def writedoc(thing, forceload=0):
"""Write HTML documentation to a file in the current directory."""
try:
object, name = resolve(thing, forceload)
page = html.page(describe(object), html.document(object, name))
file = open(name + '.html', 'w', encoding='utf-8')
file.write(page)
file.close()
print('wrote', name + '.html')
except (ImportError, ErrorDuringImport) as value:
print(value)
def writedocs(dir, pkgpath='', done=None):
"""Write out HTML documentation for all modules in a directory tree."""
if done is None: done = {}
for importer, modname, ispkg in pkgutil.walk_packages([dir], pkgpath):
writedoc(modname)
return
class Helper:
# These dictionaries map a topic name to either an alias, or a tuple
# (label, seealso-items). The "label" is the label of the corresponding
# section in the .rst file under Doc/ and an index into the dictionary
# in pydoc_data/topics.py.
#
# CAUTION: if you change one of these dictionaries, be sure to adapt the
# list of needed labels in Doc/tools/sphinxext/pyspecific.py and
# regenerate the pydoc_data/topics.py file by running
# make pydoc-topics
# in Doc/ and copying the output file into the Lib/ directory.
keywords = {
'False': '',
'None': '',
'True': '',
'and': 'BOOLEAN',
'as': 'with',
'assert': ('assert', ''),
'break': ('break', 'while for'),
'class': ('class', 'CLASSES SPECIALMETHODS'),
'continue': ('continue', 'while for'),
'def': ('function', ''),
'del': ('del', 'BASICMETHODS'),
'elif': 'if',
'else': ('else', 'while for'),
'except': 'try',
'finally': 'try',
'for': ('for', 'break continue while'),
'from': 'import',
'global': ('global', 'nonlocal NAMESPACES'),
'if': ('if', 'TRUTHVALUE'),
'import': ('import', 'MODULES'),
'in': ('in', 'SEQUENCEMETHODS'),
'is': 'COMPARISON',
'lambda': ('lambda', 'FUNCTIONS'),
'nonlocal': ('nonlocal', 'global NAMESPACES'),
'not': 'BOOLEAN',
'or': 'BOOLEAN',
'pass': ('pass', ''),
'raise': ('raise', 'EXCEPTIONS'),
'return': ('return', 'FUNCTIONS'),
'try': ('try', 'EXCEPTIONS'),
'while': ('while', 'break continue if TRUTHVALUE'),
'with': ('with', 'CONTEXTMANAGERS EXCEPTIONS yield'),
'yield': ('yield', ''),
}
# Either add symbols to this dictionary or to the symbols dictionary
# directly: Whichever is easier. They are merged later.
_symbols_inverse = {
'STRINGS' : ("'", "'''", "r'", "b'", '"""', '"', 'r"', 'b"'),
'OPERATORS' : ('+', '-', '*', '**', '/', '//', '%', '<<', '>>', '&',
'|', '^', '~', '<', '>', '<=', '>=', '==', '!=', '<>'),
'COMPARISON' : ('<', '>', '<=', '>=', '==', '!=', '<>'),
'UNARY' : ('-', '~'),
'AUGMENTEDASSIGNMENT' : ('+=', '-=', '*=', '/=', '%=', '&=', '|=',
'^=', '<<=', '>>=', '**=', '//='),
'BITWISE' : ('<<', '>>', '&', '|', '^', '~'),
'COMPLEX' : ('j', 'J')
}
symbols = {
'%': 'OPERATORS FORMATTING',
'**': 'POWER',
',': 'TUPLES LISTS FUNCTIONS',
'.': 'ATTRIBUTES FLOAT MODULES OBJECTS',
'...': 'ELLIPSIS',
':': 'SLICINGS DICTIONARYLITERALS',
'@': 'def class',
'\\': 'STRINGS',
'_': 'PRIVATENAMES',
'__': 'PRIVATENAMES SPECIALMETHODS',
'`': 'BACKQUOTES',
'(': 'TUPLES FUNCTIONS CALLS',
')': 'TUPLES FUNCTIONS CALLS',
'[': 'LISTS SUBSCRIPTS SLICINGS',
']': 'LISTS SUBSCRIPTS SLICINGS'
}
for topic, symbols_ in _symbols_inverse.items():
for symbol in symbols_:
topics = symbols.get(symbol, topic)
if topic not in topics:
topics = topics + ' ' + topic
symbols[symbol] = topics
topics = {
'TYPES': ('types', 'STRINGS UNICODE NUMBERS SEQUENCES MAPPINGS '
'FUNCTIONS CLASSES MODULES FILES inspect'),
'STRINGS': ('strings', 'str UNICODE SEQUENCES STRINGMETHODS '
'FORMATTING TYPES'),
'STRINGMETHODS': ('string-methods', 'STRINGS FORMATTING'),
'FORMATTING': ('formatstrings', 'OPERATORS'),
'UNICODE': ('strings', 'encodings unicode SEQUENCES STRINGMETHODS '
'FORMATTING TYPES'),
'NUMBERS': ('numbers', 'INTEGER FLOAT COMPLEX TYPES'),
'INTEGER': ('integers', 'int range'),
'FLOAT': ('floating', 'float math'),
'COMPLEX': ('imaginary', 'complex cmath'),
'SEQUENCES': ('typesseq', 'STRINGMETHODS FORMATTING range LISTS'),
'MAPPINGS': 'DICTIONARIES',
'FUNCTIONS': ('typesfunctions', 'def TYPES'),
'METHODS': ('typesmethods', 'class def CLASSES TYPES'),
'CODEOBJECTS': ('bltin-code-objects', 'compile FUNCTIONS TYPES'),
'TYPEOBJECTS': ('bltin-type-objects', 'types TYPES'),
'FRAMEOBJECTS': 'TYPES',
'TRACEBACKS': 'TYPES',
'NONE': ('bltin-null-object', ''),
'ELLIPSIS': ('bltin-ellipsis-object', 'SLICINGS'),
'FILES': ('bltin-file-objects', ''),
'SPECIALATTRIBUTES': ('specialattrs', ''),
'CLASSES': ('types', 'class SPECIALMETHODS PRIVATENAMES'),
'MODULES': ('typesmodules', 'import'),
'PACKAGES': 'import',
'EXPRESSIONS': ('operator-summary', 'lambda or and not in is BOOLEAN '
'COMPARISON BITWISE SHIFTING BINARY FORMATTING POWER '
'UNARY ATTRIBUTES SUBSCRIPTS SLICINGS CALLS TUPLES '
'LISTS DICTIONARIES'),
'OPERATORS': 'EXPRESSIONS',
'PRECEDENCE': 'EXPRESSIONS',
'OBJECTS': ('objects', 'TYPES'),
'SPECIALMETHODS': ('specialnames', 'BASICMETHODS ATTRIBUTEMETHODS '
'CALLABLEMETHODS SEQUENCEMETHODS MAPPINGMETHODS '
'NUMBERMETHODS CLASSES'),
'BASICMETHODS': ('customization', 'hash repr str SPECIALMETHODS'),
'ATTRIBUTEMETHODS': ('attribute-access', 'ATTRIBUTES SPECIALMETHODS'),
'CALLABLEMETHODS': ('callable-types', 'CALLS SPECIALMETHODS'),
'SEQUENCEMETHODS': ('sequence-types', 'SEQUENCES SEQUENCEMETHODS '
'SPECIALMETHODS'),
'MAPPINGMETHODS': ('sequence-types', 'MAPPINGS SPECIALMETHODS'),
'NUMBERMETHODS': ('numeric-types', 'NUMBERS AUGMENTEDASSIGNMENT '
'SPECIALMETHODS'),
'EXECUTION': ('execmodel', 'NAMESPACES DYNAMICFEATURES EXCEPTIONS'),
'NAMESPACES': ('naming', 'global nonlocal ASSIGNMENT DELETION DYNAMICFEATURES'),
'DYNAMICFEATURES': ('dynamic-features', ''),
'SCOPING': 'NAMESPACES',
'FRAMES': 'NAMESPACES',
'EXCEPTIONS': ('exceptions', 'try except finally raise'),
'CONVERSIONS': ('conversions', ''),
'IDENTIFIERS': ('identifiers', 'keywords SPECIALIDENTIFIERS'),
'SPECIALIDENTIFIERS': ('id-classes', ''),
'PRIVATENAMES': ('atom-identifiers', ''),
'LITERALS': ('atom-literals', 'STRINGS NUMBERS TUPLELITERALS '
'LISTLITERALS DICTIONARYLITERALS'),
'TUPLES': 'SEQUENCES',
'TUPLELITERALS': ('exprlists', 'TUPLES LITERALS'),
'LISTS': ('typesseq-mutable', 'LISTLITERALS'),
'LISTLITERALS': ('lists', 'LISTS LITERALS'),
'DICTIONARIES': ('typesmapping', 'DICTIONARYLITERALS'),
'DICTIONARYLITERALS': ('dict', 'DICTIONARIES LITERALS'),
'ATTRIBUTES': ('attribute-references', 'getattr hasattr setattr ATTRIBUTEMETHODS'),
'SUBSCRIPTS': ('subscriptions', 'SEQUENCEMETHODS'),
'SLICINGS': ('slicings', 'SEQUENCEMETHODS'),
'CALLS': ('calls', 'EXPRESSIONS'),
'POWER': ('power', 'EXPRESSIONS'),
'UNARY': ('unary', 'EXPRESSIONS'),
'BINARY': ('binary', 'EXPRESSIONS'),
'SHIFTING': ('shifting', 'EXPRESSIONS'),
'BITWISE': ('bitwise', 'EXPRESSIONS'),
'COMPARISON': ('comparisons', 'EXPRESSIONS BASICMETHODS'),
'BOOLEAN': ('booleans', 'EXPRESSIONS TRUTHVALUE'),
'ASSERTION': 'assert',
'ASSIGNMENT': ('assignment', 'AUGMENTEDASSIGNMENT'),
'AUGMENTEDASSIGNMENT': ('augassign', 'NUMBERMETHODS'),
'DELETION': 'del',
'RETURNING': 'return',
'IMPORTING': 'import',
'CONDITIONAL': 'if',
'LOOPING': ('compound', 'for while break continue'),
'TRUTHVALUE': ('truth', 'if while and or not BASICMETHODS'),
'DEBUGGING': ('debugger', 'pdb'),
'CONTEXTMANAGERS': ('context-managers', 'with'),
}
def __init__(self, input=None, output=None):
self._input = input
self._output = output
#fix me brython
self.input = self._input or sys.stdin
self.output = self._output or sys.stdout
#fix me brython
#input = property(lambda self: self._input or sys.stdin)
#output = property(lambda self: self._output or sys.stdout)
def __repr__(self):
if inspect.stack()[1][3] == '?':
self()
return ''
return '<pydoc.Helper instance>'
_GoInteractive = object()
def __call__(self, request=_GoInteractive):
if request is not self._GoInteractive:
self.help(request)
else:
self.intro()
self.interact()
self.output.write('''
You are now leaving help and returning to the Python interpreter.
If you want to ask for help on a particular object directly from the
interpreter, you can type "help(object)". Executing "help('string')"
has the same effect as typing a particular string at the help> prompt.
''')
def interact(self):
self.output.write('\n')
while True:
try:
request = self.getline('help> ')
if not request: break
except (KeyboardInterrupt, EOFError):
break
request = replace(request, '"', '', "'", '').strip()
if request.lower() in ('q', 'quit'): break
self.help(request)
def getline(self, prompt):
"""Read one line, using input() when appropriate."""
if self.input is sys.stdin:
return input(prompt)
else:
self.output.write(prompt)
self.output.flush()
return self.input.readline()
def help(self, request):
if type(request) is type(''):
request = request.strip()
if request == 'help': self.intro()
elif request == 'keywords': self.listkeywords()
elif request == 'symbols': self.listsymbols()
elif request == 'topics': self.listtopics()
elif request == 'modules': self.listmodules()
elif request[:8] == 'modules ':
self.listmodules(request.split()[1])
elif request in self.symbols: self.showsymbol(request)
elif request in ['True', 'False', 'None']:
# special case these keywords since they are objects too
doc(eval(request), 'Help on %s:')
elif request in self.keywords: self.showtopic(request)
elif request in self.topics: self.showtopic(request)
elif request: doc(request, 'Help on %s:', output=self._output)
elif isinstance(request, Helper): self()
else: doc(request, 'Help on %s:', output=self._output)
self.output.write('\n')
def intro(self):
self.output.write('''
Welcome to Python %s! This is the interactive help utility.
If this is your first time using Python, you should definitely check out
the tutorial on the Internet at http://docs.python.org/%s/tutorial/.
Enter the name of any module, keyword, or topic to get help on writing
Python programs and using Python modules. To quit this help utility and
return to the interpreter, just type "quit".
To get a list of available modules, keywords, or topics, type "modules",
"keywords", or "topics". Each module also comes with a one-line summary
of what it does; to list the modules whose summaries contain a given word
such as "spam", type "modules spam".
''' % tuple([sys.version[:3]]*2))
def list(self, items, columns=4, width=80):
items = list(sorted(items))
colw = width // columns
rows = (len(items) + columns - 1) // columns
for row in range(rows):
for col in range(columns):
i = col * rows + row
if i < len(items):
self.output.write(items[i])
if col < columns - 1:
self.output.write(' ' + ' ' * (colw - 1 - len(items[i])))
self.output.write('\n')
def listkeywords(self):
self.output.write('''
Here is a list of the Python keywords. Enter any keyword to get more help.
''')
self.list(self.keywords.keys())
def listsymbols(self):
self.output.write('''
Here is a list of the punctuation symbols which Python assigns special meaning
to. Enter any symbol to get more help.
''')
self.list(self.symbols.keys())
def listtopics(self):
self.output.write('''
Here is a list of available topics. Enter any topic name to get more help.
''')
self.list(self.topics.keys())
def showtopic(self, topic, more_xrefs=''):
try:
import pydoc_data.topics
except ImportError:
self.output.write('''
Sorry, topic and keyword documentation is not available because the
module "pydoc_data.topics" could not be found.
''')
return
target = self.topics.get(topic, self.keywords.get(topic))
if not target:
self.output.write('no documentation found for %s\n' % repr(topic))
return
if type(target) is type(''):
return self.showtopic(target, more_xrefs)
label, xrefs = target
try:
doc = pydoc_data.topics.topics[label]
except KeyError:
self.output.write('no documentation found for %s\n' % repr(topic))
return
pager(doc.strip() + '\n')
if more_xrefs:
xrefs = (xrefs or '') + ' ' + more_xrefs
if xrefs:
import formatter
buffer = io.StringIO()
formatter.DumbWriter(buffer).send_flowing_data(
'Related help topics: ' + ', '.join(xrefs.split()) + '\n')
self.output.write('\n%s\n' % buffer.getvalue())
def _gettopic(self, topic, more_xrefs=''):
"""Return unbuffered tuple of (topic, xrefs).
If an error occurs here, the exception is caught and displayed by
the url handler.
This function duplicates the showtopic method but returns its
result directly so it can be formatted for display in an html page.
"""
try:
import pydoc_data.topics
except ImportError:
return('''
Sorry, topic and keyword documentation is not available because the
module "pydoc_data.topics" could not be found.
''' , '')
target = self.topics.get(topic, self.keywords.get(topic))
if not target:
raise ValueError('could not find topic')
if isinstance(target, str):
return self._gettopic(target, more_xrefs)
label, xrefs = target
doc = pydoc_data.topics.topics[label]
if more_xrefs:
xrefs = (xrefs or '') + ' ' + more_xrefs
return doc, xrefs
def showsymbol(self, symbol):
target = self.symbols[symbol]
topic, _, xrefs = target.partition(' ')
self.showtopic(topic, xrefs)
def listmodules(self, key=''):
if key:
self.output.write('''
Here is a list of matching modules. Enter any module name to get more help.
''')
apropos(key)
else:
self.output.write('''
Please wait a moment while I gather a list of all available modules...
''')
modules = {}
def callback(path, modname, desc, modules=modules):
if modname and modname[-9:] == '.__init__':
modname = modname[:-9] + ' (package)'
if modname.find('.') < 0:
modules[modname] = 1
def onerror(modname):
callback(None, modname, None)
ModuleScanner().run(callback, onerror=onerror)
self.list(modules.keys())
self.output.write('''
Enter any module name to get more help. Or, type "modules spam" to search
for modules whose descriptions contain the word "spam".
''')
help = Helper()
class Scanner:
"""A generic tree iterator."""
def __init__(self, roots, children, descendp):
self.roots = roots[:]
self.state = []
self.children = children
self.descendp = descendp
def next(self):
if not self.state:
if not self.roots:
return None
root = self.roots.pop(0)
self.state = [(root, self.children(root))]
node, children = self.state[-1]
if not children:
self.state.pop()
return self.next()
child = children.pop(0)
if self.descendp(child):
self.state.append((child, self.children(child)))
return child
class ModuleScanner:
"""An interruptible scanner that searches module synopses."""
def run(self, callback, key=None, completer=None, onerror=None):
if key: key = key.lower()
self.quit = False
seen = {}
for modname in sys.builtin_module_names:
if modname != '__main__':
seen[modname] = 1
if key is None:
callback(None, modname, '')
else:
name = __import__(modname).__doc__ or ''
desc = name.split('\n')[0]
name = modname + ' - ' + desc
if name.lower().find(key) >= 0:
callback(None, modname, desc)
for importer, modname, ispkg in pkgutil.walk_packages(onerror=onerror):
if self.quit:
break
if key is None:
callback(None, modname, '')
else:
try:
loader = importer.find_module(modname)
except SyntaxError:
# raised by tests for bad coding cookies or BOM
continue
if hasattr(loader, 'get_source'):
try:
source = loader.get_source(modname)
except Exception:
if onerror:
onerror(modname)
continue
desc = source_synopsis(io.StringIO(source)) or ''
if hasattr(loader, 'get_filename'):
path = loader.get_filename(modname)
else:
path = None
else:
try:
module = loader.load_module(modname)
except ImportError:
if onerror:
onerror(modname)
continue
desc = (module.__doc__ or '').splitlines()[0]
path = getattr(module,'__file__',None)
name = modname + ' - ' + desc
if name.lower().find(key) >= 0:
callback(path, modname, desc)
if completer:
completer()
def apropos(key):
"""Print all the one-line module summaries that contain a substring."""
def callback(path, modname, desc):
if modname[-9:] == '.__init__':
modname = modname[:-9] + ' (package)'
print(modname, desc and '- ' + desc)
def onerror(modname):
pass
with warnings.catch_warnings():
warnings.filterwarnings('ignore') # ignore problems during import
ModuleScanner().run(callback, key, onerror=onerror)
# --------------------------------------- enhanced Web browser interface
def _start_server(urlhandler, port):
"""Start an HTTP server thread on a specific port.
Start an HTML/text server thread, so HTML or text documents can be
browsed dynamically and interactively with a Web browser. Example use:
>>> import time
>>> import pydoc
Define a URL handler. To determine what the client is asking
for, check the URL and content_type.
Then get or generate some text or HTML code and return it.
>>> def my_url_handler(url, content_type):
... text = 'the URL sent was: (%s, %s)' % (url, content_type)
... return text
Start server thread on port 0.
If you use port 0, the server will pick a random port number.
You can then use serverthread.port to get the port number.
>>> port = 0
>>> serverthread = pydoc._start_server(my_url_handler, port)
Check that the server is really started. If it is, open browser
and get first page. Use serverthread.url as the starting page.
>>> if serverthread.serving:
... import webbrowser
The next two lines are commented out so a browser doesn't open if
doctest is run on this module.
#... webbrowser.open(serverthread.url)
#True
Let the server do its thing. We just need to monitor its status.
Use time.sleep so the loop doesn't hog the CPU.
>>> starttime = time.time()
>>> timeout = 1 #seconds
This is a short timeout for testing purposes.
>>> while serverthread.serving:
... time.sleep(.01)
... if serverthread.serving and time.time() - starttime > timeout:
... serverthread.stop()
... break
Print any errors that may have occurred.
>>> print(serverthread.error)
None
"""
import http.server
import email.message
import select
import threading
class DocHandler(http.server.BaseHTTPRequestHandler):
def do_GET(self):
"""Process a request from an HTML browser.
The URL received is in self.path.
Get an HTML page from self.urlhandler and send it.
"""
if self.path.endswith('.css'):
content_type = 'text/css'
else:
content_type = 'text/html'
self.send_response(200)
self.send_header('Content-Type', '%s; charset=UTF-8' % content_type)
self.end_headers()
self.wfile.write(self.urlhandler(
self.path, content_type).encode('utf-8'))
def log_message(self, *args):
# Don't log messages.
pass
class DocServer(http.server.HTTPServer):
def __init__(self, port, callback):
self.host = (sys.platform == 'mac') and '127.0.0.1' or 'localhost'
self.address = ('', port)
self.callback = callback
self.base.__init__(self, self.address, self.handler)
self.quit = False
def serve_until_quit(self):
while not self.quit:
rd, wr, ex = select.select([self.socket.fileno()], [], [], 1)
if rd:
self.handle_request()
self.server_close()
def server_activate(self):
self.base.server_activate(self)
if self.callback:
self.callback(self)
class ServerThread(threading.Thread):
def __init__(self, urlhandler, port):
self.urlhandler = urlhandler
self.port = int(port)
threading.Thread.__init__(self)
self.serving = False
self.error = None
def run(self):
"""Start the server."""
try:
DocServer.base = http.server.HTTPServer
DocServer.handler = DocHandler
DocHandler.MessageClass = email.message.Message
DocHandler.urlhandler = staticmethod(self.urlhandler)
docsvr = DocServer(self.port, self.ready)
self.docserver = docsvr
docsvr.serve_until_quit()
except Exception as e:
self.error = e
def ready(self, server):
self.serving = True
self.host = server.host
self.port = server.server_port
self.url = 'http://%s:%d/' % (self.host, self.port)
def stop(self):
"""Stop the server and this thread nicely"""
self.docserver.quit = True
self.serving = False
self.url = None
thread = ServerThread(urlhandler, port)
thread.start()
# Wait until thread.serving is True to make sure we are
# really up before returning.
while not thread.error and not thread.serving:
time.sleep(.01)
return thread
def _url_handler(url, content_type="text/html"):
"""The pydoc url handler for use with the pydoc server.
If the content_type is 'text/css', the _pydoc.css style
sheet is read and returned if it exits.
If the content_type is 'text/html', then the result of
get_html_page(url) is returned.
"""
class _HTMLDoc(HTMLDoc):
def page(self, title, contents):
"""Format an HTML page."""
css_path = "pydoc_data/_pydoc.css"
css_link = (
'<link rel="stylesheet" type="text/css" href="%s">' %
css_path)
return '''\
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN">
<html><head><title>Pydoc: %s</title>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
%s</head><body bgcolor="#f0f0f8">%s<div style="clear:both;padding-top:.5em;">%s</div>
</body></html>''' % (title, css_link, html_navbar(), contents)
def filelink(self, url, path):
return '<a href="getfile?key=%s">%s</a>' % (url, path)
html = _HTMLDoc()
def html_navbar():
version = html.escape("%s [%s, %s]" % (platform.python_version(),
platform.python_build()[0],
platform.python_compiler()))
return """
<div style='float:left'>
Python %s<br>%s
</div>
<div style='float:right'>
<div style='text-align:center'>
<a href="index.html">Module Index</a>
: <a href="topics.html">Topics</a>
: <a href="keywords.html">Keywords</a>
</div>
<div>
<form action="get" style='display:inline;'>
<input type=text name=key size=15>
<input type=submit value="Get">
</form>
<form action="search" style='display:inline;'>
<input type=text name=key size=15>
<input type=submit value="Search">
</form>
</div>
</div>
""" % (version, html.escape(platform.platform(terse=True)))
def html_index():
"""Module Index page."""
def bltinlink(name):
return '<a href="%s.html">%s</a>' % (name, name)
heading = html.heading(
'<big><big><strong>Index of Modules</strong></big></big>',
'#ffffff', '#7799ee')
names = [name for name in sys.builtin_module_names
if name != '__main__']
contents = html.multicolumn(names, bltinlink)
contents = [heading, '<p>' + html.bigsection(
'Built-in Modules', '#ffffff', '#ee77aa', contents)]
seen = {}
for dir in sys.path:
contents.append(html.index(dir, seen))
contents.append(
'<p align=right><font color="#909090" face="helvetica,'
'arial"><strong>pydoc</strong> by Ka-Ping Yee'
'<ping@lfw.org></font>')
return 'Index of Modules', ''.join(contents)
def html_search(key):
"""Search results page."""
# scan for modules
search_result = []
def callback(path, modname, desc):
if modname[-9:] == '.__init__':
modname = modname[:-9] + ' (package)'
search_result.append((modname, desc and '- ' + desc))
with warnings.catch_warnings():
warnings.filterwarnings('ignore') # ignore problems during import
ModuleScanner().run(callback, key)
# format page
def bltinlink(name):
return '<a href="%s.html">%s</a>' % (name, name)
results = []
heading = html.heading(
'<big><big><strong>Search Results</strong></big></big>',
'#ffffff', '#7799ee')
for name, desc in search_result:
results.append(bltinlink(name) + desc)
contents = heading + html.bigsection(
'key = %s' % key, '#ffffff', '#ee77aa', '<br>'.join(results))
return 'Search Results', contents
def html_getfile(path):
"""Get and display a source file listing safely."""
path = path.replace('%20', ' ')
with tokenize.open(path) as fp:
lines = html.escape(fp.read())
body = '<pre>%s</pre>' % lines
heading = html.heading(
'<big><big><strong>File Listing</strong></big></big>',
'#ffffff', '#7799ee')
contents = heading + html.bigsection(
'File: %s' % path, '#ffffff', '#ee77aa', body)
return 'getfile %s' % path, contents
def html_topics():
"""Index of topic texts available."""
def bltinlink(name):
return '<a href="topic?key=%s">%s</a>' % (name, name)
heading = html.heading(
'<big><big><strong>INDEX</strong></big></big>',
'#ffffff', '#7799ee')
names = sorted(Helper.topics.keys())
contents = html.multicolumn(names, bltinlink)
contents = heading + html.bigsection(
'Topics', '#ffffff', '#ee77aa', contents)
return 'Topics', contents
def html_keywords():
"""Index of keywords."""
heading = html.heading(
'<big><big><strong>INDEX</strong></big></big>',
'#ffffff', '#7799ee')
names = sorted(Helper.keywords.keys())
def bltinlink(name):
return '<a href="topic?key=%s">%s</a>' % (name, name)
contents = html.multicolumn(names, bltinlink)
contents = heading + html.bigsection(
'Keywords', '#ffffff', '#ee77aa', contents)
return 'Keywords', contents
def html_topicpage(topic):
"""Topic or keyword help page."""
buf = io.StringIO()
htmlhelp = Helper(buf, buf)
contents, xrefs = htmlhelp._gettopic(topic)
if topic in htmlhelp.keywords:
title = 'KEYWORD'
else:
title = 'TOPIC'
heading = html.heading(
'<big><big><strong>%s</strong></big></big>' % title,
'#ffffff', '#7799ee')
contents = '<pre>%s</pre>' % html.markup(contents)
contents = html.bigsection(topic , '#ffffff','#ee77aa', contents)
if xrefs:
xrefs = sorted(xrefs.split())
def bltinlink(name):
return '<a href="topic?key=%s">%s</a>' % (name, name)
xrefs = html.multicolumn(xrefs, bltinlink)
xrefs = html.section('Related help topics: ',
'#ffffff', '#ee77aa', xrefs)
return ('%s %s' % (title, topic),
''.join((heading, contents, xrefs)))
def html_getobj(url):
obj = locate(url, forceload=1)
if obj is None and url != 'None':
raise ValueError('could not find object')
title = describe(obj)
content = html.document(obj, url)
return title, content
def html_error(url, exc):
heading = html.heading(
'<big><big><strong>Error</strong></big></big>',
'#ffffff', '#7799ee')
contents = '<br>'.join(html.escape(line) for line in
format_exception_only(type(exc), exc))
contents = heading + html.bigsection(url, '#ffffff', '#bb0000',
contents)
return "Error - %s" % url, contents
def get_html_page(url):
"""Generate an HTML page for url."""
complete_url = url
if url.endswith('.html'):
url = url[:-5]
try:
if url in ("", "index"):
title, content = html_index()
elif url == "topics":
title, content = html_topics()
elif url == "keywords":
title, content = html_keywords()
elif '=' in url:
op, _, url = url.partition('=')
if op == "search?key":
title, content = html_search(url)
elif op == "getfile?key":
title, content = html_getfile(url)
elif op == "topic?key":
# try topics first, then objects.
try:
title, content = html_topicpage(url)
except ValueError:
title, content = html_getobj(url)
elif op == "get?key":
# try objects first, then topics.
if url in ("", "index"):
title, content = html_index()
else:
try:
title, content = html_getobj(url)
except ValueError:
title, content = html_topicpage(url)
else:
raise ValueError('bad pydoc url')
else:
title, content = html_getobj(url)
except Exception as exc:
# Catch any errors and display them in an error page.
title, content = html_error(complete_url, exc)
return html.page(title, content)
if url.startswith('/'):
url = url[1:]
if content_type == 'text/css':
path_here = os.path.dirname(os.path.realpath(__file__))
css_path = os.path.join(path_here, url)
with open(css_path) as fp:
return ''.join(fp.readlines())
elif content_type == 'text/html':
return get_html_page(url)
# Errors outside the url handler are caught by the server.
raise TypeError('unknown content type %r for url %s' % (content_type, url))
def browse(port=0, *, open_browser=True):
"""Start the enhanced pydoc Web server and open a Web browser.
Use port '0' to start the server on an arbitrary port.
Set open_browser to False to suppress opening a browser.
"""
import webbrowser
serverthread = _start_server(_url_handler, port)
if serverthread.error:
print(serverthread.error)
return
if serverthread.serving:
server_help_msg = 'Server commands: [b]rowser, [q]uit'
if open_browser:
webbrowser.open(serverthread.url)
try:
print('Server ready at', serverthread.url)
print(server_help_msg)
while serverthread.serving:
cmd = input('server> ')
cmd = cmd.lower()
if cmd == 'q':
break
elif cmd == 'b':
webbrowser.open(serverthread.url)
else:
print(server_help_msg)
except (KeyboardInterrupt, EOFError):
print()
finally:
if serverthread.serving:
serverthread.stop()
print('Server stopped')
# -------------------------------------------------- command-line interface
def ispath(x):
return isinstance(x, str) and x.find(os.sep) >= 0
def cli():
"""Command-line interface (looks at sys.argv to decide what to do)."""
import getopt
class BadUsage(Exception): pass
# Scripts don't get the current directory in their path by default
# unless they are run with the '-m' switch
if '' not in sys.path:
scriptdir = os.path.dirname(sys.argv[0])
if scriptdir in sys.path:
sys.path.remove(scriptdir)
sys.path.insert(0, '.')
try:
opts, args = getopt.getopt(sys.argv[1:], 'bk:p:w')
writing = False
start_server = False
open_browser = False
port = None
for opt, val in opts:
if opt == '-b':
start_server = True
open_browser = True
if opt == '-k':
apropos(val)
return
if opt == '-p':
start_server = True
port = val
if opt == '-w':
writing = True
if start_server:
if port is None:
port = 0
browse(port, open_browser=open_browser)
return
if not args: raise BadUsage
for arg in args:
if ispath(arg) and not os.path.exists(arg):
print('file %r does not exist' % arg)
break
try:
if ispath(arg) and os.path.isfile(arg):
arg = importfile(arg)
if writing:
if ispath(arg) and os.path.isdir(arg):
writedocs(arg)
else:
writedoc(arg)
else:
help.help(arg)
except ErrorDuringImport as value:
print(value)
except (getopt.error, BadUsage):
cmd = os.path.splitext(os.path.basename(sys.argv[0]))[0]
print("""pydoc - the Python documentation tool
{cmd} <name> ...
Show text documentation on something. <name> may be the name of a
Python keyword, topic, function, module, or package, or a dotted
reference to a class or function within a module or module in a
package. If <name> contains a '{sep}', it is used as the path to a
Python source file to document. If name is 'keywords', 'topics',
or 'modules', a listing of these things is displayed.
{cmd} -k <keyword>
Search for a keyword in the synopsis lines of all available modules.
{cmd} -p <port>
Start an HTTP server on the given port on the local machine. Port
number 0 can be used to get an arbitrary unused port.
{cmd} -b
Start an HTTP server on an arbitrary unused port and open a Web browser
to interactively browse documentation. The -p option can be used with
the -b option to explicitly specify the server port.
{cmd} -w <name> ...
Write out the HTML documentation for a module to a file in the current
directory. If <name> contains a '{sep}', it is treated as a filename; if
it names a directory, documentation is written for all the contents.
""".format(cmd=cmd, sep=os.sep))
if __name__ == '__main__':
cli()
|
YannThorimbert/ThorPy-1.4.1
|
refs/heads/master
|
thorpy/painting/painters/inserterframe.py
|
6
|
from pygame import Surface, Rect, SRCALPHA
from thorpy.painting.painters.classicframe import ClassicFrame
from thorpy.miscgui import style
# get_fusion()
# get_surface()
# draw()
# personnals...
class InserterFrame(ClassicFrame):
def __init__(self, size=None, color=None, pressed=False, space=8, dark=None,
light=None, thick=1, clip="auto"):
size = style.XLARGE_SIZE if size is None else size
color = style.DEF_COLOR2 if color is None else color
ClassicFrame.__init__(self,
size=size,
color=color,
pressed=pressed,
hovered=hovered,
dark=dark,
light=light,
thick=thick,
clip=clip)
self.space = space
self.txt_zone = None
def get_fusion(self, title, center_title=None, hover=False):
"""Fusion the painter.img and the title.img and returns this fusion.
center_title is ignored."""
title.center_on(self.size)
title._pos = (0, title._pos[1])
title_length = title.img.get_size()[0]
painter_img = self.get_surface(title_length)
## painter_img.blit(title.img, title._pos)
title.blit_on(painter_img)
return painter_img
def get_surface(self, title_length):
surface = self.draw(title_length)
surface.set_clip(self.clip)
return surface
def draw(self, title_length):
# actual surface (transparent):
surface = Surface(self.size, flags=SRCALPHA).convert_alpha()
surface.fill((0, 0, 0, 0))
# computing frame length: tot = title + space + frame
frame_length = self.size[0] - title_length - self.space
frame_size = (frame_length, self.size[1])
self.txt_zone = Rect((title_length + self.space, 0), frame_size)
frame_painter = ClassicFrame(frame_size, self.color, True, self.dark,
self.light, self.thick)
# frame in which text will be inserted:
frame = frame_painter.get_surface()
surface.blit(frame, (title_length + self.space, 0))
return surface
|
mj10777/QGIS
|
refs/heads/master
|
tests/src/python/test_qgsshortcutsmanager.py
|
2
|
# -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsActionManager.
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Nyall Dawson'
__date__ = '28/05/2016'
__copyright__ = 'Copyright 2016, The QGIS Project'
import qgis # NOQA
from qgis.core import QgsSettings
from qgis.gui import QgsShortcutsManager, QgsGui
from qgis.PyQt.QtCore import QCoreApplication
from qgis.PyQt.QtWidgets import QWidget, QAction, QShortcut
from qgis.testing import start_app, unittest
class TestQgsShortcutsManager(unittest.TestCase):
@classmethod
def setUpClass(cls):
"""Run before all tests"""
QCoreApplication.setOrganizationName("QGIS_Test")
QCoreApplication.setOrganizationDomain("QGIS_TestPyQgsWFSProviderGUI.com")
QCoreApplication.setApplicationName("QGIS_TestPyQgsWFSProviderGUI")
QgsSettings().clear()
start_app()
def testInstance(self):
""" test retrieving global instance """
self.assertTrue(QgsGui.shortcutsManager())
# register an action to the singleton
action = QAction('test', None)
QgsGui.shortcutsManager().registerAction(action)
# check that the same instance is returned
self.assertEqual(QgsGui.shortcutsManager().listActions(), [action])
s2 = QgsShortcutsManager()
self.assertEqual(s2.listActions(), [])
def testConstructor(self):
""" test constructing managers"""
s = QgsShortcutsManager(None, '/my_path/')
self.assertEqual(s.settingsPath(), '/my_path/')
def testSettingsPath(self):
""" test that settings path is respected """
QgsSettings().clear()
s1 = QgsShortcutsManager(None, '/path1/')
s2 = QgsShortcutsManager(None, '/path2/')
action1 = QAction('action', None)
s1.registerAction(action1)
s1.setKeySequence(action1, 'B')
action2 = QAction('action', None)
s2.registerAction(action2)
s2.setKeySequence(action2, 'C')
# test retrieving
r1 = QgsShortcutsManager(None, '/path1/')
r2 = QgsShortcutsManager(None, '/path2/')
raction1 = QAction('action', None)
r1.registerAction(raction1)
raction2 = QAction('action', None)
r2.registerAction(raction2)
self.assertEqual(raction1.shortcut().toString(), 'B')
self.assertEqual(raction2.shortcut().toString(), 'C')
def testRegisterAction(self):
""" test registering actions """
QgsSettings().clear()
s = QgsShortcutsManager(None)
action1 = QAction('action1', None)
action1.setShortcut('x')
self.assertTrue(s.registerAction(action1, 'A'))
action2 = QAction('action2', None)
action2.setShortcut('y')
self.assertTrue(s.registerAction(action2, 'B'))
self.assertCountEqual(s.listActions(), [action1, action2])
# try re-registering an existing action - should fail, but leave action registered
self.assertFalse(s.registerAction(action2, 'B'))
self.assertCountEqual(s.listActions(), [action1, action2])
# actions should have been set to default sequences
self.assertEqual(action1.shortcut().toString(), 'A')
self.assertEqual(action2.shortcut().toString(), 'B')
# test that adding an action should set its shortcut automatically
s.setKeySequence('action1', 'C')
s.setKeySequence('action2', 'D')
s = QgsShortcutsManager(None)
self.assertTrue(s.registerAction(action1, 'A'))
self.assertTrue(s.registerAction(action2, 'B'))
# actions should have been set to previous shortcuts
self.assertEqual(action1.shortcut().toString(), 'C')
self.assertEqual(action2.shortcut().toString(), 'D')
# test registering an action containing '&' in name
s = QgsShortcutsManager(None)
action = QAction('&action1', None)
self.assertTrue(s.registerAction(action))
self.assertEqual(action1.shortcut().toString(), 'C')
def testRegisterShortcut(self):
""" test registering shortcuts """
QgsSettings().clear()
s = QgsShortcutsManager(None)
shortcut1 = QShortcut(None)
shortcut1.setKey('x')
shortcut1.setObjectName('shortcut1')
self.assertTrue(s.registerShortcut(shortcut1, 'A'))
shortcut2 = QShortcut(None)
shortcut2.setKey('y')
shortcut2.setObjectName('shortcut2')
self.assertTrue(s.registerShortcut(shortcut2, 'B'))
# shortcuts should have been set to default sequences
self.assertEqual(shortcut1.key().toString(), 'A')
self.assertEqual(shortcut2.key().toString(), 'B')
# test that adding a shortcut should set its sequence automatically
s.setKeySequence(shortcut1, 'C')
s.setKeySequence(shortcut2, 'D')
s = QgsShortcutsManager(None)
self.assertTrue(s.registerShortcut(shortcut1, 'A'))
self.assertTrue(s.registerShortcut(shortcut2, 'B'))
# shortcuts should have been set to previous sequences
self.assertEqual(shortcut1.key().toString(), 'C')
self.assertEqual(shortcut2.key().toString(), 'D')
def testRegisterAll(self):
""" test registering all children """
w = QWidget()
action1 = QAction('action1', w)
shortcut1 = QShortcut(w)
shortcut1.setObjectName('shortcut1')
w2 = QWidget(w)
action2 = QAction('action2', w2)
shortcut2 = QShortcut(w2)
shortcut2.setObjectName('shortcut2')
# recursive
s = QgsShortcutsManager()
s.registerAllChildActions(w, True)
self.assertEqual(set(s.listActions()), set([action1, action2]))
s.registerAllChildShortcuts(w, True)
self.assertEqual(set(s.listShortcuts()), set([shortcut1, shortcut2]))
# non recursive
s = QgsShortcutsManager()
s.registerAllChildActions(w, False)
self.assertEqual(set(s.listActions()), set([action1]))
s.registerAllChildShortcuts(w, False)
self.assertEqual(set(s.listShortcuts()), set([shortcut1]))
# recursive
s = QgsShortcutsManager()
s.registerAllChildren(w, True)
self.assertEqual(set(s.listActions()), set([action1, action2]))
self.assertEqual(set(s.listShortcuts()), set([shortcut1, shortcut2]))
# non recursive
s = QgsShortcutsManager()
s.registerAllChildren(w, False)
self.assertEqual(set(s.listActions()), set([action1]))
self.assertEqual(set(s.listShortcuts()), set([shortcut1]))
def testUnregister(self):
""" test unregistering from manager """
QgsSettings().clear()
s = QgsShortcutsManager(None)
shortcut1 = QShortcut(None)
shortcut1.setKey('x')
shortcut1.setObjectName('shortcut1')
shortcut2 = QShortcut(None)
shortcut2.setKey('y')
shortcut2.setObjectName('shortcut2')
action1 = QAction('action1', None)
action1.setShortcut('x')
action2 = QAction('action2', None)
action2.setShortcut('y')
# try unregistering objects not registered in manager
self.assertFalse(s.unregisterShortcut(shortcut1))
self.assertFalse(s.unregisterAction(action1))
# try unregistering objects from manager
s.registerShortcut(shortcut1)
s.registerShortcut(shortcut2)
s.registerAction(action1)
s.registerAction(action2)
self.assertEqual(set(s.listActions()), set([action1, action2]))
self.assertEqual(set(s.listShortcuts()), set([shortcut1, shortcut2]))
self.assertTrue(s.unregisterAction(action1))
self.assertTrue(s.unregisterShortcut(shortcut1))
self.assertEqual(set(s.listActions()), set([action2]))
self.assertEqual(set(s.listShortcuts()), set([shortcut2]))
self.assertTrue(s.unregisterAction(action2))
self.assertTrue(s.unregisterShortcut(shortcut2))
def testList(self):
""" test listing registered objects """
QgsSettings().clear()
s = QgsShortcutsManager(None)
self.assertEqual(s.listActions(), [])
self.assertEqual(s.listShortcuts(), [])
self.assertEqual(s.listAll(), [])
shortcut1 = QShortcut(None)
shortcut2 = QShortcut(None)
action1 = QAction('action1', None)
action2 = QAction('action2', None)
s.registerShortcut(shortcut1)
s.registerShortcut(shortcut2)
s.registerAction(action1)
s.registerAction(action2)
self.assertEqual(set(s.listActions()), set([action1, action2]))
self.assertEqual(set(s.listShortcuts()), set([shortcut1, shortcut2]))
self.assertEqual(set(s.listAll()), set([action1, action2, shortcut1, shortcut2]))
def testDefault(self):
""" test retrieving default sequences """
QgsSettings().clear()
s = QgsShortcutsManager(None)
shortcut1 = QShortcut(None)
shortcut2 = QShortcut(None)
action1 = QAction('action1', None)
action2 = QAction('action2', None)
# test while not yet registered
self.assertEqual(s.defaultKeySequence(shortcut1), '')
self.assertEqual(s.defaultKeySequence(action1), '')
self.assertEqual(s.objectDefaultKeySequence(shortcut1), '')
self.assertEqual(s.objectDefaultKeySequence(action1), '')
# now register them
s.registerShortcut(shortcut1, 'A')
s.registerShortcut(shortcut2, 'B')
s.registerAction(action1, 'C')
s.registerAction(action2, 'D')
self.assertEqual(s.defaultKeySequence(shortcut1), 'A')
self.assertEqual(s.defaultKeySequence(shortcut2), 'B')
self.assertEqual(s.defaultKeySequence(action1), 'C')
self.assertEqual(s.defaultKeySequence(action2), 'D')
self.assertEqual(s.objectDefaultKeySequence(shortcut1), 'A')
self.assertEqual(s.objectDefaultKeySequence(shortcut2), 'B')
self.assertEqual(s.objectDefaultKeySequence(action1), 'C')
self.assertEqual(s.objectDefaultKeySequence(action2), 'D')
def testSetSequence(self):
""" test setting key sequences """
QgsSettings().clear()
s = QgsShortcutsManager(None)
shortcut1 = QShortcut(None)
shortcut1.setObjectName('shortcut1')
shortcut2 = QShortcut(None)
shortcut2.setObjectName('shortcut2')
action1 = QAction('action1', None)
action2 = QAction('action2', None)
s.registerShortcut(shortcut1, 'A')
s.registerShortcut(shortcut2, 'B')
s.registerAction(action1, 'C')
s.registerAction(action2, 'D')
# test setting by action/shortcut
self.assertTrue(s.setKeySequence(shortcut1, 'E'))
self.assertTrue(s.setKeySequence(shortcut2, 'F'))
self.assertTrue(s.setKeySequence(action1, 'G'))
self.assertTrue(s.setKeySequence(action2, 'H'))
# test that action/shortcuts have been updated
self.assertEqual(shortcut1.key().toString(), 'E')
self.assertEqual(shortcut2.key().toString(), 'F')
self.assertEqual(action1.shortcut().toString(), 'G')
self.assertEqual(action2.shortcut().toString(), 'H')
# new manager
s = QgsShortcutsManager(None)
# new shortcuts
shortcut1 = QShortcut(None)
shortcut1.setObjectName('shortcut1')
shortcut2 = QShortcut(None)
shortcut2.setObjectName('shortcut2')
action1 = QAction('action1', None)
action2 = QAction('action2', None)
# register them
s.registerShortcut(shortcut1, 'A')
s.registerShortcut(shortcut2, 'B')
s.registerAction(action1, 'C')
s.registerAction(action2, 'D')
# check that previously set sequence has been restored
self.assertEqual(shortcut1.key().toString(), 'E')
self.assertEqual(shortcut2.key().toString(), 'F')
self.assertEqual(action1.shortcut().toString(), 'G')
self.assertEqual(action2.shortcut().toString(), 'H')
# same test, using setObjectKeySequence
QgsSettings().clear()
s = QgsShortcutsManager(None)
shortcut1 = QShortcut(None)
shortcut1.setObjectName('shortcut1')
action1 = QAction('action1', None)
s.registerShortcut(shortcut1, 'A')
s.registerAction(action1, 'C')
self.assertTrue(s.setObjectKeySequence(shortcut1, 'E'))
self.assertTrue(s.setObjectKeySequence(action1, 'G'))
self.assertEqual(shortcut1.key().toString(), 'E')
self.assertEqual(action1.shortcut().toString(), 'G')
s = QgsShortcutsManager(None)
shortcut1 = QShortcut(None)
shortcut1.setObjectName('shortcut1')
action1 = QAction('action1', None)
s.registerShortcut(shortcut1, 'A')
s.registerAction(action1, 'C')
self.assertEqual(shortcut1.key().toString(), 'E')
self.assertEqual(action1.shortcut().toString(), 'G')
# same test, using setKeySequence by name
QgsSettings().clear()
s = QgsShortcutsManager(None)
shortcut1 = QShortcut(None)
shortcut1.setObjectName('shortcut1')
action1 = QAction('action1', None)
s.registerShortcut(shortcut1, 'A')
s.registerAction(action1, 'C')
self.assertFalse(s.setKeySequence('invalid_name', 'E'))
self.assertTrue(s.setKeySequence('shortcut1', 'E'))
self.assertTrue(s.setKeySequence('action1', 'G'))
self.assertEqual(shortcut1.key().toString(), 'E')
self.assertEqual(action1.shortcut().toString(), 'G')
s = QgsShortcutsManager(None)
shortcut1 = QShortcut(None)
shortcut1.setObjectName('shortcut1')
action1 = QAction('action1', None)
s.registerShortcut(shortcut1, 'A')
s.registerAction(action1, 'C')
self.assertEqual(shortcut1.key().toString(), 'E')
self.assertEqual(action1.shortcut().toString(), 'G')
def testBySequence(self):
""" test retrieving by sequence """
QgsSettings().clear()
shortcut1 = QShortcut(None)
shortcut1.setObjectName('shortcut1')
shortcut2 = QShortcut(None)
shortcut2.setObjectName('shortcut2')
action1 = QAction('action1', None)
action2 = QAction('action2', None)
s = QgsShortcutsManager(None)
self.assertFalse(s.actionForSequence('E'))
self.assertFalse(s.objectForSequence('F'))
s.registerShortcut(shortcut1, 'E')
s.registerShortcut(shortcut2, 'A')
s.registerAction(action1, 'F')
s.registerAction(action2, 'B')
# use another way of registering sequences
self.assertTrue(s.setKeySequence(shortcut2, 'G'))
self.assertTrue(s.setKeySequence(action2, 'H'))
self.assertEqual(s.objectForSequence('E'), shortcut1)
self.assertEqual(s.objectForSequence('F'), action1)
self.assertEqual(s.objectForSequence('G'), shortcut2)
self.assertEqual(s.objectForSequence('H'), action2)
self.assertFalse(s.objectForSequence('A'))
self.assertFalse(s.objectForSequence('B'))
self.assertEqual(s.shortcutForSequence('E'), shortcut1)
self.assertFalse(s.shortcutForSequence('F'))
self.assertEqual(s.shortcutForSequence('G'), shortcut2)
self.assertFalse(s.shortcutForSequence('H'))
self.assertFalse(s.actionForSequence('E'))
self.assertEqual(s.actionForSequence('F'), action1)
self.assertFalse(s.actionForSequence('G'))
self.assertEqual(s.actionForSequence('H'), action2)
def testByName(self):
"""" test retrieving actions and shortcuts by name """
QgsSettings().clear()
shortcut1 = QShortcut(None)
shortcut1.setObjectName('shortcut1')
shortcut2 = QShortcut(None)
shortcut2.setObjectName('shortcut2')
action1 = QAction('action1', None)
action2 = QAction('action2', None)
s = QgsShortcutsManager(None)
self.assertFalse(s.actionByName('action1'))
self.assertFalse(s.shortcutByName('shortcut1'))
s.registerShortcut(shortcut1)
s.registerShortcut(shortcut2)
s.registerAction(action1)
s.registerAction(action2)
self.assertEqual(s.shortcutByName('shortcut1'), shortcut1)
self.assertFalse(s.shortcutByName('action1'))
self.assertEqual(s.shortcutByName('shortcut2'), shortcut2)
self.assertFalse(s.shortcutByName('action2'))
self.assertFalse(s.actionByName('shortcut1'))
self.assertEqual(s.actionByName('action1'), action1)
self.assertFalse(s.actionByName('shortcut2'))
self.assertEqual(s.actionByName('action2'), action2)
if __name__ == '__main__':
unittest.main()
|
konradxyz/cloudify-manager
|
refs/heads/master
|
plugins/windows-plugin-installer/windows_plugin_installer/tests/resources/mock-plugin/mock_for_test/module.py
|
13
|
# **************************************************************************
# * Copyright (c) 2013 GigaSpaces Technologies Ltd. All rights reserved
# *
# * Licensed under the Apache License, Version 2.0 (the "License");
# * you may not use this file except in compliance with the License.
# * You may obtain a copy of the License at
# *
# * http://www.apache.org/licenses/LICENSE-2.0
# *
# * Unless required by applicable law or agreed to in writing, software
# * distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
# ***************************************************************************/
var = "var"
|
russel/pychapel
|
refs/heads/master
|
docs/source/examples/test_arbitrary_chapel_use.py
|
2
|
from pych.extern import Chapel
import os.path
currentloc = os.path.dirname(os.path.realpath(__file__))
@Chapel(module_dirs=[os.path.join(currentloc + '/sfiles/chapel/')], sfile="user.chpl")
def useArbitrary():
return None
if __name__ == "__main__":
useArbitrary()
import testcase
# contains the general testing method, which allows us to gather output
def test_using_other_chapel_code():
"""
ensures that a sfile definition of a Chapel function with a use statement
will work when the module being used lives in a directory specified with
the decorator argument "module_dirs"
"""
out = testcase.runpy(os.path.realpath(__file__))
assert out.endswith('6\n14 14 3 14 14\n14 14 3 14 14\n(contents = 3.0)\n(contents = 3.0)\n')
|
EmmanuelJohnson/ssquiz
|
refs/heads/master
|
flask/lib/python2.7/site-packages/sqlalchemy/dialects/mysql/oursql.py
|
59
|
# mysql/oursql.py
# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""
.. dialect:: mysql+oursql
:name: OurSQL
:dbapi: oursql
:connectstring: mysql+oursql://<user>:<password>@<host>[:<port>]/<dbname>
:url: http://packages.python.org/oursql/
Unicode
-------
Please see :ref:`mysql_unicode` for current recommendations on unicode
handling.
"""
import re
from .base import (BIT, MySQLDialect, MySQLExecutionContext)
from ... import types as sqltypes, util
class _oursqlBIT(BIT):
def result_processor(self, dialect, coltype):
"""oursql already converts mysql bits, so."""
return None
class MySQLExecutionContext_oursql(MySQLExecutionContext):
@property
def plain_query(self):
return self.execution_options.get('_oursql_plain_query', False)
class MySQLDialect_oursql(MySQLDialect):
driver = 'oursql'
if util.py2k:
supports_unicode_binds = True
supports_unicode_statements = True
supports_native_decimal = True
supports_sane_rowcount = True
supports_sane_multi_rowcount = True
execution_ctx_cls = MySQLExecutionContext_oursql
colspecs = util.update_copy(
MySQLDialect.colspecs,
{
sqltypes.Time: sqltypes.Time,
BIT: _oursqlBIT,
}
)
@classmethod
def dbapi(cls):
return __import__('oursql')
def do_execute(self, cursor, statement, parameters, context=None):
"""Provide an implementation of
*cursor.execute(statement, parameters)*."""
if context and context.plain_query:
cursor.execute(statement, plain_query=True)
else:
cursor.execute(statement, parameters)
def do_begin(self, connection):
connection.cursor().execute('BEGIN', plain_query=True)
def _xa_query(self, connection, query, xid):
if util.py2k:
arg = connection.connection._escape_string(xid)
else:
charset = self._connection_charset
arg = connection.connection._escape_string(
xid.encode(charset)).decode(charset)
arg = "'%s'" % arg
connection.execution_options(
_oursql_plain_query=True).execute(query % arg)
# Because mysql is bad, these methods have to be
# reimplemented to use _PlainQuery. Basically, some queries
# refuse to return any data if they're run through
# the parameterized query API, or refuse to be parameterized
# in the first place.
def do_begin_twophase(self, connection, xid):
self._xa_query(connection, 'XA BEGIN %s', xid)
def do_prepare_twophase(self, connection, xid):
self._xa_query(connection, 'XA END %s', xid)
self._xa_query(connection, 'XA PREPARE %s', xid)
def do_rollback_twophase(self, connection, xid, is_prepared=True,
recover=False):
if not is_prepared:
self._xa_query(connection, 'XA END %s', xid)
self._xa_query(connection, 'XA ROLLBACK %s', xid)
def do_commit_twophase(self, connection, xid, is_prepared=True,
recover=False):
if not is_prepared:
self.do_prepare_twophase(connection, xid)
self._xa_query(connection, 'XA COMMIT %s', xid)
# Q: why didn't we need all these "plain_query" overrides earlier ?
# am i on a newer/older version of OurSQL ?
def has_table(self, connection, table_name, schema=None):
return MySQLDialect.has_table(
self,
connection.connect().execution_options(_oursql_plain_query=True),
table_name,
schema
)
def get_table_options(self, connection, table_name, schema=None, **kw):
return MySQLDialect.get_table_options(
self,
connection.connect().execution_options(_oursql_plain_query=True),
table_name,
schema=schema,
**kw
)
def get_columns(self, connection, table_name, schema=None, **kw):
return MySQLDialect.get_columns(
self,
connection.connect().execution_options(_oursql_plain_query=True),
table_name,
schema=schema,
**kw
)
def get_view_names(self, connection, schema=None, **kw):
return MySQLDialect.get_view_names(
self,
connection.connect().execution_options(_oursql_plain_query=True),
schema=schema,
**kw
)
def get_table_names(self, connection, schema=None, **kw):
return MySQLDialect.get_table_names(
self,
connection.connect().execution_options(_oursql_plain_query=True),
schema
)
def get_schema_names(self, connection, **kw):
return MySQLDialect.get_schema_names(
self,
connection.connect().execution_options(_oursql_plain_query=True),
**kw
)
def initialize(self, connection):
return MySQLDialect.initialize(
self,
connection.execution_options(_oursql_plain_query=True)
)
def _show_create_table(self, connection, table, charset=None,
full_name=None):
return MySQLDialect._show_create_table(
self,
connection.contextual_connect(close_with_result=True).
execution_options(_oursql_plain_query=True),
table, charset, full_name
)
def is_disconnect(self, e, connection, cursor):
if isinstance(e, self.dbapi.ProgrammingError):
return e.errno is None and 'cursor' not in e.args[1] \
and e.args[1].endswith('closed')
else:
return e.errno in (2006, 2013, 2014, 2045, 2055)
def create_connect_args(self, url):
opts = url.translate_connect_args(database='db', username='user',
password='passwd')
opts.update(url.query)
util.coerce_kw_type(opts, 'port', int)
util.coerce_kw_type(opts, 'compress', bool)
util.coerce_kw_type(opts, 'autoping', bool)
util.coerce_kw_type(opts, 'raise_on_warnings', bool)
util.coerce_kw_type(opts, 'default_charset', bool)
if opts.pop('default_charset', False):
opts['charset'] = None
else:
util.coerce_kw_type(opts, 'charset', str)
opts['use_unicode'] = opts.get('use_unicode', True)
util.coerce_kw_type(opts, 'use_unicode', bool)
# FOUND_ROWS must be set in CLIENT_FLAGS to enable
# supports_sane_rowcount.
opts.setdefault('found_rows', True)
ssl = {}
for key in ['ssl_ca', 'ssl_key', 'ssl_cert',
'ssl_capath', 'ssl_cipher']:
if key in opts:
ssl[key[4:]] = opts[key]
util.coerce_kw_type(ssl, key[4:], str)
del opts[key]
if ssl:
opts['ssl'] = ssl
return [[], opts]
def _get_server_version_info(self, connection):
dbapi_con = connection.connection
version = []
r = re.compile('[.\-]')
for n in r.split(dbapi_con.server_info):
try:
version.append(int(n))
except ValueError:
version.append(n)
return tuple(version)
def _extract_error_code(self, exception):
return exception.errno
def _detect_charset(self, connection):
"""Sniff out the character set in use for connection results."""
return connection.connection.charset
def _compat_fetchall(self, rp, charset=None):
"""oursql isn't super-broken like MySQLdb, yaaay."""
return rp.fetchall()
def _compat_fetchone(self, rp, charset=None):
"""oursql isn't super-broken like MySQLdb, yaaay."""
return rp.fetchone()
def _compat_first(self, rp, charset=None):
return rp.first()
dialect = MySQLDialect_oursql
|
blademainer/intellij-community
|
refs/heads/master
|
python/testData/inspections/importFromModuleStar/source.py
|
83
|
from target import *
<error descr="Unresolved reference 'xyzzy'">x<caret>yzzy</error>
shazam()
|
goliveirab/odoo
|
refs/heads/8.0
|
addons/stock/wizard/stock_transfer_details.py
|
169
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-TODAY OpenERP S.A. <http://www.odoo.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields, api
from openerp.tools.translate import _
import openerp.addons.decimal_precision as dp
from datetime import datetime
class stock_transfer_details(models.TransientModel):
_name = 'stock.transfer_details'
_description = 'Picking wizard'
picking_id = fields.Many2one('stock.picking', 'Picking')
item_ids = fields.One2many('stock.transfer_details_items', 'transfer_id', 'Items', domain=[('product_id', '!=', False)])
packop_ids = fields.One2many('stock.transfer_details_items', 'transfer_id', 'Packs', domain=[('product_id', '=', False)])
picking_source_location_id = fields.Many2one('stock.location', string="Head source location", related='picking_id.location_id', store=False, readonly=True)
picking_destination_location_id = fields.Many2one('stock.location', string="Head destination location", related='picking_id.location_dest_id', store=False, readonly=True)
def default_get(self, cr, uid, fields, context=None):
if context is None: context = {}
res = super(stock_transfer_details, self).default_get(cr, uid, fields, context=context)
picking_ids = context.get('active_ids', [])
active_model = context.get('active_model')
if not picking_ids or len(picking_ids) != 1:
# Partial Picking Processing may only be done for one picking at a time
return res
assert active_model in ('stock.picking'), 'Bad context propagation'
picking_id, = picking_ids
picking = self.pool.get('stock.picking').browse(cr, uid, picking_id, context=context)
items = []
packs = []
if not picking.pack_operation_ids:
picking.do_prepare_partial()
for op in picking.pack_operation_ids:
item = {
'packop_id': op.id,
'product_id': op.product_id.id,
'product_uom_id': op.product_uom_id.id,
'quantity': op.product_qty,
'package_id': op.package_id.id,
'lot_id': op.lot_id.id,
'sourceloc_id': op.location_id.id,
'destinationloc_id': op.location_dest_id.id,
'result_package_id': op.result_package_id.id,
'date': op.date,
'owner_id': op.owner_id.id,
}
if op.product_id:
items.append(item)
elif op.package_id:
packs.append(item)
res.update(item_ids=items)
res.update(packop_ids=packs)
return res
@api.one
def do_detailed_transfer(self):
processed_ids = []
# Create new and update existing pack operations
for lstits in [self.item_ids, self.packop_ids]:
for prod in lstits:
pack_datas = {
'product_id': prod.product_id.id,
'product_uom_id': prod.product_uom_id.id,
'product_qty': prod.quantity,
'package_id': prod.package_id.id,
'lot_id': prod.lot_id.id,
'location_id': prod.sourceloc_id.id,
'location_dest_id': prod.destinationloc_id.id,
'result_package_id': prod.result_package_id.id,
'date': prod.date if prod.date else datetime.now(),
'owner_id': prod.owner_id.id,
}
if prod.packop_id:
prod.packop_id.with_context(no_recompute=True).write(pack_datas)
processed_ids.append(prod.packop_id.id)
else:
pack_datas['picking_id'] = self.picking_id.id
packop_id = self.env['stock.pack.operation'].create(pack_datas)
processed_ids.append(packop_id.id)
# Delete the others
packops = self.env['stock.pack.operation'].search(['&', ('picking_id', '=', self.picking_id.id), '!', ('id', 'in', processed_ids)])
packops.unlink()
# Execute the transfer of the picking
self.picking_id.do_transfer()
return True
@api.multi
def wizard_view(self):
view = self.env.ref('stock.view_stock_enter_transfer_details')
return {
'name': _('Enter transfer details'),
'type': 'ir.actions.act_window',
'view_type': 'form',
'view_mode': 'form',
'res_model': 'stock.transfer_details',
'views': [(view.id, 'form')],
'view_id': view.id,
'target': 'new',
'res_id': self.ids[0],
'context': self.env.context,
}
class stock_transfer_details_items(models.TransientModel):
_name = 'stock.transfer_details_items'
_description = 'Picking wizard items'
transfer_id = fields.Many2one('stock.transfer_details', 'Transfer')
packop_id = fields.Many2one('stock.pack.operation', 'Operation')
product_id = fields.Many2one('product.product', 'Product')
product_uom_id = fields.Many2one('product.uom', 'Product Unit of Measure')
quantity = fields.Float('Quantity', digits=dp.get_precision('Product Unit of Measure'), default = 1.0)
package_id = fields.Many2one('stock.quant.package', 'Source package', domain="['|', ('location_id', 'child_of', sourceloc_id), ('location_id','=',False)]")
lot_id = fields.Many2one('stock.production.lot', 'Lot/Serial Number')
sourceloc_id = fields.Many2one('stock.location', 'Source Location', required=True)
destinationloc_id = fields.Many2one('stock.location', 'Destination Location', required=True)
result_package_id = fields.Many2one('stock.quant.package', 'Destination package', domain="['|', ('location_id', 'child_of', destinationloc_id), ('location_id','=',False)]")
date = fields.Datetime('Date')
owner_id = fields.Many2one('res.partner', 'Owner', help="Owner of the quants")
@api.multi
def split_quantities(self):
for det in self:
if det.quantity>1:
det.quantity = (det.quantity-1)
new_id = det.copy(context=self.env.context)
new_id.quantity = 1
new_id.packop_id = False
if self and self[0]:
return self[0].transfer_id.wizard_view()
@api.multi
def put_in_pack(self):
newpack = None
for packop in self:
if not packop.result_package_id:
if not newpack:
newpack = self.pool['stock.quant.package'].create(self._cr, self._uid, {'location_id': packop.destinationloc_id.id if packop.destinationloc_id else False}, self._context)
packop.result_package_id = newpack
if self and self[0]:
return self[0].transfer_id.wizard_view()
@api.multi
def product_id_change(self, product, uom=False):
result = {}
if product:
prod = self.env['product.product'].browse(product)
result['product_uom_id'] = prod.uom_id and prod.uom_id.id
return {'value': result, 'domain': {}, 'warning':{} }
@api.multi
def source_package_change(self, sourcepackage):
result = {}
if sourcepackage:
pack = self.env['stock.quant.package'].browse(sourcepackage)
result['sourceloc_id'] = pack.location_id and pack.location_id.id
return {'value': result, 'domain': {}, 'warning':{} }
|
dominicelse/scipy
|
refs/heads/master
|
benchmarks/benchmarks/go_benchmark_functions/go_funcs_Z.py
|
47
|
# -*- coding: utf-8 -*-
from __future__ import division, print_function, absolute_import
from numpy import abs, sum, sign, arange
from .go_benchmark import Benchmark
class Zacharov(Benchmark):
r"""
Zacharov objective function.
This class defines the Zacharov [1]_ global optimization problem. This
is a multimodal minimization problem defined as follows:
.. math::
f_{\text{Zacharov}}(x) = \sum_{i=1}^{n} x_i^2 + \left ( \frac{1}{2}
\sum_{i=1}^{n} i x_i \right )^2
+ \left ( \frac{1}{2} \sum_{i=1}^{n} i x_i
\right )^4
Here, :math:`n` represents the number of dimensions and
:math:`x_i \in [-5, 10]` for :math:`i = 1, ..., n`.
*Global optimum*: :math:`f(x) = 0` for :math:`x_i = 0` for
:math:`i = 1, ..., n`
.. [1] Jamil, M. & Yang, X.-S. A Literature Survey of Benchmark Functions
For Global Optimization Problems Int. Journal of Mathematical Modelling
and Numerical Optimisation, 2013, 4, 150-194.
"""
def __init__(self, dimensions=2):
Benchmark.__init__(self, dimensions)
self._bounds = list(zip([-5.0] * self.N, [10.0] * self.N))
self.custom_bounds = ([-1, 1], [-1, 1])
self.global_optimum = [[0 for _ in range(self.N)]]
self.fglob = 0.0
self.change_dimensionality = True
def fun(self, x, *args):
self.nfev += 1
u = sum(x ** 2)
v = sum(arange(1, self.N + 1) * x)
return u + (0.5 * v) ** 2 + (0.5 * v) ** 4
class ZeroSum(Benchmark):
r"""
ZeroSum objective function.
This class defines the ZeroSum [1]_ global optimization problem. This
is a multimodal minimization problem defined as follows:
.. math::
f_{\text{ZeroSum}}(x) = \begin{cases}
0 & \textrm{if} \sum_{i=1}^n x_i = 0 \\
1 + \left(10000 \left |\sum_{i=1}^n x_i\right|
\right)^{0.5} & \textrm{otherwise}
\end{cases}
Here, :math:`n` represents the number of dimensions and
:math:`x_i \in [-10, 10]` for :math:`i = 1, ..., n`.
*Global optimum*: :math:`f(x) = 0` where :math:`\sum_{i=1}^n x_i = 0`
.. [1] Gavana, A. Global Optimization Benchmarks and AMPGO retrieved 2015
"""
def __init__(self, dimensions=2):
Benchmark.__init__(self, dimensions)
self._bounds = list(zip([-10.0] * self.N, [10.0] * self.N))
self.global_optimum = [[]]
self.fglob = 0.0
self.change_dimensionality = True
def fun(self, x, *args):
self.nfev += 1
if abs(sum(x)) < 3e-16:
return 0.0
return 1.0 + (10000.0 * abs(sum(x))) ** 0.5
class Zettl(Benchmark):
r"""
Zettl objective function.
This class defines the Zettl [1]_ global optimization problem. This is a
multimodal minimization problem defined as follows:
.. math::
f_{\text{Zettl}}(x) = \frac{1}{4} x_{1} + \left(x_{1}^{2} - 2 x_{1}
+ x_{2}^{2}\right)^{2}
with :math:`x_i \in [-1, 5]` for :math:`i = 1, 2`.
*Global optimum*: :math:`f(x) = -0.0037912` for :math:`x = [-0.029896, 0.0]`
.. [1] Jamil, M. & Yang, X.-S. A Literature Survey of Benchmark Functions
For Global Optimization Problems Int. Journal of Mathematical Modelling
and Numerical Optimisation, 2013, 4, 150-194.
"""
def __init__(self, dimensions=2):
Benchmark.__init__(self, dimensions)
self._bounds = list(zip([-5.0] * self.N, [10.0] * self.N))
self.global_optimum = [[-0.02989597760285287, 0.0]]
self.fglob = -0.003791237220468656
def fun(self, x, *args):
self.nfev += 1
return (x[0] ** 2 + x[1] ** 2 - 2 * x[0]) ** 2 + 0.25 * x[0]
class Zimmerman(Benchmark):
r"""
Zimmerman objective function.
This class defines the Zimmerman [1]_ global optimization problem. This
is a multimodal minimization problem defined as follows:
.. math::
f_{\text{Zimmerman}}(x) = \max \left[Zh1(x), Zp(Zh2(x))
\textrm{sgn}(Zh2(x)), Zp(Zh3(x))
\textrm{sgn}(Zh3(x)),
Zp(-x_1)\textrm{sgn}(x_1),
Zp(-x_2)\textrm{sgn}(x_2) \right]
Where, in this exercise:
.. math::
\begin{cases}
Zh1(x) = 9 - x_1 - x_2 \\
Zh2(x) = (x_1 - 3)^2 + (x_2 - 2)^2 \\
Zh3(x) = x_1x_2 - 14 \\
Zp(t) = 100(1 + t)
\end{cases}
Where :math:`x` is a vector and :math:`t` is a scalar.
Here, :math:`x_i \in [0, 100]` for :math:`i = 1, 2`.
*Global optimum*: :math:`f(x) = 0` for :math:`x = [7, 2]`
.. [1] Gavana, A. Global Optimization Benchmarks and AMPGO retrieved 2015
TODO implementation from Gavana
"""
def __init__(self, dimensions=2):
Benchmark.__init__(self, dimensions)
self._bounds = list(zip([0.0] * self.N, [100.0] * self.N))
self.custom_bounds = ([0.0, 8.0], [0.0, 8.0])
self.global_optimum = [[7.0, 2.0]]
self.fglob = 0.0
def fun(self, x, *args):
self.nfev += 1
Zh1 = lambda x: 9.0 - x[0] - x[1]
Zh2 = lambda x: (x[0] - 3.0) ** 2.0 + (x[1] - 2.0) ** 2.0 - 16.0
Zh3 = lambda x: x[0] * x[1] - 14.0
Zp = lambda x: 100.0 * (1.0 + x)
return max(Zh1(x),
Zp(Zh2(x)) * sign(Zh2(x)),
Zp(Zh3(x)) * sign(Zh3(x)),
Zp(-x[0]) * sign(x[0]),
Zp(-x[1]) * sign(x[1]))
class Zirilli(Benchmark):
r"""
Zettl objective function.
This class defines the Zirilli [1]_ global optimization problem. This is a
unimodal minimization problem defined as follows:
.. math::
f_{\text{Zirilli}}(x) = 0.25x_1^4 - 0.5x_1^2 + 0.1x_1 + 0.5x_2^2
Here, :math:`n` represents the number of dimensions and
:math:`x_i \in [-10, 10]` for :math:`i = 1, 2`.
*Global optimum*: :math:`f(x) = -0.3523` for :math:`x = [-1.0465, 0]`
.. [1] Jamil, M. & Yang, X.-S. A Literature Survey of Benchmark Functions
For Global Optimization Problems Int. Journal of Mathematical Modelling
and Numerical Optimisation, 2013, 4, 150-194.
"""
def __init__(self, dimensions=2):
Benchmark.__init__(self, dimensions)
self._bounds = list(zip([-10.0] * self.N, [10.0] * self.N))
self.custom_bounds = ([-2.0, 2.0], [-2.0, 2.0])
self.global_optimum = [[-1.0465, 0.0]]
self.fglob = -0.35238603
def fun(self, x, *args):
self.nfev += 1
return 0.25 * x[0] ** 4 - 0.5 * x[0] ** 2 + 0.1 * x[0] + 0.5 * x[1] ** 2
|
Maccimo/intellij-community
|
refs/heads/master
|
python/testData/mover/multiLineSelectionDifferentIndentLevelsMoveToEmptyLine_afterDown.py
|
8
|
pass
<caret><selection>n = 0
while n:
print("spam")</selection>
pass
|
MarcJoan/django
|
refs/heads/master
|
tests/timezones/urls.py
|
406
|
from django.conf.urls import url
from . import admin as tz_admin # NOQA: register tz_admin
urlpatterns = [
url(r'^admin/', tz_admin.site.urls),
]
|
samuelcolvin/codeshow
|
refs/heads/master
|
codeshow.py
|
1
|
from flask import Flask
from flask import render_template
from pygments import highlight
import pygments.lexers as pyg_lexers
from pygments.formatters import HtmlFormatter
import requests, re
from flask import request
from urlparse import urlparse
from flask import redirect, url_for
import pygments.styles as pyg_styles
app = Flask(__name__)
@app.route("/")
def index():
url = request.args.get('url', None)
fontsize = request.args.get('fontsize', 100)
pystyle = request.args.get('pystyle', 'default')
if url:
return redirect(url_for('found', fontsize = fontsize, pystyle = pystyle, url = url))
rawurl = request.args.get('rawurl', None)
if rawurl:
return redirect(url_for('show', fontsize = fontsize, pystyle = pystyle, url = rawurl))
fontsizes = [100, 120, 150, 180, 200]
pystyles = pyg_styles.get_all_styles()
return render_template('index.jinja', pystyles = pystyles, fontsizes = fontsizes)
@app.route("/found/<int:fontsize>/<pystyle>/<path:url>")
def found(fontsize = 100, pystyle = 'default', url = None):
output = 'url: %s\n' % url
parsed_uri = urlparse(url)
domain = '{uri.scheme}://{uri.netloc}/'.format(uri=parsed_uri).strip('/')
output += 'domain: %s\n' % domain
r = requests.get(url)
# for m in re.finditer('href="(.*?)".*?>(.*?)<', r.text, re.MULTILINE):
# output += 'MATCH: %s\n' % str(m.groups())
urls = re.findall('href="(.*?)"', r.text)
links = []
for urlfound in urls:
if 'raw' in urlfound:
if urlfound.startswith('/'):
urlfound = domain + urlfound
output += 'url: %s\n' % urlfound
links.append({'name': urlfound, 'url': url_for('show',
fontsize = fontsize,
pystyle = pystyle,
url = urlfound)})
return render_template('find_links.jinja', url = url, links = links) # , output = output
@app.route("/show/<int:fontsize>/<pystyle>/<path:url>")
def show(fontsize = 100, pystyle = 'default', url = None):
print 'pystyle', pystyle
r = requests.get(url)
try:
fname = url.split('/')[-1]
except:
fname = 'unknown.txt'
contype = r.headers.get('content-type', None)
if contype and ';' in contype:
contype = contype.split(';')[0]
try:
lexer = pyg_lexers.get_lexer_for_filename(fname)
except:
try:
lexer = pyg_lexers.get_lexer_for_mimetype(contype)
except:
lexer = pyg_lexers.get_lexer_for_filename('.txt')
formatter = HtmlFormatter(linenos=True, cssclass='code', style = pystyle)
css = formatter.get_style_defs('.code').encode('utf8')
code = highlight(r.text, lexer, formatter)
return render_template('showcode.jinja', title = fname, code = code, css = css, fontsize = fontsize)
if __name__ == "__main__":
app.run(debug=True)#
|
gencer/sentry
|
refs/heads/master
|
src/sentry/analytics/events/organization_created.py
|
4
|
from __future__ import absolute_import, print_function
from sentry import analytics
class OrganizationCreatedEvent(analytics.Event):
type = 'organization.created'
attributes = (
analytics.Attribute('id'), analytics.Attribute('name'), analytics.Attribute('slug'),
analytics.Attribute('actor_id', required=False),
)
analytics.register(OrganizationCreatedEvent)
|
axilleas/ansible
|
refs/heads/devel
|
lib/ansible/plugins/lookup/indexed_items.py
|
127
|
# (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.errors import AnsibleError
from ansible.plugins.lookup import LookupBase
class LookupModule(LookupBase):
def __init__(self, basedir=None, **kwargs):
self.basedir = basedir
def run(self, terms, variables, **kwargs):
if not isinstance(terms, list):
raise AnsibleError("with_indexed_items expects a list")
items = self._flatten(terms)
return zip(range(len(items)), items)
|
OakDevel/Teacup_Firmware
|
refs/heads/master
|
configtool/heaterspage.py
|
2
|
import wx
from configtool.page import Page
from configtool.data import pinNames, BSIZESMALL
from configtool.heaterlist import HeaterList
from configtool.addheaterdlg import AddHeaterDlg
class HeatersPage(wx.Panel, Page):
def __init__(self, parent, nb, idPg, font):
wx.Panel.__init__(self, nb, wx.ID_ANY)
Page.__init__(self, font)
self.parent = parent
self.font = font
self.id = idPg
sz = wx.GridBagSizer()
sz.AddSpacer((30, 30), pos = (0, 0))
self.heaters = []
self.validPins = pinNames
self.lb = HeaterList(self, font)
sz.Add(self.lb, pos = (1, 1))
sz.AddSpacer((20, 20), pos = (1, 2))
bsz = wx.BoxSizer(wx.VERTICAL)
self.bAdd = wx.Button(self, wx.ID_ANY, "Add", size = BSIZESMALL)
self.bAdd.SetBackgroundColour(self.deco.getBackgroundColour())
self.bAdd.SetFont(font)
self.Bind(wx.EVT_BUTTON, self.doAdd, self.bAdd)
self.bAdd.SetToolTipString("Add a heater to the configuration.")
bsz.Add(self.bAdd)
bsz.AddSpacer((10, 10))
self.bModify = wx.Button(self, wx.ID_ANY, "Modify", size = BSIZESMALL)
self.bModify.SetBackgroundColour(self.deco.getBackgroundColour())
self.bModify.SetFont(font)
self.bModify.Enable(False)
self.Bind(wx.EVT_BUTTON, self.doModify, self.bModify)
self.bModify.SetToolTipString("Modify the selected heater.")
bsz.Add(self.bModify)
bsz.AddSpacer((10, 10))
self.bDelete = wx.Button(self, wx.ID_ANY, "Delete", size = BSIZESMALL)
self.bDelete.SetBackgroundColour(self.deco.getBackgroundColour())
self.bDelete.SetFont(font)
self.bDelete.Enable(False)
self.Bind(wx.EVT_BUTTON, self.doDelete, self.bDelete)
self.bDelete.SetToolTipString("Remove the selected heater from the "
"configuration.")
bsz.Add(self.bDelete)
sz.Add(bsz, pos = (1, 3))
self.SetSizer(sz)
self.enableAll(False)
def enableAll(self, flag = True):
self.bAdd.Enable(flag)
Page.enableAll(self, flag)
def setItemSelected(self, n):
self.selection = n
if n is None:
self.bDelete.Enable(False)
self.bModify.Enable(False)
else:
self.bDelete.Enable(True)
self.bModify.Enable(True)
def getFreePins(self):
freePins = [] + self.validPins
usedPins = []
for s in self.heaters:
usedPins.append(s[1])
for p in usedPins:
if p in freePins:
freePins.remove(p)
return freePins
def doAdd(self, evt):
nm = []
for s in self.heaters:
nm.append(s[0])
dlg = AddHeaterDlg(self, nm, self.getFreePins(), self.font)
rc = dlg.ShowModal()
if rc == wx.ID_OK:
ht = dlg.getValues()
dlg.Destroy()
if rc != wx.ID_OK:
return
self.heaters.append(ht)
self.lb.updateList(self.heaters)
self.validateTable()
self.parent.setHeaters(self.heaters)
self.assertModified(True)
def doModify(self, evt):
if self.selection is None:
return
nm = []
for s in self.heaters:
nm.append(s[0])
h = self.heaters[self.selection]
dlg = AddHeaterDlg(self, nm, [h[1]] + self.getFreePins(), self.font,
name = h[0], pin = h[1], invert = h[2], pwm = h[3])
rc = dlg.ShowModal()
if rc == wx.ID_OK:
ht = dlg.getValues()
dlg.Destroy()
if rc != wx.ID_OK:
return
self.heaters[self.selection] = ht
self.lb.updateList(self.heaters)
self.validateTable()
self.parent.setHeaters(self.heaters)
self.assertModified(True)
def doDelete(self, evt):
if self.selection is None:
return
self.assertModified(True)
del self.heaters[self.selection]
self.lb.updateList(self.heaters)
self.validateTable()
self.parent.setHeaters(self.heaters)
self.assertModified(True)
def setHeaters(self, heaters):
self.heaters = heaters
self.lb.updateList(self.heaters)
self.validateTable()
self.parent.setHeaters(self.heaters)
def setCandidatePins(self, plist):
if not plist or len(plist) == 0:
self.validPins = pinNames
else:
self.validPins = plist
self.validateTable()
def heaterNames(self):
heaterNames = []
for heater in self.heaters:
heaterNames.append(heater[0])
return heaterNames
def validateTable(self):
self.lb.setTableValidity(True)
self.setFieldValidity('HEATERLIST', True)
for i in range(len(self.heaters)):
if self.heaters[i][1] not in self.validPins:
self.lb.setRowValidity(i, False)
self.setFieldValidity('HEATERLIST', False)
def setHelpText(self, ht):
Page.setHelpText(self, ht)
k = 'DEFINE_HEATER'
if k in ht.keys():
self.bAdd.SetToolTipString(ht[k])
|
frugalware/melkotesting
|
refs/heads/master
|
t/fpmjunk.py
|
13
|
#!/usr/bin/env python
try:
import pacman
except ImportError:
import alpm
pacman = alpm
import os, tempfile, shutil, sys, re
remove = False
if len(sys.argv) > 1:
if sys.argv[1] == "--help":
print "no longer necessary %s fpms" % sys.argv[2]
sys.exit(0)
elif sys.argv[1] == "--remove":
remove = True
arch = sys.argv[2]
else:
arch = sys.argv[1]
for i in ['frugalware-%s' % arch]:
arch = i[11:]
root = tempfile.mkdtemp()
pacman.initialize(root)
if os.getcwd().split('/')[-2] == "frugalware-current":
treename = "frugalware-current"
archive = treename
else:
treename = "frugalware"
archive = treename + "-stable"
db = pacman.db_register(treename)
pacman.db_setserver(db, "file://" + os.getcwd() + "/../frugalware-" + arch)
pacman.db_update(1, db)
fdb = []
j = pacman.db_getpkgcache(db)
while j:
pkg = pacman.void_to_PM_PKG(pacman.list_getdata(j))
pkgname = pacman.void_to_char(pacman.pkg_getinfo(pkg, pacman.PKG_NAME))
pkgver = pacman.void_to_char(pacman.pkg_getinfo(pkg, pacman.PKG_VERSION))
fdb.append("%s-%s-%s.fpm" % (pkgname, pkgver, arch))
j = pacman.list_next(j)
pacman.release()
shutil.rmtree(root)
for j in os.listdir(os.getcwd() + "/../frugalware-" + arch):
if j not in fdb and j != treename + ".fdb" and j != ".gitignore":
print "frugalware-" + arch + "/" + j
if remove:
os.rename("../frugalware-" + arch + "/" + j, "/home/ftp/pub/archive/fpmjunk/" + archive + "/frugalware-" + arch + "/" + j)
|
Jgarcia-IAS/SAT
|
refs/heads/master
|
openerp/addons-extra/odoo-pruebas/odoo-server/addons-extra/l10n_pe_vat/base_vat.py
|
3
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (c) 2011 Cubic ERP - Teradata SAC. (http://cubicerp.com).
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsability of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# garantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import string
from openerp.osv import fields, osv
from openerp.tools.translate import _
class res_partner(osv.osv):
_inherit = 'res.partner'
def check_vat_pe(self, vat):
ruc = str(vat)
if not ruc.isdigit:
return False
if (len (ruc) <> 11):
return False
control = str(5432765432)
n = 0
for i, j in enumerate(ruc[:-1]):
n += int(control[i]) * int(j)
n = (n * 10) % 11 % 10
return n == int(ruc[-1])
res_partner()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
morelab/appcomposer
|
refs/heads/master
|
alembic/versions/452902fab185_added_auth_related_f.py
|
3
|
"""Added auth-related fields to User
Revision ID: 452902fab185
Revises: 501404b36cef
Create Date: 2013-09-24 12:42:04.461000
"""
# revision identifiers, used by Alembic.
revision = '452902fab185'
down_revision = '501404b36cef'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('Users', sa.Column('auth_data', sa.Unicode(length=255), nullable=True))
op.add_column('Users', sa.Column('auth_system', sa.Unicode(length=20), nullable=True))
### end Alembic commands ###
op.execute("INSERT INTO Users (login, name, auth_system, auth_data, password) VALUES ('testuser', 'Test User', 'userpass', 'password', 'password')")
op.execute("INSERT INTO Users (login, name, auth_system, auth_data, password) VALUES ('testuser2', 'Second Test User', 'userpass', 'password', 'password')")
def downgrade():
pass
### commands auto generated by Alembic - please adjust! ###
#op.drop_column('Users', 'auth_system')
#op.drop_column('Users', 'auth_data')
### end Alembic commands ###
|
dkagedal/stgit
|
refs/heads/safe
|
stgit/commands/branch.py
|
3
|
__copyright__ = """
Copyright (C) 2005, Chuck Lever <cel@netapp.com>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License version 2 as
published by the Free Software Foundation.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, see http://www.gnu.org/licenses/.
"""
import sys, os, time, re
from stgit.argparse import opt
from stgit.commands.common import *
from stgit.utils import *
from stgit.out import *
from stgit import argparse, stack, git, basedir
from stgit.lib import log
help = 'Branch operations: switch, list, create, rename, delete, ...'
kind = 'stack'
usage = ['',
'[--merge] [--] <branch>',
'--list',
'--create [--] <new-branch> [<committish>]',
'--clone [--] [<new-branch>]',
'--rename [--] <old-name> <new-name>',
'--protect [--] [<branch>]',
'--unprotect [--] [<branch>]',
'--delete [--force] [--] <branch>',
'--cleanup [--force] [--] [<branch>]',
'--description=<description> [--] [<branch>]']
description = """
Create, clone, switch between, rename, or delete development branches
within a git repository.
'stg branch'::
Display the name of the current branch.
'stg branch' <branch>::
Switch to the given branch."""
args = [argparse.all_branches]
options = [
opt('-l', '--list', action = 'store_true',
short = 'List the branches contained in this repository', long = """
List each branch in the current repository, followed by its
branch description (if any). The current branch is prefixed
with '>'. Branches that have been initialized for StGit (with
linkstg:init[]) are prefixed with 's'. Protected branches are
prefixed with 'p'."""),
opt('-c', '--create', action = 'store_true',
short = 'Create (and switch to) a new branch', long = """
Create (and switch to) a new branch. The new branch is already
initialized as an StGit patch stack, so you do not have to run
linkstg:init[] manually. If you give a committish argument,
the new branch is based there; otherwise, it is based at the
current HEAD.
StGit will try to detect the branch off of which the new
branch is forked, as well as the remote repository from which
that parent branch is taken (if any), so that running
linkstg:pull[] will automatically pull new commits from the
correct branch. It will warn if it cannot guess the parent
branch (e.g. if you do not specify a branch name as
committish)."""),
opt('--clone', action = 'store_true',
short = 'Clone the contents of the current branch', long = """
Clone the current branch, under the name <new-branch> if
specified, or using the current branch's name plus a
timestamp.
The description of the new branch is set to tell it is a clone
of the current branch. The parent information of the new
branch is copied from the current branch."""),
opt('-r', '--rename', action = 'store_true',
short = 'Rename an existing branch'),
opt('-p', '--protect', action = 'store_true',
short = 'Prevent StGit from modifying a branch', long = """
Prevent StGit from modifying a branch -- either the current
one, or one named on the command line."""),
opt('-u', '--unprotect', action = 'store_true',
short = 'Allow StGit to modify a branch', long = """
Allow StGit to modify a branch -- either the current one, or
one named on the command line. This undoes the effect of an
earlier 'stg branch --protect' command."""),
opt('--delete', action = 'store_true',
short = 'Delete a branch', long = """
Delete the named branch. If there are any patches left in the
branch, StGit will refuse to delete it unless you give the
'--force' flag.
A protected branch cannot be deleted; it must be unprotected
first (see '--unprotect' above).
If you delete the current branch, you are switched to the
"master" branch, if it exists."""),
opt('--cleanup', action = 'store_true',
short = 'Clean up the StGit metadata for a branch', long = """
Remove the StGit information for the current or given branch. If there
are patches left in the branch, StGit refuses the operation unless
'--force' is given.
A protected branch cannot be cleaned up; it must be unprotected first
(see '--unprotect' above).
A cleaned up branch can be re-initialised using the 'stg init'
command."""),
opt('-d', '--description', short = 'Set the branch description'),
opt('--merge', action = 'store_true',
short = 'Merge work tree changes into the other branch'),
opt('--force', action = 'store_true',
short = 'Force a delete when the series is not empty')]
directory = DirectoryGotoToplevel(log = False)
def __is_current_branch(branch_name):
return crt_series.get_name() == branch_name
def __print_branch(branch_name, length):
initialized = ' '
current = ' '
protected = ' '
branch = stack.Series(branch_name)
if branch.is_initialised():
initialized = 's'
if __is_current_branch(branch_name):
current = '>'
if branch.get_protected():
protected = 'p'
out.stdout(current + ' ' + initialized + protected + '\t'
+ branch_name.ljust(length) + ' | ' + branch.get_description())
def __delete_branch(doomed_name, force = False):
doomed = stack.Series(doomed_name)
if __is_current_branch(doomed_name):
raise CmdException('Cannot delete the current branch')
if doomed.get_protected():
raise CmdException, 'This branch is protected. Delete is not permitted'
out.start('Deleting branch "%s"' % doomed_name)
doomed.delete(force)
out.done()
def __cleanup_branch(name, force = False):
branch = stack.Series(name)
if branch.get_protected():
raise CmdExcpetion('This branch is protected. Clean up is not permitted')
out.start('Cleaning up branch "%s"' % name)
branch.delete(force = force, cleanup = True)
out.done()
def func(parser, options, args):
if options.create:
if len(args) == 0 or len(args) > 2:
parser.error('incorrect number of arguments')
check_local_changes()
check_conflicts()
check_head_top_equal(crt_series)
tree_id = None
if len(args) >= 2:
parentbranch = None
try:
branchpoint = git.rev_parse(args[1])
# parent branch?
head_re = re.compile('refs/(heads|remotes)/')
ref_re = re.compile(args[1] + '$')
for ref in git.all_refs():
if head_re.match(ref) and ref_re.search(ref):
# args[1] is a valid ref from the branchpoint
# setting above
parentbranch = args[1]
break;
except git.GitException:
# should use a more specific exception to catch only
# non-git refs ?
out.info('Don\'t know how to determine parent branch'
' from "%s"' % args[1])
# exception in branch = rev_parse() leaves branchpoint unbound
branchpoint = None
tree_id = git_id(crt_series, branchpoint or args[1])
if parentbranch:
out.info('Recording "%s" as parent branch' % parentbranch)
else:
out.info('Don\'t know how to determine parent branch'
' from "%s"' % args[1])
else:
# branch stack off current branch
parentbranch = git.get_head_file()
if parentbranch:
parentremote = git.identify_remote(parentbranch)
if parentremote:
out.info('Using remote "%s" to pull parent from'
% parentremote)
else:
out.info('Recording as a local branch')
else:
# no known parent branch, can't guess the remote
parentremote = None
stack.Series(args[0]).init(create_at = tree_id,
parent_remote = parentremote,
parent_branch = parentbranch)
out.info('Branch "%s" created' % args[0])
log.compat_log_entry('branch --create')
return
elif options.clone:
if len(args) == 0:
clone = crt_series.get_name() + \
time.strftime('-%C%y%m%d-%H%M%S')
elif len(args) == 1:
clone = args[0]
else:
parser.error('incorrect number of arguments')
check_local_changes()
check_conflicts()
check_head_top_equal(crt_series)
out.start('Cloning current branch to "%s"' % clone)
crt_series.clone(clone)
out.done()
log.copy_log(log.default_repo(), crt_series.get_name(), clone,
'branch --clone')
return
elif options.delete:
if len(args) != 1:
parser.error('incorrect number of arguments')
__delete_branch(args[0], options.force)
log.delete_log(log.default_repo(), args[0])
return
elif options.cleanup:
if not args:
name = crt_series.get_name()
elif len(args) == 1:
name = args[0]
else:
parser.error('incorrect number of arguments')
__cleanup_branch(name, options.force)
log.delete_log(log.default_repo(), name)
return
elif options.list:
if len(args) != 0:
parser.error('incorrect number of arguments')
branches = set(git.get_heads())
for br in set(branches):
m = re.match(r'^(.*)\.stgit$', br)
if m and m.group(1) in branches:
branches.remove(br)
if branches:
out.info('Available branches:')
max_len = max([len(i) for i in branches])
for i in sorted(branches):
__print_branch(i, max_len)
else:
out.info('No branches')
return
elif options.protect:
if len(args) == 0:
branch_name = crt_series.get_name()
elif len(args) == 1:
branch_name = args[0]
else:
parser.error('incorrect number of arguments')
branch = stack.Series(branch_name)
if not branch.is_initialised():
raise CmdException, 'Branch "%s" is not controlled by StGIT' \
% branch_name
out.start('Protecting branch "%s"' % branch_name)
branch.protect()
out.done()
return
elif options.rename:
if len(args) != 2:
parser.error('incorrect number of arguments')
if __is_current_branch(args[0]):
raise CmdException, 'Renaming the current branch is not supported'
stack.Series(args[0]).rename(args[1])
out.info('Renamed branch "%s" to "%s"' % (args[0], args[1]))
log.rename_log(log.default_repo(), args[0], args[1], 'branch --rename')
return
elif options.unprotect:
if len(args) == 0:
branch_name = crt_series.get_name()
elif len(args) == 1:
branch_name = args[0]
else:
parser.error('incorrect number of arguments')
branch = stack.Series(branch_name)
if not branch.is_initialised():
raise CmdException, 'Branch "%s" is not controlled by StGIT' \
% branch_name
out.info('Unprotecting branch "%s"' % branch_name)
branch.unprotect()
out.done()
return
elif options.description is not None:
if len(args) == 0:
branch_name = crt_series.get_name()
elif len(args) == 1:
branch_name = args[0]
else:
parser.error('incorrect number of arguments')
branch = stack.Series(branch_name)
if not branch.is_initialised():
raise CmdException, 'Branch "%s" is not controlled by StGIT' \
% branch_name
branch.set_description(options.description)
return
elif len(args) == 1:
if __is_current_branch(args[0]):
raise CmdException, 'Branch "%s" is already the current branch' \
% args[0]
if not options.merge:
check_local_changes()
check_conflicts()
check_head_top_equal(crt_series)
out.start('Switching to branch "%s"' % args[0])
git.switch_branch(args[0])
out.done()
return
# default action: print the current branch
if len(args) != 0:
parser.error('incorrect number of arguments')
print crt_series.get_name()
|
firebase/firebase-android-sdk
|
refs/heads/master
|
ci/fireci/tests/copyright_test.py
|
1
|
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from fireciplugins.copyright import (
match_any,
matches,
walk,
)
from .fileutil import (
Artifact,
create_artifacts,
in_tempdir,
)
class CopyrightCheckTest(unittest.TestCase):
def test_match_any(self):
test_data = (
((1, 2, 3), lambda x: x == 2, True),
((1, 2, 3), lambda x: x == 5, False),
((), lambda x: x == 1, False),
)
for iterable, predicate, expected_result in test_data:
with self.subTest():
self.assertEqual(match_any(iterable, predicate), expected_result)
def test_matches(self):
test_data = (
('file.py', '*.py', True),
('file.xml', '*.py', False),
('hello/file.py', '*.py', True),
('hello/file.xml', 'hello/**', True),
('some/file.xml', 'hello/**', False),
)
for path, path_to_match, expected_result in test_data:
pass
with self.subTest("'{}' matches '{}' must be {}".format(
path, path_to_match, expected_result)):
self.assertEqual(matches(path, [path_to_match]), expected_result)
@in_tempdir
def test_walk_in_empty_dir(self):
paths = walk('.', [], ['py', 'xml'])
self.assertTrue(len(list(paths)) == 0)
@in_tempdir
def test_walk_should_filter_out_non_matching_files(self):
create_artifacts(
Artifact('hello/world/foo.py'), Artifact('dir1/subdir2/file.py'),
Artifact('hello/world.py'), Artifact('dir1/subdir2/file.py'),
Artifact('dir1/subdir2/file.gradle'), Artifact('dir1/subdir2/file.xml'))
paths = walk('.', ['hello/**'], ['py', 'xml'])
self.assertEqual(
set(paths), {'dir1/subdir2/file.py', 'dir1/subdir2/file.xml'})
|
xuru/pyvisdk
|
refs/heads/master
|
pyvisdk/do/apply_storage_recommendation_result.py
|
1
|
import logging
from pyvisdk.exceptions import InvalidArgumentError
########################################
# Automatically generated, do not edit.
########################################
log = logging.getLogger(__name__)
def ApplyStorageRecommendationResult(vim, *args, **kwargs):
'''Both RecommendDatastores and DatastoreEnterMaintenanceMode methods may invoke
Storage DRS for recommendations on placing or evacuating virtual disks. All
initial placement recommendations, and some enterMaintenanceMode
recommendations need to be approved by the user. Recommendations that are
approved will be applied using the ApplyStorageDrsRecommendation_Task method.
This class encapsulates the result of applying a subset of the
recommendations.NOTE: This data object type and all of its methods are
experimental and subject to change in future releases.'''
obj = vim.client.factory.create('ns0:ApplyStorageRecommendationResult')
# do some validation checking...
if (len(args) + len(kwargs)) < 0:
raise IndexError('Expected at least 1 arguments got: %d' % len(args))
required = [ ]
optional = [ 'vm', 'dynamicProperty', 'dynamicType' ]
for name, arg in zip(required+optional, args):
setattr(obj, name, arg)
for name, value in kwargs.items():
if name in required + optional:
setattr(obj, name, value)
else:
raise InvalidArgumentError("Invalid argument: %s. Expected one of %s" % (name, ", ".join(required + optional)))
return obj
|
retomerz/intellij-community
|
refs/heads/master
|
python/testData/inspections/ChainedComparison1_after.py
|
83
|
if e < a <= b < c <= d:
print "q"
|
rvhub/onos
|
refs/heads/master
|
tools/test/topos/obeliskHostCheck.py
|
32
|
#!/usr/bin/python
import sys
import os
import json
# TODO: if none given, use OCI
try:
onosIp = sys.argv[1]
print "Reading hosts view from ONOS node " + onosIp + ":"
except Exception as e:
print "Error reading ONOS IP arguement"
print e
# Grab the json objects from ONOS
output = os.popen("onos " + onosIp + " \"hosts -j\"" )
hosts = json.loads( output.read() )
#hosts = json.loads( output.split( 'Logging in as karaf\n' )[1] )
hostAttachment = True
# FIXME: topo-HA/obelisk specific mappings:
# key is mac and value is dpid
mappings = {}
for i in range( 1, 29 ): # hosts 1 through 28
# set up correct variables:
macId = "00:" * 5 + hex( i ).split( "0x" )[1].upper().zfill(2)
if i == 1:
deviceId = "1000".zfill(16)
elif i == 2:
deviceId = "2000".zfill(16)
elif i == 3:
deviceId = "3000".zfill(16)
elif i == 4:
deviceId = "3004".zfill(16)
elif i == 5:
deviceId = "5000".zfill(16)
elif i == 6:
deviceId = "6000".zfill(16)
elif i == 7:
deviceId = "6007".zfill(16)
elif i >= 8 and i <= 17:
dpid = '3' + str( i ).zfill( 3 )
deviceId = dpid.zfill(16)
elif i >= 18 and i <= 27:
dpid = '6' + str( i ).zfill( 3 )
deviceId = dpid.zfill(16)
elif i == 28:
deviceId = "2800".zfill(16)
mappings[ macId ] = deviceId
if hosts or "Error" not in hosts:
if hosts == []:
print "WARNING: There are no hosts discovered"
else:
for host in hosts:
mac = None
location = None
device = None
port = None
try:
mac = host.get( 'mac' )
assert mac, "mac field could not be found for this host object"
location = host.get( 'location' )
assert location, "location field could not be found for this host object"
# Trim the protocol identifier off deviceId
device = str( location.get( 'elementId' ) ).split(':')[1]
assert device, "elementId field could not be found for this host location object"
port = location.get( 'port' )
assert port, "port field could not be found for this host location object"
# Now check if this matches where they should be
if mac and device and port:
if device != mappings[ str( mac ) ]:
print "The attachment device is incorrect for host " + str( mac ) +\
". Expected: " + mappings[ str( mac ) ] + "; Actual: " + device
hostAttachment = False
if str( port ) != "1":
print "The attachment port is incorrect for host " + str( mac ) +\
". Expected: 1; Actual: " + str( port)
hostAttachment = False
else:
hostAttachment = False
except AssertionError as e:
print "ERROR: Json object not as expected:"
print e
print "host object: " + repr( host )
hostAttachment = False
else:
print "No hosts json output or \"Error\" in output. hosts = " + repr( hosts )
|
esi-mineset/spark
|
refs/heads/master
|
examples/src/main/python/mllib/summary_statistics_example.py
|
128
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import print_function
from pyspark import SparkContext
# $example on$
import numpy as np
from pyspark.mllib.stat import Statistics
# $example off$
if __name__ == "__main__":
sc = SparkContext(appName="SummaryStatisticsExample") # SparkContext
# $example on$
mat = sc.parallelize(
[np.array([1.0, 10.0, 100.0]), np.array([2.0, 20.0, 200.0]), np.array([3.0, 30.0, 300.0])]
) # an RDD of Vectors
# Compute column summary statistics.
summary = Statistics.colStats(mat)
print(summary.mean()) # a dense vector containing the mean value for each column
print(summary.variance()) # column-wise variance
print(summary.numNonzeros()) # number of nonzeros in each column
# $example off$
sc.stop()
|
mdkennedy3/Scarab_DCT_Control
|
refs/heads/master
|
catkin_ws/devel/lib/python2.7/dist-packages/hfn/msg/_MoveActionFeedback.py
|
1
|
"""autogenerated by genpy from hfn/MoveActionFeedback.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
import hfn.msg
import geometry_msgs.msg
import genpy
import actionlib_msgs.msg
import std_msgs.msg
class MoveActionFeedback(genpy.Message):
_md5sum = "7d1870ff6e0decea702b943b5af0b42e"
_type = "hfn/MoveActionFeedback"
_has_header = True #flag to mark the presence of a Header object
_full_text = """# ====== DO NOT MODIFY! AUTOGENERATED FROM AN ACTION DEFINITION ======
Header header
actionlib_msgs/GoalStatus status
MoveFeedback feedback
================================================================================
MSG: std_msgs/Header
# Standard metadata for higher-level stamped data types.
# This is generally used to communicate timestamped data
# in a particular coordinate frame.
#
# sequence ID: consecutively increasing ID
uint32 seq
#Two-integer timestamp that is expressed as:
# * stamp.secs: seconds (stamp_secs) since epoch
# * stamp.nsecs: nanoseconds since stamp_secs
# time-handling sugar is provided by the client library
time stamp
#Frame this data is associated with
# 0: no frame
# 1: global frame
string frame_id
================================================================================
MSG: actionlib_msgs/GoalStatus
GoalID goal_id
uint8 status
uint8 PENDING = 0 # The goal has yet to be processed by the action server
uint8 ACTIVE = 1 # The goal is currently being processed by the action server
uint8 PREEMPTED = 2 # The goal received a cancel request after it started executing
# and has since completed its execution (Terminal State)
uint8 SUCCEEDED = 3 # The goal was achieved successfully by the action server (Terminal State)
uint8 ABORTED = 4 # The goal was aborted during execution by the action server due
# to some failure (Terminal State)
uint8 REJECTED = 5 # The goal was rejected by the action server without being processed,
# because the goal was unattainable or invalid (Terminal State)
uint8 PREEMPTING = 6 # The goal received a cancel request after it started executing
# and has not yet completed execution
uint8 RECALLING = 7 # The goal received a cancel request before it started executing,
# but the action server has not yet confirmed that the goal is canceled
uint8 RECALLED = 8 # The goal received a cancel request before it started executing
# and was successfully cancelled (Terminal State)
uint8 LOST = 9 # An action client can determine that a goal is LOST. This should not be
# sent over the wire by an action server
#Allow for the user to associate a string with GoalStatus for debugging
string text
================================================================================
MSG: actionlib_msgs/GoalID
# The stamp should store the time at which this goal was requested.
# It is used by an action server when it tries to preempt all
# goals that were requested before a certain time
time stamp
# The id provides a way to associate feedback and
# result message with specific goal requests. The id
# specified must be unique.
string id
================================================================================
MSG: hfn/MoveFeedback
# ====== DO NOT MODIFY! AUTOGENERATED FROM AN ACTION DEFINITION ======
geometry_msgs/PoseStamped base_position # Where we are now
================================================================================
MSG: geometry_msgs/PoseStamped
# A Pose with reference coordinate frame and timestamp
Header header
Pose pose
================================================================================
MSG: geometry_msgs/Pose
# A representation of pose in free space, composed of postion and orientation.
Point position
Quaternion orientation
================================================================================
MSG: geometry_msgs/Point
# This contains the position of a point in free space
float64 x
float64 y
float64 z
================================================================================
MSG: geometry_msgs/Quaternion
# This represents an orientation in free space in quaternion form.
float64 x
float64 y
float64 z
float64 w
"""
__slots__ = ['header','status','feedback']
_slot_types = ['std_msgs/Header','actionlib_msgs/GoalStatus','hfn/MoveFeedback']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
header,status,feedback
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(MoveActionFeedback, self).__init__(*args, **kwds)
#message fields cannot be None, assign default values for those that are
if self.header is None:
self.header = std_msgs.msg.Header()
if self.status is None:
self.status = actionlib_msgs.msg.GoalStatus()
if self.feedback is None:
self.feedback = hfn.msg.MoveFeedback()
else:
self.header = std_msgs.msg.Header()
self.status = actionlib_msgs.msg.GoalStatus()
self.feedback = hfn.msg.MoveFeedback()
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self
buff.write(_struct_3I.pack(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs))
_x = self.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_struct_2I.pack(_x.status.goal_id.stamp.secs, _x.status.goal_id.stamp.nsecs))
_x = self.status.goal_id.id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_struct_B.pack(self.status.status))
_x = self.status.text
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_struct_3I.pack(_x.feedback.base_position.header.seq, _x.feedback.base_position.header.stamp.secs, _x.feedback.base_position.header.stamp.nsecs))
_x = self.feedback.base_position.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_struct_7d.pack(_x.feedback.base_position.pose.position.x, _x.feedback.base_position.pose.position.y, _x.feedback.base_position.pose.position.z, _x.feedback.base_position.pose.orientation.x, _x.feedback.base_position.pose.orientation.y, _x.feedback.base_position.pose.orientation.z, _x.feedback.base_position.pose.orientation.w))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(_x))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(_x))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
if self.header is None:
self.header = std_msgs.msg.Header()
if self.status is None:
self.status = actionlib_msgs.msg.GoalStatus()
if self.feedback is None:
self.feedback = hfn.msg.MoveFeedback()
end = 0
_x = self
start = end
end += 12
(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.header.frame_id = str[start:end].decode('utf-8')
else:
self.header.frame_id = str[start:end]
_x = self
start = end
end += 8
(_x.status.goal_id.stamp.secs, _x.status.goal_id.stamp.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.status.goal_id.id = str[start:end].decode('utf-8')
else:
self.status.goal_id.id = str[start:end]
start = end
end += 1
(self.status.status,) = _struct_B.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.status.text = str[start:end].decode('utf-8')
else:
self.status.text = str[start:end]
_x = self
start = end
end += 12
(_x.feedback.base_position.header.seq, _x.feedback.base_position.header.stamp.secs, _x.feedback.base_position.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.feedback.base_position.header.frame_id = str[start:end].decode('utf-8')
else:
self.feedback.base_position.header.frame_id = str[start:end]
_x = self
start = end
end += 56
(_x.feedback.base_position.pose.position.x, _x.feedback.base_position.pose.position.y, _x.feedback.base_position.pose.position.z, _x.feedback.base_position.pose.orientation.x, _x.feedback.base_position.pose.orientation.y, _x.feedback.base_position.pose.orientation.z, _x.feedback.base_position.pose.orientation.w,) = _struct_7d.unpack(str[start:end])
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self
buff.write(_struct_3I.pack(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs))
_x = self.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_struct_2I.pack(_x.status.goal_id.stamp.secs, _x.status.goal_id.stamp.nsecs))
_x = self.status.goal_id.id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_struct_B.pack(self.status.status))
_x = self.status.text
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_struct_3I.pack(_x.feedback.base_position.header.seq, _x.feedback.base_position.header.stamp.secs, _x.feedback.base_position.header.stamp.nsecs))
_x = self.feedback.base_position.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_struct_7d.pack(_x.feedback.base_position.pose.position.x, _x.feedback.base_position.pose.position.y, _x.feedback.base_position.pose.position.z, _x.feedback.base_position.pose.orientation.x, _x.feedback.base_position.pose.orientation.y, _x.feedback.base_position.pose.orientation.z, _x.feedback.base_position.pose.orientation.w))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(_x))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(_x))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
if self.header is None:
self.header = std_msgs.msg.Header()
if self.status is None:
self.status = actionlib_msgs.msg.GoalStatus()
if self.feedback is None:
self.feedback = hfn.msg.MoveFeedback()
end = 0
_x = self
start = end
end += 12
(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.header.frame_id = str[start:end].decode('utf-8')
else:
self.header.frame_id = str[start:end]
_x = self
start = end
end += 8
(_x.status.goal_id.stamp.secs, _x.status.goal_id.stamp.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.status.goal_id.id = str[start:end].decode('utf-8')
else:
self.status.goal_id.id = str[start:end]
start = end
end += 1
(self.status.status,) = _struct_B.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.status.text = str[start:end].decode('utf-8')
else:
self.status.text = str[start:end]
_x = self
start = end
end += 12
(_x.feedback.base_position.header.seq, _x.feedback.base_position.header.stamp.secs, _x.feedback.base_position.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.feedback.base_position.header.frame_id = str[start:end].decode('utf-8')
else:
self.feedback.base_position.header.frame_id = str[start:end]
_x = self
start = end
end += 56
(_x.feedback.base_position.pose.position.x, _x.feedback.base_position.pose.position.y, _x.feedback.base_position.pose.position.z, _x.feedback.base_position.pose.orientation.x, _x.feedback.base_position.pose.orientation.y, _x.feedback.base_position.pose.orientation.z, _x.feedback.base_position.pose.orientation.w,) = _struct_7d.unpack(str[start:end])
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
_struct_3I = struct.Struct("<3I")
_struct_B = struct.Struct("<B")
_struct_7d = struct.Struct("<7d")
_struct_2I = struct.Struct("<2I")
|
pedohorse/hpaste
|
refs/heads/master
|
python2.7libs/hpaste/QWebAuthDialog.py
|
1
|
raise NotImplementedError()
# TODO: Ran into temporary deadend: This requires exposing client_secret for github authentication... so no point using it now
# TODO: need to implement something like device flow, but it is in beta currently
# TODO: or enter personal access token manually
import re
try:
from PySide2.QtWidgets import QDialog, QVBoxLayout
from PySide2.QtWebEngineWidgets import QWebEngineView
from PySide2.QtCore import QUrl
except ImportError:
raise NotImplementedError('web auth implemented only for QT5. Sorry, people who still use houdini 16.5')
class QWebAuthDialog(QDialog):
def __init__(self, url, success_re, parent=None):
super(QWebAuthDialog, self).__init__(parent=parent)
self.__webview = QWebEngineView(parent=self)
self.__webview.load(QUrl(url))
self.__succre = re.compile(success_re)
self.__webview.urlChanged.connect(self.on_url_changed)
self.__layout = QVBoxLayout()
self.setLayout(self.__layout)
self.__layout.addWidget(self.__webview)
self.__result = None
def get_result(self):
return self.__result
def on_url_changed(self, qurl):
url = qurl.toString()
match = self.__succre.match(url)
print(url, match)
if match:
self.__result = match
self.accept()
if __name__ == '__main__': # testing
import sys
import string
import random
from PySide2.QtWidgets import QApplication
qapp = QApplication(sys.argv)
# w = QWebAuthDialog('https://www.google.com', r'https://www.google.com/search\?(.*)')
webauthstate = ''.join(random.choice(string.ascii_letters) for _ in xrange(32))
webauthparms = {'client_id': '42e8e8e9d844e45c2d05',
'redirect_uri': 'https://github.com/login/oauth/success',
'scope': 'gist',
'state': webauthstate}
w = QWebAuthDialog(url='https://github.com/login/oauth/authorize?' +
'&'.join('%s=%s' % (k, v) for k, v in webauthparms.iteritems()),
success_re=r'https://github.com/login/oauth/success\?(.*)',
parent=None)
w.setGeometry(512, 256, 1024, 768)
res = w.exec_()
print(res == QWebAuthDialog.Accepted)
print(w.get_result())
if res == QWebAuthDialog.Accepted:
print w.get_result().groups()
# qapp.exec_()
|
SUSE/kiwi
|
refs/heads/master
|
doc/source/conf.py
|
1
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# KIWI NG documentation build configuration file
#
import sys
from os.path import abspath, dirname, join, normpath
import shlex
import sphinx_rtd_theme
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
_path = normpath(join(dirname(__file__), "../.."))
sys.path.insert(0, _path)
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.autosummary',
'sphinx.ext.extlinks',
'sphinx.ext.todo',
'sphinx.ext.ifconfig',
'sphinx.ext.viewcode',
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
]
docopt_ignore = [
'kiwi.cli',
'kiwi.tasks.system_build',
'kiwi.tasks.system_prepare',
'kiwi.tasks.system_update',
'kiwi.tasks.system_create',
'kiwi.tasks.result_list',
'kiwi.tasks.result_bundle',
'kiwi.tasks.image_resize',
'kiwi.tasks.image_info'
]
def remove_module_docstring(app, what, name, obj, options, lines):
if what == "module" and name in docopt_ignore:
del lines[:]
def prologReplace(app, docname, source):
result = source[0]
for key in app.config.prolog_replacements:
result = result.replace(key, app.config.prolog_replacements[key])
source[0] = result
def setup(app):
app.add_config_value('prolog_replacements', {}, True)
app.connect('source-read', prologReplace)
app.connect("autodoc-process-docstring", remove_module_docstring)
app.add_css_file('css/custom.css')
prolog_replacements = {
'{exc_image_base_name}': 'Leap-15.1_appliance',
'{exc_description}': 'suse-leap-15.1',
'{exc_netboot}': 'netboot/suse-leap15.1',
'{exc_os_version}': '15.1',
'{exc_image_version}': '1.15.1',
'{exc_repo}': 'obs://openSUSE:Leap:15.1/standard',
'{exc_kiwi_repo}':
'obs://Virtualization:Appliances:Builder/openSUSE_Leap_15.1',
'{schema_version}': '7.2',
'{kiwi}': 'KIWI NG',
'{kiwi-product}': 'KIWI Next Generation (KIWI NG)',
'{kiwi-legacy}': 'KIWI Legacy'
}
latex_documents = [
('index', 'kiwi.tex', 'KIWI NG Documentation', 'Marcus Schäfer', 'manual')
]
latex_elements = {
'papersize': 'a4paper',
'pointsize':'12pt',
'classoptions': ',openany',
'babel': '\\usepackage[english]{babel}',
'preamble': r'''
\makeatletter
\fancypagestyle{normal}{
\fancyhf{}
\fancyfoot[LE,RO]{{\py@HeaderFamily\thepage}}
\fancyfoot[LO]{{\py@HeaderFamily\nouppercase{\rightmark}}}
\fancyfoot[RE]{{\py@HeaderFamily\nouppercase{\leftmark}}}
\fancyhead[LE,RO]{{\py@HeaderFamily \@title, \py@release}}
\renewcommand{\headrulewidth}{0.4pt}
\renewcommand{\footrulewidth}{0.4pt}
}
\makeatother
'''
}
# Add any paths that contain templates here, relative to this directory.
templates_path = ['.templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
default_role="py:obj"
# General information about the project.
project = 'KIWI NG'
copyright = '2020, Marcus Schäfer'
author = 'Marcus Schäfer'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '9.23.16'
# The full version, including alpha/beta/rc tags.
release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = 'en'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
extlinks = {
'issue': ('https://github.com/OSInside/kiwi/issues/%s', '#'),
'pr': ('https://github.com/OSInside/kiwi/pull/%s', 'PR #'),
'ghkiwi': ('https://github.com/OSInside/kiwi/blob/master/%s', '')
}
autosummary_generate = True
# -- Options for HTML output ----------------------------------------------
#html_short_title = '%s-%s' % (project, version)
#html_last_updated_fmt = '%b %d, %Y'
#html_split_index = True
html_logo = '.images/kiwi-logo.png'
html_sidebars = {
'**': [
'localtoc.html', 'relations.html',
'about.html', 'searchbox.html',
]
}
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
html_static_path = ['.static']
html_theme_options = {
'collapse_navigation': False,
'display_version': False
}
# -- Options for manual page output ---------------------------------------
# The man page toctree documents.
kiwi_doc = 'commands/kiwi'
result_list_doc = 'commands/result_list'
result_bundle_doc = 'commands/result_bundle'
system_prepare_doc = 'commands/system_prepare'
system_update_doc = 'commands/system_update'
system_build_doc = 'commands/system_build'
system_create_doc = 'commands/system_create'
image_resize_doc = 'commands/image_resize'
image_info_doc = 'commands/image_info'
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(
kiwi_doc,
'kiwi', 'Creating Operating System Images',
[author],
8
),
(
result_list_doc,
'kiwi::result::list',
'List build results',
[author],
8
),
(
result_bundle_doc,
'kiwi::result::bundle',
'Bundle build results',
[author],
8
),
(
system_prepare_doc,
'kiwi::system::prepare',
'Prepare image root system',
[author],
8
),
(
system_create_doc,
'kiwi::system::create',
'Create image from prepared root system',
[author],
8
),
(
system_update_doc,
'kiwi::system::update',
'Update/Upgrade image root system',
[author],
8
),
(
system_build_doc,
'kiwi::system::build',
'Build image in combined prepare and create step',
[author],
8
),
(
image_resize_doc,
'kiwi::image::resize',
'Resize disk images to new geometry',
[author],
8
),
(
image_info_doc,
'kiwi::image::info',
'Provide detailed information about an image description',
[author],
8
)
]
intersphinx_mapping = {'python': ('https://docs.python.org/3', None)}
# If true, show URL addresses after external links.
#man_show_urls = False
|
sbnoemi/notorhot
|
refs/heads/master
|
notorhot_example/app/settings.py
|
1
|
"""
Django settings for notorhot_example project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'n_$x15t$)3@(v%zxg4s2&ggv*0w00is44qbm#ebupd5@1gkpt2'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'notorhot_example.app',
'notorhot.contrib.write_in',
'sorl.thumbnail',
'model_utils',
'notorhot',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'notorhot_example.app.urls'
WSGI_APPLICATION = 'notorhot_example.app.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_ROOT = os.path.join(BASE_DIR, 'media', 'static')
STATIC_URL = '/static/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media', 'dynamic')
MEDIA_URL = '/media/'
|
mikewesner-wf/glasshouse
|
refs/heads/master
|
glasshouse.indigoPlugin/Contents/Server Plugin/flask/testsuite/test_apps/moduleapp/apps/admin/__init__.py
|
629
|
from flask import Module, render_template
admin = Module(__name__, url_prefix='/admin')
@admin.route('/')
def index():
return render_template('admin/index.html')
@admin.route('/index2')
def index2():
return render_template('./admin/index.html')
|
molobrakos/home-assistant
|
refs/heads/master
|
tests/components/homekit/test_type_sensors.py
|
15
|
"""Test different accessory types: Sensors."""
from homeassistant.components.homekit.const import (
PROP_CELSIUS, THRESHOLD_CO, THRESHOLD_CO2)
from homeassistant.components.homekit.type_sensors import (
AirQualitySensor, BinarySensor, CarbonMonoxideSensor, CarbonDioxideSensor,
HumiditySensor, LightSensor, TemperatureSensor, BINARY_SENSOR_SERVICE_MAP)
from homeassistant.const import (
ATTR_DEVICE_CLASS, ATTR_UNIT_OF_MEASUREMENT, STATE_HOME, STATE_NOT_HOME,
STATE_OFF, STATE_ON, STATE_UNKNOWN, TEMP_CELSIUS, TEMP_FAHRENHEIT)
async def test_temperature(hass, hk_driver):
"""Test if accessory is updated after state change."""
entity_id = 'sensor.temperature'
hass.states.async_set(entity_id, None)
await hass.async_block_till_done()
acc = TemperatureSensor(hass, hk_driver, 'Temperature', entity_id, 2, None)
await hass.async_add_job(acc.run)
assert acc.aid == 2
assert acc.category == 10 # Sensor
assert acc.char_temp.value == 0.0
for key, value in PROP_CELSIUS.items():
assert acc.char_temp.properties[key] == value
hass.states.async_set(entity_id, STATE_UNKNOWN,
{ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS})
await hass.async_block_till_done()
assert acc.char_temp.value == 0.0
hass.states.async_set(entity_id, '20',
{ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS})
await hass.async_block_till_done()
assert acc.char_temp.value == 20
hass.states.async_set(entity_id, '75.2',
{ATTR_UNIT_OF_MEASUREMENT: TEMP_FAHRENHEIT})
await hass.async_block_till_done()
assert acc.char_temp.value == 24
async def test_humidity(hass, hk_driver):
"""Test if accessory is updated after state change."""
entity_id = 'sensor.humidity'
hass.states.async_set(entity_id, None)
await hass.async_block_till_done()
acc = HumiditySensor(hass, hk_driver, 'Humidity', entity_id, 2, None)
await hass.async_add_job(acc.run)
assert acc.aid == 2
assert acc.category == 10 # Sensor
assert acc.char_humidity.value == 0
hass.states.async_set(entity_id, STATE_UNKNOWN)
await hass.async_block_till_done()
assert acc.char_humidity.value == 0
hass.states.async_set(entity_id, '20')
await hass.async_block_till_done()
assert acc.char_humidity.value == 20
async def test_air_quality(hass, hk_driver):
"""Test if accessory is updated after state change."""
entity_id = 'sensor.air_quality'
hass.states.async_set(entity_id, None)
await hass.async_block_till_done()
acc = AirQualitySensor(hass, hk_driver, 'Air Quality', entity_id, 2, None)
await hass.async_add_job(acc.run)
assert acc.aid == 2
assert acc.category == 10 # Sensor
assert acc.char_density.value == 0
assert acc.char_quality.value == 0
hass.states.async_set(entity_id, STATE_UNKNOWN)
await hass.async_block_till_done()
assert acc.char_density.value == 0
assert acc.char_quality.value == 0
hass.states.async_set(entity_id, '34')
await hass.async_block_till_done()
assert acc.char_density.value == 34
assert acc.char_quality.value == 1
hass.states.async_set(entity_id, '200')
await hass.async_block_till_done()
assert acc.char_density.value == 200
assert acc.char_quality.value == 5
async def test_co(hass, hk_driver):
"""Test if accessory is updated after state change."""
entity_id = 'sensor.co'
hass.states.async_set(entity_id, None)
await hass.async_block_till_done()
acc = CarbonMonoxideSensor(hass, hk_driver, 'CO', entity_id, 2, None)
await hass.async_add_job(acc.run)
assert acc.aid == 2
assert acc.category == 10 # Sensor
assert acc.char_level.value == 0
assert acc.char_peak.value == 0
assert acc.char_detected.value == 0
hass.states.async_set(entity_id, STATE_UNKNOWN)
await hass.async_block_till_done()
assert acc.char_level.value == 0
assert acc.char_peak.value == 0
assert acc.char_detected.value == 0
value = 32
assert value > THRESHOLD_CO
hass.states.async_set(entity_id, str(value))
await hass.async_block_till_done()
assert acc.char_level.value == 32
assert acc.char_peak.value == 32
assert acc.char_detected.value == 1
value = 10
assert value < THRESHOLD_CO
hass.states.async_set(entity_id, str(value))
await hass.async_block_till_done()
assert acc.char_level.value == 10
assert acc.char_peak.value == 32
assert acc.char_detected.value == 0
async def test_co2(hass, hk_driver):
"""Test if accessory is updated after state change."""
entity_id = 'sensor.co2'
hass.states.async_set(entity_id, None)
await hass.async_block_till_done()
acc = CarbonDioxideSensor(hass, hk_driver, 'CO2', entity_id, 2, None)
await hass.async_add_job(acc.run)
assert acc.aid == 2
assert acc.category == 10 # Sensor
assert acc.char_level.value == 0
assert acc.char_peak.value == 0
assert acc.char_detected.value == 0
hass.states.async_set(entity_id, STATE_UNKNOWN)
await hass.async_block_till_done()
assert acc.char_level.value == 0
assert acc.char_peak.value == 0
assert acc.char_detected.value == 0
value = 1100
assert value > THRESHOLD_CO2
hass.states.async_set(entity_id, str(value))
await hass.async_block_till_done()
assert acc.char_level.value == 1100
assert acc.char_peak.value == 1100
assert acc.char_detected.value == 1
value = 800
assert value < THRESHOLD_CO2
hass.states.async_set(entity_id, str(value))
await hass.async_block_till_done()
assert acc.char_level.value == 800
assert acc.char_peak.value == 1100
assert acc.char_detected.value == 0
async def test_light(hass, hk_driver):
"""Test if accessory is updated after state change."""
entity_id = 'sensor.light'
hass.states.async_set(entity_id, None)
await hass.async_block_till_done()
acc = LightSensor(hass, hk_driver, 'Light', entity_id, 2, None)
await hass.async_add_job(acc.run)
assert acc.aid == 2
assert acc.category == 10 # Sensor
assert acc.char_light.value == 0.0001
hass.states.async_set(entity_id, STATE_UNKNOWN)
await hass.async_block_till_done()
assert acc.char_light.value == 0.0001
hass.states.async_set(entity_id, '300')
await hass.async_block_till_done()
assert acc.char_light.value == 300
async def test_binary(hass, hk_driver):
"""Test if accessory is updated after state change."""
entity_id = 'binary_sensor.opening'
hass.states.async_set(entity_id, STATE_UNKNOWN,
{ATTR_DEVICE_CLASS: 'opening'})
await hass.async_block_till_done()
acc = BinarySensor(hass, hk_driver, 'Window Opening', entity_id, 2, None)
await hass.async_add_job(acc.run)
assert acc.aid == 2
assert acc.category == 10 # Sensor
assert acc.char_detected.value == 0
hass.states.async_set(entity_id, STATE_ON,
{ATTR_DEVICE_CLASS: 'opening'})
await hass.async_block_till_done()
assert acc.char_detected.value == 1
hass.states.async_set(entity_id, STATE_OFF,
{ATTR_DEVICE_CLASS: 'opening'})
await hass.async_block_till_done()
assert acc.char_detected.value == 0
hass.states.async_set(entity_id, STATE_HOME,
{ATTR_DEVICE_CLASS: 'opening'})
await hass.async_block_till_done()
assert acc.char_detected.value == 1
hass.states.async_set(entity_id, STATE_NOT_HOME,
{ATTR_DEVICE_CLASS: 'opening'})
await hass.async_block_till_done()
assert acc.char_detected.value == 0
hass.states.async_remove(entity_id)
await hass.async_block_till_done()
assert acc.char_detected.value == 0
async def test_binary_device_classes(hass, hk_driver):
"""Test if services and characteristics are assigned correctly."""
entity_id = 'binary_sensor.demo'
for device_class, (service, char) in BINARY_SENSOR_SERVICE_MAP.items():
hass.states.async_set(entity_id, STATE_OFF,
{ATTR_DEVICE_CLASS: device_class})
await hass.async_block_till_done()
acc = BinarySensor(hass, hk_driver, 'Binary Sensor',
entity_id, 2, None)
assert acc.get_service(service).display_name == service
assert acc.char_detected.display_name == char
|
LearnEra/LearnEraPlaftform
|
refs/heads/master
|
lms/djangoapps/notification_prefs/tests.py
|
11
|
import json
from django.contrib.auth.models import AnonymousUser
from django.core.exceptions import PermissionDenied
from django.http import Http404
from django.test import TestCase
from django.test.client import Client, RequestFactory
from django.test.utils import override_settings
from mock import Mock, patch
from notification_prefs import NOTIFICATION_PREF_KEY
from notification_prefs.views import ajax_enable, ajax_disable, ajax_status, set_subscription, UsernameCipher
from student.tests.factories import UserFactory
from edxmako.tests import mako_middleware_process_request
from user_api.models import UserPreference
from util.testing import UrlResetMixin
@override_settings(SECRET_KEY="test secret key")
class NotificationPrefViewTest(UrlResetMixin, TestCase):
INITIALIZATION_VECTOR = "\x00" * 16
@classmethod
def setUpClass(cls):
# Make sure global state is set up appropriately
Client().get("/")
@patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True})
def setUp(self):
super(NotificationPrefViewTest, self).setUp()
self.user = UserFactory.create(username="testuser")
# Tokens are intentionally hard-coded instead of computed to help us
# avoid breaking existing links.
self.tokens = {
self.user: "AAAAAAAAAAAAAAAAAAAAAA8mMQo96FZfb1YKv1R5X6s=",
# Username with length equal to AES block length to test padding
UserFactory.create(username="sixteencharsuser"):
"AAAAAAAAAAAAAAAAAAAAAPxPWCuI2Ay9TATBVnfw7eIj-hUh6erQ_-VkbDqHqm8D",
# Even longer username
UserFactory.create(username="thisusernameissoveryverylong"):
"AAAAAAAAAAAAAAAAAAAAAPECbYqPI7_W4mRF8LbTaHuHt3tNXPggZ1Bke-zDyEiZ",
# Non-ASCII username
UserFactory.create(username=u"\u4e2d\u56fd"):
"AAAAAAAAAAAAAAAAAAAAAMjfGAhZKIZsI3L-Z7nflTA="
}
self.request_factory = RequestFactory()
def create_prefs(self):
"""Create all test preferences in the database"""
for (user, token) in self.tokens.items():
UserPreference.objects.create(user=user, key=NOTIFICATION_PREF_KEY, value=token)
def assertPrefValid(self, user):
"""Ensure that the correct preference for the user is persisted"""
pref = UserPreference.objects.get(user=user, key=NOTIFICATION_PREF_KEY)
self.assertTrue(pref) # check exists and only 1 (.get)
# now coerce username to utf-8 encoded str, since we test with non-ascii unicdoe above and
# the unittest framework has hard time coercing to unicode.
# decrypt also can't take a unicode input, so coerce its input to str
self.assertEqual(str(user.username.encode('utf-8')), UsernameCipher().decrypt(str(pref.value)))
def assertNotPrefExists(self, user):
"""Ensure that the user does not have a persisted preference"""
self.assertFalse(
UserPreference.objects.filter(user=user, key=NOTIFICATION_PREF_KEY).exists()
)
# AJAX status view
def test_ajax_status_get_0(self):
request = self.request_factory.get("dummy")
request.user = self.user
response = ajax_status(request)
self.assertEqual(response.status_code, 200)
self.assertEqual(json.loads(response.content), {"status":0})
def test_ajax_status_get_1(self):
self.create_prefs()
request = self.request_factory.get("dummy")
request.user = self.user
response = ajax_status(request)
self.assertEqual(response.status_code, 200)
self.assertEqual(json.loads(response.content), {"status":1})
def test_ajax_status_post(self):
request = self.request_factory.post("dummy")
request.user = self.user
response = ajax_status(request)
self.assertEqual(response.status_code, 405)
def test_ajax_status_anon_user(self):
request = self.request_factory.get("dummy")
request.user = AnonymousUser()
self.assertRaises(PermissionDenied, ajax_status, request)
# AJAX enable view
def test_ajax_enable_get(self):
request = self.request_factory.get("dummy")
request.user = self.user
response = ajax_enable(request)
self.assertEqual(response.status_code, 405)
self.assertNotPrefExists(self.user)
def test_ajax_enable_anon_user(self):
request = self.request_factory.post("dummy")
request.user = AnonymousUser()
self.assertRaises(PermissionDenied, ajax_enable, request)
self.assertNotPrefExists(self.user)
@patch("Crypto.Random.new")
def test_ajax_enable_success(self, mock_random_new):
mock_stream = Mock()
mock_stream.read.return_value = self.INITIALIZATION_VECTOR
mock_random_new.return_value = mock_stream
def test_user(user):
request = self.request_factory.post("dummy")
request.user = user
response = ajax_enable(request)
self.assertEqual(response.status_code, 204)
self.assertPrefValid(user)
for user in self.tokens.keys():
test_user(user)
def test_ajax_enable_already_enabled(self):
self.create_prefs()
request = self.request_factory.post("dummy")
request.user = self.user
response = ajax_enable(request)
self.assertEqual(response.status_code, 204)
self.assertPrefValid(self.user)
def test_ajax_enable_distinct_values(self):
request = self.request_factory.post("dummy")
request.user = self.user
ajax_enable(request)
other_user = UserFactory.create()
request.user = other_user
ajax_enable(request)
self.assertNotEqual(
UserPreference.objects.get(user=self.user, key=NOTIFICATION_PREF_KEY).value,
UserPreference.objects.get(user=other_user, key=NOTIFICATION_PREF_KEY).value
)
# AJAX disable view
def test_ajax_disable_get(self):
self.create_prefs()
request = self.request_factory.get("dummy")
request.user = self.user
response = ajax_disable(request)
self.assertEqual(response.status_code, 405)
self.assertPrefValid(self.user)
def test_ajax_disable_anon_user(self):
self.create_prefs()
request = self.request_factory.post("dummy")
request.user = AnonymousUser()
self.assertRaises(PermissionDenied, ajax_disable, request)
self.assertPrefValid(self.user)
def test_ajax_disable_success(self):
self.create_prefs()
request = self.request_factory.post("dummy")
request.user = self.user
response = ajax_disable(request)
self.assertEqual(response.status_code, 204)
self.assertNotPrefExists(self.user)
def test_ajax_disable_already_disabled(self):
request = self.request_factory.post("dummy")
request.user = self.user
response = ajax_disable(request)
self.assertEqual(response.status_code, 204)
self.assertNotPrefExists(self.user)
# Unsubscribe view
def test_unsubscribe_post(self):
request = self.request_factory.post("dummy")
response = set_subscription(request, "dummy", subscribe=False)
self.assertEqual(response.status_code, 405)
def test_unsubscribe_invalid_token(self):
def test_invalid_token(token, message):
request = self.request_factory.get("dummy")
self.assertRaisesRegexp(Http404, "^{}$".format(message), set_subscription, request, token, False)
# Invalid base64 encoding
test_invalid_token("ZOMG INVALID BASE64 CHARS!!!", "base64url")
test_invalid_token("Non-ASCII\xff", "base64url")
test_invalid_token(self.tokens[self.user][:-1], "base64url")
# Token not long enough to contain initialization vector
test_invalid_token("AAAAAAAAAAA=", "initialization_vector")
# Token length not a multiple of AES block length
test_invalid_token(self.tokens[self.user][:-4], "aes")
# Invalid padding (ends in 0 byte)
# Encrypted value: "testuser" + "\x00" * 8
test_invalid_token("AAAAAAAAAAAAAAAAAAAAAMoazRI7ePLjEWXN1N7keLw=", "padding")
# Invalid padding (ends in byte > 16)
# Encrypted value: "testusertestuser"
test_invalid_token("AAAAAAAAAAAAAAAAAAAAAC6iLXGhjkFytJoJSBJZzJ4=", "padding")
# Invalid padding (entire string is padding)
# Encrypted value: "\x10" * 16
test_invalid_token("AAAAAAAAAAAAAAAAAAAAANRGw8HDEmlcLVFawgY9wI8=", "padding")
# Nonexistent user
# Encrypted value: "nonexistentuser\x01"
test_invalid_token("AAAAAAAAAAAAAAAAAAAAACpyUxTGIrUjnpuUsNi7mAY=", "username")
def test_unsubscribe_success(self):
self.create_prefs()
def test_user(user):
request = self.request_factory.get("dummy")
request.user = AnonymousUser()
mako_middleware_process_request(request)
response = set_subscription(request, self.tokens[user], subscribe=False)
self.assertEqual(response.status_code, 200)
self.assertNotPrefExists(user)
for user in self.tokens.keys():
test_user(user)
def test_unsubscribe_twice(self):
self.create_prefs()
request = self.request_factory.get("dummy")
request.user = AnonymousUser()
mako_middleware_process_request(request)
set_subscription(request, self.tokens[self.user], False)
response = set_subscription(request, self.tokens[self.user], subscribe=False)
self.assertEqual(response.status_code, 200)
self.assertNotPrefExists(self.user)
def test_resubscribe_success(self):
def test_user(user):
# start without a pref key
self.assertFalse(UserPreference.objects.filter(user=user, key=NOTIFICATION_PREF_KEY))
request = self.request_factory.get("dummy")
request.user = AnonymousUser()
mako_middleware_process_request(request)
response = set_subscription(request, self.tokens[user], subscribe=True)
self.assertEqual(response.status_code, 200)
self.assertPrefValid(user)
for user in self.tokens.keys():
test_user(user)
|
pombreda/django-hotclub
|
refs/heads/master
|
fixtures/generate/zwitschern.py
|
2
|
import random
from pinax.zwitschern.models import tweet
from django.contrib.auth.models import User
from django.contrib.webdesign.lorem_ipsum import words
from django.template.defaultfilters import capfirst
from timezones.forms import TIMEZONE_CHOICES
OEMBED_CONTENT = [
'http://www.youtube.com/watch?v=_RyodnisVvU&feature=rec-fresh',
'http://revision3.com/tekzilla/superhot/',
'http://www.viddler.com/explore/pop17/videos/93/',
] + ([''] * 50)
def generate():
for user in User.objects.all():
for num_tweets in xrange(random.randint(1, 50)):
num_words = random.randint(1, 100)
content = words(num_words, common=False)
oembed = random.choice(OEMBED_CONTENT)
split_num = random.randint(0, len(content) - 1)
content = capfirst('%s %s %s' % (content[:split_num], oembed,
content[split_num:]))[:139] + '.'
tweet(user, content)
print "Created %s Tweets from User: %s" % (num_tweets, user)
if __name__ == "__main__":
generate()
|
DeviaVir/qtwebkit
|
refs/heads/phantomjs
|
Tools/Scripts/webkitpy/port/gtk.py
|
113
|
# Copyright (C) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the Google name nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os
import subprocess
from webkitpy.layout_tests.models.test_configuration import TestConfiguration
from webkitpy.port.base import Port
from webkitpy.port.pulseaudio_sanitizer import PulseAudioSanitizer
from webkitpy.port.xvfbdriver import XvfbDriver
class GtkPort(Port):
port_name = "gtk"
def __init__(self, *args, **kwargs):
super(GtkPort, self).__init__(*args, **kwargs)
self._pulseaudio_sanitizer = PulseAudioSanitizer()
def warn_if_bug_missing_in_test_expectations(self):
return not self.get_option('webkit_test_runner')
def _port_flag_for_scripts(self):
return "--gtk"
def _driver_class(self):
return XvfbDriver
def default_timeout_ms(self):
if self.get_option('configuration') == 'Debug':
return 12 * 1000
return 6 * 1000
def setup_test_run(self):
super(GtkPort, self).setup_test_run()
self._pulseaudio_sanitizer.unload_pulseaudio_module()
def clean_up_test_run(self):
super(GtkPort, self).clean_up_test_run()
self._pulseaudio_sanitizer.restore_pulseaudio_module()
def setup_environ_for_server(self, server_name=None):
environment = super(GtkPort, self).setup_environ_for_server(server_name)
environment['GTK_MODULES'] = 'gail'
environment['GSETTINGS_BACKEND'] = 'memory'
environment['LIBOVERLAY_SCROLLBAR'] = '0'
environment['TEST_RUNNER_INJECTED_BUNDLE_FILENAME'] = self._build_path('Libraries', 'libTestRunnerInjectedBundle.la')
environment['TEST_RUNNER_TEST_PLUGIN_PATH'] = self._build_path('TestNetscapePlugin', '.libs')
environment['WEBKIT_INSPECTOR_PATH'] = self._build_path('Programs', 'resources', 'inspector')
environment['AUDIO_RESOURCES_PATH'] = self.path_from_webkit_base('Source', 'WebCore', 'platform', 'audio', 'resources')
self._copy_value_from_environ_if_set(environment, 'WEBKIT_OUTPUTDIR')
return environment
def _generate_all_test_configurations(self):
configurations = []
for build_type in self.ALL_BUILD_TYPES:
configurations.append(TestConfiguration(version=self._version, architecture='x86', build_type=build_type))
return configurations
def _path_to_driver(self):
return self._build_path('Programs', self.driver_name())
def _path_to_image_diff(self):
return self._build_path('Programs', 'ImageDiff')
def _path_to_webcore_library(self):
gtk_library_names = [
"libwebkitgtk-1.0.so",
"libwebkitgtk-3.0.so",
"libwebkit2gtk-1.0.so",
]
for library in gtk_library_names:
full_library = self._build_path(".libs", library)
if self._filesystem.isfile(full_library):
return full_library
return None
def _search_paths(self):
search_paths = []
if self.get_option('webkit_test_runner'):
search_paths.extend([self.port_name + '-wk2', 'wk2'])
else:
search_paths.append(self.port_name + '-wk1')
search_paths.append(self.port_name)
search_paths.extend(self.get_option("additional_platform_directory", []))
return search_paths
def default_baseline_search_path(self):
return map(self._webkit_baseline_path, self._search_paths())
def _port_specific_expectations_files(self):
return [self._filesystem.join(self._webkit_baseline_path(p), 'TestExpectations') for p in reversed(self._search_paths())]
# FIXME: We should find a way to share this implmentation with Gtk,
# or teach run-launcher how to call run-safari and move this down to Port.
def show_results_html_file(self, results_filename):
run_launcher_args = ["file://%s" % results_filename]
if self.get_option('webkit_test_runner'):
run_launcher_args.append('-2')
# FIXME: old-run-webkit-tests also added ["-graphicssystem", "raster", "-style", "windows"]
# FIXME: old-run-webkit-tests converted results_filename path for cygwin.
self._run_script("run-launcher", run_launcher_args)
def check_sys_deps(self, needs_http):
return super(GtkPort, self).check_sys_deps(needs_http) and XvfbDriver.check_xvfb(self)
def _get_gdb_output(self, coredump_path):
cmd = ['gdb', '-ex', 'thread apply all bt 1024', '--batch', str(self._path_to_driver()), coredump_path]
proc = subprocess.Popen(cmd, stdin=None, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = proc.communicate()
errors = [l.strip().decode('utf8', 'ignore') for l in stderr.splitlines()]
return (stdout.decode('utf8', 'ignore'), errors)
def _get_crash_log(self, name, pid, stdout, stderr, newer_than):
pid_representation = str(pid or '<unknown>')
log_directory = os.environ.get("WEBKIT_CORE_DUMPS_DIRECTORY")
errors = []
crash_log = ''
expected_crash_dump_filename = "core-pid_%s-_-process_%s" % (pid_representation, name)
def match_filename(filesystem, directory, filename):
if pid:
return filename == expected_crash_dump_filename
return filename.find(name) > -1
if log_directory:
dumps = self._filesystem.files_under(log_directory, file_filter=match_filename)
if dumps:
# Get the most recent coredump matching the pid and/or process name.
coredump_path = list(reversed(sorted(dumps)))[0]
if not newer_than or self._filesystem.mtime(coredump_path) > newer_than:
crash_log, errors = self._get_gdb_output(coredump_path)
stderr_lines = errors + (stderr or '<empty>').decode('utf8', 'ignore').splitlines()
errors_str = '\n'.join(('STDERR: ' + l) for l in stderr_lines)
if not crash_log:
if not log_directory:
log_directory = "/path/to/coredumps"
core_pattern = os.path.join(log_directory, "core-pid_%p-_-process_%e")
crash_log = """\
Coredump %(expected_crash_dump_filename)s not found. To enable crash logs:
- run this command as super-user: echo "%(core_pattern)s" > /proc/sys/kernel/core_pattern
- enable core dumps: ulimit -c unlimited
- set the WEBKIT_CORE_DUMPS_DIRECTORY environment variable: export WEBKIT_CORE_DUMPS_DIRECTORY=%(log_directory)s
""" % locals()
return (stderr, """\
Crash log for %(name)s (pid %(pid_representation)s):
%(crash_log)s
%(errors_str)s""" % locals())
|
isotoma/django-cms
|
refs/heads/master
|
cms/admin/forms.py
|
2
|
# -*- coding: utf-8 -*-
from django import forms
from django.contrib.auth.models import Permission
from django.contrib.contenttypes.models import ContentType
from django.contrib.sites.models import Site
from django.core.exceptions import ValidationError, ObjectDoesNotExist
from django.db.models.fields import BooleanField
from django.forms.util import ErrorList
from django.forms.widgets import HiddenInput
from django.template.defaultfilters import slugify
from django.utils.encoding import force_text
from django.utils.translation import ugettext_lazy as _, get_language
from cms.apphook_pool import apphook_pool
from cms.constants import PAGE_TYPES_ID
from cms.forms.widgets import UserSelectAdminWidget, AppHookSelect, ApplicationConfigSelect
from cms.models import Page, PagePermission, PageUser, ACCESS_PAGE, PageUserGroup, Title, EmptyTitle
from cms.utils.compat.dj import get_user_model, force_unicode
from cms.utils.compat.forms import UserCreationForm
from cms.utils.conf import get_cms_setting
from cms.utils.i18n import get_language_tuple, get_language_list
from cms.utils.mail import mail_page_user_change
from cms.utils.page import is_valid_page_slug
from cms.utils.page_resolver import is_valid_url
from cms.utils.permissions import (get_current_user, get_subordinate_users,
get_subordinate_groups,
get_user_permission_level)
from menus.menu_pool import menu_pool
def get_permission_acessor(obj):
User = get_user_model()
if isinstance(obj, (PageUser, User,)):
rel_name = 'user_permissions'
else:
rel_name = 'permissions'
return getattr(obj, rel_name)
def save_permissions(data, obj):
models = (
(Page, 'page'),
(PageUser, 'pageuser'),
(PageUserGroup, 'pageuser'),
(PagePermission, 'pagepermission'),
)
if not obj.pk:
# save obj, otherwise we can't assign permissions to him
obj.save()
permission_acessor = get_permission_acessor(obj)
for model, name in models:
content_type = ContentType.objects.get_for_model(model)
for t in ('add', 'change', 'delete'):
# add permission `t` to model `model`
codename = getattr(model._meta, 'get_%s_permission' % t)()
permission = Permission.objects.get(content_type=content_type, codename=codename)
if data.get('can_%s_%s' % (t, name), None):
permission_acessor.add(permission)
else:
permission_acessor.remove(permission)
class PageForm(forms.ModelForm):
language = forms.ChoiceField(label=_("Language"), choices=get_language_tuple(),
help_text=_('The current language of the content fields.'))
page_type = forms.ChoiceField(label=_("Page type"), required=False)
title = forms.CharField(label=_("Title"), widget=forms.TextInput(),
help_text=_('The default title'))
slug = forms.CharField(label=_("Slug"), widget=forms.TextInput(),
help_text=_('The part of the title that is used in the URL'))
menu_title = forms.CharField(label=_("Menu Title"), widget=forms.TextInput(),
help_text=_('Overwrite what is displayed in the menu'), required=False)
page_title = forms.CharField(label=_("Page Title"), widget=forms.TextInput(),
help_text=_('Overwrites what is displayed at the top of your browser or in bookmarks'),
required=False)
meta_description = forms.CharField(label=_('Description meta tag'), required=False,
widget=forms.Textarea(attrs={'maxlength': '155', 'rows': '4'}),
help_text=_('A description of the page used by search engines.'),
max_length=155)
class Meta:
model = Page
fields = ["parent", "site", 'template']
def __init__(self, *args, **kwargs):
super(PageForm, self).__init__(*args, **kwargs)
self.fields['parent'].widget = HiddenInput()
self.fields['site'].widget = HiddenInput()
self.fields['template'].widget = HiddenInput()
self.fields['language'].widget = HiddenInput()
if not self.fields['site'].initial:
self.fields['site'].initial = Site.objects.get_current().pk
site_id = self.fields['site'].initial
languages = get_language_tuple(site_id)
self.fields['language'].choices = languages
if not self.fields['language'].initial:
self.fields['language'].initial = get_language()
if 'page_type' in self.fields:
try:
type_root = Page.objects.get(publisher_is_draft=True, reverse_id=PAGE_TYPES_ID, site=site_id)
except Page.DoesNotExist:
type_root = None
if type_root:
language = self.fields['language'].initial
type_ids = type_root.get_descendants().values_list('pk', flat=True)
titles = Title.objects.filter(page__in=type_ids, language=language)
choices = [('', '----')]
for title in titles:
choices.append((title.page_id, title.title))
self.fields['page_type'].choices = choices
def clean(self):
cleaned_data = self.cleaned_data
slug = cleaned_data.get('slug', '')
page = self.instance
lang = cleaned_data.get('language', None)
# No language, can not go further, but validation failed already
if not lang:
return cleaned_data
if 'parent' not in cleaned_data:
cleaned_data['parent'] = None
parent = cleaned_data.get('parent', None)
try:
site = self.cleaned_data.get('site', Site.objects.get_current())
except Site.DoesNotExist:
raise ValidationError("No site found for current settings.")
if parent and parent.site != site:
raise ValidationError("Site doesn't match the parent's page site")
if site and not is_valid_page_slug(page, parent, lang, slug, site):
self._errors['slug'] = ErrorList([_('Another page with this slug already exists')])
del cleaned_data['slug']
if self.instance and page.title_set.count():
#Check for titles attached to the page makes sense only because
#AdminFormsTests.test_clean_overwrite_url validates the form with when no page instance available
#Looks like just a theoretical corner case
title = page.get_title_obj(lang, fallback=False)
if title and not isinstance(title, EmptyTitle) and slug:
oldslug = title.slug
title.slug = slug
title.save()
try:
is_valid_url(title.path, page)
except ValidationError as exc:
title.slug = oldslug
title.save()
if 'slug' in cleaned_data:
del cleaned_data['slug']
if hasattr(exc, 'messages'):
errors = exc.messages
else:
errors = [force_unicode(exc.message)]
self._errors['slug'] = ErrorList(errors)
return cleaned_data
def clean_slug(self):
slug = slugify(self.cleaned_data['slug'])
if not slug:
raise ValidationError(_("Slug must not be empty."))
return slug
def clean_language(self):
language = self.cleaned_data['language']
if not language in get_language_list():
raise ValidationError("Given language does not match language settings.")
return language
class PublicationDatesForm(forms.ModelForm):
language = forms.ChoiceField(label=_("Language"), choices=get_language_tuple(),
help_text=_('The current language of the content fields.'))
def __init__(self, *args, **kwargs):
# Dates are not language dependent, so let's just fake the language to
# make the ModelAdmin happy
super(PublicationDatesForm, self).__init__(*args, **kwargs)
self.fields['language'].widget = HiddenInput()
self.fields['site'].widget = HiddenInput()
site_id = self.fields['site'].initial
languages = get_language_tuple(site_id)
self.fields['language'].choices = languages
if not self.fields['language'].initial:
self.fields['language'].initial = get_language()
class Meta:
model = Page
fields = ['site', 'publication_date', 'publication_end_date']
class AdvancedSettingsForm(forms.ModelForm):
from cms.forms.fields import PageSmartLinkField
application_urls = forms.ChoiceField(label=_('Application'),
choices=(), required=False,
help_text=_('Hook application to this page.'))
overwrite_url = forms.CharField(label=_('Overwrite URL'), max_length=255, required=False,
help_text=_('Keep this field empty if standard path should be used.'))
xframe_options = forms.ChoiceField(
choices=Page._meta.get_field('xframe_options').choices,
label=_('X Frame Options'),
help_text=_('Whether this page can be embedded in other pages or websites'),
initial=Page._meta.get_field('xframe_options').default,
required=False
)
redirect = PageSmartLinkField(label=_('Redirect'), required=False,
help_text=_('Redirects to this URL.'),
placeholder_text=_('Start typing...'),
ajax_view='admin:cms_page_get_published_pagelist'
)
language = forms.ChoiceField(label=_("Language"), choices=get_language_tuple(),
help_text=_('The current language of the content fields.'))
# This is really a 'fake' field which does not correspond to any Page attribute
# But creates a stub field to be populate by js
application_configs = forms.ChoiceField(label=_('Application configurations'),
choices=(), required=False,)
fieldsets = (
(None, {
'fields': ('overwrite_url', 'redirect'),
}),
('Language independent options', {
'fields': ('site', 'template', 'reverse_id', 'soft_root', 'navigation_extenders',
'application_urls', 'application_namespace', 'application_configs',
'xframe_options',)
})
)
def __init__(self, *args, **kwargs):
super(AdvancedSettingsForm, self).__init__(*args, **kwargs)
self.fields['language'].widget = HiddenInput()
self.fields['site'].widget = HiddenInput()
site_id = self.fields['site'].initial
languages = get_language_tuple(site_id)
self.fields['language'].choices = languages
if not self.fields['language'].initial:
self.fields['language'].initial = get_language()
if 'navigation_extenders' in self.fields:
self.fields['navigation_extenders'].widget = forms.Select(
{}, [('', "---------")] + menu_pool.get_menus_by_attribute("cms_enabled", True))
if 'application_urls' in self.fields:
# Prepare a dict mapping the apps by class name ('PollApp') to
# their app_name attribute ('polls'), if any.
app_namespaces = {}
app_configs = {}
for hook in apphook_pool.get_apphooks():
app = apphook_pool.get_apphook(hook[0])
if app.app_name:
app_namespaces[hook[0]] = app.app_name
if app.app_config:
app_configs[hook[0]] = app
self.fields['application_urls'].widget = AppHookSelect(
attrs={'id': 'application_urls'},
app_namespaces=app_namespaces
)
self.fields['application_urls'].choices = [('', "---------")] + apphook_pool.get_apphooks()
if app_configs:
self.fields['application_configs'].widget = ApplicationConfigSelect(
attrs={'id': 'application_configs'},
app_configs=app_configs)
if self.data.get('application_urls', False) and self.data['application_urls'] in app_configs:
self.fields['application_configs'].choices = [(config.pk, force_text(config)) for config in app_configs[self.data['application_urls']].get_configs()]
apphook = self.data.get('application_urls', False)
try:
config = apphook_pool.get_apphook(apphook).get_configs().get(namespace=self.initial['application_namespace'])
self.fields['application_configs'].initial = config.pk
except ObjectDoesNotExist:
# Provided apphook configuration doesn't exist (anymore),
# just skip it
# The user will choose another value anyway
pass
else:
# If app_config apphook is not selected, drop any value
# for application_configs do avoid the field dato for
# being validated by the field itself
try:
del self.data['application_configs']
except KeyError:
pass
if 'redirect' in self.fields:
self.fields['redirect'].widget.language = self.fields['language'].initial
def clean(self):
cleaned_data = super(AdvancedSettingsForm, self).clean()
if 'reverse_id' in self.fields:
id = cleaned_data['reverse_id']
site_id = cleaned_data['site']
if id:
if Page.objects.filter(reverse_id=id, site=site_id, publisher_is_draft=True).exclude(
pk=self.instance.pk).count():
self._errors['reverse_id'] = self.error_class(
[_('A page with this reverse URL id exists already.')])
apphook = cleaned_data.get('application_urls', None)
# The field 'application_namespace' is a misnomer. It should be
# 'instance_namespace'.
instance_namespace = cleaned_data.get('application_namespace', None)
application_config = cleaned_data.get('application_configs', None)
if apphook:
# application_config wins over application_namespace
if application_config:
# the value of the application config namespace is saved in
# the 'usual' namespace field to be backward compatible
# with existing apphooks
config = apphook_pool.get_apphook(apphook).get_configs().get(pk=int(application_config))
self.cleaned_data['application_namespace'] = config.namespace
else:
# The attribute on the apps 'app_name' is a misnomer, it should be
# 'application_namespace'.
application_namespace = apphook_pool.get_apphook(apphook).app_name
if application_namespace and not instance_namespace:
if Page.objects.filter(
publisher_is_draft=True,
application_urls=apphook,
application_namespace=application_namespace
).exclude(pk=self.instance.pk).count():
# Looks like there's already one with the default instance
# namespace defined.
self._errors['application_urls'] = ErrorList([
_('''You selected an apphook with an "app_name".
You must enter a unique instance name.''')
])
else:
# OK, there are zero instances of THIS app that use the
# default instance namespace, so, since the user didn't
# provide one, we'll use the default. NOTE: The following
# line is really setting the "instance namespace" of the
# new app to the app’s "application namespace", which is
# the default instance namespace.
self.cleaned_data['application_namespace'] = application_namespace
if instance_namespace and not apphook:
self.cleaned_data['application_namespace'] = None
if application_config and not apphook:
self.cleaned_data['application_configs'] = None
return cleaned_data
def clean_application_namespace(self):
namespace = self.cleaned_data['application_namespace']
if namespace and Page.objects.filter(publisher_is_draft=True, application_namespace=namespace).exclude(pk=self.instance.pk).count():
raise ValidationError(_('A instance name with this name already exists.'))
return namespace
def clean_xframe_options(self):
if 'xframe_options' not in self.fields:
return # nothing to do, field isn't present
xframe_options = self.cleaned_data['xframe_options']
if xframe_options == '':
return Page._meta.get_field('xframe_options').default
return xframe_options
def clean_overwrite_url(self):
if 'overwrite_url' in self.fields:
url = self.cleaned_data['overwrite_url']
is_valid_url(url, self.instance)
return url
class Meta:
model = Page
fields = [
'site', 'template', 'reverse_id', 'overwrite_url', 'redirect', 'soft_root', 'navigation_extenders',
'application_urls', 'application_namespace', "xframe_options",
]
class PagePermissionForm(forms.ModelForm):
class Meta:
model = Page
fields = ['login_required', 'limit_visibility_in_menu']
class PagePermissionInlineAdminForm(forms.ModelForm):
"""
Page permission inline admin form used in inline admin. Required, because
user and group queryset must be changed. User can see only users on the same
level or under him in choosen page tree, and users which were created by him,
but aren't assigned to higher page level than current user.
"""
page = forms.ModelChoiceField(Page, label=_('user'), widget=HiddenInput(), required=True)
def __init__(self, *args, **kwargs):
super(PagePermissionInlineAdminForm, self).__init__(*args, **kwargs)
user = get_current_user() # current user from threadlocals
sub_users = get_subordinate_users(user)
limit_choices = True
use_raw_id = False
# Unfortunately, if there are > 500 users in the system, non-superusers
# won't see any benefit here because if we ask Django to put all the
# user PKs in limit_choices_to in the query string of the popup we're
# in danger of causing 414 errors so we fall back to the normal input
# widget.
if get_cms_setting('RAW_ID_USERS'):
if sub_users.count() < 500:
# If there aren't too many users, proceed as normal and use a
# raw id field with limit_choices_to
limit_choices = True
use_raw_id = True
elif get_user_permission_level(user) == 0:
# If there are enough choices to possibly cause a 414 request
# URI too large error, we only proceed with the raw id field if
# the user is a superuser & thus can legitimately circumvent
# the limit_choices_to condition.
limit_choices = False
use_raw_id = True
# We don't use the fancy custom widget if the admin form wants to use a
# raw id field for the user
if use_raw_id:
from django.contrib.admin.widgets import ForeignKeyRawIdWidget
# This check will be False if the number of users in the system
# is less than the threshold set by the RAW_ID_USERS setting.
if isinstance(self.fields['user'].widget, ForeignKeyRawIdWidget):
# We can't set a queryset on a raw id lookup, but we can use
# the fact that it respects the limit_choices_to parameter.
if limit_choices:
self.fields['user'].widget.rel.limit_choices_to = dict(
id__in=list(sub_users.values_list('pk', flat=True))
)
else:
self.fields['user'].widget = UserSelectAdminWidget()
self.fields['user'].queryset = sub_users
self.fields['user'].widget.user = user # assign current user
self.fields['group'].queryset = get_subordinate_groups(user)
def clean(self):
super(PagePermissionInlineAdminForm, self).clean()
for field in self.Meta.model._meta.fields:
if not isinstance(field, BooleanField) or not field.name.startswith('can_'):
continue
name = field.name
self.cleaned_data[name] = self.cleaned_data.get(name, False)
can_add = self.cleaned_data['can_add']
can_edit = self.cleaned_data['can_change']
# check if access for childrens, or descendants is granted
if can_add and self.cleaned_data['grant_on'] == ACCESS_PAGE:
# this is a missconfiguration - user can add/move page to current
# page but after he does this, he will not have permissions to
# access this page anymore, so avoid this
raise forms.ValidationError(_("Add page permission requires also "
"access to children, or descendants, otherwise added page "
"can't be changed by its creator."))
if can_add and not can_edit:
raise forms.ValidationError(_('Add page permission also requires edit page permission.'))
# TODO: finish this, but is it really required? might be nice to have
# check if permissions assigned in cms are correct, and display
# a message if not - correctness mean: if user has add permission to
# page, but he doesn't have auth permissions to add page object,
# display warning
return self.cleaned_data
def save(self, commit=True):
"""
Makes sure the boolean fields are set to False if they aren't
available in the form.
"""
instance = super(PagePermissionInlineAdminForm, self).save(commit=False)
for field in self._meta.model._meta.fields:
if isinstance(field, BooleanField) and field.name.startswith('can_'):
setattr(instance, field.name, self.cleaned_data.get(field.name, False))
if commit:
instance.save()
return instance
class Meta:
model = PagePermission
class ViewRestrictionInlineAdminForm(PagePermissionInlineAdminForm):
can_view = forms.BooleanField(label=_('can_view'), widget=HiddenInput(), initial=True)
def clean_can_view(self):
self.cleaned_data["can_view"] = True
return self.cleaned_data
class GlobalPagePermissionAdminForm(forms.ModelForm):
def clean(self):
super(GlobalPagePermissionAdminForm, self).clean()
if not self.cleaned_data['user'] and not self.cleaned_data['group']:
raise forms.ValidationError(_('Please select user or group first.'))
return self.cleaned_data
class GenericCmsPermissionForm(forms.ModelForm):
"""Generic form for User & Grup permissions in cms
"""
can_add_page = forms.BooleanField(label=_('Add'), required=False, initial=True)
can_change_page = forms.BooleanField(label=_('Change'), required=False, initial=True)
can_delete_page = forms.BooleanField(label=_('Delete'), required=False)
can_recover_page = forms.BooleanField(label=_('Recover (any) pages'), required=False)
# pageuser is for pageuser & group - they are combined together,
# and read out from PageUser model
can_add_pageuser = forms.BooleanField(label=_('Add'), required=False)
can_change_pageuser = forms.BooleanField(label=_('Change'), required=False)
can_delete_pageuser = forms.BooleanField(label=_('Delete'), required=False)
can_add_pagepermission = forms.BooleanField(label=_('Add'), required=False)
can_change_pagepermission = forms.BooleanField(label=_('Change'), required=False)
can_delete_pagepermission = forms.BooleanField(label=_('Delete'), required=False)
def populate_initials(self, obj):
"""Read out permissions from permission system.
"""
initials = {}
permission_acessor = get_permission_acessor(obj)
for model in (Page, PageUser, PagePermission):
name = model.__name__.lower()
content_type = ContentType.objects.get_for_model(model)
permissions = permission_acessor.filter(content_type=content_type).values_list('codename', flat=True)
for t in ('add', 'change', 'delete'):
codename = getattr(model._meta, 'get_%s_permission' % t)()
initials['can_%s_%s' % (t, name)] = codename in permissions
return initials
class PageUserForm(UserCreationForm, GenericCmsPermissionForm):
notify_user = forms.BooleanField(label=_('Notify user'), required=False,
help_text=_(
'Send email notification to user about username or password change. Requires user email.'))
class Meta:
model = PageUser
def __init__(self, data=None, files=None, auto_id='id_%s', prefix=None,
initial=None, error_class=ErrorList, label_suffix=':',
empty_permitted=False, instance=None):
if instance:
initial = initial or {}
initial.update(self.populate_initials(instance))
super(PageUserForm, self).__init__(data, files, auto_id, prefix,
initial, error_class, label_suffix, empty_permitted, instance)
if instance:
# if it is a change form, keep those fields as not required
# password will be changed only if there is something entered inside
self.fields['password1'].required = False
self.fields['password1'].label = _('New password')
self.fields['password2'].required = False
self.fields['password2'].label = _('New password confirmation')
self._password_change = True
def clean_username(self):
if self.instance:
return self.cleaned_data['username']
return super(PageUserForm, self).clean_username()
# required if the User model's USERNAME_FIELD is the email field
def clean_email(self):
if self.instance:
return self.cleaned_data['email']
return super(PageUserForm, self).clean_email()
def clean_password2(self):
if self.instance and self.cleaned_data['password1'] == '' and self.cleaned_data['password2'] == '':
self._password_change = False
return u''
return super(PageUserForm, self).clean_password2()
def clean(self):
cleaned_data = super(PageUserForm, self).clean()
notify_user = self.cleaned_data['notify_user']
if notify_user and not self.cleaned_data.get('email', None):
raise forms.ValidationError(_("Email notification requires valid email address."))
if self.cleaned_data['can_add_page'] and not self.cleaned_data['can_change_page']:
raise forms.ValidationError(_("The permission to add new pages requires the permission to change pages!"))
if self.cleaned_data['can_add_pageuser'] and not self.cleaned_data['can_change_pageuser']:
raise forms.ValidationError(_("The permission to add new users requires the permission to change users!"))
if self.cleaned_data['can_add_pagepermission'] and not self.cleaned_data['can_change_pagepermission']:
raise forms.ValidationError(_("To add permissions you also need to edit them!"))
return cleaned_data
def save(self, commit=True):
"""Create user, assign him to staff users, and create permissions for
him if required. Also assigns creator to user.
"""
Super = self._password_change and PageUserForm or UserCreationForm
user = super(Super, self).save(commit=False)
user.is_staff = True
created = not bool(user.pk)
# assign creator to user
if created:
get_current_user()
user.created_by = get_current_user()
if commit:
user.save()
save_permissions(self.cleaned_data, user)
if self.cleaned_data['notify_user']:
mail_page_user_change(user, created, self.cleaned_data['password1'])
return user
class PageUserGroupForm(GenericCmsPermissionForm):
class Meta:
model = PageUserGroup
fields = ('name', )
def __init__(self, data=None, files=None, auto_id='id_%s', prefix=None,
initial=None, error_class=ErrorList, label_suffix=':',
empty_permitted=False, instance=None):
if instance:
initial = initial or {}
initial.update(self.populate_initials(instance))
super(PageUserGroupForm, self).__init__(data, files, auto_id, prefix,
initial, error_class, label_suffix, empty_permitted, instance)
def save(self, commit=True):
group = super(GenericCmsPermissionForm, self).save(commit=False)
created = not bool(group.pk)
# assign creator to user
if created:
group.created_by = get_current_user()
if commit:
group.save()
save_permissions(self.cleaned_data, group)
return group
|
JetBrains/intellij-community
|
refs/heads/master
|
python/testData/postfix/main/severalStatements.py
|
35
|
print("I want to be inside main").main<caret>
print("I want to be inside main too")
|
mbareta/edx-platform-ft
|
refs/heads/open-release/eucalyptus.master
|
common/test/acceptance/pages/studio/overview.py
|
9
|
"""
Course Outline page in Studio.
"""
import datetime
from bok_choy.page_object import PageObject
from bok_choy.promise import EmptyPromise
from selenium.webdriver import ActionChains
from selenium.webdriver.support.ui import Select
from selenium.webdriver.common.keys import Keys
from ..common.utils import click_css, confirm_prompt
from .course_page import CoursePage
from .container import ContainerPage
from .utils import set_input_value_and_save, set_input_value
class CourseOutlineItem(object):
"""
A mixin class for any :class:`PageObject` shown in a course outline.
"""
# Note there are a few pylint disable=no-member occurances in this class, because
# it was written assuming it is going to be a mixin to a PageObject and will have functions
# such as self.wait_for_ajax, which doesn't exist on a generic `object`.
BODY_SELECTOR = None
EDIT_BUTTON_SELECTOR = '.xblock-field-value-edit'
NAME_SELECTOR = '.item-title'
NAME_INPUT_SELECTOR = '.xblock-field-input'
NAME_FIELD_WRAPPER_SELECTOR = '.xblock-title .wrapper-xblock-field'
STATUS_MESSAGE_SELECTOR = '> div[class$="status"] .status-message'
CONFIGURATION_BUTTON_SELECTOR = '.action-item .configure-button'
def __repr__(self):
# CourseOutlineItem is also used as a mixin for CourseOutlinePage, which doesn't have a locator
# Check for the existence of a locator so that errors when navigating to the course outline page don't show up
# as errors in the repr method instead.
try:
return "{}(<browser>, {!r})".format(self.__class__.__name__, self.locator) # pylint: disable=no-member
except AttributeError:
return "{}(<browser>)".format(self.__class__.__name__)
def _bounded_selector(self, selector):
"""
Returns `selector`, but limited to this particular `CourseOutlineItem` context
"""
# If the item doesn't have a body selector or locator, then it can't be bounded
# This happens in the context of the CourseOutlinePage
# pylint: disable=no-member
if self.BODY_SELECTOR and hasattr(self, 'locator'):
return '{}[data-locator="{}"] {}'.format(
self.BODY_SELECTOR,
self.locator,
selector
)
else:
return selector
@property
def name(self):
"""
Returns the display name of this object.
"""
name_element = self.q(css=self._bounded_selector(self.NAME_SELECTOR)).first # pylint: disable=no-member
if name_element:
return name_element.text[0]
else:
return None
@property
def has_status_message(self):
"""
Returns True if the item has a status message, False otherwise.
"""
return self.q(css=self._bounded_selector(self.STATUS_MESSAGE_SELECTOR)).first.visible # pylint: disable=no-member
@property
def status_message(self):
"""
Returns the status message of this item.
"""
return self.q(css=self._bounded_selector(self.STATUS_MESSAGE_SELECTOR)).text[0] # pylint: disable=no-member
@property
def has_staff_lock_warning(self):
""" Returns True if the 'Contains staff only content' message is visible """
return self.status_message == 'Contains staff only content' if self.has_status_message else False
@property
def is_staff_only(self):
""" Returns True if the visiblity state of this item is staff only (has a black sidebar) """
return "is-staff-only" in self.q(css=self._bounded_selector(''))[0].get_attribute("class") # pylint: disable=no-member
def edit_name(self):
"""
Puts the item's name into editable form.
"""
self.q(css=self._bounded_selector(self.EDIT_BUTTON_SELECTOR)).first.click() # pylint: disable=no-member
def enter_name(self, new_name):
"""
Enters new_name as the item's display name.
"""
set_input_value(self, self._bounded_selector(self.NAME_INPUT_SELECTOR), new_name)
def change_name(self, new_name):
"""
Changes the container's name.
"""
self.edit_name()
set_input_value_and_save(self, self._bounded_selector(self.NAME_INPUT_SELECTOR), new_name)
self.wait_for_ajax() # pylint: disable=no-member
def finalize_name(self):
"""
Presses ENTER, saving the value of the display name for this item.
"""
# pylint: disable=no-member
self.q(css=self._bounded_selector(self.NAME_INPUT_SELECTOR)).results[0].send_keys(Keys.ENTER)
self.wait_for_ajax()
def set_staff_lock(self, is_locked):
"""
Sets the explicit staff lock of item on the container page to is_locked.
"""
modal = self.edit()
modal.is_explicitly_locked = is_locked
modal.save()
def in_editable_form(self):
"""
Return whether this outline item's display name is in its editable form.
"""
# pylint: disable=no-member
return "is-editing" in self.q(
css=self._bounded_selector(self.NAME_FIELD_WRAPPER_SELECTOR)
)[0].get_attribute("class")
def edit(self):
"""
Puts the item into editable form.
"""
self.q(css=self._bounded_selector(self.CONFIGURATION_BUTTON_SELECTOR)).first.click() # pylint: disable=no-member
modal = CourseOutlineModal(self)
EmptyPromise(lambda: modal.is_shown(), 'Modal is shown.') # pylint: disable=unnecessary-lambda
return modal
@property
def release_date(self):
"""
Returns the release date from the page. Date is "mm/dd/yyyy" string.
"""
element = self.q(css=self._bounded_selector(".status-release-value")) # pylint: disable=no-member
return element.first.text[0] if element.present else None
@property
def due_date(self):
"""
Returns the due date from the page. Date is "mm/dd/yyyy" string.
"""
element = self.q(css=self._bounded_selector(".status-grading-date")) # pylint: disable=no-member
return element.first.text[0] if element.present else None
@property
def policy(self):
"""
Select the grading format with `value` in the drop-down list.
"""
element = self.q(css=self._bounded_selector(".status-grading-value")) # pylint: disable=no-member
return element.first.text[0] if element.present else None
def publish(self):
"""
Publish the unit.
"""
click_css(self, self._bounded_selector('.action-publish'), require_notification=False)
modal = CourseOutlineModal(self)
EmptyPromise(lambda: modal.is_shown(), 'Modal is shown.') # pylint: disable=unnecessary-lambda
modal.publish()
@property
def publish_action(self):
"""
Returns the link for publishing a unit.
"""
return self.q(css=self._bounded_selector('.action-publish')).first # pylint: disable=no-member
class CourseOutlineContainer(CourseOutlineItem):
"""
A mixin to a CourseOutline page object that adds the ability to load
a child page object by title or by index.
CHILD_CLASS must be a :class:`CourseOutlineChild` subclass.
"""
CHILD_CLASS = None
ADD_BUTTON_SELECTOR = '> .outline-content > .add-item a.button-new'
def child(self, title, child_class=None):
"""
:type self: object
"""
if not child_class:
child_class = self.CHILD_CLASS
# pylint: disable=no-member
return child_class(
self.browser,
self.q(css=child_class.BODY_SELECTOR).filter(
lambda el: title in [inner.text for inner in
el.find_elements_by_css_selector(child_class.NAME_SELECTOR)]
).attrs('data-locator')[0]
)
def children(self, child_class=None):
"""
Returns all the children page objects of class child_class.
"""
if not child_class:
child_class = self.CHILD_CLASS
# pylint: disable=no-member
return self.q(css=self._bounded_selector(child_class.BODY_SELECTOR)).map(
lambda el: child_class(self.browser, el.get_attribute('data-locator'))).results
def child_at(self, index, child_class=None):
"""
Returns the child at the specified index.
:type self: object
"""
if not child_class:
child_class = self.CHILD_CLASS
return self.children(child_class)[index]
def add_child(self, require_notification=True):
"""
Adds a child to this xblock, waiting for notifications.
"""
click_css(
self,
self._bounded_selector(self.ADD_BUTTON_SELECTOR),
require_notification=require_notification,
)
def expand_subsection(self):
"""
Toggle the expansion of this subsection.
"""
# pylint: disable=no-member
self.browser.execute_script("jQuery.fx.off = true;")
def subsection_expanded():
"""
Returns whether or not this subsection is expanded.
"""
self.wait_for_element_presence(
self._bounded_selector(self.ADD_BUTTON_SELECTOR), 'Toggle control is present'
)
add_button = self.q(css=self._bounded_selector(self.ADD_BUTTON_SELECTOR)).first.results
return add_button and add_button[0].is_displayed()
currently_expanded = subsection_expanded()
# Need to click slightly off-center in order for the click to be recognized.
ele = self.browser.find_element_by_css_selector(self._bounded_selector('.ui-toggle-expansion .fa'))
ActionChains(self.browser).move_to_element_with_offset(ele, 4, 4).click().perform()
self.wait_for_element_presence(self._bounded_selector(self.ADD_BUTTON_SELECTOR), 'Subsection is expanded')
EmptyPromise(
lambda: subsection_expanded() != currently_expanded,
"Check that the container {} has been toggled".format(self.locator)
).fulfill()
self.browser.execute_script("jQuery.fx.off = false;")
return self
@property
def is_collapsed(self):
"""
Return whether this outline item is currently collapsed.
"""
return "is-collapsed" in self.q(css=self._bounded_selector('')).first.attrs("class")[0] # pylint: disable=no-member
class CourseOutlineChild(PageObject, CourseOutlineItem):
"""
A page object that will be used as a child of :class:`CourseOutlineContainer`.
"""
url = None
BODY_SELECTOR = '.outline-item'
def __init__(self, browser, locator):
super(CourseOutlineChild, self).__init__(browser)
self.locator = locator
def is_browser_on_page(self):
return self.q(css='{}[data-locator="{}"]'.format(self.BODY_SELECTOR, self.locator)).present
def delete(self, cancel=False):
"""
Clicks the delete button, then cancels at the confirmation prompt if cancel is True.
"""
click_css(self, self._bounded_selector('.delete-button'), require_notification=False)
confirm_prompt(self, cancel)
def _bounded_selector(self, selector):
"""
Return `selector`, but limited to this particular `CourseOutlineChild` context
"""
return '{}[data-locator="{}"] {}'.format(
self.BODY_SELECTOR,
self.locator,
selector
)
@property
def name(self):
titles = self.q(css=self._bounded_selector(self.NAME_SELECTOR)).text
if titles:
return titles[0]
else:
return None
@property
def children(self):
"""
Will return any first-generation descendant items of this item.
"""
descendants = self.q(css=self._bounded_selector(self.BODY_SELECTOR)).map(
lambda el: CourseOutlineChild(self.browser, el.get_attribute('data-locator'))).results
# Now remove any non-direct descendants.
grandkids = []
for descendant in descendants:
grandkids.extend(descendant.children)
grand_locators = [grandkid.locator for grandkid in grandkids]
return [descendant for descendant in descendants if descendant.locator not in grand_locators]
class CourseOutlineUnit(CourseOutlineChild):
"""
PageObject that wraps a unit link on the Studio Course Outline page.
"""
url = None
BODY_SELECTOR = '.outline-unit'
NAME_SELECTOR = '.unit-title a'
def go_to(self):
"""
Open the container page linked to by this unit link, and return
an initialized :class:`.ContainerPage` for that unit.
"""
return ContainerPage(self.browser, self.locator).visit()
def is_browser_on_page(self):
return self.q(css=self.BODY_SELECTOR).present
def children(self):
return self.q(css=self._bounded_selector(self.BODY_SELECTOR)).map(
lambda el: CourseOutlineUnit(self.browser, el.get_attribute('data-locator'))).results
class CourseOutlineSubsection(CourseOutlineContainer, CourseOutlineChild):
"""
:class`.PageObject` that wraps a subsection block on the Studio Course Outline page.
"""
url = None
BODY_SELECTOR = '.outline-subsection'
NAME_SELECTOR = '.subsection-title'
NAME_FIELD_WRAPPER_SELECTOR = '.subsection-header .wrapper-xblock-field'
CHILD_CLASS = CourseOutlineUnit
def unit(self, title):
"""
Return the :class:`.CourseOutlineUnit with the title `title`.
"""
return self.child(title)
def units(self):
"""
Returns the units in this subsection.
"""
return self.children()
def unit_at(self, index):
"""
Returns the CourseOutlineUnit at the specified index.
"""
return self.child_at(index)
def add_unit(self):
"""
Adds a unit to this subsection
"""
self.q(css=self._bounded_selector(self.ADD_BUTTON_SELECTOR)).click()
class CourseOutlineSection(CourseOutlineContainer, CourseOutlineChild):
"""
:class`.PageObject` that wraps a section block on the Studio Course Outline page.
"""
url = None
BODY_SELECTOR = '.outline-section'
NAME_SELECTOR = '.section-title'
NAME_FIELD_WRAPPER_SELECTOR = '.section-header .wrapper-xblock-field'
CHILD_CLASS = CourseOutlineSubsection
def subsection(self, title):
"""
Return the :class:`.CourseOutlineSubsection` with the title `title`.
"""
return self.child(title)
def subsections(self):
"""
Returns a list of the CourseOutlineSubsections of this section
"""
return self.children()
def subsection_at(self, index):
"""
Returns the CourseOutlineSubsection at the specified index.
"""
return self.child_at(index)
def add_subsection(self):
"""
Adds a subsection to this section
"""
self.add_child()
class ExpandCollapseLinkState(object):
"""
Represents the three states that the expand/collapse link can be in
"""
MISSING = 0
COLLAPSE = 1
EXPAND = 2
class CourseOutlinePage(CoursePage, CourseOutlineContainer):
"""
Course Outline page in Studio.
"""
url_path = "course"
CHILD_CLASS = CourseOutlineSection
EXPAND_COLLAPSE_CSS = '.button-toggle-expand-collapse'
BOTTOM_ADD_SECTION_BUTTON = '.outline > .add-section .button-new'
def is_browser_on_page(self):
return all([
self.q(css='body.view-outline').present,
self.q(css='.content-primary').present,
self.q(css='div.ui-loading.is-hidden').present
])
def view_live(self):
"""
Clicks the "View Live" link and switches to the new tab
"""
click_css(self, '.view-live-button', require_notification=False)
self.browser.switch_to_window(self.browser.window_handles[-1])
def section(self, title):
"""
Return the :class:`.CourseOutlineSection` with the title `title`.
"""
return self.child(title)
def section_at(self, index):
"""
Returns the :class:`.CourseOutlineSection` at the specified index.
"""
return self.child_at(index)
def click_section_name(self, parent_css=''):
"""
Find and click on first section name in course outline
"""
self.q(css='{} .section-name'.format(parent_css)).first.click()
def get_section_name(self, parent_css='', page_refresh=False):
"""
Get the list of names of all sections present
"""
if page_refresh:
self.browser.refresh()
return self.q(css='{} .section-name'.format(parent_css)).text
def section_name_edit_form_present(self, parent_css=''):
"""
Check that section name edit form present
"""
return self.q(css='{} .section-name input'.format(parent_css)).present
def change_section_name(self, new_name, parent_css=''):
"""
Change section name of first section present in course outline
"""
self.click_section_name(parent_css)
self.q(css='{} .section-name input'.format(parent_css)).first.fill(new_name)
self.q(css='{} .section-name .save-button'.format(parent_css)).first.click()
self.wait_for_ajax()
def sections(self):
"""
Returns the sections of this course outline page.
"""
return self.children()
def add_section_from_top_button(self):
"""
Clicks the button for adding a section which resides at the top of the screen.
"""
click_css(self, '.wrapper-mast nav.nav-actions .button-new')
def add_section_from_bottom_button(self, click_child_icon=False):
"""
Clicks the button for adding a section which resides at the bottom of the screen.
"""
element_css = self.BOTTOM_ADD_SECTION_BUTTON
if click_child_icon:
element_css += " .fa-plus"
click_css(self, element_css)
def toggle_expand_collapse(self):
"""
Toggles whether all sections are expanded or collapsed
"""
self.q(css=self.EXPAND_COLLAPSE_CSS).click()
def start_reindex(self):
"""
Starts course reindex by clicking reindex button
"""
self.reindex_button.click()
def open_subsection_settings_dialog(self, index=0):
"""
clicks on the settings button of subsection.
"""
self.q(css=".subsection-header-actions .configure-button").nth(index).click()
self.wait_for_element_presence('.course-outline-modal', 'Subsection settings modal is present.')
def change_problem_release_date(self):
"""
Sets a new start date
"""
self.q(css=".subsection-header-actions .configure-button").first.click()
self.q(css="#start_date").fill("01/01/2030")
self.q(css=".action-save").first.click()
self.wait_for_ajax()
def change_problem_due_date(self, date):
"""
Sets a new due date.
Expects date to be a string that will be accepted by the input (for example, '01/01/1970')
"""
self.q(css=".subsection-header-actions .configure-button").first.click()
self.q(css="#due_date").fill(date)
self.q(css=".action-save").first.click()
self.wait_for_ajax()
def select_advanced_tab(self):
"""
Select the advanced settings tab
"""
self.q(css=".settings-tab-button[data-tab='advanced']").first.click()
self.wait_for_element_presence('input.no_special_exam', 'Special exam settings fields not present.')
def make_exam_proctored(self):
"""
Makes a Proctored exam.
"""
self.q(css="input.proctored_exam").first.click()
self.q(css=".action-save").first.click()
self.wait_for_ajax()
def make_exam_timed(self, hide_after_due=False):
"""
Makes a timed exam.
"""
self.q(css="input.timed_exam").first.click()
if hide_after_due:
self.q(css='.field-hide-after-due input').first.click()
self.q(css=".action-save").first.click()
self.wait_for_ajax()
def select_none_exam(self):
"""
Choose "none" exam but do not press enter
"""
self.q(css="input.no_special_exam").first.click()
def select_timed_exam(self):
"""
Choose a timed exam but do not press enter
"""
self.q(css="input.timed_exam").first.click()
def select_proctored_exam(self):
"""
Choose a proctored exam but do not press enter
"""
self.q(css="input.proctored_exam").first.click()
def select_practice_exam(self):
"""
Choose a practice exam but do not press enter
"""
self.q(css="input.practice_exam").first.click()
def time_allotted_field_visible(self):
"""
returns whether the time allotted field is visible
"""
return self.q(css=".field-time-limit").visible
def exam_review_rules_field_visible(self):
"""
Returns whether the review rules field is visible
"""
return self.q(css=".field-exam-review-rules").visible
def hide_after_due_field_visible(self):
"""
Returns whether the hide after due field is visible
"""
return self.q(css=".field-hide-after-due").visible
def proctoring_items_are_displayed(self):
"""
Returns True if all the items are found.
"""
# The None radio button
if not self.q(css="input.no_special_exam").present:
return False
# The Timed exam radio button
if not self.q(css="input.timed_exam").present:
return False
# The Proctored exam radio button
if not self.q(css="input.proctored_exam").present:
return False
# The Practice exam radio button
if not self.q(css="input.practice_exam").present:
return False
return True
def select_access_tab(self):
"""
Select the access settings tab.
"""
self.q(css=".settings-tab-button[data-tab='access']").first.click()
self.wait_for_element_visibility('#is_prereq', 'Gating settings fields are present.')
def make_gating_prerequisite(self):
"""
Makes a subsection a gating prerequisite.
"""
if not self.q(css="#is_prereq")[0].is_selected():
self.q(css='label[for="is_prereq"]').click()
self.q(css=".action-save").first.click()
self.wait_for_ajax()
def add_prerequisite_to_subsection(self, min_score):
"""
Adds a prerequisite to a subsection.
"""
Select(self.q(css="#prereq")[0]).select_by_index(1)
self.q(css="#prereq_min_score").fill(min_score)
self.q(css=".action-save").first.click()
self.wait_for_ajax()
def gating_prerequisite_checkbox_is_visible(self):
"""
Returns True if the gating prerequisite checkbox is visible.
"""
# The Prerequisite checkbox is visible
return self.q(css="#is_prereq").visible
def gating_prerequisite_checkbox_is_checked(self):
"""
Returns True if the gating prerequisite checkbox is checked.
"""
# The Prerequisite checkbox is checked
return self.q(css="#is_prereq:checked").present
def gating_prerequisites_dropdown_is_visible(self):
"""
Returns True if the gating prerequisites dropdown is visible.
"""
# The Prerequisites dropdown is visible
return self.q(css="#prereq").visible
def gating_prerequisite_min_score_is_visible(self):
"""
Returns True if the gating prerequisite minimum score input is visible.
"""
# The Prerequisites dropdown is visible
return self.q(css="#prereq_min_score").visible
@property
def bottom_add_section_button(self):
"""
Returns the query representing the bottom add section button.
"""
return self.q(css=self.BOTTOM_ADD_SECTION_BUTTON).first
@property
def has_no_content_message(self):
"""
Returns true if a message informing the user that the course has no content is visible
"""
return self.q(css='.outline .no-content').is_present()
@property
def has_rerun_notification(self):
"""
Returns true iff the rerun notification is present on the page.
"""
return self.q(css='.wrapper-alert.is-shown').is_present()
def dismiss_rerun_notification(self):
"""
Clicks the dismiss button in the rerun notification.
"""
self.q(css='.dismiss-button').click()
@property
def expand_collapse_link_state(self):
"""
Returns the current state of the expand/collapse link
"""
link = self.q(css=self.EXPAND_COLLAPSE_CSS)[0]
if not link.is_displayed():
return ExpandCollapseLinkState.MISSING
elif "collapse-all" in link.get_attribute("class"):
return ExpandCollapseLinkState.COLLAPSE
else:
return ExpandCollapseLinkState.EXPAND
@property
def reindex_button(self):
"""
Returns reindex button.
"""
return self.q(css=".button.button-reindex")[0]
def expand_all_subsections(self):
"""
Expands all the subsections in this course.
"""
for section in self.sections():
if section.is_collapsed:
section.expand_subsection()
for subsection in section.subsections():
if subsection.is_collapsed:
subsection.expand_subsection()
@property
def xblocks(self):
"""
Return a list of xblocks loaded on the outline page.
"""
return self.children(CourseOutlineChild)
@property
def license(self):
"""
Returns the course license text, if present. Else returns None.
"""
return self.q(css=".license-value").first.text[0]
@property
def deprecated_warning_visible(self):
"""
Returns true if the deprecated warning is visible.
"""
return self.q(css='.wrapper-alert-error.is-shown').is_present()
@property
def warning_heading_text(self):
"""
Returns deprecated warning heading text.
"""
return self.q(css='.warning-heading-text').text[0]
@property
def components_list_heading(self):
"""
Returns deprecated warning component list heading text.
"""
return self.q(css='.components-list-heading-text').text[0]
@property
def modules_remove_text_shown(self):
"""
Returns True if deprecated warning advance modules remove text is visible.
"""
return self.q(css='.advance-modules-remove-text').visible
@property
def modules_remove_text(self):
"""
Returns deprecated warning advance modules remove text.
"""
return self.q(css='.advance-modules-remove-text').text[0]
@property
def components_visible(self):
"""
Returns True if components list visible.
"""
return self.q(css='.components-list').visible
@property
def components_display_names(self):
"""
Returns deprecated warning components display name list.
"""
return self.q(css='.components-list li>a').text
@property
def deprecated_advance_modules(self):
"""
Returns deprecated advance modules list.
"""
return self.q(css='.advance-modules-list li').text
class CourseOutlineModal(object):
"""
Page object specifically for a modal window on the course outline page.
"""
MODAL_SELECTOR = ".wrapper-modal-window"
def __init__(self, page):
self.page = page
def _bounded_selector(self, selector):
"""
Returns `selector`, but limited to this particular `CourseOutlineModal` context.
"""
return " ".join([self.MODAL_SELECTOR, selector])
def is_shown(self):
"""
Return whether or not the modal defined by self.MODAL_SELECTOR is shown.
"""
return self.page.q(css=self.MODAL_SELECTOR).present
def find_css(self, selector):
"""
Find the given css selector on the page.
"""
return self.page.q(css=self._bounded_selector(selector))
def click(self, selector, index=0):
"""
Perform a Click action on the given selector.
"""
self.find_css(selector).nth(index).click()
def save(self):
"""
Click the save action button, and wait for the ajax call to return.
"""
self.click(".action-save")
self.page.wait_for_ajax()
def publish(self):
"""
Click the publish action button, and wait for the ajax call to return.
"""
self.click(".action-publish")
self.page.wait_for_ajax()
def cancel(self):
"""
Click the cancel action button.
"""
self.click(".action-cancel")
def has_release_date(self):
"""
Check if the input box for the release date exists in the subsection's settings window
"""
return self.find_css("#start_date").present
def has_release_time(self):
"""
Check if the input box for the release time exists in the subsection's settings window
"""
return self.find_css("#start_time").present
def has_due_date(self):
"""
Check if the input box for the due date exists in the subsection's settings window
"""
return self.find_css("#due_date").present
def has_due_time(self):
"""
Check if the input box for the due time exists in the subsection's settings window
"""
return self.find_css("#due_time").present
def has_policy(self):
"""
Check if the input for the grading policy is present.
"""
return self.find_css("#grading_type").present
def set_date(self, property_name, input_selector, date):
"""
Set `date` value to input pointed by `selector` and `property_name`.
"""
month, day, year = map(int, date.split('/'))
self.click(input_selector)
if getattr(self, property_name):
current_month, current_year = map(int, getattr(self, property_name).split('/')[1:])
else: # Use default timepicker values, which are current month and year.
current_month, current_year = datetime.datetime.today().month, datetime.datetime.today().year
date_diff = 12 * (year - current_year) + month - current_month
selector = "a.ui-datepicker-{}".format('next' if date_diff > 0 else 'prev')
for __ in xrange(abs(date_diff)):
self.page.q(css=selector).click()
self.page.q(css="a.ui-state-default").nth(day - 1).click() # set day
self.page.wait_for_element_invisibility("#ui-datepicker-div", "datepicker should be closed")
EmptyPromise(
lambda: getattr(self, property_name) == u'{m}/{d}/{y}'.format(m=month, d=day, y=year),
"{} is updated in modal.".format(property_name)
).fulfill()
def set_time(self, input_selector, time):
"""
Set `time` value to input pointed by `input_selector`
Not using the time picker to make sure it's not being rounded up
"""
self.page.q(css=input_selector).fill(time)
self.page.q(css=input_selector).results[0].send_keys(Keys.ENTER)
@property
def release_date(self):
"""
Returns the unit's release date. Date is "mm/dd/yyyy" string.
"""
return self.find_css("#start_date").first.attrs('value')[0]
@release_date.setter
def release_date(self, date):
"""
Sets the unit's release date to `date`. Date is "mm/dd/yyyy" string.
"""
self.set_date('release_date', "#start_date", date)
@property
def release_time(self):
"""
Returns the current value of the release time. Default is u'00:00'
"""
return self.find_css("#start_time").first.attrs('value')[0]
@release_time.setter
def release_time(self, time):
"""
Time is "HH:MM" string.
"""
self.set_time("#start_time", time)
@property
def due_date(self):
"""
Returns the due date from the page. Date is "mm/dd/yyyy" string.
"""
return self.find_css("#due_date").first.attrs('value')[0]
@due_date.setter
def due_date(self, date):
"""
Sets the due date for the unit. Date is "mm/dd/yyyy" string.
"""
self.set_date('due_date', "#due_date", date)
@property
def due_time(self):
"""
Returns the current value of the release time. Default is u''
"""
return self.find_css("#due_time").first.attrs('value')[0]
@due_time.setter
def due_time(self, time):
"""
Time is "HH:MM" string.
"""
self.set_time("#due_time", time)
@property
def policy(self):
"""
Select the grading format with `value` in the drop-down list.
"""
element = self.find_css('#grading_type')[0]
return self.get_selected_option_text(element)
@policy.setter
def policy(self, grading_label):
"""
Select the grading format with `value` in the drop-down list.
"""
element = self.find_css('#grading_type')[0]
select = Select(element)
select.select_by_visible_text(grading_label)
EmptyPromise(
lambda: self.policy == grading_label,
"Grading label is updated.",
).fulfill()
@property
def is_explicitly_locked(self):
"""
Returns true if the explict staff lock checkbox is checked, false otherwise.
"""
return self.find_css('#staff_lock')[0].is_selected()
@is_explicitly_locked.setter
def is_explicitly_locked(self, value):
"""
Checks the explicit staff lock box if value is true, otherwise unchecks the box.
"""
if value != self.is_explicitly_locked:
self.find_css('label[for="staff_lock"]').click()
EmptyPromise(lambda: value == self.is_explicitly_locked, "Explicit staff lock is updated").fulfill()
def shows_staff_lock_warning(self):
"""
Returns true iff the staff lock warning is visible.
"""
return self.find_css('.staff-lock .tip-warning').visible
def get_selected_option_text(self, element):
"""
Returns the text of the first selected option for the element.
"""
if element:
select = Select(element)
return select.first_selected_option.text
else:
return None
|
mafintosh/phantomjs
|
refs/heads/master
|
src/breakpad/src/tools/gyp/test/sibling/gyptest-all.py
|
151
|
#!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
"""
import TestGyp
test = TestGyp.TestGyp()
test.run_gyp('build/all.gyp', chdir='src')
test.build('build/all.gyp', test.ALL, chdir='src')
chdir = 'src/build'
# The top-level Makefile is in the directory where gyp was run.
# TODO(mmoss) Should the Makefile go in the directory of the passed in .gyp
# file? What about when passing in multiple .gyp files? Would sub-project
# Makefiles (see http://codereview.chromium.org/340008 comments) solve this?
if test.format == 'make':
chdir = 'src'
if test.format == 'xcode':
chdir = 'src/prog1'
test.run_built_executable('prog1',
chdir=chdir,
stdout="Hello from prog1.c\n")
if test.format == 'xcode':
chdir = 'src/prog2'
test.run_built_executable('prog2',
chdir=chdir,
stdout="Hello from prog2.c\n")
test.pass_test()
|
thaumos/ansible
|
refs/heads/devel
|
lib/ansible/plugins/inventory/virtualbox.py
|
10
|
# Copyright (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
name: virtualbox
plugin_type: inventory
short_description: virtualbox inventory source
description:
- Get inventory hosts from the local virtualbox installation.
- Uses a YAML configuration file that ends with virtualbox.(yml|yaml) or vbox.(yml|yaml).
- The inventory_hostname is always the 'Name' of the virtualbox instance.
extends_documentation_fragment:
- constructed
- inventory_cache
options:
plugin:
description: token that ensures this is a source file for the 'virtualbox' plugin
required: True
choices: ['virtualbox']
running_only:
description: toggles showing all vms vs only those currently running
type: boolean
default: False
settings_password_file:
description: provide a file containing the settings password (equivalent to --settingspwfile)
network_info_path:
description: property path to query for network information (ansible_host)
default: "/VirtualBox/GuestInfo/Net/0/V4/IP"
query:
description: create vars from virtualbox properties
type: dictionary
default: {}
'''
EXAMPLES = '''
# file must be named vbox.yaml or vbox.yml
simple_config_file:
plugin: virtualbox
settings_password_file: /etc/virtulbox/secrets
query:
logged_in_users: /VirtualBox/GuestInfo/OS/LoggedInUsersList
compose:
ansible_connection: ('indows' in vbox_Guest_OS)|ternary('winrm', 'ssh')
'''
import os
from subprocess import Popen, PIPE
from ansible.errors import AnsibleParserError
from ansible.module_utils._text import to_bytes, to_native, to_text
from ansible.module_utils.common._collections_compat import MutableMapping
from ansible.plugins.inventory import BaseInventoryPlugin, Constructable, Cacheable
from ansible.module_utils.common.process import get_bin_path
class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
''' Host inventory parser for ansible using local virtualbox. '''
NAME = 'virtualbox'
VBOX = "VBoxManage"
def __init__(self):
self._vbox_path = None
super(InventoryModule, self).__init__()
def _query_vbox_data(self, host, property_path):
ret = None
try:
cmd = [self._vbox_path, b'guestproperty', b'get',
to_bytes(host, errors='surrogate_or_strict'),
to_bytes(property_path, errors='surrogate_or_strict')]
x = Popen(cmd, stdout=PIPE)
ipinfo = to_text(x.stdout.read(), errors='surrogate_or_strict')
if 'Value' in ipinfo:
a, ip = ipinfo.split(':', 1)
ret = ip.strip()
except Exception:
pass
return ret
def _set_variables(self, hostvars):
# set vars in inventory from hostvars
for host in hostvars:
query = self.get_option('query')
# create vars from vbox properties
if query and isinstance(query, MutableMapping):
for varname in query:
hostvars[host][varname] = self._query_vbox_data(host, query[varname])
strict = self.get_option('strict')
# create composite vars
self._set_composite_vars(self.get_option('compose'), hostvars[host], host, strict=strict)
# actually update inventory
for key in hostvars[host]:
self.inventory.set_variable(host, key, hostvars[host][key])
# constructed groups based on conditionals
self._add_host_to_composed_groups(self.get_option('groups'), hostvars[host], host, strict=strict)
# constructed keyed_groups
self._add_host_to_keyed_groups(self.get_option('keyed_groups'), hostvars[host], host, strict=strict)
def _populate_from_cache(self, source_data):
hostvars = source_data.pop('_meta', {}).get('hostvars', {})
for group in source_data:
if group == 'all':
continue
else:
group = self.inventory.add_group(group)
hosts = source_data[group].get('hosts', [])
for host in hosts:
self._populate_host_vars([host], hostvars.get(host, {}), group)
self.inventory.add_child('all', group)
if not source_data:
for host in hostvars:
self.inventory.add_host(host)
self._populate_host_vars([host], hostvars.get(host, {}))
def _populate_from_source(self, source_data, using_current_cache=False):
if using_current_cache:
self._populate_from_cache(source_data)
return source_data
cacheable_results = {'_meta': {'hostvars': {}}}
hostvars = {}
prevkey = pref_k = ''
current_host = None
# needed to possibly set ansible_host
netinfo = self.get_option('network_info_path')
for line in source_data:
line = to_text(line)
if ':' not in line:
continue
try:
k, v = line.split(':', 1)
except Exception:
# skip non splitable
continue
if k.strip() == '':
# skip empty
continue
v = v.strip()
# found host
if k.startswith('Name') and ',' not in v: # some setting strings appear in Name
current_host = v
if current_host not in hostvars:
hostvars[current_host] = {}
self.inventory.add_host(current_host)
# try to get network info
netdata = self._query_vbox_data(current_host, netinfo)
if netdata:
self.inventory.set_variable(current_host, 'ansible_host', netdata)
# found groups
elif k == 'Groups':
for group in v.split('/'):
if group:
group = self.inventory.add_group(group)
self.inventory.add_child(group, current_host)
if group not in cacheable_results:
cacheable_results[group] = {'hosts': []}
cacheable_results[group]['hosts'].append(current_host)
continue
else:
# found vars, accumulate in hostvars for clean inventory set
pref_k = 'vbox_' + k.strip().replace(' ', '_')
if k.startswith(' '):
if prevkey not in hostvars[current_host]:
hostvars[current_host][prevkey] = {}
hostvars[current_host][prevkey][pref_k] = v
else:
if v != '':
hostvars[current_host][pref_k] = v
if self._ungrouped_host(current_host, cacheable_results):
if 'ungrouped' not in cacheable_results:
cacheable_results['ungrouped'] = {'hosts': []}
cacheable_results['ungrouped']['hosts'].append(current_host)
prevkey = pref_k
self._set_variables(hostvars)
for host in hostvars:
h = self.inventory.get_host(host)
cacheable_results['_meta']['hostvars'][h.name] = h.vars
return cacheable_results
def _ungrouped_host(self, host, inventory):
def find_host(host, inventory):
for k, v in inventory.items():
if k == '_meta':
continue
if isinstance(v, dict):
yield self._ungrouped_host(host, v)
elif isinstance(v, list):
yield host not in v
yield True
return all([found_host for found_host in find_host(host, inventory)])
def verify_file(self, path):
valid = False
if super(InventoryModule, self).verify_file(path):
if path.endswith(('virtualbox.yaml', 'virtualbox.yml', 'vbox.yaml', 'vbox.yml')):
valid = True
return valid
def parse(self, inventory, loader, path, cache=True):
try:
self._vbox_path = get_bin_path(self.VBOX, True)
except ValueError as e:
raise AnsibleParserError(e)
super(InventoryModule, self).parse(inventory, loader, path)
cache_key = self.get_cache_key(path)
config_data = self._read_config_data(path)
# set _options from config data
self._consume_options(config_data)
source_data = None
if cache:
cache = self.get_option('cache')
update_cache = False
if cache:
try:
source_data = self._cache[cache_key]
except KeyError:
update_cache = True
if not source_data:
b_pwfile = to_bytes(self.get_option('settings_password_file'), errors='surrogate_or_strict', nonstring='passthru')
running = self.get_option('running_only')
# start getting data
cmd = [self._vbox_path, b'list', b'-l']
if running:
cmd.append(b'runningvms')
else:
cmd.append(b'vms')
if b_pwfile and os.path.exists(b_pwfile):
cmd.append(b'--settingspwfile')
cmd.append(b_pwfile)
try:
p = Popen(cmd, stdout=PIPE)
except Exception as e:
raise AnsibleParserError(to_native(e))
source_data = p.stdout.read().splitlines()
using_current_cache = cache and not update_cache
cacheable_results = self._populate_from_source(source_data, using_current_cache)
if update_cache:
self._cache[cache_key] = cacheable_results
|
i8run/BigDL-1
|
refs/heads/master
|
pyspark/test/bigdl/keras/test_keras_api.py
|
6
|
#
# Copyright 2016 The BigDL Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import pytest
from test.bigdl.test_utils import BigDLTestCase
import bigdl.nn.keras.layer as BLayer
import keras.layers as KLayer
import keras.backend as K
from bigdl.keras.converter import WeightsConverter
from bigdl.dataset.dataset import *
from bigdl.nn.keras.topology import Model as BModel
from bigdl.nn.keras.topology import Sequential as BSequential
from keras.engine import merge as kmerge, Model as KModel
from keras.models import Sequential as KSequential
np.random.seed(1337) # for reproducibility
class TestKerasAPI(BigDLTestCase):
def test_embedding(self):
input_data = np.random.randint(1000, size=(32, 10))
blayer = BLayer.Embedding(1000, 64, input_shape=(10, ))
klayer = KLayer.Embedding(1000, 64, input_length=10)
self.compare_newapi(klayer, blayer, input_data,
WeightsConverter.convert_embedding)
def test_batchnormalization(self):
K.set_image_dim_ordering("th")
input_data = np.random.random_sample([2, 5, 32, 32])
blayer = BLayer.BatchNormalization(axis=1, input_shape=(5, 32, 32))
klayer = KLayer.BatchNormalization(axis=1, input_shape=(5, 32, 32))
self.compare_newapi(klayer, blayer, input_data,
WeightsConverter.convert_batchnormalization)
K.set_image_dim_ordering("tf")
input_data2 = np.random.random_sample([2, 32, 32, 4])
blayer = BLayer.BatchNormalization(axis=-1, dim_ordering="tf", input_shape=(32, 32, 4))
klayer = KLayer.BatchNormalization(axis=-1, input_shape=(32, 32, 4))
self.compare_newapi(klayer, blayer, input_data2,
WeightsConverter.convert_batchnormalization)
def test_merge_sum(self):
b1 = BLayer.InputLayer(input_shape=(3, 5))
b2 = BLayer.InputLayer(input_shape=(3, 5))
blayer = BLayer.Merge(layers=[b1, b2], mode="sum")
k1 = KLayer.InputLayer(input_shape=(3, 5))
k2 = KLayer.InputLayer(input_shape=(3, 5))
klayer = KLayer.Merge(layers=[k1, k2], mode="sum")
input_data = [np.random.random([2, 3, 5]), np.random.random([2, 3, 5])]
self.compare_newapi(klayer, blayer, input_data)
def test_merge_mul(self):
b1 = BLayer.InputLayer(input_shape=(3, 5))
b2 = BLayer.InputLayer(input_shape=(3, 5))
blayer = BLayer.Merge(layers=[b1, b2], mode="mul")
k1 = KLayer.InputLayer(input_shape=(3, 5))
k2 = KLayer.InputLayer(input_shape=(3, 5))
klayer = KLayer.Merge(layers=[k1, k2], mode="mul")
input_data = [np.random.random([2, 3, 5]), np.random.random([2, 3, 5])]
self.compare_newapi(klayer, blayer, input_data)
def test_merge_ave(self):
b1 = BLayer.InputLayer(input_shape=(2, 5, 8))
b2 = BLayer.InputLayer(input_shape=(2, 5, 8))
blayer = BLayer.Merge(layers=[b1, b2], mode="ave")
k1 = KLayer.InputLayer(input_shape=(2, 5, 8))
k2 = KLayer.InputLayer(input_shape=(2, 5, 8))
klayer = KLayer.Merge(layers=[k1, k2], mode="ave")
input_data = [np.random.random([3, 2, 5, 8]), np.random.random([3, 2, 5, 8])]
self.compare_newapi(klayer, blayer, input_data)
def test_merge_max(self):
b1 = BLayer.InputLayer(input_shape=(2, 5, 8))
b2 = BLayer.InputLayer(input_shape=(2, 5, 8))
blayer = BLayer.Merge(layers=[b1, b2], mode="max")
k1 = KLayer.InputLayer(input_shape=(2, 5, 8))
k2 = KLayer.InputLayer(input_shape=(2, 5, 8))
klayer = KLayer.Merge(layers=[k1, k2], mode="max")
input_data = [np.random.random([3, 2, 5, 8]), np.random.random([3, 2, 5, 8])]
self.compare_newapi(klayer, blayer, input_data)
def test_merge_concat(self):
b1 = BLayer.InputLayer(input_shape=(2, 5, 11))
b2 = BLayer.InputLayer(input_shape=(2, 5, 8))
blayer = BLayer.Merge(layers=[b1, b2], mode="concat")
k1 = KLayer.InputLayer(input_shape=(2, 5, 11))
k2 = KLayer.InputLayer(input_shape=(2, 5, 8))
klayer = KLayer.Merge(layers=[k1, k2], mode="concat")
input_data = [np.random.random([3, 2, 5, 11]), np.random.random([3, 2, 5, 8])]
self.compare_newapi(klayer, blayer, input_data)
def test_merge_dot(self):
b1 = BLayer.InputLayer(input_shape=(4, ))
b2 = BLayer.InputLayer(input_shape=(4, ))
blayer = BLayer.Merge(layers=[b1, b2], mode="dot")
k1 = KLayer.InputLayer(input_shape=(4, ))
k2 = KLayer.InputLayer(input_shape=(4, ))
klayer = KLayer.Merge(layers=[k1, k2], mode="dot")
input_data = [np.random.random([2, 4]), np.random.random([2, 4])]
self.compare_newapi(klayer, blayer, input_data)
def test_merge_cos(self):
b1 = BLayer.InputLayer(input_shape=(3, ))
b2 = BLayer.InputLayer(input_shape=(3, ))
blayer = BLayer.Merge(layers=[b1, b2], mode="cos")
k1 = KLayer.InputLayer(input_shape=(3, ))
k2 = KLayer.InputLayer(input_shape=(3, ))
klayer = KLayer.Merge(layers=[k1, k2], mode="cos")
input_data = [np.random.random([2, 3]), np.random.random([2, 3])]
self.compare_newapi(klayer, blayer, input_data)
def test_lenet_shape(self):
from bigdl.examples.lenet.lenet import build_model
model = build_model(10)
input_shape = model.get_input_shape()
np.testing.assert_allclose((28, 28, 1), input_shape[1:])
output_shape = model.get_output_shape()
np.testing.assert_allclose((10, ), output_shape[1:])
def test_graph(self):
x1 = BLayer.Input(shape=(8, ))
x2 = BLayer.Input(shape=(6, ))
y1 = BLayer.Dense(10)(x1)
y2 = BLayer.Dense(10)(x2)
model = BModel([x1, x2], [y1, y2])
input_shapes = model.get_input_shape()
output_shapes = model.get_output_shape()
np.testing.assert_allclose((8, ), input_shapes[0][1:])
np.testing.assert_allclose((6, ), input_shapes[1][1:])
np.testing.assert_allclose((10, ), output_shapes[0][1:])
np.testing.assert_allclose((10, ), output_shapes[1][1:])
def test_train(self):
x = np.random.random([32, 10])
y = np.random.random([32, ])
model = BSequential()
model.add(BLayer.Dense(5, input_shape=(10, )))
model.compile(optimizer="sgd", loss="mse", metrics=["accuracy"])
model.fit(x, y, batch_size=8, nb_epoch=2, validation_data=(x, y))
model.evaluate(x, y, batch_size=8)
model.predict(x)
def test_train_dataset(self):
images = []
labels = []
for i in range(0, 8):
features = np.random.uniform(0, 1, (200, 200, 3))
label = np.array([2])
images.append(features)
labels.append(label)
image_frame = DistributedImageFrame(self.sc.parallelize(images),
self.sc.parallelize(labels))
transformer = Pipeline([BytesToMat(), Resize(256, 256), CenterCrop(224, 224),
ChannelNormalize(0.485, 0.456, 0.406, 0.229, 0.224, 0.225),
MatToTensor(), ImageFrameToSample(target_keys=['label'])])
data_set = DataSet.image_frame(image_frame).transform(transformer)
model = BSequential()
model.add(BLayer.Convolution2D(1, 5, 5, input_shape=(3, 224, 224)))
model.add(BLayer.Reshape((1*220*220, )))
model.add(BLayer.Dense(20, activation="softmax"))
model.compile(optimizer="sgd", loss="sparse_categorical_crossentropy", metrics=["accuracy"])
model.fit(data_set, batch_size=8, nb_epoch=2, validation_data=data_set)
def convert_two_dense_model(self, kmodel, weights):
return [weights[2].T, weights[3], weights[0].T, weights[1]]
def test_merge_method_sum(self):
bx1 = BLayer.Input(shape=(8, ))
bx2 = BLayer.Input(shape=(6, ))
by1 = BLayer.Dense(10)(bx1)
by2 = BLayer.Dense(10)(bx2)
bz = BLayer.merge([by1, by2], mode="sum")
bmodel = BModel([bx1, bx2], bz, name="graph1")
kx1 = KLayer.Input(shape=(8, ))
kx2 = KLayer.Input(shape=(6, ))
ky1 = KLayer.Dense(10)(kx1)
ky2 = KLayer.Dense(10)(kx2)
kz = kmerge([ky1, ky2], mode="sum")
kmodel = KModel([kx1, kx2], kz)
input_data = [np.random.random([2, 8]), np.random.random([2, 6])]
self.compare_newapi(kmodel, bmodel, input_data, self.convert_two_dense_model)
def test_merge_method_model_concat(self):
bx1 = BLayer.Input(shape=(4, ))
bx2 = BLayer.Input(shape=(5, ))
by1 = BLayer.Dense(6, activation="sigmoid")(bx1)
bbranch1 = BModel(bx1, by1)(bx1)
bbranch2 = BLayer.Dense(8)(bx2)
bz = BLayer.merge([bbranch1, bbranch2], mode="concat")
bmodel = BModel([bx1, bx2], bz)
kx1 = KLayer.Input(shape=(4, ))
kx2 = KLayer.Input(shape=(5, ))
ky1 = KLayer.Dense(6, activation="sigmoid")(kx1)
kbranch1 = KModel(kx1, ky1)(kx1)
kbranch2 = KLayer.Dense(8)(kx2)
kz = KLayer.merge([kbranch1, kbranch2], mode="concat")
kmodel = KModel([kx1, kx2], kz)
input_data = [np.random.random([2, 4]), np.random.random([2, 5])]
self.compare_newapi(kmodel, bmodel, input_data, self.convert_two_dense_model)
def test_merge_method_seq_concat(self):
bx1 = BLayer.Input(shape=(10, ))
bx2 = BLayer.Input(shape=(10, ))
by1 = BLayer.Dense(12, activation="sigmoid")(bx1)
bbranch1_node = BModel(bx1, by1)(bx1)
bbranch2 = BSequential()
bbranch2.add(BLayer.Dense(12, input_dim=10))
bbranch2_node = bbranch2(bx2)
bz = BLayer.merge([bbranch1_node, bbranch2_node], mode="concat")
bmodel = BModel([bx1, bx2], bz)
kx1 = KLayer.Input(shape=(10, ))
kx2 = KLayer.Input(shape=(10, ))
ky1 = KLayer.Dense(12, activation="sigmoid")(kx1)
kbranch1_node = KModel(kx1, ky1)(kx1)
kbranch2 = KSequential()
kbranch2.add(KLayer.Dense(12, input_dim=10))
kbranch2_node = kbranch2(kx2)
kz = KLayer.merge([kbranch1_node, kbranch2_node], mode="concat")
kmodel = KModel([kx1, kx2], kz)
input_data = [np.random.random([2, 10]), np.random.random([2, 10])]
self.compare_newapi(kmodel, bmodel, input_data, self.convert_two_dense_model)
if __name__ == "__main__":
pytest.main([__file__])
|
komsas/OpenUpgrade
|
refs/heads/master
|
addons/pad/py_etherpad/__init__.py
|
505
|
"""Module to talk to EtherpadLite API."""
import json
import urllib
import urllib2
class EtherpadLiteClient:
"""Client to talk to EtherpadLite API."""
API_VERSION = 1 # TODO probably 1.1 sometime soon
CODE_OK = 0
CODE_INVALID_PARAMETERS = 1
CODE_INTERNAL_ERROR = 2
CODE_INVALID_FUNCTION = 3
CODE_INVALID_API_KEY = 4
TIMEOUT = 20
apiKey = ""
baseUrl = "http://localhost:9001/api"
def __init__(self, apiKey=None, baseUrl=None):
if apiKey:
self.apiKey = apiKey
if baseUrl:
self.baseUrl = baseUrl
def call(self, function, arguments=None):
"""Create a dictionary of all parameters"""
url = '%s/%d/%s' % (self.baseUrl, self.API_VERSION, function)
params = arguments or {}
params.update({'apikey': self.apiKey})
data = urllib.urlencode(params, True)
try:
opener = urllib2.build_opener()
request = urllib2.Request(url=url, data=data)
response = opener.open(request, timeout=self.TIMEOUT)
result = response.read()
response.close()
except urllib2.HTTPError:
raise
result = json.loads(result)
if result is None:
raise ValueError("JSON response could not be decoded")
return self.handleResult(result)
def handleResult(self, result):
"""Handle API call result"""
if 'code' not in result:
raise Exception("API response has no code")
if 'message' not in result:
raise Exception("API response has no message")
if 'data' not in result:
result['data'] = None
if result['code'] == self.CODE_OK:
return result['data']
elif result['code'] == self.CODE_INVALID_PARAMETERS or result['code'] == self.CODE_INVALID_API_KEY:
raise ValueError(result['message'])
elif result['code'] == self.CODE_INTERNAL_ERROR:
raise Exception(result['message'])
elif result['code'] == self.CODE_INVALID_FUNCTION:
raise Exception(result['message'])
else:
raise Exception("An unexpected error occurred whilst handling the response")
# GROUPS
# Pads can belong to a group. There will always be public pads that do not belong to a group (or we give this group the id 0)
def createGroup(self):
"""creates a new group"""
return self.call("createGroup")
def createGroupIfNotExistsFor(self, groupMapper):
"""this functions helps you to map your application group ids to etherpad lite group ids"""
return self.call("createGroupIfNotExistsFor", {
"groupMapper": groupMapper
})
def deleteGroup(self, groupID):
"""deletes a group"""
return self.call("deleteGroup", {
"groupID": groupID
})
def listPads(self, groupID):
"""returns all pads of this group"""
return self.call("listPads", {
"groupID": groupID
})
def createGroupPad(self, groupID, padName, text=''):
"""creates a new pad in this group"""
params = {
"groupID": groupID,
"padName": padName,
}
if text:
params['text'] = text
return self.call("createGroupPad", params)
# AUTHORS
# Theses authors are bind to the attributes the users choose (color and name).
def createAuthor(self, name=''):
"""creates a new author"""
params = {}
if name:
params['name'] = name
return self.call("createAuthor", params)
def createAuthorIfNotExistsFor(self, authorMapper, name=''):
"""this functions helps you to map your application author ids to etherpad lite author ids"""
params = {
'authorMapper': authorMapper
}
if name:
params['name'] = name
return self.call("createAuthorIfNotExistsFor", params)
# SESSIONS
# Sessions can be created between a group and a author. This allows
# an author to access more than one group. The sessionID will be set as
# a cookie to the client and is valid until a certain date.
def createSession(self, groupID, authorID, validUntil):
"""creates a new session"""
return self.call("createSession", {
"groupID": groupID,
"authorID": authorID,
"validUntil": validUntil
})
def deleteSession(self, sessionID):
"""deletes a session"""
return self.call("deleteSession", {
"sessionID": sessionID
})
def getSessionInfo(self, sessionID):
"""returns informations about a session"""
return self.call("getSessionInfo", {
"sessionID": sessionID
})
def listSessionsOfGroup(self, groupID):
"""returns all sessions of a group"""
return self.call("listSessionsOfGroup", {
"groupID": groupID
})
def listSessionsOfAuthor(self, authorID):
"""returns all sessions of an author"""
return self.call("listSessionsOfAuthor", {
"authorID": authorID
})
# PAD CONTENT
# Pad content can be updated and retrieved through the API
def getText(self, padID, rev=None):
"""returns the text of a pad"""
params = {"padID": padID}
if rev is not None:
params['rev'] = rev
return self.call("getText", params)
# introduced with pull request merge
def getHtml(self, padID, rev=None):
"""returns the html of a pad"""
params = {"padID": padID}
if rev is not None:
params['rev'] = rev
return self.call("getHTML", params)
def setText(self, padID, text):
"""sets the text of a pad"""
return self.call("setText", {
"padID": padID,
"text": text
})
def setHtml(self, padID, html):
"""sets the text of a pad from html"""
return self.call("setHTML", {
"padID": padID,
"html": html
})
# PAD
# Group pads are normal pads, but with the name schema
# GROUPID$PADNAME. A security manager controls access of them and its
# forbidden for normal pads to include a in the name.
def createPad(self, padID, text=''):
"""creates a new pad"""
params = {
"padID": padID,
}
if text:
params['text'] = text
return self.call("createPad", params)
def getRevisionsCount(self, padID):
"""returns the number of revisions of this pad"""
return self.call("getRevisionsCount", {
"padID": padID
})
def deletePad(self, padID):
"""deletes a pad"""
return self.call("deletePad", {
"padID": padID
})
def getReadOnlyID(self, padID):
"""returns the read only link of a pad"""
return self.call("getReadOnlyID", {
"padID": padID
})
def setPublicStatus(self, padID, publicStatus):
"""sets a boolean for the public status of a pad"""
return self.call("setPublicStatus", {
"padID": padID,
"publicStatus": publicStatus
})
def getPublicStatus(self, padID):
"""return true of false"""
return self.call("getPublicStatus", {
"padID": padID
})
def setPassword(self, padID, password):
"""returns ok or a error message"""
return self.call("setPassword", {
"padID": padID,
"password": password
})
def isPasswordProtected(self, padID):
"""returns true or false"""
return self.call("isPasswordProtected", {
"padID": padID
})
|
ordinarybill/react-native
|
refs/heads/master
|
JSCLegacyProfiler/trace_data.py
|
375
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import re
import unittest
"""
# _-----=> irqs-off
# / _----=> need-resched
# | / _---=> hardirq/softirq
# || / _--=> preempt-depth
# ||| / delay
# TASK-PID CPU# |||| TIMESTAMP FUNCTION
# | | | |||| | |
<idle>-0 [001] ...2 3269.291072: sched_switch: prev_comm=swapper/1 prev_pid=0 prev_prio=120 prev_state=R ==> next_comm=mmcqd/0 next_pid=120 next_prio=120
"""
TRACE_LINE_PATTERN = re.compile(
r'^\s*(?P<task>.+)-(?P<pid>\d+)\s+(?:\((?P<tgid>.+)\)\s+)?\[(?P<cpu>\d+)\]\s+(?:(?P<flags>\S{4})\s+)?(?P<timestamp>[0-9.]+):\s+(?P<function>.+)$')
"""
Example lines from custom app traces:
0: B|27295|providerRemove
0: E
tracing_mark_write: S|27311|NNFColdStart<D-7744962>|1112249168
"""
APP_TRACE_LINE_PATTERN = re.compile(
r'^(?P<type>.+?): (?P<args>.+)$')
"""
Example section names:
NNFColdStart
NNFColdStart<0><T7744962>
NNFColdStart<X>
NNFColdStart<T7744962>
"""
DECORATED_SECTION_NAME_PATTERN = re.compile(r'^(?P<section_name>.*?)(?:<0>)?(?:<(?P<command>.)(?P<argument>.*?)>)?$')
SYSTRACE_LINE_TYPES = set(['0', 'tracing_mark_write'])
class TraceLine(object):
def __init__(self, task, pid, tgid, cpu, flags, timestamp, function):
self.task = task
self.pid = pid
self.tgid = tgid
self.cpu = cpu
self.flags = flags
self.timestamp = timestamp
self.function = function
self.canceled = False
@property
def is_app_trace_line(self):
return isinstance(self.function, AppTraceFunction)
def cancel(self):
self.canceled = True
def __str__(self):
if self.canceled:
return ""
elif self.tgid:
return "{task:>16s}-{pid:<5d} ({tgid:5s}) [{cpu:03d}] {flags:4s} {timestamp:12f}: {function}\n".format(**vars(self))
elif self.flags:
return "{task:>16s}-{pid:<5d} [{cpu:03d}] {flags:4s} {timestamp:12f}: {function}\n".format(**vars(self))
else:
return "{task:>16s}-{pid:<5d} [{cpu:03d}] {timestamp:12.6f}: {function}\n".format(**vars(self))
class AppTraceFunction(object):
def __init__(self, type, args):
self.type = type
self.args = args
self.operation = args[0]
if len(args) >= 2 and args[1]:
self.pid = int(args[1])
if len(args) >= 3:
self._section_name, self.command, self.argument = _parse_section_name(args[2])
args[2] = self._section_name
else:
self._section_name = None
self.command = None
self.argument = None
self.cookie = None
@property
def section_name(self):
return self._section_name
@section_name.setter
def section_name(self, value):
self._section_name = value
self.args[2] = value
def __str__(self):
return "{type}: {args}".format(type=self.type, args='|'.join(self.args))
class AsyncTraceFunction(AppTraceFunction):
def __init__(self, type, args):
super(AsyncTraceFunction, self).__init__(type, args)
self.cookie = int(args[3])
TRACE_TYPE_MAP = {
'S': AsyncTraceFunction,
'T': AsyncTraceFunction,
'F': AsyncTraceFunction,
}
def parse_line(line):
match = TRACE_LINE_PATTERN.match(line.strip())
if not match:
return None
task = match.group("task")
pid = int(match.group("pid"))
tgid = match.group("tgid")
cpu = int(match.group("cpu"))
flags = match.group("flags")
timestamp = float(match.group("timestamp"))
function = match.group("function")
app_trace = _parse_function(function)
if app_trace:
function = app_trace
return TraceLine(task, pid, tgid, cpu, flags, timestamp, function)
def parse_dextr_line(line):
task = line["name"]
pid = line["pid"]
tgid = line["tid"]
cpu = None
flags = None
timestamp = line["ts"]
function = AppTraceFunction("DextrTrace", [line["ph"], line["pid"], line["name"]])
return TraceLine(task, pid, tgid, cpu, flags, timestamp, function)
def _parse_function(function):
line_match = APP_TRACE_LINE_PATTERN.match(function)
if not line_match:
return None
type = line_match.group("type")
if not type in SYSTRACE_LINE_TYPES:
return None
args = line_match.group("args").split('|')
if len(args) == 1 and len(args[0]) == 0:
args = None
constructor = TRACE_TYPE_MAP.get(args[0], AppTraceFunction)
return constructor(type, args)
def _parse_section_name(section_name):
if section_name is None:
return section_name, None, None
section_name_match = DECORATED_SECTION_NAME_PATTERN.match(section_name)
section_name = section_name_match.group("section_name")
command = section_name_match.group("command")
argument = section_name_match.group("argument")
return section_name, command, argument
def _format_section_name(section_name, command, argument):
if not command:
return section_name
return "{section_name}<{command}{argument}>".format(**vars())
class RoundTripFormattingTests(unittest.TestCase):
def testPlainSectionName(self):
section_name = "SectionName12345-5562342fas"
self.assertEqual(section_name, _format_section_name(*_parse_section_name(section_name)))
def testDecoratedSectionName(self):
section_name = "SectionName12345-5562342fas<D-123456>"
self.assertEqual(section_name, _format_section_name(*_parse_section_name(section_name)))
def testSimpleFunction(self):
function = "0: E"
self.assertEqual(function, str(_parse_function(function)))
def testFunctionWithoutCookie(self):
function = "0: B|27295|providerRemove"
self.assertEqual(function, str(_parse_function(function)))
def testFunctionWithCookie(self):
function = "0: S|27311|NNFColdStart|1112249168"
self.assertEqual(function, str(_parse_function(function)))
def testFunctionWithCookieAndArgs(self):
function = "0: T|27311|NNFColdStart|1122|Start"
self.assertEqual(function, str(_parse_function(function)))
def testFunctionWithArgsButNoPid(self):
function = "0: E|||foo=bar"
self.assertEqual(function, str(_parse_function(function)))
def testKitKatFunction(self):
function = "tracing_mark_write: B|14127|Looper.dispatchMessage|arg=>>>>> Dispatching to Handler (android.os.Handler) {422ae980} null: 0|Java"
self.assertEqual(function, str(_parse_function(function)))
def testNonSysTraceFunctionIgnored(self):
function = "sched_switch: prev_comm=swapper/1 prev_pid=0 prev_prio=120 prev_state=R ==> next_comm=mmcqd/0 next_pid=120 next_prio=120"
self.assertEqual(None, _parse_function(function))
def testLineWithFlagsAndTGID(self):
line = " <idle>-0 ( 550) [000] d..2 7953.258473: cpu_idle: state=1 cpu_id=0\n"
self.assertEqual(line, str(parse_line(line)))
def testLineWithFlagsAndNoTGID(self):
line = " <idle>-0 (-----) [000] d..2 7953.258473: cpu_idle: state=1 cpu_id=0\n"
self.assertEqual(line, str(parse_line(line)))
def testLineWithFlags(self):
line = " <idle>-0 [001] ...2 3269.291072: sched_switch: prev_comm=swapper/1 prev_pid=0 prev_prio=120 prev_state=R ==> next_comm=mmcqd/0 next_pid=120 next_prio=120\n"
self.assertEqual(line, str(parse_line(line)))
def testLineWithoutFlags(self):
line = " <idle>-0 [001] 3269.291072: sched_switch: prev_comm=swapper/1 prev_pid=0 prev_prio=120 prev_state=R ==> next_comm=mmcqd/0 next_pid=120 next_prio=120\n"
self.assertEqual(line, str(parse_line(line)))
|
mdsafwan/Deal-My-Stuff
|
refs/heads/master
|
Lib/site-packages/django/conf/locale/zh_Hans/formats.py
|
634
|
# -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'Y年n月j日' # 2016年9月5日
TIME_FORMAT = 'H:i' # 20:45
DATETIME_FORMAT = 'Y年n月j日 H:i' # 2016年9月5日 20:45
YEAR_MONTH_FORMAT = 'Y年n月' # 2016年9月
MONTH_DAY_FORMAT = 'm月j日' # 9月5日
SHORT_DATE_FORMAT = 'Y年n月j日' # 2016年9月5日
SHORT_DATETIME_FORMAT = 'Y年n月j日 H:i' # 2016年9月5日 20:45
FIRST_DAY_OF_WEEK = 1 # 星期一 (Monday)
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = (
'%Y/%m/%d', # '2016/09/05'
'%Y-%m-%d', # '2016-09-05'
'%Y年%n月%j日', # '2016年9月5日'
)
TIME_INPUT_FORMATS = (
'%H:%M', # '20:45'
'%H:%M:%S', # '20:45:29'
'%H:%M:%S.%f', # '20:45:29.000200'
)
DATETIME_INPUT_FORMATS = (
'%Y/%m/%d %H:%M', # '2016/09/05 20:45'
'%Y-%m-%d %H:%M', # '2016-09-05 20:45'
'%Y年%n月%j日 %H:%M', # '2016年9月5日 14:45'
'%Y/%m/%d %H:%M:%S', # '2016/09/05 20:45:29'
'%Y-%m-%d %H:%M:%S', # '2016-09-05 20:45:29'
'%Y年%n月%j日 %H:%M:%S', # '2016年9月5日 20:45:29'
'%Y/%m/%d %H:%M:%S.%f', # '2016/09/05 20:45:29.000200'
'%Y-%m-%d %H:%M:%S.%f', # '2016-09-05 20:45:29.000200'
'%Y年%n月%j日 %H:%n:%S.%f', # '2016年9月5日 20:45:29.000200'
)
DECIMAL_SEPARATOR = '.'
THOUSAND_SEPARATOR = ''
NUMBER_GROUPING = 4
|
painterjd/python-marconiclient
|
refs/heads/master
|
marconiclient/auth.py
|
1
|
from exceptions import ClientException
from keystoneclient.v2_0 import client as ksclient
from keystoneclient import exceptions
def authenticate(auth_url, user, key, **kwargs):
"""Authenticates against the endpoint to use. The correct
endpoint to use is looked up in the service catalog. The
caller can override this lookup by passing the endpoint
as a parameter.
:param auth_url: The keystone auth endpoint to use
:param user: The username to use for auth
:param key: The apikey to use for authentiation
:param endpoint: The Marconi endpoint to use. IOW, don't
look up an endpoint in the service catalog, just use
this one instead.
:param tenant_name: The optional tenant-name to use
:param tenant_id: The optional tenant ID toi use
:param cacert: The cacert PEM file to use
:param service_type: The service type to look for in
the service catalog
:param endpoint_type The endpoint type to reference in
the service catalog
:param region_name The region to pass for authentication
:returns: Tuple containing Marconi endpoint and token
:raises: ClientException
"""
insecure = kwargs.get('insecure', False)
endpoint = kwargs.get('endpoint')
tenant_name = kwargs.get('tenant_name')
tenant_id = kwargs.get('tenant_id')
cacert = kwargs.get('cacert')
try:
_ksclient = ksclient.Client(username=user,
password=key,
tenant_name=tenant_name,
tenant_id=tenant_id,
cacert=cacert,
auth_url=auth_url,
insecure=insecure)
except exceptions.Unauthorized as ex:
raise ClientException('Unauthorized. Check username, password'
' and tenant name/id')
except exceptions.AuthorizationFailure as err:
raise ClientException('Authorization Failure. %s' % err)
if not endpoint:
# The user did not pass in an endpoint, so we need to
# look one up on their behalf in the service catalog
# TODO(jdp): Ensure that this is the correct service_type field
service_type = kwargs.get('service_type', 'queueing')
endpoint_type = kwargs.get('endpoint_type', 'publicURL')
region = kwargs.get('region_name')
try:
endpoint = _ksclient.service_catalog.url_for(
attr='region',
filter_value=region,
service_type=service_type,
endpoint_type=endpoint_type)
except exceptions.EndpointNotFound as ex:
raise ClientException('Endpoint not found in service catalog')
return (endpoint, _ksclient.auth_token)
|
bthirion/scikit-learn
|
refs/heads/master
|
sklearn/ensemble/gradient_boosting.py
|
2
|
"""Gradient Boosted Regression Trees
This module contains methods for fitting gradient boosted regression trees for
both classification and regression.
The module structure is the following:
- The ``BaseGradientBoosting`` base class implements a common ``fit`` method
for all the estimators in the module. Regression and classification
only differ in the concrete ``LossFunction`` used.
- ``GradientBoostingClassifier`` implements gradient boosting for
classification problems.
- ``GradientBoostingRegressor`` implements gradient boosting for
regression problems.
"""
# Authors: Peter Prettenhofer, Scott White, Gilles Louppe, Emanuele Olivetti,
# Arnaud Joly, Jacob Schreiber
# License: BSD 3 clause
from __future__ import print_function
from __future__ import division
from abc import ABCMeta
from abc import abstractmethod
from .base import BaseEnsemble
from ..base import BaseEstimator
from ..base import ClassifierMixin
from ..base import RegressorMixin
from ..externals import six
from ._gradient_boosting import predict_stages
from ._gradient_boosting import predict_stage
from ._gradient_boosting import _random_sample_mask
import numbers
import numpy as np
from scipy import stats
from scipy.misc import logsumexp
from scipy.sparse import csc_matrix
from scipy.sparse import csr_matrix
from scipy.sparse import issparse
from scipy.special import expit
from time import time
from ..tree.tree import DecisionTreeRegressor
from ..tree._tree import DTYPE
from ..tree._tree import TREE_LEAF
from ..utils import check_random_state
from ..utils import check_array
from ..utils import check_X_y
from ..utils import column_or_1d
from ..utils import check_consistent_length
from ..utils.fixes import bincount
from ..utils import deprecated
from ..utils.stats import _weighted_percentile
from ..utils.validation import check_is_fitted
from ..utils.multiclass import check_classification_targets
from ..exceptions import NotFittedError
class QuantileEstimator(object):
"""An estimator predicting the alpha-quantile of the training targets."""
def __init__(self, alpha=0.9):
if not 0 < alpha < 1.0:
raise ValueError("`alpha` must be in (0, 1.0) but was %r" % alpha)
self.alpha = alpha
def fit(self, X, y, sample_weight=None):
if sample_weight is None:
self.quantile = stats.scoreatpercentile(y, self.alpha * 100.0)
else:
self.quantile = _weighted_percentile(y, sample_weight,
self.alpha * 100.0)
def predict(self, X):
check_is_fitted(self, 'quantile')
y = np.empty((X.shape[0], 1), dtype=np.float64)
y.fill(self.quantile)
return y
class MeanEstimator(object):
"""An estimator predicting the mean of the training targets."""
def fit(self, X, y, sample_weight=None):
if sample_weight is None:
self.mean = np.mean(y)
else:
self.mean = np.average(y, weights=sample_weight)
def predict(self, X):
check_is_fitted(self, 'mean')
y = np.empty((X.shape[0], 1), dtype=np.float64)
y.fill(self.mean)
return y
class LogOddsEstimator(object):
"""An estimator predicting the log odds ratio."""
scale = 1.0
def fit(self, X, y, sample_weight=None):
# pre-cond: pos, neg are encoded as 1, 0
if sample_weight is None:
pos = np.sum(y)
neg = y.shape[0] - pos
else:
pos = np.sum(sample_weight * y)
neg = np.sum(sample_weight * (1 - y))
if neg == 0 or pos == 0:
raise ValueError('y contains non binary labels.')
self.prior = self.scale * np.log(pos / neg)
def predict(self, X):
check_is_fitted(self, 'prior')
y = np.empty((X.shape[0], 1), dtype=np.float64)
y.fill(self.prior)
return y
class ScaledLogOddsEstimator(LogOddsEstimator):
"""Log odds ratio scaled by 0.5 -- for exponential loss. """
scale = 0.5
class PriorProbabilityEstimator(object):
"""An estimator predicting the probability of each
class in the training data.
"""
def fit(self, X, y, sample_weight=None):
if sample_weight is None:
sample_weight = np.ones_like(y, dtype=np.float64)
class_counts = bincount(y, weights=sample_weight)
self.priors = class_counts / class_counts.sum()
def predict(self, X):
check_is_fitted(self, 'priors')
y = np.empty((X.shape[0], self.priors.shape[0]), dtype=np.float64)
y[:] = self.priors
return y
class ZeroEstimator(object):
"""An estimator that simply predicts zero. """
def fit(self, X, y, sample_weight=None):
if np.issubdtype(y.dtype, int):
# classification
self.n_classes = np.unique(y).shape[0]
if self.n_classes == 2:
self.n_classes = 1
else:
# regression
self.n_classes = 1
def predict(self, X):
check_is_fitted(self, 'n_classes')
y = np.empty((X.shape[0], self.n_classes), dtype=np.float64)
y.fill(0.0)
return y
class LossFunction(six.with_metaclass(ABCMeta, object)):
"""Abstract base class for various loss functions.
Attributes
----------
K : int
The number of regression trees to be induced;
1 for regression and binary classification;
``n_classes`` for multi-class classification.
"""
is_multi_class = False
def __init__(self, n_classes):
self.K = n_classes
def init_estimator(self):
"""Default ``init`` estimator for loss function. """
raise NotImplementedError()
@abstractmethod
def __call__(self, y, pred, sample_weight=None):
"""Compute the loss of prediction ``pred`` and ``y``. """
@abstractmethod
def negative_gradient(self, y, y_pred, **kargs):
"""Compute the negative gradient.
Parameters
---------
y : np.ndarray, shape=(n,)
The target labels.
y_pred : np.ndarray, shape=(n,):
The predictions.
"""
def update_terminal_regions(self, tree, X, y, residual, y_pred,
sample_weight, sample_mask,
learning_rate=1.0, k=0):
"""Update the terminal regions (=leaves) of the given tree and
updates the current predictions of the model. Traverses tree
and invokes template method `_update_terminal_region`.
Parameters
----------
tree : tree.Tree
The tree object.
X : ndarray, shape=(n, m)
The data array.
y : ndarray, shape=(n,)
The target labels.
residual : ndarray, shape=(n,)
The residuals (usually the negative gradient).
y_pred : ndarray, shape=(n,)
The predictions.
sample_weight : ndarray, shape=(n,)
The weight of each sample.
sample_mask : ndarray, shape=(n,)
The sample mask to be used.
learning_rate : float, default=0.1
learning rate shrinks the contribution of each tree by
``learning_rate``.
k : int, default 0
The index of the estimator being updated.
"""
# compute leaf for each sample in ``X``.
terminal_regions = tree.apply(X)
# mask all which are not in sample mask.
masked_terminal_regions = terminal_regions.copy()
masked_terminal_regions[~sample_mask] = -1
# update each leaf (= perform line search)
for leaf in np.where(tree.children_left == TREE_LEAF)[0]:
self._update_terminal_region(tree, masked_terminal_regions,
leaf, X, y, residual,
y_pred[:, k], sample_weight)
# update predictions (both in-bag and out-of-bag)
y_pred[:, k] += (learning_rate
* tree.value[:, 0, 0].take(terminal_regions, axis=0))
@abstractmethod
def _update_terminal_region(self, tree, terminal_regions, leaf, X, y,
residual, pred, sample_weight):
"""Template method for updating terminal regions (=leaves). """
class RegressionLossFunction(six.with_metaclass(ABCMeta, LossFunction)):
"""Base class for regression loss functions. """
def __init__(self, n_classes):
if n_classes != 1:
raise ValueError("``n_classes`` must be 1 for regression but "
"was %r" % n_classes)
super(RegressionLossFunction, self).__init__(n_classes)
class LeastSquaresError(RegressionLossFunction):
"""Loss function for least squares (LS) estimation.
Terminal regions need not to be updated for least squares. """
def init_estimator(self):
return MeanEstimator()
def __call__(self, y, pred, sample_weight=None):
if sample_weight is None:
return np.mean((y - pred.ravel()) ** 2.0)
else:
return (1.0 / sample_weight.sum() *
np.sum(sample_weight * ((y - pred.ravel()) ** 2.0)))
def negative_gradient(self, y, pred, **kargs):
return y - pred.ravel()
def update_terminal_regions(self, tree, X, y, residual, y_pred,
sample_weight, sample_mask,
learning_rate=1.0, k=0):
"""Least squares does not need to update terminal regions.
But it has to update the predictions.
"""
# update predictions
y_pred[:, k] += learning_rate * tree.predict(X).ravel()
def _update_terminal_region(self, tree, terminal_regions, leaf, X, y,
residual, pred, sample_weight):
pass
class LeastAbsoluteError(RegressionLossFunction):
"""Loss function for least absolute deviation (LAD) regression. """
def init_estimator(self):
return QuantileEstimator(alpha=0.5)
def __call__(self, y, pred, sample_weight=None):
if sample_weight is None:
return np.abs(y - pred.ravel()).mean()
else:
return (1.0 / sample_weight.sum() *
np.sum(sample_weight * np.abs(y - pred.ravel())))
def negative_gradient(self, y, pred, **kargs):
"""1.0 if y - pred > 0.0 else -1.0"""
pred = pred.ravel()
return 2.0 * (y - pred > 0.0) - 1.0
def _update_terminal_region(self, tree, terminal_regions, leaf, X, y,
residual, pred, sample_weight):
"""LAD updates terminal regions to median estimates. """
terminal_region = np.where(terminal_regions == leaf)[0]
sample_weight = sample_weight.take(terminal_region, axis=0)
diff = y.take(terminal_region, axis=0) - pred.take(terminal_region, axis=0)
tree.value[leaf, 0, 0] = _weighted_percentile(diff, sample_weight, percentile=50)
class HuberLossFunction(RegressionLossFunction):
"""Huber loss function for robust regression.
M-Regression proposed in Friedman 2001.
References
----------
J. Friedman, Greedy Function Approximation: A Gradient Boosting
Machine, The Annals of Statistics, Vol. 29, No. 5, 2001.
"""
def __init__(self, n_classes, alpha=0.9):
super(HuberLossFunction, self).__init__(n_classes)
self.alpha = alpha
self.gamma = None
def init_estimator(self):
return QuantileEstimator(alpha=0.5)
def __call__(self, y, pred, sample_weight=None):
pred = pred.ravel()
diff = y - pred
gamma = self.gamma
if gamma is None:
if sample_weight is None:
gamma = stats.scoreatpercentile(np.abs(diff), self.alpha * 100)
else:
gamma = _weighted_percentile(np.abs(diff), sample_weight, self.alpha * 100)
gamma_mask = np.abs(diff) <= gamma
if sample_weight is None:
sq_loss = np.sum(0.5 * diff[gamma_mask] ** 2.0)
lin_loss = np.sum(gamma * (np.abs(diff[~gamma_mask]) - gamma / 2.0))
loss = (sq_loss + lin_loss) / y.shape[0]
else:
sq_loss = np.sum(0.5 * sample_weight[gamma_mask] * diff[gamma_mask] ** 2.0)
lin_loss = np.sum(gamma * sample_weight[~gamma_mask] *
(np.abs(diff[~gamma_mask]) - gamma / 2.0))
loss = (sq_loss + lin_loss) / sample_weight.sum()
return loss
def negative_gradient(self, y, pred, sample_weight=None, **kargs):
pred = pred.ravel()
diff = y - pred
if sample_weight is None:
gamma = stats.scoreatpercentile(np.abs(diff), self.alpha * 100)
else:
gamma = _weighted_percentile(np.abs(diff), sample_weight, self.alpha * 100)
gamma_mask = np.abs(diff) <= gamma
residual = np.zeros((y.shape[0],), dtype=np.float64)
residual[gamma_mask] = diff[gamma_mask]
residual[~gamma_mask] = gamma * np.sign(diff[~gamma_mask])
self.gamma = gamma
return residual
def _update_terminal_region(self, tree, terminal_regions, leaf, X, y,
residual, pred, sample_weight):
terminal_region = np.where(terminal_regions == leaf)[0]
sample_weight = sample_weight.take(terminal_region, axis=0)
gamma = self.gamma
diff = (y.take(terminal_region, axis=0)
- pred.take(terminal_region, axis=0))
median = _weighted_percentile(diff, sample_weight, percentile=50)
diff_minus_median = diff - median
tree.value[leaf, 0] = median + np.mean(
np.sign(diff_minus_median) *
np.minimum(np.abs(diff_minus_median), gamma))
class QuantileLossFunction(RegressionLossFunction):
"""Loss function for quantile regression.
Quantile regression allows to estimate the percentiles
of the conditional distribution of the target.
"""
def __init__(self, n_classes, alpha=0.9):
super(QuantileLossFunction, self).__init__(n_classes)
assert 0 < alpha < 1.0
self.alpha = alpha
self.percentile = alpha * 100.0
def init_estimator(self):
return QuantileEstimator(self.alpha)
def __call__(self, y, pred, sample_weight=None):
pred = pred.ravel()
diff = y - pred
alpha = self.alpha
mask = y > pred
if sample_weight is None:
loss = (alpha * diff[mask].sum() -
(1.0 - alpha) * diff[~mask].sum()) / y.shape[0]
else:
loss = ((alpha * np.sum(sample_weight[mask] * diff[mask]) -
(1.0 - alpha) * np.sum(sample_weight[~mask] * diff[~mask])) /
sample_weight.sum())
return loss
def negative_gradient(self, y, pred, **kargs):
alpha = self.alpha
pred = pred.ravel()
mask = y > pred
return (alpha * mask) - ((1.0 - alpha) * ~mask)
def _update_terminal_region(self, tree, terminal_regions, leaf, X, y,
residual, pred, sample_weight):
terminal_region = np.where(terminal_regions == leaf)[0]
diff = (y.take(terminal_region, axis=0)
- pred.take(terminal_region, axis=0))
sample_weight = sample_weight.take(terminal_region, axis=0)
val = _weighted_percentile(diff, sample_weight, self.percentile)
tree.value[leaf, 0] = val
class ClassificationLossFunction(six.with_metaclass(ABCMeta, LossFunction)):
"""Base class for classification loss functions. """
def _score_to_proba(self, score):
"""Template method to convert scores to probabilities.
the does not support probabilites raises AttributeError.
"""
raise TypeError('%s does not support predict_proba' % type(self).__name__)
@abstractmethod
def _score_to_decision(self, score):
"""Template method to convert scores to decisions.
Returns int arrays.
"""
class BinomialDeviance(ClassificationLossFunction):
"""Binomial deviance loss function for binary classification.
Binary classification is a special case; here, we only need to
fit one tree instead of ``n_classes`` trees.
"""
def __init__(self, n_classes):
if n_classes != 2:
raise ValueError("{0:s} requires 2 classes.".format(
self.__class__.__name__))
# we only need to fit one tree for binary clf.
super(BinomialDeviance, self).__init__(1)
def init_estimator(self):
return LogOddsEstimator()
def __call__(self, y, pred, sample_weight=None):
"""Compute the deviance (= 2 * negative log-likelihood). """
# logaddexp(0, v) == log(1.0 + exp(v))
pred = pred.ravel()
if sample_weight is None:
return -2.0 * np.mean((y * pred) - np.logaddexp(0.0, pred))
else:
return (-2.0 / sample_weight.sum() *
np.sum(sample_weight * ((y * pred) - np.logaddexp(0.0, pred))))
def negative_gradient(self, y, pred, **kargs):
"""Compute the residual (= negative gradient). """
return y - expit(pred.ravel())
def _update_terminal_region(self, tree, terminal_regions, leaf, X, y,
residual, pred, sample_weight):
"""Make a single Newton-Raphson step.
our node estimate is given by:
sum(w * (y - prob)) / sum(w * prob * (1 - prob))
we take advantage that: y - prob = residual
"""
terminal_region = np.where(terminal_regions == leaf)[0]
residual = residual.take(terminal_region, axis=0)
y = y.take(terminal_region, axis=0)
sample_weight = sample_weight.take(terminal_region, axis=0)
numerator = np.sum(sample_weight * residual)
denominator = np.sum(sample_weight * (y - residual) * (1 - y + residual))
# prevents overflow and division by zero
if abs(denominator) < 1e-150:
tree.value[leaf, 0, 0] = 0.0
else:
tree.value[leaf, 0, 0] = numerator / denominator
def _score_to_proba(self, score):
proba = np.ones((score.shape[0], 2), dtype=np.float64)
proba[:, 1] = expit(score.ravel())
proba[:, 0] -= proba[:, 1]
return proba
def _score_to_decision(self, score):
proba = self._score_to_proba(score)
return np.argmax(proba, axis=1)
class MultinomialDeviance(ClassificationLossFunction):
"""Multinomial deviance loss function for multi-class classification.
For multi-class classification we need to fit ``n_classes`` trees at
each stage.
"""
is_multi_class = True
def __init__(self, n_classes):
if n_classes < 3:
raise ValueError("{0:s} requires more than 2 classes.".format(
self.__class__.__name__))
super(MultinomialDeviance, self).__init__(n_classes)
def init_estimator(self):
return PriorProbabilityEstimator()
def __call__(self, y, pred, sample_weight=None):
# create one-hot label encoding
Y = np.zeros((y.shape[0], self.K), dtype=np.float64)
for k in range(self.K):
Y[:, k] = y == k
if sample_weight is None:
return np.sum(-1 * (Y * pred).sum(axis=1) +
logsumexp(pred, axis=1))
else:
return np.sum(-1 * sample_weight * (Y * pred).sum(axis=1) +
logsumexp(pred, axis=1))
def negative_gradient(self, y, pred, k=0, **kwargs):
"""Compute negative gradient for the ``k``-th class. """
return y - np.nan_to_num(np.exp(pred[:, k] -
logsumexp(pred, axis=1)))
def _update_terminal_region(self, tree, terminal_regions, leaf, X, y,
residual, pred, sample_weight):
"""Make a single Newton-Raphson step. """
terminal_region = np.where(terminal_regions == leaf)[0]
residual = residual.take(terminal_region, axis=0)
y = y.take(terminal_region, axis=0)
sample_weight = sample_weight.take(terminal_region, axis=0)
numerator = np.sum(sample_weight * residual)
numerator *= (self.K - 1) / self.K
denominator = np.sum(sample_weight * (y - residual) *
(1.0 - y + residual))
# prevents overflow and division by zero
if abs(denominator) < 1e-150:
tree.value[leaf, 0, 0] = 0.0
else:
tree.value[leaf, 0, 0] = numerator / denominator
def _score_to_proba(self, score):
return np.nan_to_num(
np.exp(score - (logsumexp(score, axis=1)[:, np.newaxis])))
def _score_to_decision(self, score):
proba = self._score_to_proba(score)
return np.argmax(proba, axis=1)
class ExponentialLoss(ClassificationLossFunction):
"""Exponential loss function for binary classification.
Same loss as AdaBoost.
References
----------
Greg Ridgeway, Generalized Boosted Models: A guide to the gbm package, 2007
"""
def __init__(self, n_classes):
if n_classes != 2:
raise ValueError("{0:s} requires 2 classes.".format(
self.__class__.__name__))
# we only need to fit one tree for binary clf.
super(ExponentialLoss, self).__init__(1)
def init_estimator(self):
return ScaledLogOddsEstimator()
def __call__(self, y, pred, sample_weight=None):
pred = pred.ravel()
if sample_weight is None:
return np.mean(np.exp(-(2. * y - 1.) * pred))
else:
return (1.0 / sample_weight.sum() *
np.sum(sample_weight * np.exp(-(2 * y - 1) * pred)))
def negative_gradient(self, y, pred, **kargs):
y_ = -(2. * y - 1.)
return y_ * np.exp(y_ * pred.ravel())
def _update_terminal_region(self, tree, terminal_regions, leaf, X, y,
residual, pred, sample_weight):
terminal_region = np.where(terminal_regions == leaf)[0]
pred = pred.take(terminal_region, axis=0)
y = y.take(terminal_region, axis=0)
sample_weight = sample_weight.take(terminal_region, axis=0)
y_ = 2. * y - 1.
numerator = np.sum(y_ * sample_weight * np.exp(-y_ * pred))
denominator = np.sum(sample_weight * np.exp(-y_ * pred))
# prevents overflow and division by zero
if abs(denominator) < 1e-150:
tree.value[leaf, 0, 0] = 0.0
else:
tree.value[leaf, 0, 0] = numerator / denominator
def _score_to_proba(self, score):
proba = np.ones((score.shape[0], 2), dtype=np.float64)
proba[:, 1] = expit(2.0 * score.ravel())
proba[:, 0] -= proba[:, 1]
return proba
def _score_to_decision(self, score):
return (score.ravel() >= 0.0).astype(np.int)
LOSS_FUNCTIONS = {'ls': LeastSquaresError,
'lad': LeastAbsoluteError,
'huber': HuberLossFunction,
'quantile': QuantileLossFunction,
'deviance': None, # for both, multinomial and binomial
'exponential': ExponentialLoss,
}
INIT_ESTIMATORS = {'zero': ZeroEstimator}
class VerboseReporter(object):
"""Reports verbose output to stdout.
If ``verbose==1`` output is printed once in a while (when iteration mod
verbose_mod is zero).; if larger than 1 then output is printed for
each update.
"""
def __init__(self, verbose):
self.verbose = verbose
def init(self, est, begin_at_stage=0):
# header fields and line format str
header_fields = ['Iter', 'Train Loss']
verbose_fmt = ['{iter:>10d}', '{train_score:>16.4f}']
# do oob?
if est.subsample < 1:
header_fields.append('OOB Improve')
verbose_fmt.append('{oob_impr:>16.4f}')
header_fields.append('Remaining Time')
verbose_fmt.append('{remaining_time:>16s}')
# print the header line
print(('%10s ' + '%16s ' *
(len(header_fields) - 1)) % tuple(header_fields))
self.verbose_fmt = ' '.join(verbose_fmt)
# plot verbose info each time i % verbose_mod == 0
self.verbose_mod = 1
self.start_time = time()
self.begin_at_stage = begin_at_stage
def update(self, j, est):
"""Update reporter with new iteration. """
do_oob = est.subsample < 1
# we need to take into account if we fit additional estimators.
i = j - self.begin_at_stage # iteration relative to the start iter
if (i + 1) % self.verbose_mod == 0:
oob_impr = est.oob_improvement_[j] if do_oob else 0
remaining_time = ((est.n_estimators - (j + 1)) *
(time() - self.start_time) / float(i + 1))
if remaining_time > 60:
remaining_time = '{0:.2f}m'.format(remaining_time / 60.0)
else:
remaining_time = '{0:.2f}s'.format(remaining_time)
print(self.verbose_fmt.format(iter=j + 1,
train_score=est.train_score_[j],
oob_impr=oob_impr,
remaining_time=remaining_time))
if self.verbose == 1 and ((i + 1) // (self.verbose_mod * 10) > 0):
# adjust verbose frequency (powers of 10)
self.verbose_mod *= 10
class BaseGradientBoosting(six.with_metaclass(ABCMeta, BaseEnsemble)):
"""Abstract base class for Gradient Boosting. """
@abstractmethod
def __init__(self, loss, learning_rate, n_estimators, criterion,
min_samples_split, min_samples_leaf, min_weight_fraction_leaf,
max_depth, min_impurity_decrease, min_impurity_split,
init, subsample, max_features,
random_state, alpha=0.9, verbose=0, max_leaf_nodes=None,
warm_start=False, presort='auto'):
self.n_estimators = n_estimators
self.learning_rate = learning_rate
self.loss = loss
self.criterion = criterion
self.min_samples_split = min_samples_split
self.min_samples_leaf = min_samples_leaf
self.min_weight_fraction_leaf = min_weight_fraction_leaf
self.subsample = subsample
self.max_features = max_features
self.max_depth = max_depth
self.min_impurity_decrease = min_impurity_decrease
self.min_impurity_split = min_impurity_split
self.init = init
self.random_state = random_state
self.alpha = alpha
self.verbose = verbose
self.max_leaf_nodes = max_leaf_nodes
self.warm_start = warm_start
self.presort = presort
def _fit_stage(self, i, X, y, y_pred, sample_weight, sample_mask,
random_state, X_idx_sorted, X_csc=None, X_csr=None):
"""Fit another stage of ``n_classes_`` trees to the boosting model. """
assert sample_mask.dtype == np.bool
loss = self.loss_
original_y = y
for k in range(loss.K):
if loss.is_multi_class:
y = np.array(original_y == k, dtype=np.float64)
residual = loss.negative_gradient(y, y_pred, k=k,
sample_weight=sample_weight)
# induce regression tree on residuals
tree = DecisionTreeRegressor(
criterion=self.criterion,
splitter='best',
max_depth=self.max_depth,
min_samples_split=self.min_samples_split,
min_samples_leaf=self.min_samples_leaf,
min_weight_fraction_leaf=self.min_weight_fraction_leaf,
min_impurity_decrease=self.min_impurity_decrease,
min_impurity_split=self.min_impurity_split,
max_features=self.max_features,
max_leaf_nodes=self.max_leaf_nodes,
random_state=random_state,
presort=self.presort)
if self.subsample < 1.0:
# no inplace multiplication!
sample_weight = sample_weight * sample_mask.astype(np.float64)
if X_csc is not None:
tree.fit(X_csc, residual, sample_weight=sample_weight,
check_input=False, X_idx_sorted=X_idx_sorted)
else:
tree.fit(X, residual, sample_weight=sample_weight,
check_input=False, X_idx_sorted=X_idx_sorted)
# update tree leaves
if X_csr is not None:
loss.update_terminal_regions(tree.tree_, X_csr, y, residual, y_pred,
sample_weight, sample_mask,
self.learning_rate, k=k)
else:
loss.update_terminal_regions(tree.tree_, X, y, residual, y_pred,
sample_weight, sample_mask,
self.learning_rate, k=k)
# add tree to ensemble
self.estimators_[i, k] = tree
return y_pred
def _check_params(self):
"""Check validity of parameters and raise ValueError if not valid. """
if self.n_estimators <= 0:
raise ValueError("n_estimators must be greater than 0 but "
"was %r" % self.n_estimators)
if self.learning_rate <= 0.0:
raise ValueError("learning_rate must be greater than 0 but "
"was %r" % self.learning_rate)
if (self.loss not in self._SUPPORTED_LOSS
or self.loss not in LOSS_FUNCTIONS):
raise ValueError("Loss '{0:s}' not supported. ".format(self.loss))
if self.loss == 'deviance':
loss_class = (MultinomialDeviance
if len(self.classes_) > 2
else BinomialDeviance)
else:
loss_class = LOSS_FUNCTIONS[self.loss]
if self.loss in ('huber', 'quantile'):
self.loss_ = loss_class(self.n_classes_, self.alpha)
else:
self.loss_ = loss_class(self.n_classes_)
if not (0.0 < self.subsample <= 1.0):
raise ValueError("subsample must be in (0,1] but "
"was %r" % self.subsample)
if self.init is not None:
if isinstance(self.init, six.string_types):
if self.init not in INIT_ESTIMATORS:
raise ValueError('init="%s" is not supported' % self.init)
else:
if (not hasattr(self.init, 'fit')
or not hasattr(self.init, 'predict')):
raise ValueError("init=%r must be valid BaseEstimator "
"and support both fit and "
"predict" % self.init)
if not (0.0 < self.alpha < 1.0):
raise ValueError("alpha must be in (0.0, 1.0) but "
"was %r" % self.alpha)
if isinstance(self.max_features, six.string_types):
if self.max_features == "auto":
# if is_classification
if self.n_classes_ > 1:
max_features = max(1, int(np.sqrt(self.n_features_)))
else:
# is regression
max_features = self.n_features_
elif self.max_features == "sqrt":
max_features = max(1, int(np.sqrt(self.n_features_)))
elif self.max_features == "log2":
max_features = max(1, int(np.log2(self.n_features_)))
else:
raise ValueError("Invalid value for max_features: %r. "
"Allowed string values are 'auto', 'sqrt' "
"or 'log2'." % self.max_features)
elif self.max_features is None:
max_features = self.n_features_
elif isinstance(self.max_features, (numbers.Integral, np.integer)):
max_features = self.max_features
else: # float
if 0. < self.max_features <= 1.:
max_features = max(int(self.max_features *
self.n_features_), 1)
else:
raise ValueError("max_features must be in (0, n_features]")
self.max_features_ = max_features
def _init_state(self):
"""Initialize model state and allocate model state data structures. """
if self.init is None:
self.init_ = self.loss_.init_estimator()
elif isinstance(self.init, six.string_types):
self.init_ = INIT_ESTIMATORS[self.init]()
else:
self.init_ = self.init
self.estimators_ = np.empty((self.n_estimators, self.loss_.K),
dtype=np.object)
self.train_score_ = np.zeros((self.n_estimators,), dtype=np.float64)
# do oob?
if self.subsample < 1.0:
self.oob_improvement_ = np.zeros((self.n_estimators),
dtype=np.float64)
def _clear_state(self):
"""Clear the state of the gradient boosting model. """
if hasattr(self, 'estimators_'):
self.estimators_ = np.empty((0, 0), dtype=np.object)
if hasattr(self, 'train_score_'):
del self.train_score_
if hasattr(self, 'oob_improvement_'):
del self.oob_improvement_
if hasattr(self, 'init_'):
del self.init_
def _resize_state(self):
"""Add additional ``n_estimators`` entries to all attributes. """
# self.n_estimators is the number of additional est to fit
total_n_estimators = self.n_estimators
if total_n_estimators < self.estimators_.shape[0]:
raise ValueError('resize with smaller n_estimators %d < %d' %
(total_n_estimators, self.estimators_[0]))
self.estimators_.resize((total_n_estimators, self.loss_.K))
self.train_score_.resize(total_n_estimators)
if (self.subsample < 1 or hasattr(self, 'oob_improvement_')):
# if do oob resize arrays or create new if not available
if hasattr(self, 'oob_improvement_'):
self.oob_improvement_.resize(total_n_estimators)
else:
self.oob_improvement_ = np.zeros((total_n_estimators,),
dtype=np.float64)
def _is_initialized(self):
return len(getattr(self, 'estimators_', [])) > 0
def _check_initialized(self):
"""Check that the estimator is initialized, raising an error if not."""
check_is_fitted(self, 'estimators_')
@property
@deprecated("Attribute n_features was deprecated in version 0.19 and "
"will be removed in 0.21.")
def n_features(self):
return self.n_features_
def fit(self, X, y, sample_weight=None, monitor=None):
"""Fit the gradient boosting model.
Parameters
----------
X : array-like, shape = [n_samples, n_features]
Training vectors, where n_samples is the number of samples
and n_features is the number of features.
y : array-like, shape = [n_samples]
Target values (integers in classification, real numbers in
regression)
For classification, labels must correspond to classes.
sample_weight : array-like, shape = [n_samples] or None
Sample weights. If None, then samples are equally weighted. Splits
that would create child nodes with net zero or negative weight are
ignored while searching for a split in each node. In the case of
classification, splits are also ignored if they would result in any
single class carrying a negative weight in either child node.
monitor : callable, optional
The monitor is called after each iteration with the current
iteration, a reference to the estimator and the local variables of
``_fit_stages`` as keyword arguments ``callable(i, self,
locals())``. If the callable returns ``True`` the fitting procedure
is stopped. The monitor can be used for various things such as
computing held-out estimates, early stopping, model introspect, and
snapshoting.
Returns
-------
self : object
Returns self.
"""
# if not warmstart - clear the estimator state
if not self.warm_start:
self._clear_state()
# Check input
X, y = check_X_y(X, y, accept_sparse=['csr', 'csc', 'coo'], dtype=DTYPE)
n_samples, self.n_features_ = X.shape
if sample_weight is None:
sample_weight = np.ones(n_samples, dtype=np.float32)
else:
sample_weight = column_or_1d(sample_weight, warn=True)
check_consistent_length(X, y, sample_weight)
y = self._validate_y(y)
random_state = check_random_state(self.random_state)
self._check_params()
if not self._is_initialized():
# init state
self._init_state()
# fit initial model - FIXME make sample_weight optional
self.init_.fit(X, y, sample_weight)
# init predictions
y_pred = self.init_.predict(X)
begin_at_stage = 0
else:
# add more estimators to fitted model
# invariant: warm_start = True
if self.n_estimators < self.estimators_.shape[0]:
raise ValueError('n_estimators=%d must be larger or equal to '
'estimators_.shape[0]=%d when '
'warm_start==True'
% (self.n_estimators,
self.estimators_.shape[0]))
begin_at_stage = self.estimators_.shape[0]
y_pred = self._decision_function(X)
self._resize_state()
X_idx_sorted = None
presort = self.presort
# Allow presort to be 'auto', which means True if the dataset is dense,
# otherwise it will be False.
if presort == 'auto' and issparse(X):
presort = False
elif presort == 'auto':
presort = True
if presort == True:
if issparse(X):
raise ValueError("Presorting is not supported for sparse matrices.")
else:
X_idx_sorted = np.asfortranarray(np.argsort(X, axis=0),
dtype=np.int32)
# fit the boosting stages
n_stages = self._fit_stages(X, y, y_pred, sample_weight, random_state,
begin_at_stage, monitor, X_idx_sorted)
# change shape of arrays after fit (early-stopping or additional ests)
if n_stages != self.estimators_.shape[0]:
self.estimators_ = self.estimators_[:n_stages]
self.train_score_ = self.train_score_[:n_stages]
if hasattr(self, 'oob_improvement_'):
self.oob_improvement_ = self.oob_improvement_[:n_stages]
return self
def _fit_stages(self, X, y, y_pred, sample_weight, random_state,
begin_at_stage=0, monitor=None, X_idx_sorted=None):
"""Iteratively fits the stages.
For each stage it computes the progress (OOB, train score)
and delegates to ``_fit_stage``.
Returns the number of stages fit; might differ from ``n_estimators``
due to early stopping.
"""
n_samples = X.shape[0]
do_oob = self.subsample < 1.0
sample_mask = np.ones((n_samples, ), dtype=np.bool)
n_inbag = max(1, int(self.subsample * n_samples))
loss_ = self.loss_
# Set min_weight_leaf from min_weight_fraction_leaf
if self.min_weight_fraction_leaf != 0. and sample_weight is not None:
min_weight_leaf = (self.min_weight_fraction_leaf *
np.sum(sample_weight))
else:
min_weight_leaf = 0.
if self.verbose:
verbose_reporter = VerboseReporter(self.verbose)
verbose_reporter.init(self, begin_at_stage)
X_csc = csc_matrix(X) if issparse(X) else None
X_csr = csr_matrix(X) if issparse(X) else None
# perform boosting iterations
i = begin_at_stage
for i in range(begin_at_stage, self.n_estimators):
# subsampling
if do_oob:
sample_mask = _random_sample_mask(n_samples, n_inbag,
random_state)
# OOB score before adding this stage
old_oob_score = loss_(y[~sample_mask],
y_pred[~sample_mask],
sample_weight[~sample_mask])
# fit next stage of trees
y_pred = self._fit_stage(i, X, y, y_pred, sample_weight,
sample_mask, random_state, X_idx_sorted,
X_csc, X_csr)
# track deviance (= loss)
if do_oob:
self.train_score_[i] = loss_(y[sample_mask],
y_pred[sample_mask],
sample_weight[sample_mask])
self.oob_improvement_[i] = (
old_oob_score - loss_(y[~sample_mask],
y_pred[~sample_mask],
sample_weight[~sample_mask]))
else:
# no need to fancy index w/ no subsampling
self.train_score_[i] = loss_(y, y_pred, sample_weight)
if self.verbose > 0:
verbose_reporter.update(i, self)
if monitor is not None:
early_stopping = monitor(i, self, locals())
if early_stopping:
break
return i + 1
def _make_estimator(self, append=True):
# we don't need _make_estimator
raise NotImplementedError()
def _init_decision_function(self, X):
"""Check input and compute prediction of ``init``. """
self._check_initialized()
X = self.estimators_[0, 0]._validate_X_predict(X, check_input=True)
if X.shape[1] != self.n_features_:
raise ValueError("X.shape[1] should be {0:d}, not {1:d}.".format(
self.n_features_, X.shape[1]))
score = self.init_.predict(X).astype(np.float64)
return score
def _decision_function(self, X):
# for use in inner loop, not raveling the output in single-class case,
# not doing input validation.
score = self._init_decision_function(X)
predict_stages(self.estimators_, X, self.learning_rate, score)
return score
def _staged_decision_function(self, X):
"""Compute decision function of ``X`` for each iteration.
This method allows monitoring (i.e. determine error on testing set)
after each stage.
Parameters
----------
X : array-like or sparse matrix, shape = [n_samples, n_features]
The input samples. Internally, it will be converted to
``dtype=np.float32`` and if a sparse matrix is provided
to a sparse ``csr_matrix``.
Returns
-------
score : generator of array, shape = [n_samples, k]
The decision function of the input samples. The order of the
classes corresponds to that in the attribute `classes_`.
Regression and binary classification are special cases with
``k == 1``, otherwise ``k==n_classes``.
"""
X = check_array(X, dtype=DTYPE, order="C", accept_sparse='csr')
score = self._init_decision_function(X)
for i in range(self.estimators_.shape[0]):
predict_stage(self.estimators_, i, X, self.learning_rate, score)
yield score.copy()
@property
def feature_importances_(self):
"""Return the feature importances (the higher, the more important the
feature).
Returns
-------
feature_importances_ : array, shape = [n_features]
"""
self._check_initialized()
total_sum = np.zeros((self.n_features_, ), dtype=np.float64)
for stage in self.estimators_:
stage_sum = sum(tree.feature_importances_
for tree in stage) / len(stage)
total_sum += stage_sum
importances = total_sum / len(self.estimators_)
return importances
def _validate_y(self, y):
self.n_classes_ = 1
if y.dtype.kind == 'O':
y = y.astype(np.float64)
# Default implementation
return y
def apply(self, X):
"""Apply trees in the ensemble to X, return leaf indices.
.. versionadded:: 0.17
Parameters
----------
X : array-like or sparse matrix, shape = [n_samples, n_features]
The input samples. Internally, its dtype will be converted to
``dtype=np.float32``. If a sparse matrix is provided, it will
be converted to a sparse ``csr_matrix``.
Returns
-------
X_leaves : array_like, shape = [n_samples, n_estimators, n_classes]
For each datapoint x in X and for each tree in the ensemble,
return the index of the leaf x ends up in each estimator.
In the case of binary classification n_classes is 1.
"""
self._check_initialized()
X = self.estimators_[0, 0]._validate_X_predict(X, check_input=True)
# n_classes will be equal to 1 in the binary classification or the
# regression case.
n_estimators, n_classes = self.estimators_.shape
leaves = np.zeros((X.shape[0], n_estimators, n_classes))
for i in range(n_estimators):
for j in range(n_classes):
estimator = self.estimators_[i, j]
leaves[:, i, j] = estimator.apply(X, check_input=False)
return leaves
class GradientBoostingClassifier(BaseGradientBoosting, ClassifierMixin):
"""Gradient Boosting for classification.
GB builds an additive model in a
forward stage-wise fashion; it allows for the optimization of
arbitrary differentiable loss functions. In each stage ``n_classes_``
regression trees are fit on the negative gradient of the
binomial or multinomial deviance loss function. Binary classification
is a special case where only a single regression tree is induced.
Read more in the :ref:`User Guide <gradient_boosting>`.
Parameters
----------
loss : {'deviance', 'exponential'}, optional (default='deviance')
loss function to be optimized. 'deviance' refers to
deviance (= logistic regression) for classification
with probabilistic outputs. For loss 'exponential' gradient
boosting recovers the AdaBoost algorithm.
learning_rate : float, optional (default=0.1)
learning rate shrinks the contribution of each tree by `learning_rate`.
There is a trade-off between learning_rate and n_estimators.
n_estimators : int (default=100)
The number of boosting stages to perform. Gradient boosting
is fairly robust to over-fitting so a large number usually
results in better performance.
max_depth : integer, optional (default=3)
maximum depth of the individual regression estimators. The maximum
depth limits the number of nodes in the tree. Tune this parameter
for best performance; the best value depends on the interaction
of the input variables.
criterion : string, optional (default="friedman_mse")
The function to measure the quality of a split. Supported criteria
are "friedman_mse" for the mean squared error with improvement
score by Friedman, "mse" for mean squared error, and "mae" for
the mean absolute error. The default value of "friedman_mse" is
generally the best as it can provide a better approximation in
some cases.
.. versionadded:: 0.18
min_samples_split : int, float, optional (default=2)
The minimum number of samples required to split an internal node:
- If int, then consider `min_samples_split` as the minimum number.
- If float, then `min_samples_split` is a percentage and
`ceil(min_samples_split * n_samples)` are the minimum
number of samples for each split.
.. versionchanged:: 0.18
Added float values for percentages.
min_samples_leaf : int, float, optional (default=1)
The minimum number of samples required to be at a leaf node:
- If int, then consider `min_samples_leaf` as the minimum number.
- If float, then `min_samples_leaf` is a percentage and
`ceil(min_samples_leaf * n_samples)` are the minimum
number of samples for each node.
.. versionchanged:: 0.18
Added float values for percentages.
min_weight_fraction_leaf : float, optional (default=0.)
The minimum weighted fraction of the sum total of weights (of all
the input samples) required to be at a leaf node. Samples have
equal weight when sample_weight is not provided.
subsample : float, optional (default=1.0)
The fraction of samples to be used for fitting the individual base
learners. If smaller than 1.0 this results in Stochastic Gradient
Boosting. `subsample` interacts with the parameter `n_estimators`.
Choosing `subsample < 1.0` leads to a reduction of variance
and an increase in bias.
max_features : int, float, string or None, optional (default=None)
The number of features to consider when looking for the best split:
- If int, then consider `max_features` features at each split.
- If float, then `max_features` is a percentage and
`int(max_features * n_features)` features are considered at each
split.
- If "auto", then `max_features=sqrt(n_features)`.
- If "sqrt", then `max_features=sqrt(n_features)`.
- If "log2", then `max_features=log2(n_features)`.
- If None, then `max_features=n_features`.
Choosing `max_features < n_features` leads to a reduction of variance
and an increase in bias.
Note: the search for a split does not stop until at least one
valid partition of the node samples is found, even if it requires to
effectively inspect more than ``max_features`` features.
max_leaf_nodes : int or None, optional (default=None)
Grow trees with ``max_leaf_nodes`` in best-first fashion.
Best nodes are defined as relative reduction in impurity.
If None then unlimited number of leaf nodes.
min_impurity_split : float,
Threshold for early stopping in tree growth. A node will split
if its impurity is above the threshold, otherwise it is a leaf.
.. deprecated:: 0.19
``min_impurity_split`` has been deprecated in favor of
``min_impurity_decrease`` in 0.19 and will be removed in 0.21.
Use ``min_impurity_decrease`` instead.
min_impurity_decrease : float, optional (default=0.)
A node will be split if this split induces a decrease of the impurity
greater than or equal to this value.
The weighted impurity decrease equation is the following::
N_t / N * (impurity - N_t_R / N_t * right_impurity
- N_t_L / N_t * left_impurity)
where ``N`` is the total number of samples, ``N_t`` is the number of
samples at the current node, ``N_t_L`` is the number of samples in the
left child, and ``N_t_R`` is the number of samples in the right child.
``N``, ``N_t``, ``N_t_R`` and ``N_t_L`` all refer to the weighted sum,
if ``sample_weight`` is passed.
.. versionadded:: 0.19
init : BaseEstimator, None, optional (default=None)
An estimator object that is used to compute the initial
predictions. ``init`` has to provide ``fit`` and ``predict``.
If None it uses ``loss.init_estimator``.
verbose : int, default: 0
Enable verbose output. If 1 then it prints progress and performance
once in a while (the more trees the lower the frequency). If greater
than 1 then it prints progress and performance for every tree.
warm_start : bool, default: False
When set to ``True``, reuse the solution of the previous call to fit
and add more estimators to the ensemble, otherwise, just erase the
previous solution.
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
presort : bool or 'auto', optional (default='auto')
Whether to presort the data to speed up the finding of best splits in
fitting. Auto mode by default will use presorting on dense data and
default to normal sorting on sparse data. Setting presort to true on
sparse data will raise an error.
.. versionadded:: 0.17
*presort* parameter.
Attributes
----------
feature_importances_ : array, shape = [n_features]
The feature importances (the higher, the more important the feature).
oob_improvement_ : array, shape = [n_estimators]
The improvement in loss (= deviance) on the out-of-bag samples
relative to the previous iteration.
``oob_improvement_[0]`` is the improvement in
loss of the first stage over the ``init`` estimator.
train_score_ : array, shape = [n_estimators]
The i-th score ``train_score_[i]`` is the deviance (= loss) of the
model at iteration ``i`` on the in-bag sample.
If ``subsample == 1`` this is the deviance on the training data.
loss_ : LossFunction
The concrete ``LossFunction`` object.
init : BaseEstimator
The estimator that provides the initial predictions.
Set via the ``init`` argument or ``loss.init_estimator``.
estimators_ : ndarray of DecisionTreeRegressor, shape = [n_estimators, ``loss_.K``]
The collection of fitted sub-estimators. ``loss_.K`` is 1 for binary
classification, otherwise n_classes.
Notes
-----
The features are always randomly permuted at each split. Therefore,
the best found split may vary, even with the same training data and
``max_features=n_features``, if the improvement of the criterion is
identical for several splits enumerated during the search of the best
split. To obtain a deterministic behaviour during fitting,
``random_state`` has to be fixed.
See also
--------
sklearn.tree.DecisionTreeClassifier, RandomForestClassifier
AdaBoostClassifier
References
----------
J. Friedman, Greedy Function Approximation: A Gradient Boosting
Machine, The Annals of Statistics, Vol. 29, No. 5, 2001.
J. Friedman, Stochastic Gradient Boosting, 1999
T. Hastie, R. Tibshirani and J. Friedman.
Elements of Statistical Learning Ed. 2, Springer, 2009.
"""
_SUPPORTED_LOSS = ('deviance', 'exponential')
def __init__(self, loss='deviance', learning_rate=0.1, n_estimators=100,
subsample=1.0, criterion='friedman_mse', min_samples_split=2,
min_samples_leaf=1, min_weight_fraction_leaf=0.,
max_depth=3, min_impurity_decrease=0.,
min_impurity_split=None, init=None,
random_state=None, max_features=None, verbose=0,
max_leaf_nodes=None, warm_start=False,
presort='auto'):
super(GradientBoostingClassifier, self).__init__(
loss=loss, learning_rate=learning_rate, n_estimators=n_estimators,
criterion=criterion, min_samples_split=min_samples_split,
min_samples_leaf=min_samples_leaf,
min_weight_fraction_leaf=min_weight_fraction_leaf,
max_depth=max_depth, init=init, subsample=subsample,
max_features=max_features,
random_state=random_state, verbose=verbose,
max_leaf_nodes=max_leaf_nodes,
min_impurity_decrease=min_impurity_decrease,
min_impurity_split=min_impurity_split,
warm_start=warm_start,
presort=presort)
def _validate_y(self, y):
check_classification_targets(y)
self.classes_, y = np.unique(y, return_inverse=True)
self.n_classes_ = len(self.classes_)
return y
def decision_function(self, X):
"""Compute the decision function of ``X``.
Parameters
----------
X : array-like or sparse matrix, shape = [n_samples, n_features]
The input samples. Internally, it will be converted to
``dtype=np.float32`` and if a sparse matrix is provided
to a sparse ``csr_matrix``.
Returns
-------
score : array, shape = [n_samples, n_classes] or [n_samples]
The decision function of the input samples. The order of the
classes corresponds to that in the attribute `classes_`.
Regression and binary classification produce an array of shape
[n_samples].
"""
X = check_array(X, dtype=DTYPE, order="C", accept_sparse='csr')
score = self._decision_function(X)
if score.shape[1] == 1:
return score.ravel()
return score
def staged_decision_function(self, X):
"""Compute decision function of ``X`` for each iteration.
This method allows monitoring (i.e. determine error on testing set)
after each stage.
Parameters
----------
X : array-like or sparse matrix, shape = [n_samples, n_features]
The input samples. Internally, it will be converted to
``dtype=np.float32`` and if a sparse matrix is provided
to a sparse ``csr_matrix``.
Returns
-------
score : generator of array, shape = [n_samples, k]
The decision function of the input samples. The order of the
classes corresponds to that in the attribute `classes_`.
Regression and binary classification are special cases with
``k == 1``, otherwise ``k==n_classes``.
"""
for dec in self._staged_decision_function(X):
# no yield from in Python2.X
yield dec
def predict(self, X):
"""Predict class for X.
Parameters
----------
X : array-like or sparse matrix, shape = [n_samples, n_features]
The input samples. Internally, it will be converted to
``dtype=np.float32`` and if a sparse matrix is provided
to a sparse ``csr_matrix``.
Returns
-------
y : array of shape = ["n_samples]
The predicted values.
"""
score = self.decision_function(X)
decisions = self.loss_._score_to_decision(score)
return self.classes_.take(decisions, axis=0)
def staged_predict(self, X):
"""Predict class at each stage for X.
This method allows monitoring (i.e. determine error on testing set)
after each stage.
Parameters
----------
X : array-like or sparse matrix, shape = [n_samples, n_features]
The input samples. Internally, it will be converted to
``dtype=np.float32`` and if a sparse matrix is provided
to a sparse ``csr_matrix``.
Returns
-------
y : generator of array of shape = [n_samples]
The predicted value of the input samples.
"""
for score in self._staged_decision_function(X):
decisions = self.loss_._score_to_decision(score)
yield self.classes_.take(decisions, axis=0)
def predict_proba(self, X):
"""Predict class probabilities for X.
Parameters
----------
X : array-like or sparse matrix, shape = [n_samples, n_features]
The input samples. Internally, it will be converted to
``dtype=np.float32`` and if a sparse matrix is provided
to a sparse ``csr_matrix``.
Raises
------
AttributeError
If the ``loss`` does not support probabilities.
Returns
-------
p : array of shape = [n_samples]
The class probabilities of the input samples. The order of the
classes corresponds to that in the attribute `classes_`.
"""
score = self.decision_function(X)
try:
return self.loss_._score_to_proba(score)
except NotFittedError:
raise
except AttributeError:
raise AttributeError('loss=%r does not support predict_proba' %
self.loss)
def predict_log_proba(self, X):
"""Predict class log-probabilities for X.
Parameters
----------
X : array-like or sparse matrix, shape = [n_samples, n_features]
The input samples. Internally, it will be converted to
``dtype=np.float32`` and if a sparse matrix is provided
to a sparse ``csr_matrix``.
Raises
------
AttributeError
If the ``loss`` does not support probabilities.
Returns
-------
p : array of shape = [n_samples]
The class log-probabilities of the input samples. The order of the
classes corresponds to that in the attribute `classes_`.
"""
proba = self.predict_proba(X)
return np.log(proba)
def staged_predict_proba(self, X):
"""Predict class probabilities at each stage for X.
This method allows monitoring (i.e. determine error on testing set)
after each stage.
Parameters
----------
X : array-like or sparse matrix, shape = [n_samples, n_features]
The input samples. Internally, it will be converted to
``dtype=np.float32`` and if a sparse matrix is provided
to a sparse ``csr_matrix``.
Returns
-------
y : generator of array of shape = [n_samples]
The predicted value of the input samples.
"""
try:
for score in self._staged_decision_function(X):
yield self.loss_._score_to_proba(score)
except NotFittedError:
raise
except AttributeError:
raise AttributeError('loss=%r does not support predict_proba' %
self.loss)
class GradientBoostingRegressor(BaseGradientBoosting, RegressorMixin):
"""Gradient Boosting for regression.
GB builds an additive model in a forward stage-wise fashion;
it allows for the optimization of arbitrary differentiable loss functions.
In each stage a regression tree is fit on the negative gradient of the
given loss function.
Read more in the :ref:`User Guide <gradient_boosting>`.
Parameters
----------
loss : {'ls', 'lad', 'huber', 'quantile'}, optional (default='ls')
loss function to be optimized. 'ls' refers to least squares
regression. 'lad' (least absolute deviation) is a highly robust
loss function solely based on order information of the input
variables. 'huber' is a combination of the two. 'quantile'
allows quantile regression (use `alpha` to specify the quantile).
learning_rate : float, optional (default=0.1)
learning rate shrinks the contribution of each tree by `learning_rate`.
There is a trade-off between learning_rate and n_estimators.
n_estimators : int (default=100)
The number of boosting stages to perform. Gradient boosting
is fairly robust to over-fitting so a large number usually
results in better performance.
max_depth : integer, optional (default=3)
maximum depth of the individual regression estimators. The maximum
depth limits the number of nodes in the tree. Tune this parameter
for best performance; the best value depends on the interaction
of the input variables.
criterion : string, optional (default="friedman_mse")
The function to measure the quality of a split. Supported criteria
are "friedman_mse" for the mean squared error with improvement
score by Friedman, "mse" for mean squared error, and "mae" for
the mean absolute error. The default value of "friedman_mse" is
generally the best as it can provide a better approximation in
some cases.
.. versionadded:: 0.18
min_samples_split : int, float, optional (default=2)
The minimum number of samples required to split an internal node:
- If int, then consider `min_samples_split` as the minimum number.
- If float, then `min_samples_split` is a percentage and
`ceil(min_samples_split * n_samples)` are the minimum
number of samples for each split.
.. versionchanged:: 0.18
Added float values for percentages.
min_samples_leaf : int, float, optional (default=1)
The minimum number of samples required to be at a leaf node:
- If int, then consider `min_samples_leaf` as the minimum number.
- If float, then `min_samples_leaf` is a percentage and
`ceil(min_samples_leaf * n_samples)` are the minimum
number of samples for each node.
.. versionchanged:: 0.18
Added float values for percentages.
min_weight_fraction_leaf : float, optional (default=0.)
The minimum weighted fraction of the sum total of weights (of all
the input samples) required to be at a leaf node. Samples have
equal weight when sample_weight is not provided.
subsample : float, optional (default=1.0)
The fraction of samples to be used for fitting the individual base
learners. If smaller than 1.0 this results in Stochastic Gradient
Boosting. `subsample` interacts with the parameter `n_estimators`.
Choosing `subsample < 1.0` leads to a reduction of variance
and an increase in bias.
max_features : int, float, string or None, optional (default=None)
The number of features to consider when looking for the best split:
- If int, then consider `max_features` features at each split.
- If float, then `max_features` is a percentage and
`int(max_features * n_features)` features are considered at each
split.
- If "auto", then `max_features=n_features`.
- If "sqrt", then `max_features=sqrt(n_features)`.
- If "log2", then `max_features=log2(n_features)`.
- If None, then `max_features=n_features`.
Choosing `max_features < n_features` leads to a reduction of variance
and an increase in bias.
Note: the search for a split does not stop until at least one
valid partition of the node samples is found, even if it requires to
effectively inspect more than ``max_features`` features.
max_leaf_nodes : int or None, optional (default=None)
Grow trees with ``max_leaf_nodes`` in best-first fashion.
Best nodes are defined as relative reduction in impurity.
If None then unlimited number of leaf nodes.
min_impurity_split : float,
Threshold for early stopping in tree growth. A node will split
if its impurity is above the threshold, otherwise it is a leaf.
.. deprecated:: 0.19
``min_impurity_split`` has been deprecated in favor of
``min_impurity_decrease`` in 0.19 and will be removed in 0.21.
Use ``min_impurity_decrease`` instead.
min_impurity_decrease : float, optional (default=0.)
A node will be split if this split induces a decrease of the impurity
greater than or equal to this value.
The weighted impurity decrease equation is the following::
N_t / N * (impurity - N_t_R / N_t * right_impurity
- N_t_L / N_t * left_impurity)
where ``N`` is the total number of samples, ``N_t`` is the number of
samples at the current node, ``N_t_L`` is the number of samples in the
left child, and ``N_t_R`` is the number of samples in the right child.
``N``, ``N_t``, ``N_t_R`` and ``N_t_L`` all refer to the weighted sum,
if ``sample_weight`` is passed.
.. versionadded:: 0.19
alpha : float (default=0.9)
The alpha-quantile of the huber loss function and the quantile
loss function. Only if ``loss='huber'`` or ``loss='quantile'``.
init : BaseEstimator, None, optional (default=None)
An estimator object that is used to compute the initial
predictions. ``init`` has to provide ``fit`` and ``predict``.
If None it uses ``loss.init_estimator``.
verbose : int, default: 0
Enable verbose output. If 1 then it prints progress and performance
once in a while (the more trees the lower the frequency). If greater
than 1 then it prints progress and performance for every tree.
warm_start : bool, default: False
When set to ``True``, reuse the solution of the previous call to fit
and add more estimators to the ensemble, otherwise, just erase the
previous solution.
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
presort : bool or 'auto', optional (default='auto')
Whether to presort the data to speed up the finding of best splits in
fitting. Auto mode by default will use presorting on dense data and
default to normal sorting on sparse data. Setting presort to true on
sparse data will raise an error.
.. versionadded:: 0.17
optional parameter *presort*.
Attributes
----------
feature_importances_ : array, shape = [n_features]
The feature importances (the higher, the more important the feature).
oob_improvement_ : array, shape = [n_estimators]
The improvement in loss (= deviance) on the out-of-bag samples
relative to the previous iteration.
``oob_improvement_[0]`` is the improvement in
loss of the first stage over the ``init`` estimator.
train_score_ : array, shape = [n_estimators]
The i-th score ``train_score_[i]`` is the deviance (= loss) of the
model at iteration ``i`` on the in-bag sample.
If ``subsample == 1`` this is the deviance on the training data.
loss_ : LossFunction
The concrete ``LossFunction`` object.
init : BaseEstimator
The estimator that provides the initial predictions.
Set via the ``init`` argument or ``loss.init_estimator``.
estimators_ : ndarray of DecisionTreeRegressor, shape = [n_estimators, 1]
The collection of fitted sub-estimators.
Notes
-----
The features are always randomly permuted at each split. Therefore,
the best found split may vary, even with the same training data and
``max_features=n_features``, if the improvement of the criterion is
identical for several splits enumerated during the search of the best
split. To obtain a deterministic behaviour during fitting,
``random_state`` has to be fixed.
See also
--------
DecisionTreeRegressor, RandomForestRegressor
References
----------
J. Friedman, Greedy Function Approximation: A Gradient Boosting
Machine, The Annals of Statistics, Vol. 29, No. 5, 2001.
J. Friedman, Stochastic Gradient Boosting, 1999
T. Hastie, R. Tibshirani and J. Friedman.
Elements of Statistical Learning Ed. 2, Springer, 2009.
"""
_SUPPORTED_LOSS = ('ls', 'lad', 'huber', 'quantile')
def __init__(self, loss='ls', learning_rate=0.1, n_estimators=100,
subsample=1.0, criterion='friedman_mse', min_samples_split=2,
min_samples_leaf=1, min_weight_fraction_leaf=0.,
max_depth=3, min_impurity_decrease=0.,
min_impurity_split=None, init=None, random_state=None,
max_features=None, alpha=0.9, verbose=0, max_leaf_nodes=None,
warm_start=False, presort='auto'):
super(GradientBoostingRegressor, self).__init__(
loss=loss, learning_rate=learning_rate, n_estimators=n_estimators,
criterion=criterion, min_samples_split=min_samples_split,
min_samples_leaf=min_samples_leaf,
min_weight_fraction_leaf=min_weight_fraction_leaf,
max_depth=max_depth, init=init, subsample=subsample,
max_features=max_features,
min_impurity_decrease=min_impurity_decrease,
min_impurity_split=min_impurity_split,
random_state=random_state, alpha=alpha, verbose=verbose,
max_leaf_nodes=max_leaf_nodes, warm_start=warm_start,
presort=presort)
def predict(self, X):
"""Predict regression target for X.
Parameters
----------
X : array-like or sparse matrix, shape = [n_samples, n_features]
The input samples. Internally, it will be converted to
``dtype=np.float32`` and if a sparse matrix is provided
to a sparse ``csr_matrix``.
Returns
-------
y : array of shape = [n_samples]
The predicted values.
"""
X = check_array(X, dtype=DTYPE, order="C", accept_sparse='csr')
return self._decision_function(X).ravel()
def staged_predict(self, X):
"""Predict regression target at each stage for X.
This method allows monitoring (i.e. determine error on testing set)
after each stage.
Parameters
----------
X : array-like or sparse matrix, shape = [n_samples, n_features]
The input samples. Internally, it will be converted to
``dtype=np.float32`` and if a sparse matrix is provided
to a sparse ``csr_matrix``.
Returns
-------
y : generator of array of shape = [n_samples]
The predicted value of the input samples.
"""
for y in self._staged_decision_function(X):
yield y.ravel()
def apply(self, X):
"""Apply trees in the ensemble to X, return leaf indices.
.. versionadded:: 0.17
Parameters
----------
X : array-like or sparse matrix, shape = [n_samples, n_features]
The input samples. Internally, its dtype will be converted to
``dtype=np.float32``. If a sparse matrix is provided, it will
be converted to a sparse ``csr_matrix``.
Returns
-------
X_leaves : array_like, shape = [n_samples, n_estimators]
For each datapoint x in X and for each tree in the ensemble,
return the index of the leaf x ends up in each estimator.
"""
leaves = super(GradientBoostingRegressor, self).apply(X)
leaves = leaves.reshape(X.shape[0], self.estimators_.shape[0])
return leaves
|
eharney/cinder
|
refs/heads/master
|
cinder/volume/drivers/ibm/ibm_storage/ds8k_helper.py
|
1
|
# Copyright (c) 2016 IBM Corporation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import collections
import copy
import distutils.version as dist_version # pylint: disable=E0611
import eventlet
import math
import os
import six
import string
from oslo_log import log as logging
from cinder import coordination
from cinder import exception
from cinder.i18n import _
from cinder.objects import fields
import cinder.volume.drivers.ibm.ibm_storage as storage
from cinder.volume.drivers.ibm.ibm_storage import cryptish
from cinder.volume.drivers.ibm.ibm_storage import ds8k_restclient as restclient
from cinder.volume.drivers.ibm.ibm_storage import proxy
from cinder.volume.drivers.ibm.ibm_storage import strings
LOG = logging.getLogger(__name__)
LSS_VOL_SLOTS = 0x100
LSS_SLOTS = 0xFF
VALID_HOST_TYPES = (
'auto', 'AMDLinuxRHEL', 'AMDLinuxSuse',
'AppleOSX', 'Fujitsu', 'Hp', 'HpTru64',
'HpVms', 'LinuxDT', 'LinuxRF', 'LinuxRHEL',
'LinuxSuse', 'Novell', 'SGI', 'SVC',
'SanFsAIX', 'SanFsLinux', 'Sun', 'VMWare',
'Win2000', 'Win2003', 'Win2008', 'Win2012',
'iLinux', 'nSeries', 'pLinux', 'pSeries',
'pSeriesPowerswap', 'zLinux', 'iSeries'
)
def filter_alnum(s):
return ''.join(x if x in string.ascii_letters +
string.digits else '_' for x in s) if s else ''
class DS8KCommonHelper(object):
"""Manage the primary backend, it is common class too."""
OPTIONAL_PARAMS = ['ds8k_host_type', 'lss_range_for_cg']
# if use new REST API, please update the version below
VALID_REST_VERSION_5_7_MIN = '5.7.51.1047'
INVALID_STORAGE_VERSION = '8.0.1'
REST_VERSION_5_7_MIN_PPRC_CG = '5.7.51.1068'
REST_VERSION_5_8_MIN_PPRC_CG = '5.8.20.1059'
def __init__(self, conf, HTTPConnectorObject=None):
self.conf = conf
self._connector_obj = HTTPConnectorObject
self._storage_pools = None
self._disable_thin_provision = False
self._connection_type = self._get_value('connection_type')
self._existing_lss = None
self.backend = {}
self.setup()
@staticmethod
def _gb2b(gb):
return gb * (2 ** 30)
def _get_value(self, key):
if getattr(self.conf, 'safe_get', 'get') == 'get':
value = self.conf.get(key)
else:
value = self.conf.safe_get(key)
if not value and key not in self.OPTIONAL_PARAMS:
raise exception.InvalidParameterValue(
err=(_('Param [%s] should be provided.') % key))
return value
def get_thin_provision(self):
return self._disable_thin_provision
def get_storage_pools(self):
return self._storage_pools
def get_connection_type(self):
return self._connection_type
def get_pool(self, lss):
node = int(lss, 16) % 2
pids = [
pid for pid, p in self._storage_pools.items() if p['node'] == node]
return pids[0] if pids else None
def setup(self):
self._create_client()
self._get_storage_information()
self._check_host_type()
self.backend['pools_str'] = self._get_value('san_clustername')
self._storage_pools = self.get_pools()
self.verify_pools(self._storage_pools)
self.backend['lss_ids_for_cg'] = self._get_lss_ids_for_cg()
self._verify_rest_version()
def update_client(self):
self._client.close()
self._create_client()
def _get_certificate(self, host):
cert_file = strings.CERTIFICATES_PATH + host + '.pem'
LOG.debug("certificate file for DS8K %(host)s: %(cert)s",
{'host': host, 'cert': cert_file})
# Use the certificate if it exists, otherwise use the System CA Bundle
if os.path.exists(cert_file):
return cert_file
else:
LOG.debug("certificate file not found.")
return True
def _create_client(self):
san_ip = self._get_value('san_ip')
try:
clear_pass = cryptish.decrypt(self._get_value('san_password'))
except TypeError:
raise exception.InvalidParameterValue(
err=_('Param [san_password] is invalid.'))
verify = self._get_certificate(san_ip)
try:
self._client = restclient.RESTScheduler(
san_ip,
self._get_value('san_login'),
clear_pass,
self._connector_obj,
verify)
except restclient.TimeoutException:
raise restclient.APIException(
data=(_("Can't connect to %(host)s") % {'host': san_ip}))
self.backend['rest_version'] = self._get_version()['bundle_version']
LOG.info("Connection to DS8K storage system %(host)s has been "
"established successfully, the version of REST is %(rest)s.",
{'host': self._get_value('san_ip'),
'rest': self.backend['rest_version']})
def _get_storage_information(self):
storage_info = self.get_systems()
self.backend['storage_unit'] = storage_info['id']
self.backend['storage_wwnn'] = storage_info['wwnn']
self.backend['storage_version'] = storage_info['release']
def _get_lss_ids_for_cg(self):
lss_ids_for_cg = set()
lss_range = self._get_value('lss_range_for_cg')
if lss_range:
lss_range = lss_range.replace(' ', '').split('-')
if len(lss_range) == 1:
begin = int(lss_range[0], 16)
end = begin
else:
begin = int(lss_range[0], 16)
end = int(lss_range[1], 16)
if begin > 0xFF or end > 0xFF or begin > end:
raise exception.InvalidParameterValue(
err=_('Param [lss_range_for_cg] is invalid, it '
'should be within 00-FF.'))
lss_ids_for_cg = set(
('%02x' % i).upper() for i in range(begin, end + 1))
return lss_ids_for_cg
def _check_host_type(self):
ds8k_host_type = self._get_value('ds8k_host_type')
if (ds8k_host_type and
(ds8k_host_type not in VALID_HOST_TYPES)):
msg = (_("Param [ds8k_host_type] must be one of: %(values)s.")
% {'values': VALID_HOST_TYPES[1:-1]})
LOG.error(msg)
raise exception.InvalidParameterValue(err=msg)
self.backend['host_type_override'] = (
None if ds8k_host_type == 'auto' else ds8k_host_type)
def _verify_rest_version(self):
if self.backend['storage_version'] == self.INVALID_STORAGE_VERSION:
raise exception.VolumeDriverException(
message=(_("%s does not support bulk deletion of volumes, "
"if you want to use this version of driver, "
"please upgrade the CCL.")
% self.INVALID_STORAGE_VERSION))
if ('5.7' in self.backend['rest_version'] and
dist_version.LooseVersion(self.backend['rest_version']) <
dist_version.LooseVersion(self.VALID_REST_VERSION_5_7_MIN)):
raise exception.VolumeDriverException(
message=(_("REST version %(invalid)s is lower than "
"%(valid)s, please upgrade it in DS8K.")
% {'invalid': self.backend['rest_version'],
'valid': self.VALID_REST_VERSION_5_7_MIN}))
def verify_rest_version_for_pprc_cg(self):
if '8.1' in self.backend['rest_version']:
raise exception.VolumeDriverException(
message=_("REST for DS8K 8.1 does not support PPRC "
"consistency group, please upgrade the CCL."))
valid_rest_version = None
if ('5.7' in self.backend['rest_version'] and
dist_version.LooseVersion(self.backend['rest_version']) <
dist_version.LooseVersion(self.REST_VERSION_5_7_MIN_PPRC_CG)):
valid_rest_version = self.REST_VERSION_5_7_MIN_PPRC_CG
elif ('5.8' in self.backend['rest_version'] and
dist_version.LooseVersion(self.backend['rest_version']) <
dist_version.LooseVersion(self.REST_VERSION_5_8_MIN_PPRC_CG)):
valid_rest_version = self.REST_VERSION_5_8_MIN_PPRC_CG
if valid_rest_version:
raise exception.VolumeDriverException(
message=(_("REST version %(invalid)s is lower than "
"%(valid)s, please upgrade it in DS8K.")
% {'invalid': self.backend['rest_version'],
'valid': valid_rest_version}))
def verify_pools(self, storage_pools):
if self._connection_type == storage.XIV_CONNECTION_TYPE_FC:
ptype = 'fb'
elif self._connection_type == storage.XIV_CONNECTION_TYPE_FC_ECKD:
ptype = 'ckd'
else:
raise exception.InvalidParameterValue(
err=_('Param [connection_type] is invalid.'))
for pid, pool in storage_pools.items():
if pool['stgtype'] != ptype:
LOG.error('The stgtype of pool %(pool)s is %(ptype)s.',
{'pool': pid, 'ptype': pool['stgtype']})
raise exception.InvalidParameterValue(
err='Param [san_clustername] is invalid.')
@proxy.logger
def get_pools(self, specific_pools=None):
if specific_pools:
pools_str = specific_pools.replace(' ', '').upper().split(',')
else:
pools_str = self.backend['pools_str'].replace(
' ', '').upper().split(',')
pools = self._get_pools(pools_str)
unsorted_pools = self._format_pools(pools)
storage_pools = collections.OrderedDict(sorted(
unsorted_pools, key=lambda i: i[1]['capavail'], reverse=True))
return storage_pools
@proxy.logger
def update_storage_pools(self, storage_pools):
self._storage_pools = storage_pools
def _format_pools(self, pools):
return ((p['id'], {
'name': p['name'],
'node': int(p['node']),
'stgtype': p['stgtype'],
'cap': int(p['cap']),
'capavail': int(p['capavail'])
}) for p in pools)
def verify_lss_ids(self, specified_lss_ids):
if not specified_lss_ids:
return None
lss_ids = specified_lss_ids.upper().replace(' ', '').split(',')
# verify LSS IDs.
for lss_id in lss_ids:
if int(lss_id, 16) > 255:
raise exception.InvalidParameterValue(
_('LSS %s should be within 00-FF.') % lss_id)
# verify address group
self._existing_lss = self.get_all_lss()
ckd_addrgrps = set(int(lss['id'], 16) // 16 for lss in
self._existing_lss if lss['type'] == 'ckd')
fb_addrgrps = set((int(lss, 16) // 16) for lss in lss_ids)
intersection = ckd_addrgrps & fb_addrgrps
if intersection:
raise exception.VolumeDriverException(
message=_('LSSes in the address group %s are reserved '
'for CKD volumes') % list(intersection))
# verify whether LSSs specified have been reserved for
# consistency group or not.
if self.backend['lss_ids_for_cg']:
for lss_id in lss_ids:
if lss_id in self.backend['lss_ids_for_cg']:
raise exception.InvalidParameterValue(
_('LSS %s has been reserved for CG.') % lss_id)
return lss_ids
@proxy.logger
def find_pool_lss_pair(self, pool, find_new_pid, excluded_lss):
if pool:
node = int(pool[1:], 16) % 2
lss = self._find_lss(node, excluded_lss)
if lss:
return (pool, lss)
else:
if not find_new_pid:
raise restclient.LssIDExhaustError(
message=_('All LSS/LCU IDs for configured pools '
'on storage are exhausted.'))
# find new pool id and lss for lun
return self.find_biggest_pool_and_lss(excluded_lss)
@proxy.logger
def find_biggest_pool_and_lss(self, excluded_lss, specified_pool_lss=None):
if specified_pool_lss:
# pool and lss should be verified every time user create volume or
# snapshot, because they can be changed in extra-sepcs at any time.
specified_pool_ids, specified_lss_ids = specified_pool_lss
storage_pools = self.get_pools(specified_pool_ids)
self.verify_pools(storage_pools)
storage_lss = self.verify_lss_ids(specified_lss_ids)
else:
storage_pools, storage_lss = self._storage_pools, None
# pools are ordered by capacity
for pool_id, pool in storage_pools.items():
lss = self._find_lss(pool['node'], excluded_lss, storage_lss)
if lss:
return pool_id, lss
raise restclient.LssIDExhaustError(
message=_("All LSS/LCU IDs for configured pools are exhausted."))
@proxy.logger
def _find_lss(self, node, excluded_lss, specified_lss_ids=None):
if specified_lss_ids:
existing_lss = self._existing_lss
else:
existing_lss = self.get_all_lss()
LOG.info("Existing LSS IDs are: %s.",
','.join([lss['id'] for lss in existing_lss]))
saved_existing_lss = copy.copy(existing_lss)
# exclude LSSs that are full.
existing_lss = [lss for lss in existing_lss
if lss['id'] not in excluded_lss]
if not existing_lss:
LOG.info("All LSSs are full.")
return None
# user specify LSSs in extra-specs.
if specified_lss_ids:
specified_lss_ids = [lss for lss in specified_lss_ids
if lss not in excluded_lss]
if specified_lss_ids:
existing_lss = [lss for lss in existing_lss
if lss['id'] in specified_lss_ids]
nonexistent_lss_ids = (set(specified_lss_ids) -
set(lss['id'] for lss in existing_lss))
lss = None
for lss_id in nonexistent_lss_ids:
if int(lss_id, 16) % 2 == node:
lss = lss_id
break
if not lss:
lss = self._find_from_existing_lss(
node, existing_lss, True)
else:
LOG.info("All appropriate LSSs specified are full.")
return None
else:
# exclude LSSs that reserved for CG.
if self.backend['lss_ids_for_cg']:
existing_lss_cg, nonexistent_lss_cg = (
self._classify_lss_for_cg(existing_lss))
existing_lss = [lss for lss in existing_lss
if lss['id'] not in existing_lss_cg]
else:
existing_lss_cg = set()
nonexistent_lss_cg = set()
lss = self._find_from_existing_lss(node, existing_lss)
if not lss:
lss = self._find_from_nonexistent_lss(node, saved_existing_lss,
nonexistent_lss_cg)
return lss
def _classify_lss_for_cg(self, existing_lss):
existing_lss_ids = set(lss['id'] for lss in existing_lss)
existing_lss_cg = existing_lss_ids & self.backend['lss_ids_for_cg']
nonexistent_lss_cg = self.backend['lss_ids_for_cg'] - existing_lss_cg
return existing_lss_cg, nonexistent_lss_cg
def _find_from_existing_lss(self, node, existing_lss, ignore_pprc=False):
if not ignore_pprc:
# exclude LSSs that are used by PPRC paths.
lss_in_pprc = self.get_lss_in_pprc_paths()
if lss_in_pprc:
existing_lss = [lss for lss in existing_lss
if lss['id'] not in lss_in_pprc]
# exclude wrong type of LSSs and those that are not in expected node.
existing_lss = [lss for lss in existing_lss if lss['type'] == 'fb'
and int(lss['group']) == node]
lss_id = None
if existing_lss:
# look for the emptiest lss from existing lss
lss = sorted(existing_lss, key=lambda k: int(k['configvols']))[0]
if int(lss['configvols']) < LSS_VOL_SLOTS:
lss_id = lss['id']
LOG.info('_find_from_existing_lss: choose %(lss)s. '
'now it has %(num)s volumes.',
{'lss': lss_id, 'num': lss['configvols']})
return lss_id
def _find_from_nonexistent_lss(self, node, existing_lss, lss_cg=None):
ckd_addrgrps = set(int(lss['id'], 16) // 16 for lss in existing_lss if
lss['type'] == 'ckd' and int(lss['group']) == node)
full_lss = set(int(lss['id'], 16) for lss in existing_lss if
lss['type'] == 'fb' and int(lss['group']) == node)
cg_lss = set(int(lss, 16) for lss in lss_cg) if lss_cg else set()
# look for an available lss from nonexistent lss
lss_id = None
for lss in range(node, LSS_SLOTS, 2):
addrgrp = lss // 16
if (addrgrp not in ckd_addrgrps and
lss not in full_lss and
lss not in cg_lss):
lss_id = ("%02x" % lss).upper()
break
LOG.info('_find_from_unexisting_lss: choose %s.', lss_id)
return lss_id
def create_lun(self, lun):
volData = {
'cap': self._gb2b(lun.size),
'captype': 'bytes',
'stgtype': 'fb',
'tp': 'ese' if lun.type_thin else 'none'
}
lun.data_type = lun.data_type if lun.data_type else 'FB 512'
if lun.type_os400:
volData['os400'] = lun.type_os400
volData['name'] = lun.ds_name
volData['pool'], volData['lss'] = lun.pool_lss_pair['source']
lun.ds_id = self._create_lun(volData)
return lun
def delete_lun(self, luns):
lun_ids = []
luns = [luns] if not isinstance(luns, list) else luns
for lun in luns:
if lun.ds_id is None:
# create_lun must have failed and not returned the id
LOG.error("delete_lun: volume id is None.")
continue
if not self.lun_exists(lun.ds_id):
LOG.error("delete_lun: volume %s not found.", lun.ds_id)
continue
lun_ids.append(lun.ds_id)
# Max 32 volumes could be deleted by specifying ids parameter
while lun_ids:
if len(lun_ids) > 32:
lun_ids_str = ','.join(lun_ids[0:32])
del lun_ids[0:32]
else:
lun_ids_str = ','.join(lun_ids)
lun_ids = []
LOG.info("Deleting volumes: %s.", lun_ids_str)
self._delete_lun(lun_ids_str)
def get_lss_in_pprc_paths(self):
# TODO(Jiamin): when the REST API that get the licenses installed
# in DS8K is ready, this function should be improved.
try:
paths = self.get_pprc_paths()
except restclient.APIException:
paths = []
LOG.exception("Can not get the LSS")
lss_ids = set(p['source_lss_id'] for p in paths)
LOG.info('LSS in PPRC paths are: %s.', ','.join(lss_ids))
return lss_ids
def wait_flashcopy_finished(self, src_luns, tgt_luns):
finished = False
try:
fc_state = [False] * len(tgt_luns)
while True:
eventlet.sleep(5)
for i in range(len(tgt_luns)):
if not fc_state[i]:
fcs = self.get_flashcopy(tgt_luns[i].ds_id)
if not fcs:
fc_state[i] = True
continue
if fcs[0]['state'] not in ('valid',
'validation_required'):
raise restclient.APIException(
data=(_('Flashcopy ended up in bad state %s. '
'Rolling back.') % fcs[0]['state']))
if fc_state.count(False) == 0:
break
finished = True
finally:
if not finished:
for src_lun, tgt_lun in zip(src_luns, tgt_luns):
self.delete_flashcopy(src_lun.ds_id, tgt_lun.ds_id)
return finished
def wait_pprc_copy_finished(self, vol_ids, state, delete=True):
LOG.info("Wait for PPRC pair to enter into state %s", state)
vol_ids = sorted(vol_ids)
min_vol_id = min(vol_ids)
max_vol_id = max(vol_ids)
try:
finished = False
while True:
eventlet.sleep(2)
pairs = self.get_pprc_pairs(min_vol_id, max_vol_id)
pairs = [
p for p in pairs if p['source_volume']['name'] in vol_ids]
finished_pairs = [p for p in pairs if p['state'] == state]
if len(finished_pairs) == len(pairs):
finished = True
break
invalid_states = [
'target_suspended',
'invalid',
'volume_inaccessible'
]
if state == 'full_duplex':
invalid_states.append('suspended')
elif state == 'suspended':
invalid_states.append('valid')
unfinished_pairs = [p for p in pairs if p['state'] != state]
for p in unfinished_pairs:
if p['state'] in invalid_states:
raise restclient.APIException(
data=(_('Metro Mirror pair %(id)s enters into '
'state %(state)s. ')
% {'id': p['id'], 'state': p['state']}))
finally:
if not finished and delete:
pair_ids = {'ids': ','.join([p['id'] for p in pairs])}
self.delete_pprc_pair_by_pair_id(pair_ids)
def _get_host(self, connector):
# DS8K doesn't support hostname which is longer than 32 chars.
hname = ('OShost:%s' % filter_alnum(connector['host']))[:32]
os_type = connector.get('os_type')
platform = connector.get('platform')
if self.backend['host_type_override']:
htype = self.backend['host_type_override']
elif os_type == 'OS400':
htype = 'iSeries'
elif os_type == 'AIX':
htype = 'pSeries'
elif platform in ('s390', 's390x') and os_type == 'linux2':
htype = 'zLinux'
else:
htype = 'LinuxRHEL'
return collections.namedtuple('Host', ('name', 'type'))(hname, htype)
@coordination.synchronized('ibm-ds8k-{connector[host]}')
def initialize_connection(self, vol_id, connector, **kwargs):
host = self._get_host(connector)
# Find defined host and undefined host ports
host_wwpn_set = set(wwpn.upper() for wwpn in connector['wwpns'])
host_ports = self._get_host_ports(host_wwpn_set)
LOG.debug("host_ports: %s", host_ports)
defined_hosts = set(
hp['host_id'] for hp in host_ports if hp['host_id'])
unknown_ports = host_wwpn_set - set(
hp['wwpn'] for hp in host_ports)
unconfigured_ports = set(
hp['wwpn'] for hp in host_ports if not hp['host_id'])
LOG.debug("initialize_connection: defined_hosts: %(defined)s, "
"unknown_ports: %(unknown)s, unconfigured_ports: "
"%(unconfigured)s.", {"defined": defined_hosts,
"unknown": unknown_ports,
"unconfigured": unconfigured_ports})
# Create host if it is not defined
if not defined_hosts:
host_id = self._create_host(host)['id']
elif len(defined_hosts) == 1:
host_id = defined_hosts.pop()
else:
raise restclient.APIException(
message='More than one host defined for requested ports.')
LOG.info('Volume will be attached to host %s.', host_id)
# Create missing host ports
if unknown_ports or unconfigured_ports:
self._assign_host_port(host_id,
list(unknown_ports | unconfigured_ports))
# Map the volume to host
lun_id = self._map_volume_to_host(host_id, vol_id)
target_ports = [p['wwpn'] for p in self._get_ioports()]
return {
'driver_volume_type': 'fibre_channel',
'data': {
'target_discovered': False,
'target_lun': int(lun_id, 16),
'target_wwn': target_ports,
'initiator_target_map': {initiator: target_ports for
initiator in host_wwpn_set}
}
}
@coordination.synchronized('ibm-ds8k-{connector[host]}')
def terminate_connection(self, vol_id, connector, force, **kwargs):
host = self._get_host(connector)
host_wwpn_set = set(wwpn.upper() for wwpn in connector['wwpns'])
host_ports = self._get_host_ports(host_wwpn_set)
defined_hosts = set(
hp['host_id'] for hp in host_ports if hp['host_id'])
delete_ports = set(
hp['wwpn'] for hp in host_ports if not hp['host_id'])
LOG.debug("terminate_connection: host_ports: %(host)s, "
"defined_hosts: %(defined)s, delete_ports: %(delete)s.",
{"host": host_ports,
"defined": defined_hosts,
"delete": delete_ports})
if not defined_hosts:
LOG.info('Could not find host.')
return None
elif len(defined_hosts) > 1:
raise restclient.APIException(_('More than one host found.'))
else:
host_id = defined_hosts.pop()
mappings = self._get_mappings(host_id)
lun_ids = [
m['lunid'] for m in mappings if m['volume']['id'] == vol_id]
LOG.info('Volumes attached to host %(host)s are %(vols)s.',
{'host': host_id, 'vols': ','.join(lun_ids)})
for lun_id in lun_ids:
self._delete_mappings(host_id, lun_id)
if not lun_ids:
LOG.warning("Volume %(vol)s is already not mapped to "
"host %(host)s.",
{'vol': vol_id, 'host': host.name})
# if this host only has volumes that have been detached,
# remove the host and its ports
ret_info = {
'driver_volume_type': 'fibre_channel',
'data': {}
}
if len(mappings) == len(lun_ids):
for port in delete_ports:
self._delete_host_ports(port)
self._delete_host(host_id)
target_ports = [p['wwpn'] for p in self._get_ioports()]
target_map = {initiator.upper(): target_ports
for initiator in connector['wwpns']}
ret_info['data']['initiator_target_map'] = target_map
return ret_info
def create_group(self, group):
return {'status': fields.GroupStatus.AVAILABLE}
def delete_group(self, group, src_luns):
volumes_model_update = []
model_update = {'status': fields.GroupStatus.DELETED}
if src_luns:
try:
self.delete_lun(src_luns)
except restclient.APIException as e:
model_update['status'] = fields.GroupStatus.ERROR_DELETING
LOG.exception(
"Failed to delete the volumes in group %(group)s, "
"Exception = %(ex)s",
{'group': group.id, 'ex': e})
for src_lun in src_luns:
volumes_model_update.append({
'id': src_lun.os_id,
'status': model_update['status']
})
return model_update, volumes_model_update
def delete_group_snapshot(self, group_snapshot, tgt_luns):
snapshots_model_update = []
model_update = {'status': fields.GroupSnapshotStatus.DELETED}
if tgt_luns:
try:
self.delete_lun(tgt_luns)
except restclient.APIException as e:
model_update['status'] = (
fields.GroupSnapshotStatus.ERROR_DELETING)
LOG.error("Failed to delete snapshots in group snapshot "
"%(gsnapshot)s, Exception = %(ex)s",
{'gsnapshot': group_snapshot.id, 'ex': e})
for tgt_lun in tgt_luns:
snapshots_model_update.append({
'id': tgt_lun.os_id,
'status': model_update['status']
})
return model_update, snapshots_model_update
def _delete_lun(self, lun_ids_str):
self._client.send('DELETE', '/volumes',
params={'ids': lun_ids_str})
def delete_lun_by_id(self, lun_id):
self._client.send('DELETE', '/volumes/%s' % lun_id)
def _get_version(self):
return self._client.fetchone('GET', '')
@proxy.logger
def _create_lun(self, volData):
return self._client.fetchid('POST', '/volumes', volData)
def _get_pools(self, pools_str):
return [self._client.fetchone('GET', '/pools/%s' % pid,
fields=['id', 'name', 'node', 'stgtype', 'cap', 'capavail'])
for pid in pools_str]
def start_flashcopy(self, vol_pairs, freeze=False):
options = [
"permit_space_efficient_target",
"fail_space_efficient_target_out_of_space"
]
if freeze:
options.append("freeze_consistency")
self._client.send('POST', '/cs/flashcopies', {
"volume_pairs": vol_pairs,
"options": options
})
def get_pprc_paths(self, specific_lss=None):
if specific_lss:
lss_range = {
'source_lss_id_from': specific_lss,
'source_lss_id_to': specific_lss
}
else:
# get all of PPRC paths between source DS8K and target DS8K.
lss_range = {
'source_lss_id_from': '00',
'source_lss_id_to': 'FF'
}
return self._client.fetchall('GET', '/cs/pprcs/paths',
params=lss_range)
def get_flashcopy(self, vol_id):
return self._client.fetchall('GET', '/volumes/%s/flashcopy' % vol_id)
def delete_flashcopy(self, src_lun_id, tgt_lun_id):
# no exception if failed
self._client.statusok(
'DELETE', '/cs/flashcopies/%s:%s' % (src_lun_id, tgt_lun_id))
def _get_host_ports(self, host_wwpn_set):
return self._client.fetchall(
'GET', '/host_ports',
params={
'wwpns': ",".join(host_wwpn_set),
'state': 'logged in,logged out'
},
fields=['host_id', 'wwpn'])
def _create_host(self, host):
return self._client.fetchone(
'POST', '/hosts', {'name': host.name, 'hosttype': host.type})
def _assign_host_port(self, host_id, ports):
self._client.send('POST', '/host_ports/assign', {
'host_id': host_id, 'host_port_wwpns': ports})
def _map_volume_to_host(self, host_id, vol_id):
return self._client.fetchid(
'POST', '/hosts%5Bid=' + host_id + '%5D/mappings',
{'volumes': [vol_id]})
def _get_mappings(self, host_id):
return self._client.fetchall(
'GET', '/hosts%5Bid=' + host_id + '%5D/mappings')
def _delete_mappings(self, host_id, lun_id):
self._client.send(
'DELETE', '/hosts%5Bid=' + host_id + '%5D/mappings/' + lun_id)
def _delete_host_ports(self, port):
self._client.send('DELETE', '/host_ports/%s' % port)
def _delete_host(self, host_id):
# delete the host will delete all of the ports belong to it
self._client.send('DELETE', '/hosts%5Bid=' + host_id + '%5D')
def _get_ioports(self):
return self._client.fetchall('GET', '/ioports', fields=['wwpn'])
def unfreeze_lss(self, lss_ids):
self._client.send(
'POST', '/cs/flashcopies/unfreeze', {"lss_ids": lss_ids})
def get_all_lss(self, fields=None):
fields = (fields if fields else
['id', 'type', 'group', 'configvols'])
return self._client.fetchall('GET', '/lss', fields=fields)
def lun_exists(self, lun_id):
return self._client.statusok('GET', '/volumes/%s' % lun_id)
def get_lun_pool(self, lun_id):
return self._client.fetchone(
'GET', '/volumes/%s' % lun_id, fields=['pool'])['pool']
def change_lun(self, lun_id, param):
self._client.send('PUT', '/volumes/%s' % lun_id, param)
def get_physical_links(self, target_id):
return self._client.fetchall(
'GET', '/cs/pprcs/physical_links',
params={
'target_system_wwnn': target_id,
'source_lss_id': 00,
'target_lss_id': 00
})
def get_systems(self):
return self._client.fetchone(
'GET', '/systems', fields=['id', 'wwnn', 'release'])
def get_lun_number_in_lss(self, lss_id):
return int(self._client.fetchone(
'GET', '/lss/%s' % lss_id,
fields=['configvols'])['configvols'])
def create_pprc_path(self, pathData):
self._client.send('POST', '/cs/pprcs/paths', pathData)
def get_pprc_path(self, path_id):
return self._client.fetchone(
'GET', '/cs/pprcs/paths/%s' % path_id,
fields=['port_pairs'])
def delete_pprc_path(self, path_id):
self._client.send('DELETE', '/cs/pprcs/paths/%s' % path_id)
def create_pprc_pair(self, pair_data):
self._client.send('POST', '/cs/pprcs', pair_data)
def delete_pprc_pair_by_pair_id(self, pids):
self._client.statusok('DELETE', '/cs/pprcs', params=pids)
def do_failback(self, pair_data):
self._client.send('POST', '/cs/pprcs/resume', pair_data)
def get_pprc_pairs(self, min_vol_id, max_vol_id):
return self._client.fetchall(
'GET', '/cs/pprcs',
params={
'volume_id_from': min_vol_id,
'volume_id_to': max_vol_id
})
def delete_pprc_pair(self, vol_id):
# check pprc pairs exist or not.
if not self.get_pprc_pairs(vol_id, vol_id):
return None
# don't use pprc pair ID to delete it, because it may have
# communication issues.
pair_data = {
'volume_full_ids': [{
'volume_id': vol_id,
'system_id': self.backend['storage_unit']
}],
'options': ['unconditional', 'issue_source']
}
self._client.send('POST', '/cs/pprcs/delete', pair_data)
def pause_pprc_pairs(self, pprc_pair_ids):
pair_data = {'pprc_ids': pprc_pair_ids}
self._client.send('POST', '/cs/pprcs/pause', pair_data)
def resume_pprc_pairs(self, pprc_pair_ids):
pair_data = {
'pprc_ids': pprc_pair_ids,
'type': 'metro_mirror',
'options': ['permit_space_efficient_target',
'initial_copy_out_of_sync']
}
self._client.send('POST', '/cs/pprcs/resume', pair_data)
class DS8KReplicationSourceHelper(DS8KCommonHelper):
"""Manage source storage for replication."""
@proxy.logger
def find_pool_and_lss(self, excluded_lss=None):
for pool_id, pool in self._storage_pools.items():
lss = self._find_lss_for_type_replication(pool['node'],
excluded_lss)
if lss:
return pool_id, lss
raise restclient.LssIDExhaustError(
message=_("All LSS/LCU IDs for configured pools are exhausted."))
@proxy.logger
def _find_lss_for_type_replication(self, node, excluded_lss):
# prefer to choose non-existing one first.
existing_lss = self.get_all_lss()
LOG.info("existing LSS IDs are %s",
','.join([lss['id'] for lss in existing_lss]))
existing_lss_cg, nonexistent_lss_cg = (
self._classify_lss_for_cg(existing_lss))
lss_id = self._find_from_nonexistent_lss(node, existing_lss,
nonexistent_lss_cg)
if not lss_id:
if excluded_lss:
existing_lss = [lss for lss in existing_lss
if lss['id'] not in excluded_lss]
candidates = [lss for lss in existing_lss
if lss['id'] not in existing_lss_cg]
lss_id = self._find_from_existing_lss(node, candidates)
return lss_id
class DS8KReplicationTargetHelper(DS8KReplicationSourceHelper):
"""Manage target storage for replication."""
OPTIONAL_PARAMS = ['ds8k_host_type', 'port_pairs', 'lss_range_for_cg']
def setup(self):
self._create_client()
self._get_storage_information()
self._get_replication_information()
self._check_host_type()
self.backend['lss_ids_for_cg'] = self._get_lss_ids_for_cg()
self.backend['pools_str'] = self._get_value(
'san_clustername').replace('_', ',')
self._storage_pools = self.get_pools()
self.verify_pools(self._storage_pools)
self._verify_rest_version()
def _get_replication_information(self):
port_pairs = []
pairs = self._get_value('port_pairs')
if pairs:
for pair in pairs.replace(' ', '').upper().split(';'):
pair = pair.split('-')
port_pair = {
'source_port_id': pair[0],
'target_port_id': pair[1]
}
port_pairs.append(port_pair)
self.backend['port_pairs'] = port_pairs
self.backend['id'] = self._get_value('backend_id')
def create_lun(self, lun):
volData = {
'cap': self._gb2b(lun.size),
'captype': 'bytes',
'stgtype': 'fb',
'tp': 'ese' if lun.type_thin else 'none'
}
lun.data_type = lun.data_type if lun.data_type else 'FB 512'
if lun.type_os400:
volData['os400'] = lun.type_os400
volData['name'] = lun.replica_ds_name
volData['pool'], volData['lss'] = lun.pool_lss_pair['target']
volID = self._create_lun(volData)
lun.replication_driver_data.update(
{self.backend['id']: {'vol_hex_id': volID}})
return lun
def delete_pprc_pair(self, vol_id):
if not self.get_pprc_pairs(vol_id, vol_id):
return None
pair_data = {
'volume_full_ids': [{
'volume_id': vol_id,
'system_id': self.backend['storage_unit']
}],
'options': ['unconditional', 'issue_target']
}
self._client.send('POST', '/cs/pprcs/delete', pair_data)
class DS8KECKDHelper(DS8KCommonHelper):
"""Manage ECKD volume."""
OPTIONAL_PARAMS = ['ds8k_host_type', 'port_pairs', 'ds8k_ssid_prefix',
'lss_range_for_cg']
# if use new REST API, please update the version below
VALID_REST_VERSION_5_7_MIN = '5.7.51.1068'
VALID_REST_VERSION_5_8_MIN = '5.8.20.1059'
MIN_VALID_STORAGE_VERSION = '8.1'
INVALID_STORAGE_VERSION = '8.0.1'
@staticmethod
def _gb2cyl(gb):
# now only support 3390, no 3380 or 3390-A
cyl = int(math.ceil(gb * 1263.28))
if cyl > 65520:
raise exception.VolumeDriverException(
message=(_("For 3390 volume, capacity can be in the range "
"1-65520(849KiB to 55.68GiB) cylinders, now it "
"is %(gb)d GiB, equals to %(cyl)d cylinders.")
% {'gb': gb, 'cyl': cyl}))
return cyl
@staticmethod
def _cyl2b(cyl):
return cyl * 849960
def _get_cula(self, lcu):
return self.backend['device_mapping'][lcu]
def disable_thin_provision(self):
self._disable_thin_provision = True
def setup(self):
self._create_client()
self._get_storage_information()
self._check_host_type()
self.backend['lss_ids_for_cg'] = self._get_lss_ids_for_cg()
self.backend['pools_str'] = self._get_value('san_clustername')
self._storage_pools = self.get_pools()
self.verify_pools(self._storage_pools)
ssid_prefix = self._get_value('ds8k_ssid_prefix')
self.backend['ssid_prefix'] = ssid_prefix if ssid_prefix else 'FF'
self.backend['device_mapping'] = self._get_device_mapping()
self._verify_rest_version()
def _verify_rest_version(self):
if self.backend['storage_version'] == self.INVALID_STORAGE_VERSION:
raise exception.VolumeDriverException(
message=(_("%s does not support bulk deletion of volumes, "
"if you want to use this version of driver, "
"please upgrade the CCL.")
% self.INVALID_STORAGE_VERSION))
# DS8K supports ECKD ESE volume from 8.1
if (dist_version.LooseVersion(self.backend['storage_version']) <
dist_version.LooseVersion(self.MIN_VALID_STORAGE_VERSION)):
self._disable_thin_provision = True
if (('5.7' in self.backend['rest_version'] and
dist_version.LooseVersion(self.backend['rest_version']) <
dist_version.LooseVersion(self.VALID_REST_VERSION_5_7_MIN)) or
('5.8' in self.backend['rest_version'] and
dist_version.LooseVersion(self.backend['rest_version']) <
dist_version.LooseVersion(self.VALID_REST_VERSION_5_8_MIN))):
raise exception.VolumeDriverException(
message=(_("REST version %(invalid)s is lower than "
"%(valid)s, please upgrade it in DS8K.")
% {'invalid': self.backend['rest_version'],
'valid': (self.VALID_REST_VERSION_5_7_MIN if '5.7'
in self.backend['rest_version'] else
self.VALID_REST_VERSION_5_8_MIN)}))
@proxy.logger
def _get_device_mapping(self):
map_str = self._get_value('ds8k_devadd_unitadd_mapping')
mappings = map_str.replace(' ', '').upper().split(';')
pairs = [m.split('-') for m in mappings]
self.verify_lss_ids(','.join([p[1] for p in pairs]))
return {p[1]: int(p[0], 16) for p in pairs}
@proxy.logger
def verify_lss_ids(self, specified_lcu_ids):
if not specified_lcu_ids:
return None
lcu_ids = specified_lcu_ids.upper().replace(' ', '').split(',')
# verify the LCU ID.
for lcu in lcu_ids:
if int(lcu, 16) > 255:
raise exception.InvalidParameterValue(
err=_('LCU %s should be within 00-FF.') % lcu)
# verify address group
self._existing_lss = self.get_all_lss()
fb_addrgrps = set(int(lss['id'], 16) // 16 for lss in
self._existing_lss if lss['type'] == 'fb')
ckd_addrgrps = set((int(lcu, 16) // 16) for lcu in lcu_ids)
intersection = ckd_addrgrps & fb_addrgrps
if intersection:
raise exception.VolumeDriverException(
message=_('LCUs in the address group %s are reserved '
'for FB volumes') % list(intersection))
# create LCU that doesn't exist
nonexistent_lcu = set(lcu_ids) - set(
lss['id'] for lss in self._existing_lss if lss['type'] == 'ckd')
if nonexistent_lcu:
LOG.info('LCUs %s do not exist in DS8K, they will be '
'created.', ','.join(nonexistent_lcu))
for lcu in nonexistent_lcu:
try:
self._create_lcu(self.backend['ssid_prefix'], lcu)
except restclient.APIException as e:
raise exception.VolumeDriverException(
message=(_('Can not create lcu %(lcu)s, '
'Exception = %(e)s.')
% {'lcu': lcu, 'e': six.text_type(e)}))
return lcu_ids
def _format_pools(self, pools):
return ((p['id'], {
'name': p['name'],
'node': int(p['node']),
'stgtype': p['stgtype'],
'cap': self._cyl2b(int(p['cap'])),
'capavail': self._cyl2b(int(p['capavail']))
}) for p in pools)
@proxy.logger
def find_pool_and_lss(self, excluded_lss=None):
return self.find_biggest_pool_and_lss(excluded_lss)
@proxy.logger
def _find_lss(self, node, excluded_lcu, specified_lcu_ids=None):
# all LCUs have existed, unlike LSS.
if specified_lcu_ids:
for lcu_id in specified_lcu_ids:
if lcu_id not in self.backend['device_mapping'].keys():
raise exception.InvalidParameterValue(
err=_("LCU %s is not in parameter "
"ds8k_devadd_unitadd_mapping, "
"Please specify LCU in it, otherwise "
"driver can not attach volume.") % lcu_id)
all_lss = self._existing_lss
else:
all_lss = self.get_all_lss()
existing_lcu = [lcu for lcu in all_lss if
lcu['type'] == 'ckd' and
lcu['id'] in self.backend['device_mapping'].keys() and
lcu['group'] == six.text_type(node)]
LOG.info("All appropriate LCUs are %s.",
','.join([lcu['id'] for lcu in existing_lcu]))
# exclude full LCUs.
if excluded_lcu:
existing_lcu = [lcu for lcu in existing_lcu if
lcu['id'] not in excluded_lcu]
if not existing_lcu:
LOG.info("All appropriate LCUs are full.")
return None
ignore_pprc = False
if specified_lcu_ids:
# user specify LCUs in extra-specs.
existing_lcu = [lcu for lcu in existing_lcu
if lcu['id'] in specified_lcu_ids]
ignore_pprc = True
# exclude LCUs reserved for CG.
existing_lcu = [lcu for lcu in existing_lcu if lcu['id']
not in self.backend['lss_ids_for_cg']]
if not existing_lcu:
LOG.info("All appropriate LCUs have been reserved for "
"for consistency group.")
return None
if not ignore_pprc:
# prefer to use LCU that is not in PPRC path first.
lcu_pprc = self.get_lss_in_pprc_paths() & set(
self.backend['device_mapping'].keys())
if lcu_pprc:
lcu_non_pprc = [
lcu for lcu in existing_lcu if lcu['id'] not in lcu_pprc]
if lcu_non_pprc:
existing_lcu = lcu_non_pprc
# return LCU which has max number of empty slots.
emptiest_lcu = sorted(
existing_lcu, key=lambda i: int(i['configvols']))[0]
if int(emptiest_lcu['configvols']) == LSS_VOL_SLOTS:
return None
else:
return emptiest_lcu['id']
def _create_lcu(self, ssid_prefix, lcu):
self._client.send('POST', '/lss', {
'id': lcu,
'type': 'ckd',
'sub_system_identifier': ssid_prefix + lcu
})
def create_lun(self, lun):
volData = {
'cap': self._gb2cyl(lun.size),
'captype': 'cyl',
'stgtype': 'ckd',
'tp': 'ese' if lun.type_thin else 'none'
}
lun.data_type = '3390'
volData['name'] = lun.ds_name
volData['pool'], volData['lss'] = lun.pool_lss_pair['source']
lun.ds_id = self._create_lun(volData)
return lun
def initialize_connection(self, vol_id, connector, **kwargs):
return {
'driver_volume_type': 'fibre_channel_eckd',
'data': {
'target_discovered': True,
'cula': self._get_cula(vol_id[0:2]),
'unit_address': int(vol_id[2:4], 16),
'discard': False
}
}
def terminate_connection(self, vol_id, connector, force, **kwargs):
return None
class DS8KReplicationTargetECKDHelper(DS8KECKDHelper,
DS8KReplicationTargetHelper):
"""Manage ECKD volume in replication target."""
def setup(self):
self._create_client()
self._get_storage_information()
self._get_replication_information()
self._check_host_type()
self.backend['lss_ids_for_cg'] = self._get_lss_ids_for_cg()
self.backend['pools_str'] = self._get_value(
'san_clustername').replace('_', ',')
self._storage_pools = self.get_pools()
self.verify_pools(self._storage_pools)
ssid_prefix = self._get_value('ds8k_ssid_prefix')
self.backend['ssid_prefix'] = ssid_prefix if ssid_prefix else 'FF'
self.backend['device_mapping'] = self._get_device_mapping()
self._verify_rest_version()
def create_lun(self, lun):
volData = {
'cap': self._gb2cyl(lun.size),
'captype': 'cyl',
'stgtype': 'ckd',
'tp': 'ese' if lun.type_thin else 'none'
}
lun.data_type = '3390'
volData['name'] = lun.replica_ds_name
volData['pool'], volData['lss'] = lun.pool_lss_pair['target']
volID = self._create_lun(volData)
lun.replication_driver_data.update(
{self.backend['id']: {'vol_hex_id': volID}})
return lun
|
askhl/ase
|
refs/heads/master
|
ase/gui/graphs.py
|
6
|
from math import sqrt
import gtk
from gettext import gettext as _
from ase.gui.widgets import pack, help
graph_help_text = _("""\
Help for plot ...
Symbols:
<c>e</c>:\t\t\t\ttotal energy
<c>epot</c>:\t\t\tpotential energy
<c>ekin</c>:\t\t\tkinetic energy
<c>fmax</c>:\t\t\tmaximum force
<c>fave</c>:\t\t\taverage force
<c>R[n,0-2]</c>:\t\t\tposition of atom number <c>n</c>
<c>d(n<sub>1</sub>,n<sub>2</sub>)</c>:\t\t\tdistance between two atoms <c>n<sub>1</sub></c> and <c>n<sub>2</sub></c>
<c>i</c>:\t\t\t\tcurrent image number
<c>E[i]</c>:\t\t\t\tenergy of image number <c>i</c>
<c>F[n,0-2]</c>:\t\t\tforce on atom number <c>n</c>
<c>V[n,0-2]</c>:\t\t\tvelocity of atom number <c>n</c>
<c>M[n]</c>:\t\t\tmagnetic moment of atom number <c>n</c>
<c>A[0-2,0-2]</c>:\t\tunit-cell basis vectors
<c>s</c>:\t\t\t\tpath length
<c>a(n1,n2,n3)</c>:\t\tangle between atoms <c>n<sub>1</sub></c>, <c>n<sub>2</sub></c> and <c>n<sub>3</sub></c>, centered on <c>n<sub>2</sub></c>
<c>dih(n1,n2,n3,n4)</c>:\tdihedral angle between <c>n<sub>1</sub></c>, <c>n<sub>2</sub></c>, <c>n<sub>3</sub></c> and <c>n<sub>4</sub></c>
<c>T</c>:\t\t\t\ttemperature (K)\
""")
class Graphs(gtk.Window):
def __init__(self, gui):
gtk.Window.__init__(self)
#self.window.set_position(gtk.WIN_POS_CENTER)
#self.window.connect("destroy", lambda w: gtk.main_quit())
#self.window.connect('delete_event', self.exit)
self.set_title('Graphs')
vbox = gtk.VBox()
self.expr = pack(vbox, [gtk.Entry(64),
help(graph_help_text)])[0]
self.expr.connect('activate', self.plot)
completion = gtk.EntryCompletion()
self.liststore = gtk.ListStore(str)
for s in ['fmax', 's, e-E[0]', 'i, d(0,1)']:
self.liststore.append([s])
completion.set_model(self.liststore)
self.expr.set_completion(completion)
completion.set_text_column(0)
button = pack(vbox, [gtk.Button(_('Plot')),
gtk.Label(' x, y1, y2, ...')])[0]
button.connect('clicked', self.plot, 'xy')
button = pack(vbox, [gtk.Button(_('Plot')),
gtk.Label(' y1, y2, ...')])[0]
button.connect('clicked', self.plot, 'y')
save_button = gtk.Button(stock=gtk.STOCK_SAVE)
save_button.connect('clicked',self.save)
clear_button = gtk.Button(_('clear'))
clear_button.connect('clicked', self.clear)
pack(vbox, [save_button,clear_button])
self.add(vbox)
vbox.show()
self.show()
self.gui = gui
def plot(self, button=None, type=None, expr=None):
if expr is None:
expr = self.expr.get_text()
else:
self.expr.set_text(expr)
if expr not in [row[0] for row in self.liststore]:
self.liststore.append([expr])
data = self.gui.images.graph(expr)
import matplotlib
matplotlib.interactive(True)
matplotlib.use('GTKAgg')
#matplotlib.use('GTK', warn=False)# Not avail. in 0.91 (it is in 0.98)
import pylab
pylab.ion()
x = 2.5
self.gui.graphs.append(pylab.figure(figsize=(x * 2.5**0.5, x)))
i = self.gui.frame
m = len(data)
if type is None:
if m == 1:
type = 'y'
else:
type = 'xy'
if type == 'y':
for j in range(m):
pylab.plot(data[j])
pylab.plot([i], [data[j, i]], 'o')
else:
for j in range(1, m):
pylab.plot(data[0], data[j])
pylab.plot([data[0, i]], [data[j, i]], 'o')
pylab.title(expr)
#pylab.show()
python = plot
def save(self, filename):
chooser = gtk.FileChooserDialog(
_('Save data to file ... '), None, gtk.FILE_CHOOSER_ACTION_SAVE,
(gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL,
gtk.STOCK_SAVE, gtk.RESPONSE_OK))
save = chooser.run()
if save == gtk.RESPONSE_OK:
filename = chooser.get_filename()
expr = self.expr.get_text()
data = self.gui.images.graph(expr)
expr = '# '+expr
fd = open(filename,'w')
fd.write("%s \n" % (expr))
for s in range(len(data[0])):
for i in range(len(data)):
val = data[i,s]
fd.write("%12.8e\t" % (val))
fd.write("\n")
fd.close()
chooser.destroy()
def clear(self, button):
import pylab
for graph in self.gui.graphs:
pylab.close(graph)
self.gui.graphs = []
|
javachengwc/hue
|
refs/heads/master
|
desktop/core/ext-py/Django-1.6.10/django/contrib/admindocs/tests/test_fields.py
|
111
|
from __future__ import absolute_import, unicode_literals
from django.contrib.admindocs import views
from django.db import models
from django.db.models import fields
from django.utils import unittest
from django.utils.translation import ugettext as _
class CustomField(models.Field):
description = "A custom field type"
class DescriptionLackingField(models.Field):
pass
class TestFieldType(unittest.TestCase):
def setUp(self):
pass
def test_field_name(self):
self.assertRaises(AttributeError,
views.get_readable_field_data_type, "NotAField"
)
def test_builtin_fields(self):
self.assertEqual(
views.get_readable_field_data_type(fields.BooleanField()),
_('Boolean (Either True or False)')
)
def test_custom_fields(self):
self.assertEqual(
views.get_readable_field_data_type(CustomField()),
'A custom field type'
)
self.assertEqual(
views.get_readable_field_data_type(DescriptionLackingField()),
_('Field of type: %(field_type)s') % {
'field_type': 'DescriptionLackingField'
}
)
|
esacosta/u-mooc
|
refs/heads/master
|
tests/functional/model_student_work.py
|
7
|
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Functional tests for models/review.py."""
__author__ = [
'johncox@google.com (John Cox)',
]
from models import entities
from models import models
from models import student_work
from tests.functional import actions
from google.appengine.ext import db
class ReferencedModel(entities.BaseEntity):
pass
class UnvalidatedReference(entities.BaseEntity):
referenced_model_key = student_work.KeyProperty()
class ValidatedReference(entities.BaseEntity):
referenced_model_key = student_work.KeyProperty(kind=ReferencedModel.kind())
class KeyPropertyTest(actions.TestBase):
"""Tests KeyProperty."""
def setUp(self): # From superclass. pylint: disable-msg=g-bad-name
super(KeyPropertyTest, self).setUp()
self.referenced_model_key = ReferencedModel().put()
def test_validation_and_datastore_round_trip_of_keys_succeeds(self):
"""Tests happy path for both validation and (de)serialization."""
model_with_reference = ValidatedReference(
referenced_model_key=self.referenced_model_key)
model_with_reference_key = model_with_reference.put()
model_with_reference_from_datastore = db.get(model_with_reference_key)
self.assertEqual(
self.referenced_model_key,
model_with_reference_from_datastore.referenced_model_key)
custom_model_from_datastore = db.get(
model_with_reference_from_datastore.referenced_model_key)
self.assertEqual(
self.referenced_model_key, custom_model_from_datastore.key())
self.assertTrue(isinstance(
model_with_reference_from_datastore.referenced_model_key,
db.Key))
def test_type_not_validated_if_kind_not_passed(self):
model_key = db.Model().put()
unvalidated = UnvalidatedReference(referenced_model_key=model_key)
self.assertEqual(model_key, unvalidated.referenced_model_key)
def test_validation_fails(self):
model_key = db.Model().put()
self.assertRaises(
db.BadValueError, ValidatedReference,
referenced_model_key='not_a_key')
self.assertRaises(
db.BadValueError, ValidatedReference,
referenced_model_key=model_key)
class ReviewTest(actions.TestBase):
def test_constructor_sets_key_name(self):
"""Tests construction of key_name, put of entity with key_name set."""
unit_id = 'unit_id'
reviewer_key = models.Student(key_name='reviewer@example.com').put()
review_key = student_work.Review(
reviewer_key=reviewer_key, unit_id=unit_id).put()
self.assertEqual(
student_work.Review.key_name(unit_id, reviewer_key),
review_key.name())
class SubmissionTest(actions.TestBase):
def test_constructor_sets_key_name(self):
"""Tests construction of key_name, put of entity with key_name set."""
unit_id = 'unit_id'
reviewee_key = models.Student(key_name='reviewee@example.com').put()
review_key = student_work.Submission(
reviewee_key=reviewee_key, unit_id=unit_id).put()
self.assertEqual(
student_work.Submission.key_name(unit_id, reviewee_key),
review_key.name())
|
alekseik1/python_mipt_study
|
refs/heads/master
|
1sem/contest_5/C.py
|
2
|
a = input().split()
s = ""
if a[1] == "a.m.":
a = a[0].split(':')
if int(a[0]) < 10:
s += "0" + a[0]
else:
s += a[0]
s += ":"
if len(a[1]) > 1:
s += a[1]
else:
s += "0" + a[1]
elif a[1] == "p.m.":
a = a[0].split(':')
if int(a[0]) == 12:
s += a[0] + ":"
else:
s += str(int(a[0])+12) + ":"
if len(a[1]) > 1:
s += a[1]
else:
s += "0" + a[1]
print(s)
|
bhaskar24/ns-3_RARED
|
refs/heads/master
|
src/visualizer/visualizer/core.py
|
88
|
# -*- Mode: python; coding: utf-8 -*-
from __future__ import division
#from __future__ import with_statement
LAYOUT_ALGORITHM = 'neato' # ['neato'|'dot'|'twopi'|'circo'|'fdp'|'nop']
REPRESENT_CHANNELS_AS_NODES = 1
DEFAULT_NODE_SIZE = 3.0 # default node size in meters
DEFAULT_TRANSMISSIONS_MEMORY = 5 # default number of of past intervals whose transmissions are remembered
BITRATE_FONT_SIZE = 10
# internal constants, normally not meant to be changed
SAMPLE_PERIOD = 0.1
PRIORITY_UPDATE_MODEL = -100
PRIORITY_UPDATE_VIEW = 200
import platform
if platform.system() == "Windows":
SHELL_FONT = "Lucida Console 9"
else:
SHELL_FONT = "Luxi Mono 10"
import ns.core
import ns.network
import ns.visualizer
import ns.internet
import ns.mobility
import math
import os
import sys
import gobject
import time
try:
import pygraphviz
import gtk
import pango
import goocanvas
import cairo
import threading
import hud
#import time
import cairo
from higcontainer import HIGContainer
gobject.threads_init()
try:
import svgitem
except ImportError:
svgitem = None
except ImportError, _import_error:
import dummy_threading as threading
else:
_import_error = None
try:
import ipython_view
except ImportError:
ipython_view = None
from base import InformationWindow, PyVizObject, Link, lookup_netdevice_traits, PIXELS_PER_METER
from base import transform_distance_simulation_to_canvas, transform_point_simulation_to_canvas
from base import transform_distance_canvas_to_simulation, transform_point_canvas_to_simulation
from base import load_plugins, register_plugin, plugins
PI_OVER_2 = math.pi/2
PI_TIMES_2 = math.pi*2
class Node(PyVizObject):
__gsignals__ = {
# signal emitted whenever a tooltip is about to be shown for the node
# the first signal parameter is a python list of strings, to which information can be appended
'query-extra-tooltip-info': (gobject.SIGNAL_RUN_LAST, None, (object,)),
}
def __init__(self, visualizer, node_index):
super(Node, self).__init__()
self.visualizer = visualizer
self.node_index = node_index
self.canvas_item = goocanvas.Ellipse()
self.canvas_item.set_data("pyviz-object", self)
self.links = []
self._has_mobility = None
self._selected = False
self._highlighted = False
self._color = 0x808080ff
self._size = DEFAULT_NODE_SIZE
self.canvas_item.connect("enter-notify-event", self.on_enter_notify_event)
self.canvas_item.connect("leave-notify-event", self.on_leave_notify_event)
self.menu = None
self.svg_item = None
self.svg_align_x = None
self.svg_align_y = None
self._label = None
self._label_canvas_item = None
self._update_appearance() # call this last
def set_svg_icon(self, file_base_name, width=None, height=None, align_x=0.5, align_y=0.5):
"""
Set a background SVG icon for the node.
@param file_base_name: base file name, including .svg
extension, of the svg file. Place the file in the folder
src/contrib/visualizer/resource.
@param width: scale to the specified width, in meters
@param width: scale to the specified height, in meters
@param align_x: horizontal alignment of the icon relative to
the node position, from 0 (icon fully to the left of the node)
to 1.0 (icon fully to the right of the node)
@param align_y: vertical alignment of the icon relative to the
node position, from 0 (icon fully to the top of the node) to
1.0 (icon fully to the bottom of the node)
"""
if width is None and height is None:
raise ValueError("either width or height must be given")
rsvg_handle = svgitem.rsvg_handle_factory(file_base_name)
x = self.canvas_item.props.center_x
y = self.canvas_item.props.center_y
self.svg_item = svgitem.SvgItem(x, y, rsvg_handle)
self.svg_item.props.parent = self.visualizer.canvas.get_root_item()
self.svg_item.props.pointer_events = 0
self.svg_item.lower(None)
self.svg_item.props.visibility = goocanvas.ITEM_VISIBLE_ABOVE_THRESHOLD
if width is not None:
self.svg_item.props.width = transform_distance_simulation_to_canvas(width)
if height is not None:
self.svg_item.props.height = transform_distance_simulation_to_canvas(height)
#threshold1 = 10.0/self.svg_item.props.height
#threshold2 = 10.0/self.svg_item.props.width
#self.svg_item.props.visibility_threshold = min(threshold1, threshold2)
self.svg_align_x = align_x
self.svg_align_y = align_y
self._update_svg_position(x, y)
self._update_appearance()
def set_label(self, label):
assert isinstance(label, basestring)
self._label = label
self._update_appearance()
def _update_svg_position(self, x, y):
w = self.svg_item.width
h = self.svg_item.height
self.svg_item.set_properties(x=(x - (1-self.svg_align_x)*w),
y=(y - (1-self.svg_align_y)*h))
def tooltip_query(self, tooltip):
self.visualizer.simulation.lock.acquire()
try:
ns3_node = ns.network.NodeList.GetNode(self.node_index)
ipv4 = ns3_node.GetObject(ns.internet.Ipv4.GetTypeId())
ipv6 = ns3_node.GetObject(ns.internet.Ipv6.GetTypeId())
name = '<b><u>Node %i</u></b>' % self.node_index
node_name = ns.core.Names.FindName (ns3_node)
if len(node_name)!=0:
name += ' <b>(' + node_name + ')</b>'
lines = [name]
lines.append('')
self.emit("query-extra-tooltip-info", lines)
mob = ns3_node.GetObject(ns.mobility.MobilityModel.GetTypeId())
if mob is not None:
lines.append(' <b>Mobility Model</b>: %s' % mob.GetInstanceTypeId().GetName())
for devI in range(ns3_node.GetNDevices()):
lines.append('')
lines.append(' <u>NetDevice %i:</u>' % devI)
dev = ns3_node.GetDevice(devI)
name = ns.core.Names.FindName(dev)
if name:
lines.append(' <b>Name:</b> %s' % name)
devname = dev.GetInstanceTypeId().GetName()
lines.append(' <b>Type:</b> %s' % devname)
if ipv4 is not None:
ipv4_idx = ipv4.GetInterfaceForDevice(dev)
if ipv4_idx != -1:
addresses = [
'%s/%s' % (ipv4.GetAddress(ipv4_idx, i).GetLocal(),
ipv4.GetAddress(ipv4_idx, i).GetMask())
for i in range(ipv4.GetNAddresses(ipv4_idx))]
lines.append(' <b>IPv4 Addresses:</b> %s' % '; '.join(addresses))
if ipv6 is not None:
ipv6_idx = ipv6.GetInterfaceForDevice(dev)
if ipv6_idx != -1:
addresses = [
'%s/%s' % (ipv6.GetAddress(ipv6_idx, i).GetAddress(),
ipv6.GetAddress(ipv6_idx, i).GetPrefix())
for i in range(ipv6.GetNAddresses(ipv6_idx))]
lines.append(' <b>IPv6 Addresses:</b> %s' % '; '.join(addresses))
lines.append(' <b>MAC Address:</b> %s' % (dev.GetAddress(),))
tooltip.set_markup('\n'.join(lines))
finally:
self.visualizer.simulation.lock.release()
def on_enter_notify_event(self, view, target, event):
self.highlighted = True
def on_leave_notify_event(self, view, target, event):
self.highlighted = False
def _set_selected(self, value):
self._selected = value
self._update_appearance()
def _get_selected(self):
return self._selected
selected = property(_get_selected, _set_selected)
def _set_highlighted(self, value):
self._highlighted = value
self._update_appearance()
def _get_highlighted(self):
return self._highlighted
highlighted = property(_get_highlighted, _set_highlighted)
def set_size(self, size):
self._size = size
self._update_appearance()
def _update_appearance(self):
"""Update the node aspect to reflect the selected/highlighted state"""
size = transform_distance_simulation_to_canvas(self._size)
if self.svg_item is not None:
alpha = 0x80
else:
alpha = 0xff
fill_color_rgba = (self._color & 0xffffff00) | alpha
self.canvas_item.set_properties(radius_x=size, radius_y=size,
fill_color_rgba=fill_color_rgba)
if self._selected:
line_width = size*.3
else:
line_width = size*.15
if self.highlighted:
stroke_color = 'yellow'
else:
stroke_color = 'black'
self.canvas_item.set_properties(line_width=line_width, stroke_color=stroke_color)
if self._label is not None:
if self._label_canvas_item is None:
self._label_canvas_item = goocanvas.Text(visibility_threshold=0.5,
font="Sans Serif 10",
fill_color_rgba=0x808080ff,
alignment=pango.ALIGN_CENTER,
anchor=gtk.ANCHOR_N,
parent=self.visualizer.canvas.get_root_item(),
pointer_events=0)
self._label_canvas_item.lower(None)
self._label_canvas_item.set_properties(visibility=goocanvas.ITEM_VISIBLE_ABOVE_THRESHOLD,
text=self._label)
self._update_position()
def set_position(self, x, y):
self.canvas_item.set_property("center_x", x)
self.canvas_item.set_property("center_y", y)
if self.svg_item is not None:
self._update_svg_position(x, y)
for link in self.links:
link.update_points()
if self._label_canvas_item is not None:
self._label_canvas_item.set_properties(x=x, y=(y+self._size*3))
def get_position(self):
return (self.canvas_item.get_property("center_x"), self.canvas_item.get_property("center_y"))
def _update_position(self):
x, y = self.get_position()
self.set_position(x, y)
def set_color(self, color):
if isinstance(color, str):
color = gtk.gdk.color_parse(color)
color = ((color.red>>8) << 24) | ((color.green>>8) << 16) | ((color.blue>>8) << 8) | 0xff
self._color = color
self._update_appearance()
def add_link(self, link):
assert isinstance(link, Link)
self.links.append(link)
def remove_link(self, link):
assert isinstance(link, Link)
self.links.remove(link)
@property
def has_mobility(self):
if self._has_mobility is None:
node = ns.network.NodeList.GetNode(self.node_index)
mobility = node.GetObject(ns.mobility.MobilityModel.GetTypeId())
self._has_mobility = (mobility is not None)
return self._has_mobility
class Channel(PyVizObject):
def __init__(self, channel):
self.channel = channel
self.canvas_item = goocanvas.Ellipse(radius_x=30, radius_y=30,
fill_color="white",
stroke_color="grey", line_width=2.0,
line_dash=goocanvas.LineDash([10.0, 10.0 ]),
visibility=goocanvas.ITEM_VISIBLE)
self.canvas_item.set_data("pyviz-object", self)
self.links = []
def set_position(self, x, y):
self.canvas_item.set_property("center_x", x)
self.canvas_item.set_property("center_y", y)
for link in self.links:
link.update_points()
def get_position(self):
return (self.canvas_item.get_property("center_x"), self.canvas_item.get_property("center_y"))
class WiredLink(Link):
def __init__(self, node1, node2):
assert isinstance(node1, Node)
assert isinstance(node2, (Node, Channel))
self.node1 = node1
self.node2 = node2
self.canvas_item = goocanvas.Path(line_width=1.0, stroke_color="black")
self.canvas_item.set_data("pyviz-object", self)
self.node1.links.append(self)
self.node2.links.append(self)
def update_points(self):
pos1_x, pos1_y = self.node1.get_position()
pos2_x, pos2_y = self.node2.get_position()
self.canvas_item.set_property("data", "M %r %r L %r %r" % (pos1_x, pos1_y, pos2_x, pos2_y))
class SimulationThread(threading.Thread):
def __init__(self, viz):
super(SimulationThread, self).__init__()
assert isinstance(viz, Visualizer)
self.viz = viz # Visualizer object
self.lock = threading.Lock()
self.go = threading.Event()
self.go.clear()
self.target_time = 0 # in seconds
self.quit = False
self.sim_helper = ns.visualizer.PyViz()
self.pause_messages = []
def set_nodes_of_interest(self, nodes):
self.lock.acquire()
try:
self.sim_helper.SetNodesOfInterest(nodes)
finally:
self.lock.release()
def run(self):
while not self.quit:
#print "sim: Wait for go"
self.go.wait() # wait until the main (view) thread gives us the go signal
self.go.clear()
if self.quit:
break
#self.go.clear()
#print "sim: Acquire lock"
self.lock.acquire()
try:
if 0:
if ns3.core.Simulator.IsFinished():
self.viz.play_button.set_sensitive(False)
break
#print "sim: Current time is %f; Run until: %f" % (ns3.Simulator.Now ().GetSeconds (), self.target_time)
#if ns3.Simulator.Now ().GetSeconds () > self.target_time:
# print "skipping, model is ahead of view!"
self.sim_helper.SimulatorRunUntil(ns.core.Seconds(self.target_time))
#print "sim: Run until ended at current time: ", ns3.Simulator.Now ().GetSeconds ()
self.pause_messages.extend(self.sim_helper.GetPauseMessages())
gobject.idle_add(self.viz.update_model, priority=PRIORITY_UPDATE_MODEL)
#print "sim: Run until: ", self.target_time, ": finished."
finally:
self.lock.release()
#print "sim: Release lock, loop."
# enumeration
class ShowTransmissionsMode(object):
__slots__ = []
ShowTransmissionsMode.ALL = ShowTransmissionsMode()
ShowTransmissionsMode.NONE = ShowTransmissionsMode()
ShowTransmissionsMode.SELECTED = ShowTransmissionsMode()
class Visualizer(gobject.GObject):
INSTANCE = None
if _import_error is None:
__gsignals__ = {
# signal emitted whenever a right-click-on-node popup menu is being constructed
'populate-node-menu': (gobject.SIGNAL_RUN_LAST, None, (object, gtk.Menu,)),
# signal emitted after every simulation period (SAMPLE_PERIOD seconds of simulated time)
# the simulation lock is acquired while the signal is emitted
'simulation-periodic-update': (gobject.SIGNAL_RUN_LAST, None, ()),
# signal emitted right after the topology is scanned
'topology-scanned': (gobject.SIGNAL_RUN_LAST, None, ()),
# signal emitted when it's time to update the view objects
'update-view': (gobject.SIGNAL_RUN_LAST, None, ()),
}
def __init__(self):
assert Visualizer.INSTANCE is None
Visualizer.INSTANCE = self
super(Visualizer, self).__init__()
self.nodes = {} # node index -> Node
self.channels = {} # id(ns3.Channel) -> Channel
self.window = None # toplevel window
self.canvas = None # goocanvas.Canvas
self.time_label = None # gtk.Label
self.play_button = None # gtk.ToggleButton
self.zoom = None # gtk.Adjustment
self._scrolled_window = None # gtk.ScrolledWindow
self.links_group = goocanvas.Group()
self.channels_group = goocanvas.Group()
self.nodes_group = goocanvas.Group()
self._update_timeout_id = None
self.simulation = SimulationThread(self)
self.selected_node = None # node currently selected
self.speed = 1.0
self.information_windows = []
self._transmission_arrows = []
self._last_transmissions = []
self._drop_arrows = []
self._last_drops = []
self._show_transmissions_mode = None
self.set_show_transmissions_mode(ShowTransmissionsMode.ALL)
self._panning_state = None
self.node_size_adjustment = None
self.transmissions_smoothing_adjustment = None
self.sample_period = SAMPLE_PERIOD
self.node_drag_state = None
self.follow_node = None
self.shell_window = None
self.create_gui()
for plugin in plugins:
plugin(self)
def set_show_transmissions_mode(self, mode):
assert isinstance(mode, ShowTransmissionsMode)
self._show_transmissions_mode = mode
if self._show_transmissions_mode == ShowTransmissionsMode.ALL:
self.simulation.set_nodes_of_interest(range(ns.network.NodeList.GetNNodes()))
elif self._show_transmissions_mode == ShowTransmissionsMode.NONE:
self.simulation.set_nodes_of_interest([])
elif self._show_transmissions_mode == ShowTransmissionsMode.SELECTED:
if self.selected_node is None:
self.simulation.set_nodes_of_interest([])
else:
self.simulation.set_nodes_of_interest([self.selected_node.node_index])
def _create_advanced_controls(self):
expander = gtk.Expander("Advanced")
expander.show()
main_vbox = gobject.new(gtk.VBox, border_width=8, visible=True)
expander.add(main_vbox)
main_hbox1 = gobject.new(gtk.HBox, border_width=8, visible=True)
main_vbox.pack_start(main_hbox1)
show_transmissions_group = HIGContainer("Show transmissions")
show_transmissions_group.show()
main_hbox1.pack_start(show_transmissions_group, False, False, 8)
vbox = gtk.VBox(True, 4)
vbox.show()
show_transmissions_group.add(vbox)
all_nodes = gtk.RadioButton(None)
all_nodes.set_label("All nodes")
all_nodes.set_active(True)
all_nodes.show()
vbox.add(all_nodes)
selected_node = gtk.RadioButton(all_nodes)
selected_node.show()
selected_node.set_label("Selected node")
selected_node.set_active(False)
vbox.add(selected_node)
no_node = gtk.RadioButton(all_nodes)
no_node.show()
no_node.set_label("Disabled")
no_node.set_active(False)
vbox.add(no_node)
def toggled(radio):
if radio.get_active():
self.set_show_transmissions_mode(ShowTransmissionsMode.ALL)
all_nodes.connect("toggled", toggled)
def toggled(radio):
if radio.get_active():
self.set_show_transmissions_mode(ShowTransmissionsMode.NONE)
no_node.connect("toggled", toggled)
def toggled(radio):
if radio.get_active():
self.set_show_transmissions_mode(ShowTransmissionsMode.SELECTED)
selected_node.connect("toggled", toggled)
# -- misc settings
misc_settings_group = HIGContainer("Misc Settings")
misc_settings_group.show()
main_hbox1.pack_start(misc_settings_group, False, False, 8)
settings_hbox = gobject.new(gtk.HBox, border_width=8, visible=True)
misc_settings_group.add(settings_hbox)
# --> node size
vbox = gobject.new(gtk.VBox, border_width=0, visible=True)
scale = gobject.new(gtk.HScale, visible=True, digits=2)
vbox.pack_start(scale, True, True, 0)
vbox.pack_start(gobject.new(gtk.Label, label="Node Size", visible=True), True, True, 0)
settings_hbox.pack_start(vbox, False, False, 6)
self.node_size_adjustment = scale.get_adjustment()
def node_size_changed(adj):
for node in self.nodes.itervalues():
node.set_size(adj.value)
self.node_size_adjustment.connect("value-changed", node_size_changed)
self.node_size_adjustment.set_all(DEFAULT_NODE_SIZE, 0.01, 20, 0.1)
# --> transmissions smooth factor
vbox = gobject.new(gtk.VBox, border_width=0, visible=True)
scale = gobject.new(gtk.HScale, visible=True, digits=1)
vbox.pack_start(scale, True, True, 0)
vbox.pack_start(gobject.new(gtk.Label, label="Tx. Smooth Factor (s)", visible=True), True, True, 0)
settings_hbox.pack_start(vbox, False, False, 6)
self.transmissions_smoothing_adjustment = scale.get_adjustment()
self.transmissions_smoothing_adjustment.set_all(DEFAULT_TRANSMISSIONS_MEMORY*0.1, 0.1, 10, 0.1)
return expander
class _PanningState(object):
__slots__ = ['initial_mouse_pos', 'initial_canvas_pos', 'motion_signal']
def _begin_panning(self, widget, event):
self.canvas.window.set_cursor(gtk.gdk.Cursor(gtk.gdk.FLEUR))
self._panning_state = self._PanningState()
x, y, dummy = widget.window.get_pointer()
self._panning_state.initial_mouse_pos = (x, y)
x = self._scrolled_window.get_hadjustment().value
y = self._scrolled_window.get_vadjustment().value
self._panning_state.initial_canvas_pos = (x, y)
self._panning_state.motion_signal = self.canvas.connect("motion-notify-event", self._panning_motion)
def _end_panning(self, event):
if self._panning_state is None:
return
self.canvas.window.set_cursor(None)
self.canvas.disconnect(self._panning_state.motion_signal)
self._panning_state = None
def _panning_motion(self, widget, event):
assert self._panning_state is not None
if event.is_hint:
x, y, dummy = widget.window.get_pointer()
else:
x, y = event.x, event.y
hadj = self._scrolled_window.get_hadjustment()
vadj = self._scrolled_window.get_vadjustment()
mx0, my0 = self._panning_state.initial_mouse_pos
cx0, cy0 = self._panning_state.initial_canvas_pos
dx = x - mx0
dy = y - my0
hadj.value = cx0 - dx
vadj.value = cy0 - dy
return True
def _canvas_button_press(self, widget, event):
if event.button == 2:
self._begin_panning(widget, event)
return True
return False
def _canvas_button_release(self, dummy_widget, event):
if event.button == 2:
self._end_panning(event)
return True
return False
def _canvas_scroll_event(self, dummy_widget, event):
if event.direction == gtk.gdk.SCROLL_UP:
self.zoom.value *= 1.25
return True
elif event.direction == gtk.gdk.SCROLL_DOWN:
self.zoom.value /= 1.25
return True
return False
def get_hadjustment(self):
return self._scrolled_window.get_hadjustment()
def get_vadjustment(self):
return self._scrolled_window.get_vadjustment()
def create_gui(self):
self.window = gtk.Window()
vbox = gtk.VBox(); vbox.show()
self.window.add(vbox)
# canvas
self.canvas = goocanvas.Canvas()
self.canvas.connect_after("button-press-event", self._canvas_button_press)
self.canvas.connect_after("button-release-event", self._canvas_button_release)
self.canvas.connect("scroll-event", self._canvas_scroll_event)
self.canvas.props.has_tooltip = True
self.canvas.connect("query-tooltip", self._canvas_tooltip_cb)
self.canvas.show()
sw = gtk.ScrolledWindow(); sw.show()
self._scrolled_window = sw
sw.add(self.canvas)
vbox.pack_start(sw, True, True, 4)
self.canvas.set_size_request(600, 450)
self.canvas.set_bounds(-10000, -10000, 10000, 10000)
self.canvas.scroll_to(0, 0)
self.canvas.get_root_item().add_child(self.links_group)
self.links_group.set_property("visibility", goocanvas.ITEM_VISIBLE)
self.canvas.get_root_item().add_child(self.channels_group)
self.channels_group.set_property("visibility", goocanvas.ITEM_VISIBLE)
self.channels_group.raise_(self.links_group)
self.canvas.get_root_item().add_child(self.nodes_group)
self.nodes_group.set_property("visibility", goocanvas.ITEM_VISIBLE)
self.nodes_group.raise_(self.channels_group)
self.hud = hud.Axes(self)
hbox = gtk.HBox(); hbox.show()
vbox.pack_start(hbox, False, False, 4)
# zoom
zoom_adj = gtk.Adjustment(1.0, 0.01, 10.0, 0.02, 1.0, 0)
self.zoom = zoom_adj
def _zoom_changed(adj):
self.canvas.set_scale(adj.value)
zoom_adj.connect("value-changed", _zoom_changed)
zoom = gtk.SpinButton(zoom_adj)
zoom.set_digits(3)
zoom.show()
hbox.pack_start(gobject.new(gtk.Label, label=" Zoom:", visible=True), False, False, 4)
hbox.pack_start(zoom, False, False, 4)
_zoom_changed(zoom_adj)
# speed
speed_adj = gtk.Adjustment(1.0, 0.01, 10.0, 0.02, 1.0, 0)
def _speed_changed(adj):
self.speed = adj.value
self.sample_period = SAMPLE_PERIOD*adj.value
self._start_update_timer()
speed_adj.connect("value-changed", _speed_changed)
speed = gtk.SpinButton(speed_adj)
speed.set_digits(3)
speed.show()
hbox.pack_start(gobject.new(gtk.Label, label=" Speed:", visible=True), False, False, 4)
hbox.pack_start(speed, False, False, 4)
_speed_changed(speed_adj)
# Current time
self.time_label = gobject.new(gtk.Label, label=" Speed:", visible=True)
self.time_label.set_width_chars(20)
hbox.pack_start(self.time_label, False, False, 4)
# Screenshot button
screenshot_button = gobject.new(gtk.Button,
label="Snapshot",
relief=gtk.RELIEF_NONE, focus_on_click=False,
visible=True)
hbox.pack_start(screenshot_button, False, False, 4)
def load_button_icon(button, icon_name):
try:
import gnomedesktop
except ImportError:
sys.stderr.write("Could not load icon %s due to missing gnomedesktop Python module\n" % icon_name)
else:
icon = gnomedesktop.find_icon(gtk.icon_theme_get_default(), icon_name, 16, 0)
if icon is not None:
button.props.image = gobject.new(gtk.Image, file=icon, visible=True)
load_button_icon(screenshot_button, "applets-screenshooter")
screenshot_button.connect("clicked", self._take_screenshot)
# Shell button
if ipython_view is not None:
shell_button = gobject.new(gtk.Button,
label="Shell",
relief=gtk.RELIEF_NONE, focus_on_click=False,
visible=True)
hbox.pack_start(shell_button, False, False, 4)
load_button_icon(shell_button, "gnome-terminal")
shell_button.connect("clicked", self._start_shell)
# Play button
self.play_button = gobject.new(gtk.ToggleButton,
image=gobject.new(gtk.Image, stock=gtk.STOCK_MEDIA_PLAY, visible=True),
label="Simulate (F3)",
relief=gtk.RELIEF_NONE, focus_on_click=False,
use_stock=True, visible=True)
accel_group = gtk.AccelGroup()
self.window.add_accel_group(accel_group)
self.play_button.add_accelerator("clicked", accel_group,
gtk.keysyms.F3, 0, gtk.ACCEL_VISIBLE)
self.play_button.connect("toggled", self._on_play_button_toggled)
hbox.pack_start(self.play_button, False, False, 4)
self.canvas.get_root_item().connect("button-press-event", self.on_root_button_press_event)
vbox.pack_start(self._create_advanced_controls(), False, False, 4)
self.window.show()
def scan_topology(self):
print "scanning topology: %i nodes..." % (ns.network.NodeList.GetNNodes(),)
graph = pygraphviz.AGraph()
seen_nodes = 0
for nodeI in range(ns.network.NodeList.GetNNodes()):
seen_nodes += 1
if seen_nodes == 100:
print "scan topology... %i nodes visited (%.1f%%)" % (nodeI, 100*nodeI/ns.network.NodeList.GetNNodes())
seen_nodes = 0
node = ns.network.NodeList.GetNode(nodeI)
node_name = "Node %i" % nodeI
node_view = self.get_node(nodeI)
mobility = node.GetObject(ns.mobility.MobilityModel.GetTypeId())
if mobility is not None:
node_view.set_color("red")
pos = mobility.GetPosition()
node_view.set_position(*transform_point_simulation_to_canvas(pos.x, pos.y))
#print "node has mobility position -> ", "%f,%f" % (pos.x, pos.y)
else:
graph.add_node(node_name)
for devI in range(node.GetNDevices()):
device = node.GetDevice(devI)
device_traits = lookup_netdevice_traits(type(device))
if device_traits.is_wireless:
continue
if device_traits.is_virtual:
continue
channel = device.GetChannel()
if channel.GetNDevices() > 2:
if REPRESENT_CHANNELS_AS_NODES:
# represent channels as white nodes
if mobility is None:
channel_name = "Channel %s" % id(channel)
graph.add_edge(node_name, channel_name)
self.get_channel(channel)
self.create_link(self.get_node(nodeI), self.get_channel(channel))
else:
# don't represent channels, just add links between nodes in the same channel
for otherDevI in range(channel.GetNDevices()):
otherDev = channel.GetDevice(otherDevI)
otherNode = otherDev.GetNode()
otherNodeView = self.get_node(otherNode.GetId())
if otherNode is not node:
if mobility is None and not otherNodeView.has_mobility:
other_node_name = "Node %i" % otherNode.GetId()
graph.add_edge(node_name, other_node_name)
self.create_link(self.get_node(nodeI), otherNodeView)
else:
for otherDevI in range(channel.GetNDevices()):
otherDev = channel.GetDevice(otherDevI)
otherNode = otherDev.GetNode()
otherNodeView = self.get_node(otherNode.GetId())
if otherNode is not node:
if mobility is None and not otherNodeView.has_mobility:
other_node_name = "Node %i" % otherNode.GetId()
graph.add_edge(node_name, other_node_name)
self.create_link(self.get_node(nodeI), otherNodeView)
print "scanning topology: calling graphviz layout"
graph.layout(LAYOUT_ALGORITHM)
for node in graph.iternodes():
#print node, "=>", node.attr['pos']
node_type, node_id = node.split(' ')
pos_x, pos_y = [float(s) for s in node.attr['pos'].split(',')]
if node_type == 'Node':
obj = self.nodes[int(node_id)]
elif node_type == 'Channel':
obj = self.channels[int(node_id)]
obj.set_position(pos_x, pos_y)
print "scanning topology: all done."
self.emit("topology-scanned")
def get_node(self, index):
try:
return self.nodes[index]
except KeyError:
node = Node(self, index)
self.nodes[index] = node
self.nodes_group.add_child(node.canvas_item)
node.canvas_item.connect("button-press-event", self.on_node_button_press_event, node)
node.canvas_item.connect("button-release-event", self.on_node_button_release_event, node)
return node
def get_channel(self, ns3_channel):
try:
return self.channels[id(ns3_channel)]
except KeyError:
channel = Channel(ns3_channel)
self.channels[id(ns3_channel)] = channel
self.channels_group.add_child(channel.canvas_item)
return channel
def create_link(self, node, node_or_channel):
link = WiredLink(node, node_or_channel)
self.links_group.add_child(link.canvas_item)
link.canvas_item.lower(None)
def update_view(self):
#print "update_view"
self.time_label.set_text("Time: %f s" % ns.core.Simulator.Now().GetSeconds())
self._update_node_positions()
# Update information
for info_win in self.information_windows:
info_win.update()
self._update_transmissions_view()
self._update_drops_view()
self.emit("update-view")
def _update_node_positions(self):
for node in self.nodes.itervalues():
if node.has_mobility:
ns3_node = ns.network.NodeList.GetNode(node.node_index)
mobility = ns3_node.GetObject(ns.mobility.MobilityModel.GetTypeId())
if mobility is not None:
pos = mobility.GetPosition()
x, y = transform_point_simulation_to_canvas(pos.x, pos.y)
node.set_position(x, y)
if node is self.follow_node:
hadj = self._scrolled_window.get_hadjustment()
vadj = self._scrolled_window.get_vadjustment()
px, py = self.canvas.convert_to_pixels(x, y)
hadj.value = px - hadj.page_size/2
vadj.value = py - vadj.page_size/2
def center_on_node(self, node):
if isinstance(node, ns.network.Node):
node = self.nodes[node.GetId()]
elif isinstance(node, (int, long)):
node = self.nodes[node]
elif isinstance(node, Node):
pass
else:
raise TypeError("expected int, viz.Node or ns.network.Node, not %r" % node)
x, y = node.get_position()
hadj = self._scrolled_window.get_hadjustment()
vadj = self._scrolled_window.get_vadjustment()
px, py = self.canvas.convert_to_pixels(x, y)
hadj.value = px - hadj.page_size/2
vadj.value = py - vadj.page_size/2
def update_model(self):
self.simulation.lock.acquire()
try:
self.emit("simulation-periodic-update")
finally:
self.simulation.lock.release()
def do_simulation_periodic_update(self):
smooth_factor = int(self.transmissions_smoothing_adjustment.value*10)
transmissions = self.simulation.sim_helper.GetTransmissionSamples()
self._last_transmissions.append(transmissions)
while len(self._last_transmissions) > smooth_factor:
self._last_transmissions.pop(0)
drops = self.simulation.sim_helper.GetPacketDropSamples()
self._last_drops.append(drops)
while len(self._last_drops) > smooth_factor:
self._last_drops.pop(0)
def _get_label_over_line_position(self, pos1_x, pos1_y, pos2_x, pos2_y):
hadj = self._scrolled_window.get_hadjustment()
vadj = self._scrolled_window.get_vadjustment()
bounds_x1, bounds_y1 = self.canvas.convert_from_pixels(hadj.value, vadj.value)
bounds_x2, bounds_y2 = self.canvas.convert_from_pixels(hadj.value + hadj.page_size,
vadj.value + vadj.page_size)
pos1_x, pos1_y, pos2_x, pos2_y = ns.visualizer.PyViz.LineClipping(bounds_x1, bounds_y1,
bounds_x2, bounds_y2,
pos1_x, pos1_y,
pos2_x, pos2_y)
return (pos1_x + pos2_x)/2, (pos1_y + pos2_y)/2
def _update_transmissions_view(self):
transmissions_average = {}
for transmission_set in self._last_transmissions:
for transmission in transmission_set:
key = (transmission.transmitter.GetId(), transmission.receiver.GetId())
rx_bytes, count = transmissions_average.get(key, (0, 0))
rx_bytes += transmission.bytes
count += 1
transmissions_average[key] = rx_bytes, count
old_arrows = self._transmission_arrows
for arrow, label in old_arrows:
arrow.set_property("visibility", goocanvas.ITEM_HIDDEN)
label.set_property("visibility", goocanvas.ITEM_HIDDEN)
new_arrows = []
k = self.node_size_adjustment.value/5
for (transmitter_id, receiver_id), (rx_bytes, rx_count) in transmissions_average.iteritems():
transmitter = self.get_node(transmitter_id)
receiver = self.get_node(receiver_id)
try:
arrow, label = old_arrows.pop()
except IndexError:
arrow = goocanvas.Polyline(line_width=2.0, stroke_color_rgba=0x00C000C0, close_path=False, end_arrow=True)
arrow.set_property("parent", self.canvas.get_root_item())
arrow.props.pointer_events = 0
arrow.raise_(None)
label = goocanvas.Text(parent=self.canvas.get_root_item(), pointer_events=0)
label.raise_(None)
arrow.set_property("visibility", goocanvas.ITEM_VISIBLE)
line_width = max(0.1, math.log(float(rx_bytes)/rx_count/self.sample_period)*k)
arrow.set_property("line-width", line_width)
pos1_x, pos1_y = transmitter.get_position()
pos2_x, pos2_y = receiver.get_position()
points = goocanvas.Points([(pos1_x, pos1_y), (pos2_x, pos2_y)])
arrow.set_property("points", points)
kbps = float(rx_bytes*8)/1e3/rx_count/self.sample_period
label.set_properties(visibility=goocanvas.ITEM_VISIBLE_ABOVE_THRESHOLD,
visibility_threshold=0.5,
font=("Sans Serif %f" % int(1+BITRATE_FONT_SIZE*k)))
angle = math.atan2((pos2_y - pos1_y), (pos2_x - pos1_x))
if -PI_OVER_2 <= angle <= PI_OVER_2:
label.set_properties(text=("%.2f kbit/s →" % (kbps,)),
alignment=pango.ALIGN_CENTER,
anchor=gtk.ANCHOR_S,
x=0, y=-line_width/2)
M = cairo.Matrix()
M.translate(*self._get_label_over_line_position(pos1_x, pos1_y, pos2_x, pos2_y))
M.rotate(angle)
label.set_transform(M)
else:
label.set_properties(text=("← %.2f kbit/s" % (kbps,)),
alignment=pango.ALIGN_CENTER,
anchor=gtk.ANCHOR_N,
x=0, y=line_width/2)
M = cairo.Matrix()
M.translate(*self._get_label_over_line_position(pos1_x, pos1_y, pos2_x, pos2_y))
M.rotate(angle)
M.scale(-1, -1)
label.set_transform(M)
new_arrows.append((arrow, label))
self._transmission_arrows = new_arrows + old_arrows
def _update_drops_view(self):
drops_average = {}
for drop_set in self._last_drops:
for drop in drop_set:
key = drop.transmitter.GetId()
drop_bytes, count = drops_average.get(key, (0, 0))
drop_bytes += drop.bytes
count += 1
drops_average[key] = drop_bytes, count
old_arrows = self._drop_arrows
for arrow, label in old_arrows:
arrow.set_property("visibility", goocanvas.ITEM_HIDDEN)
label.set_property("visibility", goocanvas.ITEM_HIDDEN)
new_arrows = []
# get the coordinates for the edge of screen
vadjustment = self._scrolled_window.get_vadjustment()
bottom_y = vadjustment.value + vadjustment.page_size
dummy, edge_y = self.canvas.convert_from_pixels(0, bottom_y)
k = self.node_size_adjustment.value/5
for transmitter_id, (drop_bytes, drop_count) in drops_average.iteritems():
transmitter = self.get_node(transmitter_id)
try:
arrow, label = old_arrows.pop()
except IndexError:
arrow = goocanvas.Polyline(line_width=2.0, stroke_color_rgba=0xC00000C0, close_path=False, end_arrow=True)
arrow.props.pointer_events = 0
arrow.set_property("parent", self.canvas.get_root_item())
arrow.raise_(None)
label = goocanvas.Text()#, fill_color_rgba=0x00C000C0)
label.props.pointer_events = 0
label.set_property("parent", self.canvas.get_root_item())
label.raise_(None)
arrow.set_property("visibility", goocanvas.ITEM_VISIBLE)
arrow.set_property("line-width", max(0.1, math.log(float(drop_bytes)/drop_count/self.sample_period)*k))
pos1_x, pos1_y = transmitter.get_position()
pos2_x, pos2_y = pos1_x, edge_y
points = goocanvas.Points([(pos1_x, pos1_y), (pos2_x, pos2_y)])
arrow.set_property("points", points)
label.set_properties(visibility=goocanvas.ITEM_VISIBLE_ABOVE_THRESHOLD,
visibility_threshold=0.5,
font=("Sans Serif %i" % int(1+BITRATE_FONT_SIZE*k)),
text=("%.2f kbit/s" % (float(drop_bytes*8)/1e3/drop_count/self.sample_period,)),
alignment=pango.ALIGN_CENTER,
x=(pos1_x + pos2_x)/2,
y=(pos1_y + pos2_y)/2)
new_arrows.append((arrow, label))
self._drop_arrows = new_arrows + old_arrows
def update_view_timeout(self):
#print "view: update_view_timeout called at real time ", time.time()
# while the simulator is busy, run the gtk event loop
while not self.simulation.lock.acquire(False):
while gtk.events_pending():
gtk.main_iteration()
pause_messages = self.simulation.pause_messages
self.simulation.pause_messages = []
try:
self.update_view()
self.simulation.target_time = ns.core.Simulator.Now ().GetSeconds () + self.sample_period
#print "view: target time set to %f" % self.simulation.target_time
finally:
self.simulation.lock.release()
if pause_messages:
#print pause_messages
dialog = gtk.MessageDialog(parent=self.window, flags=0, type=gtk.MESSAGE_WARNING, buttons=gtk.BUTTONS_OK,
message_format='\n'.join(pause_messages))
dialog.connect("response", lambda d, r: d.destroy())
dialog.show()
self.play_button.set_active(False)
# if we're paused, stop the update timer
if not self.play_button.get_active():
self._update_timeout_id = None
return False
#print "view: self.simulation.go.set()"
self.simulation.go.set()
#print "view: done."
return True
def _start_update_timer(self):
if self._update_timeout_id is not None:
gobject.source_remove(self._update_timeout_id)
#print "start_update_timer"
self._update_timeout_id = gobject.timeout_add(int(SAMPLE_PERIOD/min(self.speed, 1)*1e3),
self.update_view_timeout,
priority=PRIORITY_UPDATE_VIEW)
def _on_play_button_toggled(self, button):
if button.get_active():
self._start_update_timer()
else:
if self._update_timeout_id is not None:
gobject.source_remove(self._update_timeout_id)
def _quit(self, *dummy_args):
if self._update_timeout_id is not None:
gobject.source_remove(self._update_timeout_id)
self._update_timeout_id = None
self.simulation.quit = True
self.simulation.go.set()
self.simulation.join()
gtk.main_quit()
def _monkey_patch_ipython(self):
# The user may want to access the NS 3 simulation state, but
# NS 3 is not thread safe, so it could cause serious problems.
# To work around this, monkey-patch IPython to automatically
# acquire and release the simulation lock around each code
# that is executed.
original_runcode = self.ipython.runcode
def runcode(ip, *args):
#print "lock"
self.simulation.lock.acquire()
try:
return original_runcode(*args)
finally:
#print "unlock"
self.simulation.lock.release()
import types
self.ipython.runcode = types.MethodType(runcode, self.ipython)
def autoscale_view(self):
if not self.nodes:
return
self._update_node_positions()
positions = [node.get_position() for node in self.nodes.itervalues()]
min_x, min_y = min(x for (x,y) in positions), min(y for (x,y) in positions)
max_x, max_y = max(x for (x,y) in positions), max(y for (x,y) in positions)
min_x_px, min_y_px = self.canvas.convert_to_pixels(min_x, min_y)
max_x_px, max_y_px = self.canvas.convert_to_pixels(max_x, max_y)
dx = max_x - min_x
dy = max_y - min_y
dx_px = max_x_px - min_x_px
dy_px = max_y_px - min_y_px
hadj = self._scrolled_window.get_hadjustment()
vadj = self._scrolled_window.get_vadjustment()
new_dx, new_dy = 1.5*dx_px, 1.5*dy_px
if new_dx == 0 or new_dy == 0:
return
self.zoom.value = min(hadj.page_size/new_dx, vadj.page_size/new_dy)
x1, y1 = self.canvas.convert_from_pixels(hadj.value, vadj.value)
x2, y2 = self.canvas.convert_from_pixels(hadj.value+hadj.page_size, vadj.value+vadj.page_size)
width = x2 - x1
height = y2 - y1
center_x = (min_x + max_x) / 2
center_y = (min_y + max_y) / 2
self.canvas.scroll_to(center_x - width/2, center_y - height/2)
return False
def start(self):
self.scan_topology()
self.window.connect("delete-event", self._quit)
#self._start_update_timer()
gobject.timeout_add(200, self.autoscale_view)
self.simulation.start()
try:
__IPYTHON__
except NameError:
pass
else:
self._monkey_patch_ipython()
gtk.main()
def on_root_button_press_event(self, view, target, event):
if event.button == 1:
self.select_node(None)
return True
def on_node_button_press_event(self, view, target, event, node):
if event.button == 1:
self.select_node(node)
return True
elif event.button == 3:
self.popup_node_menu(node, event)
return True
elif event.button == 2:
self.begin_node_drag(node)
return True
return False
def on_node_button_release_event(self, view, target, event, node):
if event.button == 2:
self.end_node_drag(node)
return True
return False
class NodeDragState(object):
def __init__(self, canvas_x0, canvas_y0, sim_x0, sim_y0):
self.canvas_x0 = canvas_x0
self.canvas_y0 = canvas_y0
self.sim_x0 = sim_x0
self.sim_y0 = sim_y0
self.motion_signal = None
def begin_node_drag(self, node):
self.simulation.lock.acquire()
try:
ns3_node = ns.network.NodeList.GetNode(node.node_index)
mob = ns3_node.GetObject(ns.mobility.MobilityModel.GetTypeId())
if mob is None:
return
if self.node_drag_state is not None:
return
pos = mob.GetPosition()
finally:
self.simulation.lock.release()
x, y, dummy = self.canvas.window.get_pointer()
x0, y0 = self.canvas.convert_from_pixels(x, y)
self.node_drag_state = self.NodeDragState(x0, y0, pos.x, pos.y)
self.node_drag_state.motion_signal = node.canvas_item.connect("motion-notify-event", self.node_drag_motion, node)
def node_drag_motion(self, item, targe_item, event, node):
self.simulation.lock.acquire()
try:
ns3_node = ns.network.NodeList.GetNode(node.node_index)
mob = ns3_node.GetObject(ns.mobility.MobilityModel.GetTypeId())
if mob is None:
return False
if self.node_drag_state is None:
return False
x, y, dummy = self.canvas.window.get_pointer()
canvas_x, canvas_y = self.canvas.convert_from_pixels(x, y)
dx = (canvas_x - self.node_drag_state.canvas_x0)
dy = (canvas_y - self.node_drag_state.canvas_y0)
pos = mob.GetPosition()
pos.x = self.node_drag_state.sim_x0 + transform_distance_canvas_to_simulation(dx)
pos.y = self.node_drag_state.sim_y0 + transform_distance_canvas_to_simulation(dy)
#print "SetPosition(%G, %G)" % (pos.x, pos.y)
mob.SetPosition(pos)
node.set_position(*transform_point_simulation_to_canvas(pos.x, pos.y))
finally:
self.simulation.lock.release()
return True
def end_node_drag(self, node):
if self.node_drag_state is None:
return
node.canvas_item.disconnect(self.node_drag_state.motion_signal)
self.node_drag_state = None
def popup_node_menu(self, node, event):
menu = gtk.Menu()
self.emit("populate-node-menu", node, menu)
menu.popup(None, None, None, event.button, event.time)
def _update_ipython_selected_node(self):
# If we are running under ipython -gthread, make this new
# selected node available as a global 'selected_node'
# variable.
try:
__IPYTHON__
except NameError:
pass
else:
if self.selected_node is None:
ns3_node = None
else:
self.simulation.lock.acquire()
try:
ns3_node = ns.network.NodeList.GetNode(self.selected_node.node_index)
finally:
self.simulation.lock.release()
self.ipython.updateNamespace({'selected_node': ns3_node})
def select_node(self, node):
if isinstance(node, ns.network.Node):
node = self.nodes[node.GetId()]
elif isinstance(node, (int, long)):
node = self.nodes[node]
elif isinstance(node, Node):
pass
elif node is None:
pass
else:
raise TypeError("expected None, int, viz.Node or ns.network.Node, not %r" % node)
if node is self.selected_node:
return
if self.selected_node is not None:
self.selected_node.selected = False
self.selected_node = node
if self.selected_node is not None:
self.selected_node.selected = True
if self._show_transmissions_mode == ShowTransmissionsMode.SELECTED:
if self.selected_node is None:
self.simulation.set_nodes_of_interest([])
else:
self.simulation.set_nodes_of_interest([self.selected_node.node_index])
self._update_ipython_selected_node()
def add_information_window(self, info_win):
self.information_windows.append(info_win)
self.simulation.lock.acquire()
try:
info_win.update()
finally:
self.simulation.lock.release()
def remove_information_window(self, info_win):
self.information_windows.remove(info_win)
def _canvas_tooltip_cb(self, canvas, x, y, keyboard_mode, tooltip):
#print "tooltip query: ", x, y
hadj = self._scrolled_window.get_hadjustment()
vadj = self._scrolled_window.get_vadjustment()
x, y = self.canvas.convert_from_pixels(hadj.value + x, vadj.value + y)
item = self.canvas.get_item_at(x, y, True)
#print "items at (%f, %f): %r | keyboard_mode=%r" % (x, y, item, keyboard_mode)
if not item:
return False
while item is not None:
obj = item.get_data("pyviz-object")
if obj is not None:
obj.tooltip_query(tooltip)
return True
item = item.props.parent
return False
def _get_export_file_name(self):
sel = gtk.FileChooserDialog("Save...", self.canvas.get_toplevel(),
gtk.FILE_CHOOSER_ACTION_SAVE,
(gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL,
gtk.STOCK_SAVE, gtk.RESPONSE_OK))
sel.set_default_response(gtk.RESPONSE_OK)
sel.set_local_only(True)
sel.set_do_overwrite_confirmation(True)
sel.set_current_name("Unnamed.pdf")
filter = gtk.FileFilter()
filter.set_name("Embedded PostScript")
filter.add_mime_type("image/x-eps")
sel.add_filter(filter)
filter = gtk.FileFilter()
filter.set_name("Portable Document Graphics")
filter.add_mime_type("application/pdf")
sel.add_filter(filter)
filter = gtk.FileFilter()
filter.set_name("Scalable Vector Graphics")
filter.add_mime_type("image/svg+xml")
sel.add_filter(filter)
resp = sel.run()
if resp != gtk.RESPONSE_OK:
sel.destroy()
return None
file_name = sel.get_filename()
sel.destroy()
return file_name
def _take_screenshot(self, dummy_button):
#print "Cheese!"
file_name = self._get_export_file_name()
if file_name is None:
return
# figure out the correct bounding box for what is visible on screen
x1 = self._scrolled_window.get_hadjustment().value
y1 = self._scrolled_window.get_vadjustment().value
x2 = x1 + self._scrolled_window.get_hadjustment().page_size
y2 = y1 + self._scrolled_window.get_vadjustment().page_size
bounds = goocanvas.Bounds()
bounds.x1, bounds.y1 = self.canvas.convert_from_pixels(x1, y1)
bounds.x2, bounds.y2 = self.canvas.convert_from_pixels(x2, y2)
dest_width = bounds.x2 - bounds.x1
dest_height = bounds.y2 - bounds.y1
#print bounds.x1, bounds.y1, " -> ", bounds.x2, bounds.y2
dummy, extension = os.path.splitext(file_name)
extension = extension.lower()
if extension == '.eps':
surface = cairo.PSSurface(file_name, dest_width, dest_height)
elif extension == '.pdf':
surface = cairo.PDFSurface(file_name, dest_width, dest_height)
elif extension == '.svg':
surface = cairo.SVGSurface(file_name, dest_width, dest_height)
else:
dialog = gtk.MessageDialog(parent = self.canvas.get_toplevel(),
flags = gtk.DIALOG_DESTROY_WITH_PARENT,
type = gtk.MESSAGE_ERROR,
buttons = gtk.BUTTONS_OK,
message_format = "Unknown extension '%s' (valid extensions are '.eps', '.svg', and '.pdf')"
% (extension,))
dialog.run()
dialog.destroy()
return
# draw the canvas to a printing context
cr = cairo.Context(surface)
cr.translate(-bounds.x1, -bounds.y1)
self.canvas.render(cr, bounds, self.zoom.value)
cr.show_page()
surface.finish()
def set_follow_node(self, node):
if isinstance(node, ns.network.Node):
node = self.nodes[node.GetId()]
self.follow_node = node
def _start_shell(self, dummy_button):
if self.shell_window is not None:
self.shell_window.present()
return
self.shell_window = gtk.Window()
self.shell_window.set_size_request(750,550)
self.shell_window.set_resizable(True)
scrolled_window = gtk.ScrolledWindow()
scrolled_window.set_policy(gtk.POLICY_AUTOMATIC,gtk.POLICY_AUTOMATIC)
self.ipython = ipython_view.IPythonView()
self.ipython.modify_font(pango.FontDescription(SHELL_FONT))
self.ipython.set_wrap_mode(gtk.WRAP_CHAR)
self.ipython.show()
scrolled_window.add(self.ipython)
scrolled_window.show()
self.shell_window.add(scrolled_window)
self.shell_window.show()
self.shell_window.connect('destroy', self._on_shell_window_destroy)
self._update_ipython_selected_node()
self.ipython.updateNamespace({'viz': self})
def _on_shell_window_destroy(self, window):
self.shell_window = None
initialization_hooks = []
def add_initialization_hook(hook, *args):
"""
Adds a callback to be called after
the visualizer is initialized, like this::
initialization_hook(visualizer, *args)
"""
global initialization_hooks
initialization_hooks.append((hook, args))
def set_bounds(x1, y1, x2, y2):
assert x2>x1
assert y2>y1
def hook(viz):
cx1, cy1 = transform_point_simulation_to_canvas(x1, y1)
cx2, cy2 = transform_point_simulation_to_canvas(x2, y2)
viz.canvas.set_bounds(cx1, cy1, cx2, cy2)
add_initialization_hook(hook)
def start():
assert Visualizer.INSTANCE is None
if _import_error is not None:
import sys
print >> sys.stderr, "No visualization support (%s)." % (str(_import_error),)
ns.core.Simulator.Run()
return
load_plugins()
viz = Visualizer()
for hook, args in initialization_hooks:
gobject.idle_add(hook, viz, *args)
ns.network.Packet.EnablePrinting()
viz.start()
|
gauribhoite/personfinder
|
refs/heads/master
|
env/google_appengine/lib/django-1.3/django/contrib/staticfiles/management/commands/findstatic.py
|
244
|
import os
from optparse import make_option
from django.core.management.base import LabelCommand
from django.utils.encoding import smart_str, smart_unicode
from django.contrib.staticfiles import finders
class Command(LabelCommand):
help = "Finds the absolute paths for the given static file(s)."
args = "[file ...]"
label = 'static file'
option_list = LabelCommand.option_list + (
make_option('--first', action='store_false', dest='all', default=True,
help="Only return the first match for each static file."),
)
def handle_label(self, path, **options):
verbosity = int(options.get('verbosity', 1))
result = finders.find(path, all=options['all'])
path = smart_unicode(path)
if result:
if not isinstance(result, (list, tuple)):
result = [result]
output = u'\n '.join(
(smart_unicode(os.path.realpath(path)) for path in result))
self.stdout.write(
smart_str(u"Found '%s' here:\n %s\n" % (path, output)))
else:
if verbosity >= 1:
self.stderr.write(
smart_str("No matching file found for '%s'.\n" % path))
|
lightcn/odoo
|
refs/heads/8.0
|
addons/purchase/wizard/purchase_line_invoice.py
|
177
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv
from openerp.tools.translate import _
class purchase_line_invoice(osv.osv_memory):
""" To create invoice for purchase order line"""
_name = 'purchase.order.line_invoice'
_description = 'Purchase Order Line Make Invoice'
def _make_invoice_by_partner(self, cr, uid, partner, orders, lines_ids, context=None):
"""
create a new invoice for one supplier
@param cr : Cursor
@param uid : Id of current user
@param partner : The object partner
@param orders : The set of orders to add in the invoice
@param lines : The list of line's id
"""
purchase_obj = self.pool.get('purchase.order')
account_jrnl_obj = self.pool.get('account.journal')
invoice_obj = self.pool.get('account.invoice')
name = orders and orders[0].name or ''
journal_id = account_jrnl_obj\
.search(cr, uid, [('type', '=', 'purchase')], context=None)
journal_id = journal_id and journal_id[0] or False
a = partner.property_account_payable.id
inv = {
'name': name,
'origin': name,
'type': 'in_invoice',
'journal_id': journal_id,
'reference': partner.ref,
'account_id': a,
'partner_id': partner.id,
'invoice_line': [(6, 0, lines_ids)],
'currency_id': orders[0].currency_id.id,
'comment': " \n".join([order.notes for order in orders if order.notes]),
'payment_term': orders[0].payment_term_id.id,
'fiscal_position': partner.property_account_position.id
}
inv_id = invoice_obj.create(cr, uid, inv, context=context)
purchase_obj.write(cr, uid, [order.id for order in orders], {'invoice_ids': [(4, inv_id)]}, context=context)
return inv_id
def makeInvoices(self, cr, uid, ids, context=None):
"""
To get Purchase Order line and create Invoice
@param self: The object pointer.
@param cr: A database cursor
@param uid: ID of the user currently logged in
@param context: A standard dictionary
@return : retrun view of Invoice
"""
if context is None:
context={}
record_ids = context.get('active_ids',[])
if record_ids:
res = False
invoices = {}
purchase_obj = self.pool.get('purchase.order')
purchase_line_obj = self.pool.get('purchase.order.line')
invoice_line_obj = self.pool.get('account.invoice.line')
for line in purchase_line_obj.browse(cr, uid, record_ids, context=context):
if (not line.invoiced) and (line.state not in ('draft', 'cancel')):
if not line.partner_id.id in invoices:
invoices[line.partner_id.id] = []
acc_id = purchase_obj._choose_account_from_po_line(cr, uid, line, context=context)
inv_line_data = purchase_obj._prepare_inv_line(cr, uid, acc_id, line, context=context)
inv_line_data.update({'origin': line.order_id.name})
inv_id = invoice_line_obj.create(cr, uid, inv_line_data, context=context)
purchase_line_obj.write(cr, uid, [line.id], {'invoiced': True, 'invoice_lines': [(4, inv_id)]})
invoices[line.partner_id.id].append((line,inv_id))
res = []
for result in invoices.values():
il = map(lambda x: x[1], result)
orders = list(set(map(lambda x : x[0].order_id, result)))
res.append(self._make_invoice_by_partner(cr, uid, orders[0].partner_id, orders, il, context=context))
return {
'domain': "[('id','in', ["+','.join(map(str,res))+"])]",
'name': _('Supplier Invoices'),
'view_type': 'form',
'view_mode': 'tree,form',
'res_model': 'account.invoice',
'view_id': False,
'context': "{'type':'in_invoice', 'journal_type': 'purchase'}",
'type': 'ir.actions.act_window'
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
daniboy/feedmuncher
|
refs/heads/master
|
app/migrations/0005_munchedfeed_cache_mime_type.py
|
1
|
# -*- coding: utf-8 -*-
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('app', '0004_auto_20150406_1204'),
]
operations = [
migrations.AddField(
model_name='munchedfeed',
name='cache_mime_type',
field=models.TextField(null=True),
),
]
|
AlexCaranha/Wox
|
refs/heads/master
|
PythonHome/Lib/site-packages/chardet/compat.py
|
2942
|
######################## BEGIN LICENSE BLOCK ########################
# Contributor(s):
# Ian Cordasco - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
import sys
if sys.version_info < (3, 0):
base_str = (str, unicode)
else:
base_str = (bytes, str)
def wrap_ord(a):
if sys.version_info < (3, 0) and isinstance(a, base_str):
return ord(a)
else:
return a
|
lneuhaus/pyrpl
|
refs/heads/master
|
setup.py
|
1
|
# note to the developer
# do not forget to make source distribution with
# python setup.py sdist
# much of the code here is from
# https://jeffknupp.com/blog/2013/08/16/open-sourcing-a-python-project-the-right-way/
#! /usr/bin/env python
from __future__ import print_function
from setuptools import setup, find_packages
from setuptools.command.test import test as TestCommand
from distutils.core import setup
import io
import codecs
import os
import sys
# path to the directory that contains the setup.py script
SETUP_PATH = os.path.dirname(os.path.abspath(__file__))
def read(fname):
return open(os.path.join(SETUP_PATH, fname)).read()
# Version info -- read without importing
_locals = {}
exec(read(os.path.join('pyrpl', '_version.py')), None, _locals)
version = _locals['__version__']
# # read requirements
# # from http://stackoverflow.com/questions/14399534/how-can-i-reference-requirements-txt-for-the-install-requires-kwarg-in-setuptool
# requirements = []
# here = os.path.abspath(os.path.dirname(__file__))
# with open(os.path.join(here, 'readthedocs_requirements.txt')) as f:
# lines = f.readlines()
# for line in lines:
# line = line.strip()
# if '#' not in line and line:
# requirements.append(line.strip())
requirements = ['scp',
#'matplotlib', # optional requirementm, not needed for core
'scipy',
'pyyaml',
#'ruamel.yaml' # temporarily disabled
'pandas',
'pyqtgraph',
'numpy>=1.9',
'paramiko>=2.0',
'nose>=1.0',
#'PyQt5', # cannot be installed with pip
'qtpy',
'nbconvert',
'jupyter-client']
if sys.version_info >= (3,4): # python version dependencies
requirements += ['quamash']
else: # python 2.7
requirements += ['futures', 'mock'] # mock is now a full dependency
if os.environ.get('TRAVIS') == 'true':
requirements += ['pandoc']
if os.environ.get('READTHEDOCS') == 'True':
requirements += ['pandoc', 'sphinx', 'sphinx_bootstrap_theme'] # mock is needed on readthedocs.io to mock PyQt5
# remove a few of the mocked modules
def rtd_included(r):
for rr in ['numpy', 'scipy', 'pandas', 'scp', 'paramiko', 'nose',
'quamash', 'qtpy', 'asyncio', 'pyqtgraph']:
if r.startswith(rr):
return False
return True
requirements = [r for r in requirements if rtd_included(r)]
# cannot install pyQt4 with pip:
# http://stackoverflow.com/questions/4628519/is-it-possible-to-require-pyqt-from-setuptools-setup-py
# PyQt4
try:
long_description = read('README.rst')
except:
try:
import pypandoc
long_description = pypandoc.convert_file('README.md', 'rst')
except:
long_description = read('README.md')
def find_packages():
"""
Simple function to find all modules under the current folder.
"""
modules = []
for dirpath, _, filenames in os.walk(os.path.join(SETUP_PATH, "pyrpl")):
if "__init__.py" in filenames:
modules.append(os.path.relpath(dirpath, SETUP_PATH))
return [module.replace(os.sep, ".") for module in modules]
class PyTest(TestCommand):
# user_options = [('pytest-args=', 'a', "192.168.1.100")] #not yet working
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
import pytest
errcode = pytest.main(self.test_args)
sys.exit(errcode)
def compile_fpga(): #vivado 2015.4 must be installed for this to work
cwd = os.getcwd()
try:
os.chdir("pyrpl//fpga")
os.system("make")
finally:
os.chdir(cwd)
def compile_server(): #gcc crosscompiler must be installed for this to work
cwd = os.getcwd()
try:
os.chdir("pyrpl//monitor_server")
os.system("make clean")
os.system("make")
finally:
os.chdir(cwd)
setup(name='pyrpl',
version=version,
description='DSP servo controller for quantum optics with the RedPitaya',
long_description=long_description,
author='Leonhard Neuhaus',
author_email='neuhaus@lkb.upmc.fr',
url='http://lneuhaus.github.io/pyrpl/',
license='GPLv3',
classifiers=['Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: C',
'Natural Language :: English',
'Development Status :: 4 - Beta',
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
'Topic :: Scientific/Engineering :: Human Machine Interfaces',
'Topic :: Scientific/Engineering :: Physics'],
keywords='RedPitaya DSP FPGA IIR PDH synchronous detection filter PID '
'control lockbox servo feedback lock quantum optics',
platforms='any',
packages=find_packages(), #['pyrpl'],
package_data={'pyrpl': ['fpga/*',
'monitor_server/*',
'config/*',
'widgets/images/*']},
install_requires=requirements,
# what were the others for? dont remember..
#setup_requires=requirements,
#requires=requirements,
# stuff for unitary test with pytest
tests_require=['nose>=1.0'],
# extras_require={'testing': ['pytest']},
test_suite='nose.collector',
# install options
cmdclass={'test': PyTest,
'fpga': compile_fpga,
'server': compile_server}
)
|
praekelt/django-atlas
|
refs/heads/develop
|
atlas/forms.py
|
1
|
from django import forms
from django.contrib.gis.geos import fromstr
from atlas.models import Country, Region, City
from atlas.fields import LonLatWidget, CoordinateFormField
from atlas.utils import get_city
class SelectLocationForm(forms.Form):
location = CoordinateFormField(
required = True,
help_text="Select your location on the map",
)
origin = forms.CharField(widget=forms.HiddenInput)
required_css_class = 'required'
def __init__(self, *args, **kwargs):
self.request = kwargs.pop('request', None)
super(SelectLocationForm, self).__init__(*args, **kwargs)
self.fields['origin'].initial = self.request.GET.get('view', '/')
# set location initial value to either GPS coords or closest city
if 'location' in self.request.session:
location = self.request.session['location']
self.fields['location'].initial = location['position'] \
if 'position' in location else location['city'].coordinates
def save(self):
position = fromstr(self.cleaned_data['location'], srid=4326)
'''if 'REMOTE_ADDR' in self.request.META:
city = get_city(ip=self.request.META['REMOTE_ADDR'], position=position)
else:'''
city = get_city(position=position)
self.request.session['location'] = {'city': city, 'position': position}
def as_div(self):
return self._html_output(
normal_row=u'<div class="field"><div %(html_class_attr)s>%(label)s %(errors)s <div class="helptext">%(help_text)s</div> %(field)s</div></div>',
error_row=u'%s',
row_ender='</div>',
help_text_html=u'%s',
errors_on_separate_row=False
)
|
seecr/meresco-rdf
|
refs/heads/master
|
meresco/rdf/graph/rdfparser.py
|
1
|
## begin license ##
#
# Meresco RDF contains components to handle RDF data.
#
# Copyright (C) 2014-2016 Seecr (Seek You Too B.V.) http://seecr.nl
# Copyright (C) 2014 Stichting Bibliotheek.nl (BNL) http://www.bibliotheek.nl
# Copyright (C) 2015 Drents Archief http://www.drentsarchief.nl
# Copyright (C) 2015 Koninklijke Bibliotheek (KB) http://www.kb.nl
#
# This file is part of "Meresco RDF"
#
# "Meresco RDF" is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# "Meresco RDF" is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with "Meresco RDF"; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
## end license ##
from urlparse import urljoin as urijoin
from lxml.etree import Element
from meresco.xml.namespaces import curieToTag, curieToUri
from .uri import Uri
from .bnode import BNode
from .literal import Literal
from .graph import Graph
# lxml-modification of code taken from http://infomesh.net/2003/rdfparser/rdfxml.py.txt by Sean B. Palmer
class RDFParser(object):
"""Please note that this is currently by no means a feature complete RDF/XML parser!
Particularly the following RDF/XML constructs are not supported:
- rdf:datatype (!)
- rdf:parseType "Literal" and "Collection"
- rdf:li (generated node ids)
- rdf:bagID
- rdf:aboutEach
- rdf:aboutEachPrefix
- implicit base
Input is not validated in any way.
How incorrect RDF/XML (or input with unsupported constructs) is parsed into a graph remains undefined.
"""
_element2uri = {}
def __init__(self, sink=None):
self._sink = sink or Graph()
self.addTriple = self._sink.addTriple
def parse(self, lxmlNode):
root = lxmlNode
if hasattr(lxmlNode, 'getroot'):
root = lxmlNode.getroot()
if root.tag == rdf_RDF_tag:
for child in root.iterchildren(tag=Element):
self.nodeElement(child)
else:
self.nodeElement(root)
return self._sink
def bNode(self, nodeID=None):
if not nodeID is None:
if not nodeID[0].isalpha(): nodeID = 'b' + nodeID
return BNode('_:' + nodeID)
return BNode()
@classmethod
def uriForTag(cls, tag):
try:
uri = cls._element2uri[tag]
except KeyError:
uri = cls._element2uri[tag] = ''.join(tag[1:].split('}'))
return uri
def nodeElement(self, e):
# *Only* call with an Element
if rdf_about_tag in e.attrib:
subj = Uri(urijoin(e.base, e.attrib[rdf_about_tag]))
elif rdf_ID_tag in e.attrib:
subj = Uri(urijoin(e.base, '#' + e.attrib[rdf_ID_tag]))
else:
nodeID = None
if rdf_nodeID_tag in e.attrib:
nodeID = e.attrib[rdf_nodeID_tag]
subj = self.bNode(nodeID=nodeID)
if e.tag != rdf_Description_tag:
self.addTriple(subj.value, rdf_type_uri, Uri(self.uriForTag(e.tag)))
if rdf_type_tag in e.attrib:
self.addTriple(subj.value, rdf_type_uri, Uri(e.attrib[rdf_type_tag]))
for attr, value in e.attrib.items():
if attr not in DISALLOWED and attr != rdf_type_tag:
objt = Literal(value, lang=e.attrib.get(x_lang_tag))
self.addTriple(subj.value, self.uriForTag(attr), objt)
for element in e.iterchildren(tag=Element):
self.propertyElt(subj.value, element)
return subj
def propertyElt(self, subj, e):
children = [c for c in e.iterchildren(tag=Element)]
eText = getText(e)
if len(children) == 0 and eText:
self.literalPropertyElt(subj, e, eText)
elif len(children) == 1 and not rdf_parseType_tag in e.attrib:
self.resourcePropertyElt(subj, e, children[0])
elif 'Resource' == e.attrib.get(rdf_parseType_tag):
self.parseTypeResourcePropertyElt(subj, e, children)
elif not eText:
self.emptyPropertyElt(subj, e)
def emptyPropertyElt(self, subj, e):
uri = self.uriForTag(e.tag)
if sum(1 for k in e.attrib.keys() if not k == rdf_ID_tag) == 0:
obj = Literal(e.text or '', lang=e.attrib.get(x_lang_tag))
else:
resource = e.attrib.get(rdf_resource_tag)
if not resource is None:
obj = Uri(urijoin(e.base, resource))
else:
obj = self.bNode(nodeID=e.attrib.get(rdf_nodeID_tag))
for attrib, value in filter(lambda (k, v): k not in DISALLOWED, e.attrib.items()):
if attrib != rdf_type_tag:
self.addTriple(obj.value, self.uriForTag(attrib), Literal(value, lang=e.attrib.get(x_lang_tag)))
else:
self.addTriple(obj.value, rdf_type_uri, Uri(value))
self.addTriple(subj, uri, obj)
rdfID = e.attrib.get(rdf_ID_tag)
if not rdfID is None:
self.reify(subj, uri, obj, e.base, rdfID)
def resourcePropertyElt(self, subj, e, n):
uri = self.uriForTag(e.tag)
childSubj = self.nodeElement(n)
self.addTriple(subj, uri, childSubj)
rdfID = e.attrib.get(rdf_ID_tag)
if not rdfID is None:
self.reify(subj, uri, childSubj, e.base, rdfID)
def literalPropertyElt(self, subj, e, eText):
uri = self.uriForTag(e.tag)
o = Literal(eText, lang=e.attrib.get(x_lang_tag)) # TODO: process datatype with e.attrib.get(rdf_datatype_tag
self.addTriple(subj, uri, o)
rdfID = e.attrib.get(rdf_ID_tag)
if not rdfID is None:
self.reify(subj, uri, o, e.base, rdfID)
def parseTypeResourcePropertyElt(self, subj, e, children):
uri = self.uriForTag(e.tag)
node = self.bNode()
self.addTriple(subj, uri, node)
rdfID = e.attrib.get(rdf_ID_tag)
if not rdfID is None:
self.reify(subj, uri, node, e.base, rdfID)
for child in children:
self.propertyElt(node.value, child)
def reify(self, s, p, o, base, rdfID):
r = urijoin(base, '#' + rdfID)
self.addTriple(r, rdf_subject_uri, BNode(s) if s.startswith('_:') else Uri(s))
self.addTriple(r, rdf_predicate_uri, Uri(p))
self.addTriple(r, rdf_object_uri, o)
self.addTriple(r, rdf_type_uri, Uri(rdf_Statement_uri))
def getText(node):
# *Only* call with an Element
allText = node.text or ''
for c in node.getchildren():
tail = c.tail
if tail:
allText += tail
return allText or None
x_lang_tag = curieToTag("xml:lang")
rdf_RDF_tag = curieToTag("rdf:RDF")
rdf_ID_tag = curieToTag("rdf:ID")
rdf_about_tag = curieToTag("rdf:about")
rdf_aboutEach_tag = curieToTag("rdf:aboutEach")
rdf_aboutEachPrefix_tag = curieToTag("rdf:aboutEachPrefix")
rdf_type_tag = curieToTag("rdf:type")
rdf_resource_tag = curieToTag("rdf:resource")
rdf_Description_tag = curieToTag("rdf:Description")
rdf_bagID_tag = curieToTag("rdf:bagID")
rdf_parseType_tag = curieToTag("rdf:parseType")
rdf_nodeID_tag = curieToTag("rdf:nodeID")
rdf_datatype_tag = curieToTag("rdf:datatype")
rdf_li_tag = curieToTag("rdf:li")
rdf_Statement_uri = curieToUri('rdf:Statement')
rdf_type_uri = curieToUri('rdf:type')
rdf_subject_uri = curieToUri('rdf:subject')
rdf_predicate_uri = curieToUri('rdf:predicate')
rdf_object_uri = curieToUri('rdf:object')
DISALLOWED = set([rdf_RDF_tag, rdf_ID_tag, rdf_about_tag, rdf_bagID_tag,
rdf_parseType_tag, rdf_resource_tag, rdf_nodeID_tag, rdf_datatype_tag,
rdf_li_tag, rdf_aboutEach_tag, rdf_aboutEachPrefix_tag])
|
bartoldeman/easybuild-framework
|
refs/heads/master
|
easybuild/tools/toolchain/options.py
|
1
|
# #
# Copyright 2012-2018 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),
# Flemish Research Foundation (FWO) (http://www.fwo.be/en)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# https://github.com/easybuilders/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
# #
"""
The toolchain options module contains the ToolchainOptions class
These are the options that can be passed to the toolchain through the easyconfig files
Map values can be string with named templates
By default following named options is filled
%(opt)s : option name
%(value)s : option value
:author: Stijn De Weirdt (Ghent University)
:author: Kenneth Hoste (Ghent University)
"""
from vsc.utils import fancylogger
from easybuild.tools.build_log import EasyBuildError
class ToolchainOptions(dict):
def __init__(self):
self.log = fancylogger.getLogger(self.__class__.__name__, fname=False)
self.options_map = {} # map between options name and value
self.description = {} # short description of the options
def add_options(self, options=None, options_map=None):
"""Add
@options: dict with options : tuple option_name and option_description
@options_map: dict with a mapping between and option and a value
"""
if not options is None:
self._add_options(options)
if not options_map is None:
self._add_options_map(options_map)
def _add_options(self, options):
"""Add actual options dict to self"""
self.log.debug("Using toolchain options %s", options)
for name, value in options.items():
if not isinstance(value, (list, tuple,)) and len(value) == 2:
raise EasyBuildError("_add_options: option name %s has to be 2 element list (%s)", name, value)
if name in self:
self.log.devel("_add_options: redefining previous name %s (previous value %s)", name, self.get(name))
self.__setitem__(name, value[0])
self.description.__setitem__(name, value[1])
def _add_options_map(self, options_map):
"""Add map dict between options and values
map names starting with _opt_ are allowed without corresponding option
"""
for name in options_map.keys():
if not name in self:
if name.startswith('_opt_'):
self.log.devel("_add_options_map: no option with name %s defined, but allowed", name)
else:
raise EasyBuildError("No toolchain option with name %s defined", name)
self.options_map.update(options_map)
def option(self, name, templatedict=None):
"""Return option value"""
value = self.get(name, None)
if value is None and name not in self.options_map:
self.log.warning("option: option with name %s returns None" % name)
res = None
elif name in self.options_map:
res = self.options_map[name]
if templatedict is None:
templatedict = {}
templatedict.update({
'opt':name,
'value':value,
})
if isinstance(res, basestring):
# allow for template
res = self.options_map[name] % templatedict
elif isinstance(res, (list, tuple,)):
# allow for template per element
res = self.options_map[name]
for i in xrange(0, len(res)):
res[i] = res[i] % templatedict
else:
# check if True?
res = self.options_map[name]
else:
res = value
return res
|
khhhh/RIOT
|
refs/heads/master
|
dist/tools/pyterm/testbeds/testbeds.py
|
100
|
#!/usr/bin/python2
# -*- coding: utf-8 -*-
# Copyright (C) 2014 Philipp Rosenkranz <philipp.rosenkranz@fu-berlin.de>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
import os, re, datetime
from subprocess import call, Popen, PIPE
class Testbed():
log_dir_name = 'log'
def __init__(self):
pass
def initCleanWithFlash(self):
self.stop()
self.cleanLogs()
self.flashNodes()
self.start()
def initClean(self):
self.cleanLogs()
self.start()
def flashNodes(self):
raise NotImplementedError("Inherit from Testbed and implement flashNodes")
def cleanLogs(self):
raise NotImplementedError("Inherit from Testbed and implement flashNodes")
def archiveLogs(self, experiment = None):
raise NotImplementedError("Inherit from Testbed and implement flashNodes")
def start(self):
raise NotImplementedError("Inherit from Testbed and implement flashNodes")
def stop(self):
raise NotImplementedError("Inherit from Testbed and implement flashNodes")
def defaultArchivePostfix(self, experimentName = None):
if not experimentName:
experimentName = "unknown"
time = datetime.datetime.now().strftime("%Y-%m-%d_%H_%M_%S")
postfix = "-" + experimentName +"_" + time
return postfix
def printAndCall(self, cmdString):
print(cmdString)
call(cmdString, shell=True)
class DESTestbed(Testbed):
def __init__(self, serverHost = None, serverPort=None, userName = None, flasher = None,
hexfilePath = None, pyterm = None, logFilePath = None, hostFile = None):
self.serverHost = serverHost
self.serverPort = str(serverPort)
self.userName = userName
self.flasher = flasher
self.hexFilePath = hexfilePath
self.pyterm = pyterm
self.logFilePath = logFilePath
self.hostFile = hostFile
def flashNodes(self):
self.printAndCall("parallel-ssh -h %s -l %s 'python %s'" % (self.hostFile, self.userName, self.flasher))
def cleanLogs(self):
self.printAndCall("rm -rf %s/*.log" % (self.logFilePath))
def archiveLogs(self, postfix = None):
postfix = self.defaultArchivePostfix(postfix)
logDir = self.logFilePath.split("/")[-1]
self.printAndCall("cd %s/..; tar -cjf archived_logs%s.tar.bz2 %s/*.log" % (self.logFilePath, postfix, logDir))
def start(self):
self.printAndCall("parallel-ssh -h %s -l %s 'screen -S pyterm -d -m python %s -ln %s'" % (self.hostFile, self.userName, self.pyterm, self.log_dir_name))
def stop(self):
self.printAndCall("parallel-ssh -h %s -l %s 'screen -X -S pyterm quit'" % (self.hostFile, self.userName))
class LocalTestbed(Testbed):
def __init__(self, serverHost = None, serverPort=None, flasher = None, hexfilePath = None, pyterm = None, logFilePath = None):
self.serverHost = serverHost
self.serverPort = str(serverPort)
self.flasher = flasher
self.hexFilePath = hexfilePath
self.pyterm = pyterm
self.logFilePath = logFilePath
def findPorts(self):
devlist = os.listdir("/dev/")
regex = re.compile('^ttyUSB')
return sorted([port for port in devlist if regex.match(port)])
def flashNodes(self):
self.printAndCall("python %s %s" % (self.flasher, self.hexFilePath))
def cleanLogs(self):
self.printAndCall("rm -rf %s/*.log" % (self.logFilePath))
def archiveLogs(self, postfix = None):
postfix = self.defaultArchivePostfix(postfix)
logDir = self.logFilePath.split("/")[-1]
self.printAndCall("cd %s/..; tar -cjf archived_logs%s.tar.bz2 %s/*.log" % (self.logFilePath, postfix, logDir))
def start(self):
portList = self.findPorts()
for port in portList:
self.printAndCall("screen -S pyterm-%s -d -m python %s -H %s -rn %s -p /dev/%s -ln %s" % (port, self.pyterm, port, port, port, self.log_dir_name))
def stop(self):
portList = self.findPorts()
for port in portList:
self.printAndCall("screen -X -S pyterm-%s quit" % (port))
class DesVirtTestbed(Testbed):
def __init__(self, serverHost = None, serverPort=None, desvirtPath = None, topologyName = None, pyterm = None, logFilePath = None):
self.serverHost = serverHost
self.serverPort = str(serverPort)
self.desvirtPath = desvirtPath
self.topologyName = topologyName
self.pyterm = pyterm
self.logFilePath = logFilePath
self.namePortList = []
def findPorts(self):
return self.namePortList
def startDesVirtNetwork(self):
print "executing: " + "./vnet --start --name " + self.topologyName + " in: " + self.desvirtPath
call("sh -c \"./vnet --define --name " + self.topologyName + "\"", cwd=self.desvirtPath, shell=True)
stream = Popen("sh -c \"./vnet --start --name " + self.topologyName + "\"", cwd=self.desvirtPath, shell=True, stderr=PIPE).stderr
pats = r'.*riotnative.*\.elf (\S+) -t (\S+)'
pattern = re.compile(pats)
for line in stream:
match = pattern.match(line)
if(match):
tuple = match.groups()
self.namePortList.append((tuple[0], int(tuple[1])))
self.namePortList = sorted(self.namePortList)
for tuple in self.namePortList:
print "name: " + tuple[0] + " port: " + str(tuple[1])
def stopDesVirtNetwork(self):
call("sh -c \"./vnet --stop --name " + self.topologyName + "\"", cwd=self.desvirtPath, shell=True)
def flashNodes(self):
pass
def cleanLogs(self):
self.printAndCall("rm -rf %s/*.log" % (self.logFilePath))
def archiveLogs(self, postfix = None):
postfix = self.defaultArchivePostfix(postfix)
logDir = self.logFilePath.split("/")[-1]
self.printAndCall("cd %s/..; tar -cjf archived_logs%s.tar.bz2 %s/*.log" % (self.logFilePath, postfix, logDir))
def start(self):
for node in self.namePortList:
self.printAndCall("screen -S pyterm-%s -d -m python %s -H %s -rn %s -ts %s -ln %s" % (node[0], self.pyterm, node[0], node[0], node[1], self.log_dir_name))
def stop(self):
print "stop called"
for node in self.namePortList:
self.printAndCall("screen -X -S pyterm-%s quit" % (node[0]))
self.stopDesVirtNetwork()
|
andrew-szymanski/gae_django
|
refs/heads/master
|
django/contrib/sitemaps/__init__.py
|
291
|
from django.contrib.sites.models import Site, get_current_site
from django.core import urlresolvers, paginator
from django.core.exceptions import ImproperlyConfigured
import urllib
PING_URL = "http://www.google.com/webmasters/tools/ping"
class SitemapNotFound(Exception):
pass
def ping_google(sitemap_url=None, ping_url=PING_URL):
"""
Alerts Google that the sitemap for the current site has been updated.
If sitemap_url is provided, it should be an absolute path to the sitemap
for this site -- e.g., '/sitemap.xml'. If sitemap_url is not provided, this
function will attempt to deduce it by using urlresolvers.reverse().
"""
if sitemap_url is None:
try:
# First, try to get the "index" sitemap URL.
sitemap_url = urlresolvers.reverse('django.contrib.sitemaps.views.index')
except urlresolvers.NoReverseMatch:
try:
# Next, try for the "global" sitemap URL.
sitemap_url = urlresolvers.reverse('django.contrib.sitemaps.views.sitemap')
except urlresolvers.NoReverseMatch:
pass
if sitemap_url is None:
raise SitemapNotFound("You didn't provide a sitemap_url, and the sitemap URL couldn't be auto-detected.")
from django.contrib.sites.models import Site
current_site = Site.objects.get_current()
url = "http://%s%s" % (current_site.domain, sitemap_url)
params = urllib.urlencode({'sitemap':url})
urllib.urlopen("%s?%s" % (ping_url, params))
class Sitemap(object):
# This limit is defined by Google. See the index documentation at
# http://sitemaps.org/protocol.php#index.
limit = 50000
def __get(self, name, obj, default=None):
try:
attr = getattr(self, name)
except AttributeError:
return default
if callable(attr):
return attr(obj)
return attr
def items(self):
return []
def location(self, obj):
return obj.get_absolute_url()
def _get_paginator(self):
if not hasattr(self, "_paginator"):
self._paginator = paginator.Paginator(self.items(), self.limit)
return self._paginator
paginator = property(_get_paginator)
def get_urls(self, page=1, site=None):
if site is None:
if Site._meta.installed:
try:
site = Site.objects.get_current()
except Site.DoesNotExist:
pass
if site is None:
raise ImproperlyConfigured("In order to use Sitemaps you must either use the sites framework or pass in a Site or RequestSite object in your view code.")
urls = []
for item in self.paginator.page(page).object_list:
loc = "http://%s%s" % (site.domain, self.__get('location', item))
priority = self.__get('priority', item, None)
url_info = {
'location': loc,
'lastmod': self.__get('lastmod', item, None),
'changefreq': self.__get('changefreq', item, None),
'priority': str(priority is not None and priority or '')
}
urls.append(url_info)
return urls
class FlatPageSitemap(Sitemap):
def items(self):
current_site = Site.objects.get_current()
return current_site.flatpage_set.filter(registration_required=False)
class GenericSitemap(Sitemap):
priority = None
changefreq = None
def __init__(self, info_dict, priority=None, changefreq=None):
self.queryset = info_dict['queryset']
self.date_field = info_dict.get('date_field', None)
self.priority = priority
self.changefreq = changefreq
def items(self):
# Make sure to return a clone; we don't want premature evaluation.
return self.queryset.filter()
def lastmod(self, item):
if self.date_field is not None:
return getattr(item, self.date_field)
return None
|
tobycoder/webshop
|
refs/heads/master
|
bits/forms.py
|
1
|
from django import forms
from .models import ContactModel, bp_products, bp_users
MATEN = (('s', 'S - 15cm-16cm'),('m', 'M - 16.5cm-18cm'), ('l', 'L - 18cm-20cm'), ('xl', 'XL - 20cm-22cm'))
class ContactForm(forms.ModelForm):
class Meta:
model = ContactModel
fields = ('voornaam', 'email', 'onderwerp', 'text')
class ShoppingForm(forms.ModelForm):
quantity = forms.IntegerField()
maat = forms.ChoiceField(choices=MATEN)
pr_prijs = forms.CharField(max_length=200, widget=forms.HiddenInput())
pr_naam = forms.CharField(max_length=200, widget=forms.HiddenInput())
class Meta:
model = bp_products
fields = ['pr_prijs', 'pr_naam']
class Login(forms.ModelForm):
class Meta:
model = bp_users
fields = ['username', 'password']
widgets = {
'password': forms.PasswordInput(),
}
class Registreren(forms.ModelForm):
class Meta:
model = bp_users
fields = "__all__"
widgets = {
'password': forms.PasswordInput(),
}
|
guillaume-philippon/aquilon
|
refs/heads/master
|
sbin/aqd.py
|
1
|
#!/usr/bin/env python
# -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2008,2009,2010,2011,2012,2013,2014 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import os
# -- begin path_setup --
BINDIR = os.path.dirname(os.path.realpath(sys.argv[0]))
LIBDIR = os.path.join(BINDIR, "..", "lib")
if LIBDIR not in sys.path:
sys.path.append(LIBDIR)
# -- end path_setup --
import aquilon.aqdb.depends # pylint: disable=W0611
import aquilon.worker.depends # pylint: disable=W0611
from twisted.scripts import twistd
from aquilon.twisted_patches import updated_application_run
twistd._SomeApplicationRunner.run = updated_application_run
twistd.run()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.