repo_name stringlengths 5 100 | ref stringlengths 12 67 | path stringlengths 4 244 | copies stringlengths 1 8 | content stringlengths 0 1.05M ⌀ |
|---|---|---|---|---|
cisco-openstack/networking-cisco | refs/heads/staging/libertyplus | networking_cisco/tests/unit/cisco/cfg_agent/__init__.py | 12133432 | |
dprince/tripleo-heat-templates | refs/heads/master | tools/merge-new-params-nic-config-script.py | 1 | #!/usr/bin/env python
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import argparse
import collections
import datetime
import os
import re
import shutil
import subprocess
import sys
import yaml
from tempfile import mkdtemp
DEFAULT_THT_DIR = '/usr/share/openstack-tripleo-heat-templates'
NIC_CONFIG_REFERENCE = 'single-nic-vlans'
def parse_opts(argv):
parser = argparse.ArgumentParser(
description='Merge new NIC config template parameters into '
'existing NIC config template.')
parser.add_argument('-r', '--roles-data', metavar='ROLES_DATA',
help="Relative path to the roles_data.yaml file.",
default=('%s/roles_data.yaml') % DEFAULT_THT_DIR)
parser.add_argument('-n', '--network-data', metavar='NETWORK_DATA',
help="Relative path to the network_data.yaml file.",
default=('%s/network_data.yaml') % DEFAULT_THT_DIR)
parser.add_argument('--role-name', metavar='ROLE-NAME',
help="Name of the role the NIC config is used for.",
required=True)
parser.add_argument('-t', '--template', metavar='TEMPLATE_FILE',
help=("Existing NIC config template to merge "
"parameter too."),
required=True)
parser.add_argument('--tht-dir', metavar='THT_DIR',
help=("Path to tripleo-heat-templates (THT) "
"directory"),
default=DEFAULT_THT_DIR)
parser.add_argument('--discard-comments', metavar='DISCARD_COMMENTS',
help="Discard comments from the template. (The "
"scripts functions to keep YAML file comments in "
"place, does not work in all scenarios.)",
default=False)
opts = parser.parse_args(argv[1:])
return opts
# FIXME: This duplicates code from tools/yaml-nic-config-2-script.py, we should
# refactor to share the common code
def to_commented_yaml(filename):
""" Convert comments into 'comments<num>: ...' YAML """
out_str = ''
last_non_comment_spaces = ''
with open(filename, 'r') as f:
comment_count = 0
for line in f:
# skip blank line
if line.isspace():
continue
char_count = 0
spaces = ''
for char in line:
char_count += 1
if char == ' ':
spaces += ' '
next
elif char == '#':
comment_count += 1
comment = line[char_count:-1]
last_non_comment_spaces = spaces
out_str += "%scomment%i_%i: '%s'\n" % (
last_non_comment_spaces, comment_count, len(spaces),
comment)
break
else:
last_non_comment_spaces = spaces
out_str += line
# inline comments check
m = re.match(".*:.*#(.*)", line)
if m:
comment_count += 1
out_str += "%s inline_comment%i: '%s'\n" % (
last_non_comment_spaces, comment_count, m.group(1))
break
with open(filename, 'w') as f:
f.write(out_str)
return out_str
# FIXME: This duplicates code from tools/yaml-nic-config-2-script.py, we should
# refactor to share the common code
def to_normal_yaml(filename):
""" Convert back to normal #commented YAML"""
with open(filename, 'r') as f:
data = f.read()
out_str = ''
next_line_break = False
for line in data.split('\n'):
# get_input not supported by run-os-net-config.sh script
line = line.replace('get_input: ', '')
# Normal comments
m = re.match(" +comment[0-9]+_([0-9]+): '(.*)'.*", line)
# Inline comments
i = re.match(" +inline_comment[0-9]+: '(.*)'.*", line)
if m:
if next_line_break:
out_str += '\n'
next_line_break = False
for x in range(0, int(m.group(1))):
out_str += " "
out_str += "#%s\n" % m.group(2)
elif i:
out_str += " #%s\n" % i.group(1)
next_line_break = False
else:
if next_line_break:
out_str += '\n'
out_str += line
next_line_break = True
if next_line_break:
out_str += '\n'
with open(filename, 'w') as f:
f.write(out_str)
return out_str
# FIXME: Some of this duplicates code from build_endpoint_map.py, we should
# refactor to share the common code
class TemplateDumper(yaml.SafeDumper):
def represent_ordered_dict(self, data):
return self.represent_dict(data.items())
def description_presenter(self, data):
if not len(data) > 80:
return self.represent_scalar('tag:yaml.org,2002:str', data)
return self.represent_scalar('tag:yaml.org,2002:str', data, style='>')
# FIXME: This duplicates code from tools/yaml-nic-config-2-script.py, we should
# refactor to share the common code
# We load mappings into OrderedDict to preserve their order
class TemplateLoader(yaml.SafeLoader):
def construct_mapping(self, node):
self.flatten_mapping(node)
return collections.OrderedDict(self.construct_pairs(node))
if sys.version_info.major >= 3:
TemplateDumper.add_representer(str, TemplateDumper.description_presenter)
TemplateDumper.add_representer(bytes,
TemplateDumper.description_presenter)
else:
TemplateDumper.add_representer(str, TemplateDumper.description_presenter)
TemplateDumper.add_representer(unicode,
TemplateDumper.description_presenter)
TemplateDumper.add_representer(collections.OrderedDict,
TemplateDumper.represent_ordered_dict)
TemplateLoader.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
TemplateLoader.construct_mapping)
# FIXME: This duplicates code from tools/yaml-nic-config-2-script.py, we should
# refactor to share the common code
def write_template(template, filename=None):
with open(filename, 'w') as f:
yaml.dump(template, f, TemplateDumper, width=120,
default_flow_style=False)
def process_templates_and_get_reference_parameters():
temp_dir = mkdtemp(dir='/tmp')
executable = OPTS.tht_dir + '/tools/process-templates.py'
cmd = [executable,
'--roles-data ' + OPTS.roles_data,
'--base_path ' + OPTS.tht_dir,
'--network-data ' + OPTS.network_data,
'--output-dir ' + temp_dir]
child = subprocess.Popen(' '.join(cmd), shell=True, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, universal_newlines=True)
out, err = child.communicate()
if not child.returncode == 0:
raise RuntimeError('Error processing templates: %s' % err)
# If deprecated_nic_config_names is set for role the deprecated name must
# be used when loading the reference file.
with open(OPTS.roles_data) as roles_data_file:
roles_data = yaml.safe_load(roles_data_file)
try:
nic_config_name = next((x.get('deprecated_nic_config_name',
OPTS.role_name.lower() + '.yaml')
for x in roles_data
if x['name'] == OPTS.role_name))
except StopIteration:
raise RuntimeError('The role: {role_name} is not defined in roles '
'data file: {roles_data_file}'.format(
role_name=OPTS.role_name, roles_data_file=OPTS.roles_data))
refernce_file = '/'.join([temp_dir, 'network/config', NIC_CONFIG_REFERENCE,
nic_config_name])
with open(refernce_file) as reference:
reference_template = yaml.safe_load(reference)
reference_params = reference_template['parameters']
shutil.rmtree(temp_dir)
return reference_params
def validate_template():
if not os.path.exists(OPTS.template):
raise RuntimeError('Template not provided.')
if not os.path.isfile(OPTS.template):
raise RuntimeError('Template %s is not a file.')
pass
def backup_template():
extension = datetime.datetime.now().strftime('%Y%m%d%H%M%S')
backup_filename = os.path.realpath(OPTS.template) + '.' + extension
if os.path.exists(backup_filename):
raise RuntimeError('Backupe file: %s already exists. Aborting!'
% backup_filename)
shutil.copyfile(OPTS.template, backup_filename)
print('The original template was saved as: %s' % backup_filename)
def merge_from_processed(reference_params):
template = yaml.load(open(OPTS.template).read(), Loader=TemplateLoader)
for param in reference_params:
if param not in template['parameters']:
template['parameters'][param] = reference_params[param]
write_template(template, filename=OPTS.template)
print('The update template was saved as: %s' % OPTS.template)
OPTS = parse_opts(sys.argv)
validate_template()
backup_template()
if not OPTS.discard_comments:
# Convert comments '# ...' into 'comments<num>: ...' YAML so that the info
# is not lost when loading the data.
to_commented_yaml(OPTS.template)
reference_params = process_templates_and_get_reference_parameters()
merge_from_processed(reference_params)
if not OPTS.discard_comments:
# Convert previously converted comments, 'comments<num>: ...' YAML back to
# normal #commented YAML
to_normal_yaml(OPTS.template)
|
aarestad/advent-of-code-2015 | refs/heads/master | 2015/12.py | 1 | import json
def parse_as_dict(d):
for v in d.values():
# reject dict if it has a 'red' value
if v == 'red': return
for v in d.values():
parse_object(v)
total_sum = 0
def parse_object(o):
global total_sum
if isinstance(o, dict):
parse_as_dict(o)
elif isinstance(o, list):
for e in o: parse_object(e)
elif isinstance(o, int):
total_sum += o
parse_object(json.load(open('input_12.json')))
print(total_sum)
# with open('input_12.json') as input_file:
# json_in = input_file.readline()
#
# import re
# int_strings = re.finditer(r'-?[0-9]+', json_in)
#
# total_sum = 0
#
# for s in int_strings:
# total_sum += int(s.group(0))
#
# print total_sum
|
nlalevee/spark | refs/heads/master | python/pyspark/accumulators.py | 77 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
>>> from pyspark.context import SparkContext
>>> sc = SparkContext('local', 'test')
>>> a = sc.accumulator(1)
>>> a.value
1
>>> a.value = 2
>>> a.value
2
>>> a += 5
>>> a.value
7
>>> sc.accumulator(1.0).value
1.0
>>> sc.accumulator(1j).value
1j
>>> rdd = sc.parallelize([1,2,3])
>>> def f(x):
... global a
... a += x
>>> rdd.foreach(f)
>>> a.value
13
>>> b = sc.accumulator(0)
>>> def g(x):
... b.add(x)
>>> rdd.foreach(g)
>>> b.value
6
>>> from pyspark.accumulators import AccumulatorParam
>>> class VectorAccumulatorParam(AccumulatorParam):
... def zero(self, value):
... return [0.0] * len(value)
... def addInPlace(self, val1, val2):
... for i in range(len(val1)):
... val1[i] += val2[i]
... return val1
>>> va = sc.accumulator([1.0, 2.0, 3.0], VectorAccumulatorParam())
>>> va.value
[1.0, 2.0, 3.0]
>>> def g(x):
... global va
... va += [x] * 3
>>> rdd.foreach(g)
>>> va.value
[7.0, 8.0, 9.0]
>>> rdd.map(lambda x: a.value).collect() # doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
Py4JJavaError:...
>>> def h(x):
... global a
... a.value = 7
>>> rdd.foreach(h) # doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
Py4JJavaError:...
>>> sc.accumulator([1.0, 2.0, 3.0]) # doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
TypeError:...
"""
import sys
import select
import struct
if sys.version < '3':
import SocketServer
else:
import socketserver as SocketServer
import threading
from pyspark.cloudpickle import CloudPickler
from pyspark.serializers import read_int, PickleSerializer
__all__ = ['Accumulator', 'AccumulatorParam']
pickleSer = PickleSerializer()
# Holds accumulators registered on the current machine, keyed by ID. This is then used to send
# the local accumulator updates back to the driver program at the end of a task.
_accumulatorRegistry = {}
def _deserialize_accumulator(aid, zero_value, accum_param):
from pyspark.accumulators import _accumulatorRegistry
accum = Accumulator(aid, zero_value, accum_param)
accum._deserialized = True
_accumulatorRegistry[aid] = accum
return accum
class Accumulator(object):
"""
A shared variable that can be accumulated, i.e., has a commutative and associative "add"
operation. Worker tasks on a Spark cluster can add values to an Accumulator with the C{+=}
operator, but only the driver program is allowed to access its value, using C{value}.
Updates from the workers get propagated automatically to the driver program.
While C{SparkContext} supports accumulators for primitive data types like C{int} and
C{float}, users can also define accumulators for custom types by providing a custom
L{AccumulatorParam} object. Refer to the doctest of this module for an example.
"""
def __init__(self, aid, value, accum_param):
"""Create a new Accumulator with a given initial value and AccumulatorParam object"""
from pyspark.accumulators import _accumulatorRegistry
self.aid = aid
self.accum_param = accum_param
self._value = value
self._deserialized = False
_accumulatorRegistry[aid] = self
def __reduce__(self):
"""Custom serialization; saves the zero value from our AccumulatorParam"""
param = self.accum_param
return (_deserialize_accumulator, (self.aid, param.zero(self._value), param))
@property
def value(self):
"""Get the accumulator's value; only usable in driver program"""
if self._deserialized:
raise Exception("Accumulator.value cannot be accessed inside tasks")
return self._value
@value.setter
def value(self, value):
"""Sets the accumulator's value; only usable in driver program"""
if self._deserialized:
raise Exception("Accumulator.value cannot be accessed inside tasks")
self._value = value
def add(self, term):
"""Adds a term to this accumulator's value"""
self._value = self.accum_param.addInPlace(self._value, term)
def __iadd__(self, term):
"""The += operator; adds a term to this accumulator's value"""
self.add(term)
return self
def __str__(self):
return str(self._value)
def __repr__(self):
return "Accumulator<id=%i, value=%s>" % (self.aid, self._value)
class AccumulatorParam(object):
"""
Helper object that defines how to accumulate values of a given type.
"""
def zero(self, value):
"""
Provide a "zero value" for the type, compatible in dimensions with the
provided C{value} (e.g., a zero vector)
"""
raise NotImplementedError
def addInPlace(self, value1, value2):
"""
Add two values of the accumulator's data type, returning a new value;
for efficiency, can also update C{value1} in place and return it.
"""
raise NotImplementedError
class AddingAccumulatorParam(AccumulatorParam):
"""
An AccumulatorParam that uses the + operators to add values. Designed for simple types
such as integers, floats, and lists. Requires the zero value for the underlying type
as a parameter.
"""
def __init__(self, zero_value):
self.zero_value = zero_value
def zero(self, value):
return self.zero_value
def addInPlace(self, value1, value2):
value1 += value2
return value1
# Singleton accumulator params for some standard types
INT_ACCUMULATOR_PARAM = AddingAccumulatorParam(0)
FLOAT_ACCUMULATOR_PARAM = AddingAccumulatorParam(0.0)
COMPLEX_ACCUMULATOR_PARAM = AddingAccumulatorParam(0.0j)
class _UpdateRequestHandler(SocketServer.StreamRequestHandler):
"""
This handler will keep polling updates from the same socket until the
server is shutdown.
"""
def handle(self):
from pyspark.accumulators import _accumulatorRegistry
while not self.server.server_shutdown:
# Poll every 1 second for new data -- don't block in case of shutdown.
r, _, _ = select.select([self.rfile], [], [], 1)
if self.rfile in r:
num_updates = read_int(self.rfile)
for _ in range(num_updates):
(aid, update) = pickleSer._read_with_length(self.rfile)
_accumulatorRegistry[aid] += update
# Write a byte in acknowledgement
self.wfile.write(struct.pack("!b", 1))
class AccumulatorServer(SocketServer.TCPServer):
"""
A simple TCP server that intercepts shutdown() in order to interrupt
our continuous polling on the handler.
"""
server_shutdown = False
def shutdown(self):
self.server_shutdown = True
SocketServer.TCPServer.shutdown(self)
self.server_close()
def _start_update_server():
"""Start a TCP server to receive accumulator updates in a daemon thread, and returns it"""
server = AccumulatorServer(("localhost", 0), _UpdateRequestHandler)
thread = threading.Thread(target=server.serve_forever)
thread.daemon = True
thread.start()
return server
if __name__ == "__main__":
import doctest
(failure_count, test_count) = doctest.testmod()
if failure_count:
exit(-1)
|
CoDEmanX/ArangoDB | refs/heads/devel | 3rdParty/V8-4.3.61/build/gyp/test/win/gyptest-midl-rules.py | 141 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Handle default .idl build rules.
"""
import TestGyp
import sys
if sys.platform == 'win32':
test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
CHDIR = 'idl-rules'
test.run_gyp('basic-idl.gyp', chdir=CHDIR)
for platform in ['Win32', 'x64']:
test.set_configuration('Debug|%s' % platform)
test.build('basic-idl.gyp', test.ALL, chdir=CHDIR)
test.pass_test()
|
chaffra/sympy | refs/heads/master | sympy/conftest.py | 89 | from __future__ import print_function, division
import sys
sys._running_pytest = True
from distutils.version import LooseVersion as V
import pytest
from sympy.core.cache import clear_cache
import re
sp = re.compile(r'([0-9]+)/([1-9][0-9]*)')
def process_split(session, config, items):
split = config.getoption("--split")
if not split:
return
m = sp.match(split)
if not m:
raise ValueError("split must be a string of the form a/b "
"where a and b are ints.")
i, t = map(int, m.groups())
start, end = (i-1)*len(items)//t, i*len(items)//t
if i < t:
# remove elements from end of list first
del items[end:]
del items[:start]
def pytest_report_header(config):
from sympy.utilities.misc import ARCH
s = "architecture: %s\n" % ARCH
from sympy.core.cache import USE_CACHE
s += "cache: %s\n" % USE_CACHE
from sympy.core.compatibility import GROUND_TYPES, HAS_GMPY
version = ''
if GROUND_TYPES =='gmpy':
if HAS_GMPY == 1:
import gmpy
elif HAS_GMPY == 2:
import gmpy2 as gmpy
version = gmpy.version()
s += "ground types: %s %s\n" % (GROUND_TYPES, version)
return s
def pytest_terminal_summary(terminalreporter):
if (terminalreporter.stats.get('error', None) or
terminalreporter.stats.get('failed', None)):
terminalreporter.write_sep(
' ', 'DO *NOT* COMMIT!', red=True, bold=True)
def pytest_addoption(parser):
parser.addoption("--split", action="store", default="",
help="split tests")
def pytest_collection_modifyitems(session, config, items):
""" pytest hook. """
# handle splits
process_split(session, config, items)
@pytest.fixture(autouse=True, scope='module')
def file_clear_cache():
clear_cache()
@pytest.fixture(autouse=True, scope='module')
def check_disabled(request):
if getattr(request.module, 'disabled', False):
pytest.skip("test requirements not met.")
elif getattr(request.module, 'ipython', False):
# need to check version and options for ipython tests
if (V(pytest.__version__) < '2.6.3' and
pytest.config.getvalue('-s') != 'no'):
pytest.skip("run py.test with -s or upgrade to newer version.")
|
cnelsonsic/cardscript | refs/heads/master | cardscript/cards/__init__.py | 105 | from . import *
|
Celedhrim/persomov | refs/heads/master | libs/suds/mx/core.py | 211 | # This program is free software; you can redistribute it and/or modify
# it under the terms of the (LGPL) GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library Lesser General Public License for more details at
# ( http://www.gnu.org/licenses/lgpl.html ).
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# written by: Jeff Ortel ( jortel@redhat.com )
"""
Provides I{marshaller} core classes.
"""
from logging import getLogger
from suds import *
from suds.mx import *
from suds.mx.appender import ContentAppender
from suds.sax.element import Element
from suds.sax.document import Document
from suds.sudsobject import Property
log = getLogger(__name__)
class Core:
"""
An I{abstract} marshaller. This class implement the core
functionality of the marshaller.
@ivar appender: A content appender.
@type appender: L{ContentAppender}
"""
def __init__(self):
"""
"""
self.appender = ContentAppender(self)
def process(self, content):
"""
Process (marshal) the tag with the specified value using the
optional type information.
@param content: The content to process.
@type content: L{Object}
"""
log.debug('processing:\n%s', content)
self.reset()
if content.tag is None:
content.tag = content.value.__class__.__name__
document = Document()
if isinstance(content.value, Property):
root = self.node(content)
self.append(document, content)
else:
self.append(document, content)
return document.root()
def append(self, parent, content):
"""
Append the specified L{content} to the I{parent}.
@param parent: The parent node to append to.
@type parent: L{Element}
@param content: The content to append.
@type content: L{Object}
"""
log.debug('appending parent:\n%s\ncontent:\n%s', parent, content)
if self.start(content):
self.appender.append(parent, content)
self.end(parent, content)
def reset(self):
"""
Reset the marshaller.
"""
pass
def node(self, content):
"""
Create and return an XML node.
@param content: The content for which proccessing has been suspended.
@type content: L{Object}
@return: An element.
@rtype: L{Element}
"""
return Element(content.tag)
def start(self, content):
"""
Appending this content has started.
@param content: The content for which proccessing has started.
@type content: L{Content}
@return: True to continue appending
@rtype: boolean
"""
return True
def suspend(self, content):
"""
Appending this content has suspended.
@param content: The content for which proccessing has been suspended.
@type content: L{Content}
"""
pass
def resume(self, content):
"""
Appending this content has resumed.
@param content: The content for which proccessing has been resumed.
@type content: L{Content}
"""
pass
def end(self, parent, content):
"""
Appending this content has ended.
@param parent: The parent node ending.
@type parent: L{Element}
@param content: The content for which proccessing has ended.
@type content: L{Content}
"""
pass
def setnil(self, node, content):
"""
Set the value of the I{node} to nill.
@param node: A I{nil} node.
@type node: L{Element}
@param content: The content to set nil.
@type content: L{Content}
"""
pass
def setdefault(self, node, content):
"""
Set the value of the I{node} to a default value.
@param node: A I{nil} node.
@type node: L{Element}
@param content: The content to set the default value.
@type content: L{Content}
@return: The default.
"""
pass
def optional(self, content):
"""
Get whether the specified content is optional.
@param content: The content which to check.
@type content: L{Content}
"""
return False
|
bartTC/django-wakawaka | refs/heads/master | wakawaka/tests/test_page_list.py | 1 | from django.urls import reverse
from wakawaka.tests.base import BaseTestCase
class PageListTestCase(BaseTestCase):
"""
The Revision List displays all Pages.
"""
def test_pagelist(self):
# Create a couple of Wiki pages
self.create_wikipage('WikiIndex', 'Some content')
self.create_wikipage('CarrotCake', 'Some content')
self.create_wikipage('BeanSoup', 'Some content')
response = self.client.get(reverse('wakawaka_page_list'))
self.assertContains(response, 'WikiIndex')
self.assertContains(response, 'CarrotCake')
self.assertContains(response, 'BeanSoup')
|
gausspy/gausspy | refs/heads/master | docs/conf.py | 1 | # -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
#
# Astropy documentation build configuration file.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this file.
#
# All configuration values have a default. Some values are defined in
# the global Astropy configuration which is loaded here before anything else.
# See astropy.sphinx.conf for which values are set there.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
# sys.path.insert(0, os.path.abspath('..'))
# IMPORTANT: the above commented section was generated by sphinx-quickstart, but
# is *NOT* appropriate for astropy or Astropy affiliated packages. It is left
# commented out with this explanation to make it clear why this should not be
# done. If the sys.path entry above is added, when the astropy.sphinx.conf
# import occurs, it will import the *source* version of astropy instead of the
# version installed (if invoked as "make html" or directly with sphinx), or the
# version in the build directory (if "python setup.py build_sphinx" is used).
# Thus, any C-extensions that are needed to build the documentation will *not*
# be accessible, and the documentation will not build correctly.
import os
import sys
import datetime
from importlib import import_module
try:
from sphinx_astropy.conf.v1 import * # noqa
except ImportError:
print('ERROR: the documentation requires the sphinx-astropy package to be installed')
sys.exit(1)
# Get configuration information from setup.cfg
from configparser import ConfigParser
conf = ConfigParser()
conf.read([os.path.join(os.path.dirname(__file__), '..', 'setup.cfg')])
setup_cfg = dict(conf.items('metadata'))
# -- General configuration ----------------------------------------------------
# By default, highlight as Python 3.
highlight_language = 'python3'
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.2'
# To perform a Sphinx version check that needs to be more specific than
# major.minor, call `check_sphinx_version("x.y.z")` here.
# check_sphinx_version("1.2.1")
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns.append('_templates')
# This is added to the end of RST files - a good place to put substitutions to
# be used globally.
rst_epilog += """
"""
# -- Project information ------------------------------------------------------
# This does not *have* to match the package name, but typically does
project = setup_cfg['name']
author = setup_cfg['author']
copyright = '{0}, {1}'.format(
datetime.datetime.now().year, setup_cfg['author'])
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
import_module(setup_cfg['name'])
package = sys.modules[setup_cfg['name']]
# The short X.Y version.
version = package.__version__.split('-', 1)[0]
# The full version, including alpha/beta/rc tags.
release = package.__version__
# -- Options for HTML output --------------------------------------------------
# A NOTE ON HTML THEMES
# The global astropy configuration uses a custom theme, 'bootstrap-astropy',
# which is installed along with astropy. A different theme can be used or
# the options for this theme can be modified by overriding some of the
# variables set in the global configuration. The variables set in the
# global configuration are listed below, commented out.
# Add any paths that contain custom themes here, relative to this directory.
# To use a different custom theme, add the directory containing the theme.
#html_theme_path = []
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes. To override the custom theme, set this to the
# name of a builtin theme or the name of a custom theme in html_theme_path.
#html_theme = None
html_theme_options = {
'logotext1': 'GaussPy', # white, semi-bold
'logotext2': '', # orange, light
'logotext3': ':docs' # white, light
}
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = ''
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = ''
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = ''
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
html_title = '{0} v{1}'.format(project, release)
# Output file base name for HTML help builder.
htmlhelp_basename = project + 'doc'
# -- Options for LaTeX output -------------------------------------------------
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [('index', project + '.tex', project + u' Documentation',
author, 'manual')]
# -- Options for manual page output -------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [('index', project.lower(), project + u' Documentation',
[author], 1)]
# -- Options for the edit_on_github extension ---------------------------------
if eval(setup_cfg.get('edit_on_github')):
extensions += ['sphinx_astropy.ext.edit_on_github']
versionmod = __import__(setup_cfg['package_name'] + '.version')
edit_on_github_project = setup_cfg['github_project']
if versionmod.version.release:
edit_on_github_branch = "v" + versionmod.version.version
else:
edit_on_github_branch = "master"
edit_on_github_source_root = ""
edit_on_github_doc_root = "docs"
# -- Resolving issue number to links in changelog -----------------------------
github_issues_url = 'https://github.com/{0}/issues/'.format(setup_cfg['github_project'])
# -- Turn on nitpicky mode for sphinx (to warn about references not found) ----
#
# nitpicky = True
# nitpick_ignore = []
#
# Some warnings are impossible to suppress, and you can list specific references
# that should be ignored in a nitpick-exceptions file which should be inside
# the docs/ directory. The format of the file should be:
#
# <type> <class>
#
# for example:
#
# py:class astropy.io.votable.tree.Element
# py:class astropy.io.votable.tree.SimpleElement
# py:class astropy.io.votable.tree.SimpleElementWithContent
#
# Uncomment the following lines to enable the exceptions:
#
# for line in open('nitpick-exceptions'):
# if line.strip() == "" or line.startswith("#"):
# continue
# dtype, target = line.split(None, 1)
# target = target.strip()
# nitpick_ignore.append((dtype, six.u(target)))
|
debasishm89/OpenXMolar | refs/heads/master | ExtDepLibs/winappdbg/breakpoint.py | 1 | #!/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2009-2016, Mario Vilas
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice,this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""
Breakpoints.
@group Breakpoints:
Breakpoint, CodeBreakpoint, PageBreakpoint, HardwareBreakpoint,
BufferWatch, Hook, ApiHook
@group Warnings:
BreakpointWarning, BreakpointCallbackWarning
"""
__all__ = [
# Base class for breakpoints
'Breakpoint',
# Breakpoint implementations
'CodeBreakpoint',
'PageBreakpoint',
'HardwareBreakpoint',
# Hooks and watches
'Hook',
'ApiHook',
'BufferWatch',
# Warnings
'BreakpointWarning',
'BreakpointCallbackWarning',
]
import win32
from process import Process, Thread
from util import DebugRegister, MemoryAddresses
from textio import HexDump
import ctypes
import warnings
import traceback
# Cygwin compatibility.
try:
WindowsError
except NameError:
from winappdbg.win32 import WindowsError
#==============================================================================
class BreakpointWarning (UserWarning):
"""
This warning is issued when a non-fatal error occurs that's related to
breakpoints.
"""
class BreakpointCallbackWarning (RuntimeWarning):
"""
This warning is issued when an uncaught exception was raised by a
breakpoint's user-defined callback.
"""
#==============================================================================
class Breakpoint (object):
"""
Base class for breakpoints.
Here's the breakpoints state machine.
@see: L{CodeBreakpoint}, L{PageBreakpoint}, L{HardwareBreakpoint}
@group Breakpoint states:
DISABLED, ENABLED, ONESHOT, RUNNING
@group State machine:
hit, disable, enable, one_shot, running,
is_disabled, is_enabled, is_one_shot, is_running,
get_state, get_state_name
@group Information:
get_address, get_size, get_span, is_here
@group Conditional breakpoints:
is_conditional, is_unconditional,
get_condition, set_condition, eval_condition
@group Automatic breakpoints:
is_automatic, is_interactive,
get_action, set_action, run_action
@cvar DISABLED: I{Disabled} S{->} Enabled, OneShot
@cvar ENABLED: I{Enabled} S{->} I{Running}, Disabled
@cvar ONESHOT: I{OneShot} S{->} I{Disabled}
@cvar RUNNING: I{Running} S{->} I{Enabled}, Disabled
@type DISABLED: int
@type ENABLED: int
@type ONESHOT: int
@type RUNNING: int
@type stateNames: dict E{lb} int S{->} str E{rb}
@cvar stateNames: User-friendly names for each breakpoint state.
@type typeName: str
@cvar typeName: User friendly breakpoint type string.
"""
# I don't think transitions Enabled <-> OneShot should be allowed... plus
# it would require special handling to avoid setting the same bp twice
DISABLED = 0
ENABLED = 1
ONESHOT = 2
RUNNING = 3
typeName = 'breakpoint'
stateNames = {
DISABLED : 'disabled',
ENABLED : 'enabled',
ONESHOT : 'one shot',
RUNNING : 'running',
}
def __init__(self, address, size = 1, condition = True, action = None):
"""
Breakpoint object.
@type address: int
@param address: Memory address for breakpoint.
@type size: int
@param size: Size of breakpoint in bytes (defaults to 1).
@type condition: function
@param condition: (Optional) Condition callback function.
The callback signature is::
def condition_callback(event):
return True # returns True or False
Where B{event} is an L{Event} object,
and the return value is a boolean
(C{True} to dispatch the event, C{False} otherwise).
@type action: function
@param action: (Optional) Action callback function.
If specified, the event is handled by this callback instead of
being dispatched normally.
The callback signature is::
def action_callback(event):
pass # no return value
Where B{event} is an L{Event} object.
"""
self.__address = address
self.__size = size
self.__state = self.DISABLED
self.set_condition(condition)
self.set_action(action)
def __repr__(self):
if self.is_disabled():
state = 'Disabled'
else:
state = 'Active (%s)' % self.get_state_name()
if self.is_conditional():
condition = 'conditional'
else:
condition = 'unconditional'
name = self.typeName
size = self.get_size()
if size == 1:
address = HexDump.address( self.get_address() )
else:
begin = self.get_address()
end = begin + size
begin = HexDump.address(begin)
end = HexDump.address(end)
address = "range %s-%s" % (begin, end)
msg = "<%s %s %s at remote address %s>"
msg = msg % (state, condition, name, address)
return msg
#------------------------------------------------------------------------------
def is_disabled(self):
"""
@rtype: bool
@return: C{True} if the breakpoint is in L{DISABLED} state.
"""
return self.get_state() == self.DISABLED
def is_enabled(self):
"""
@rtype: bool
@return: C{True} if the breakpoint is in L{ENABLED} state.
"""
return self.get_state() == self.ENABLED
def is_one_shot(self):
"""
@rtype: bool
@return: C{True} if the breakpoint is in L{ONESHOT} state.
"""
return self.get_state() == self.ONESHOT
def is_running(self):
"""
@rtype: bool
@return: C{True} if the breakpoint is in L{RUNNING} state.
"""
return self.get_state() == self.RUNNING
def is_here(self, address):
"""
@rtype: bool
@return: C{True} if the address is within the range of the breakpoint.
"""
begin = self.get_address()
end = begin + self.get_size()
return begin <= address < end
def get_address(self):
"""
@rtype: int
@return: The target memory address for the breakpoint.
"""
return self.__address
def get_size(self):
"""
@rtype: int
@return: The size in bytes of the breakpoint.
"""
return self.__size
def get_span(self):
"""
@rtype: tuple( int, int )
@return:
Starting and ending address of the memory range
covered by the breakpoint.
"""
address = self.get_address()
size = self.get_size()
return ( address, address + size )
def get_state(self):
"""
@rtype: int
@return: The current state of the breakpoint
(L{DISABLED}, L{ENABLED}, L{ONESHOT}, L{RUNNING}).
"""
return self.__state
def get_state_name(self):
"""
@rtype: str
@return: The name of the current state of the breakpoint.
"""
return self.stateNames[ self.get_state() ]
#------------------------------------------------------------------------------
def is_conditional(self):
"""
@see: L{__init__}
@rtype: bool
@return: C{True} if the breakpoint has a condition callback defined.
"""
# Do not evaluate as boolean! Test for identity with True instead.
return self.__condition is not True
def is_unconditional(self):
"""
@rtype: bool
@return: C{True} if the breakpoint doesn't have a condition callback defined.
"""
# Do not evaluate as boolean! Test for identity with True instead.
return self.__condition is True
def get_condition(self):
"""
@rtype: bool, function
@return: Returns the condition callback for conditional breakpoints.
Returns C{True} for unconditional breakpoints.
"""
return self.__condition
def set_condition(self, condition = True):
"""
Sets a new condition callback for the breakpoint.
@see: L{__init__}
@type condition: function
@param condition: (Optional) Condition callback function.
"""
if condition is None:
self.__condition = True
else:
self.__condition = condition
def eval_condition(self, event):
"""
Evaluates the breakpoint condition, if any was set.
@type event: L{Event}
@param event: Debug event triggered by the breakpoint.
@rtype: bool
@return: C{True} to dispatch the event, C{False} otherwise.
"""
condition = self.get_condition()
if condition is True: # shortcut for unconditional breakpoints
return True
if callable(condition):
try:
return bool( condition(event) )
except Exception, e:
msg = ("Breakpoint condition callback %r"
" raised an exception: %s")
msg = msg % (condition, traceback.format_exc(e))
warnings.warn(msg, BreakpointCallbackWarning)
return False
return bool( condition ) # force evaluation now
#------------------------------------------------------------------------------
def is_automatic(self):
"""
@rtype: bool
@return: C{True} if the breakpoint has an action callback defined.
"""
return self.__action is not None
def is_interactive(self):
"""
@rtype: bool
@return:
C{True} if the breakpoint doesn't have an action callback defined.
"""
return self.__action is None
def get_action(self):
"""
@rtype: bool, function
@return: Returns the action callback for automatic breakpoints.
Returns C{None} for interactive breakpoints.
"""
return self.__action
def set_action(self, action = None):
"""
Sets a new action callback for the breakpoint.
@type action: function
@param action: (Optional) Action callback function.
"""
self.__action = action
def run_action(self, event):
"""
Executes the breakpoint action callback, if any was set.
@type event: L{Event}
@param event: Debug event triggered by the breakpoint.
"""
action = self.get_action()
if action is not None:
try:
return bool( action(event) )
except Exception, e:
msg = ("Breakpoint action callback %r"
" raised an exception: %s")
msg = msg % (action, traceback.format_exc(e))
warnings.warn(msg, BreakpointCallbackWarning)
return False
return True
#------------------------------------------------------------------------------
def __bad_transition(self, state):
"""
Raises an C{AssertionError} exception for an invalid state transition.
@see: L{stateNames}
@type state: int
@param state: Intended breakpoint state.
@raise Exception: Always.
"""
statemsg = ""
oldState = self.stateNames[ self.get_state() ]
newState = self.stateNames[ state ]
msg = "Invalid state transition (%s -> %s)" \
" for breakpoint at address %s"
msg = msg % (oldState, newState, HexDump.address(self.get_address()))
raise AssertionError(msg)
def disable(self, aProcess, aThread):
"""
Transition to L{DISABLED} state.
- When hit: OneShot S{->} Disabled
- Forced by user: Enabled, OneShot, Running S{->} Disabled
- Transition from running state may require special handling
by the breakpoint implementation class.
@type aProcess: L{Process}
@param aProcess: Process object.
@type aThread: L{Thread}
@param aThread: Thread object.
"""
## if self.__state not in (self.ENABLED, self.ONESHOT, self.RUNNING):
## self.__bad_transition(self.DISABLED)
self.__state = self.DISABLED
def enable(self, aProcess, aThread):
"""
Transition to L{ENABLED} state.
- When hit: Running S{->} Enabled
- Forced by user: Disabled, Running S{->} Enabled
- Transition from running state may require special handling
by the breakpoint implementation class.
@type aProcess: L{Process}
@param aProcess: Process object.
@type aThread: L{Thread}
@param aThread: Thread object.
"""
## if self.__state not in (self.DISABLED, self.RUNNING):
## self.__bad_transition(self.ENABLED)
self.__state = self.ENABLED
def one_shot(self, aProcess, aThread):
"""
Transition to L{ONESHOT} state.
- Forced by user: Disabled S{->} OneShot
@type aProcess: L{Process}
@param aProcess: Process object.
@type aThread: L{Thread}
@param aThread: Thread object.
"""
## if self.__state != self.DISABLED:
## self.__bad_transition(self.ONESHOT)
self.__state = self.ONESHOT
def running(self, aProcess, aThread):
"""
Transition to L{RUNNING} state.
- When hit: Enabled S{->} Running
@type aProcess: L{Process}
@param aProcess: Process object.
@type aThread: L{Thread}
@param aThread: Thread object.
"""
if self.__state != self.ENABLED:
self.__bad_transition(self.RUNNING)
self.__state = self.RUNNING
def hit(self, event):
"""
Notify a breakpoint that it's been hit.
This triggers the corresponding state transition and sets the
C{breakpoint} property of the given L{Event} object.
@see: L{disable}, L{enable}, L{one_shot}, L{running}
@type event: L{Event}
@param event: Debug event to handle (depends on the breakpoint type).
@raise AssertionError: Disabled breakpoints can't be hit.
"""
aProcess = event.get_process()
aThread = event.get_thread()
state = self.get_state()
event.breakpoint = self
if state == self.ENABLED:
self.running(aProcess, aThread)
elif state == self.RUNNING:
self.enable(aProcess, aThread)
elif state == self.ONESHOT:
self.disable(aProcess, aThread)
elif state == self.DISABLED:
# this should not happen
msg = "Hit a disabled breakpoint at address %s"
msg = msg % HexDump.address( self.get_address() )
warnings.warn(msg, BreakpointWarning)
#==============================================================================
# XXX TODO
# Check if the user is trying to set a code breakpoint on a memory mapped file,
# so we don't end up writing the int3 instruction in the file by accident.
class CodeBreakpoint (Breakpoint):
"""
Code execution breakpoints (using an int3 opcode).
@see: L{Debug.break_at}
@type bpInstruction: str
@cvar bpInstruction: Breakpoint instruction for the current processor.
"""
typeName = 'code breakpoint'
if win32.arch in (win32.ARCH_I386, win32.ARCH_AMD64):
bpInstruction = '\xCC' # int 3
def __init__(self, address, condition = True, action = None):
"""
Code breakpoint object.
@see: L{Breakpoint.__init__}
@type address: int
@param address: Memory address for breakpoint.
@type condition: function
@param condition: (Optional) Condition callback function.
@type action: function
@param action: (Optional) Action callback function.
"""
if win32.arch not in (win32.ARCH_I386, win32.ARCH_AMD64):
msg = "Code breakpoints not supported for %s" % win32.arch
raise NotImplementedError(msg)
Breakpoint.__init__(self, address, len(self.bpInstruction),
condition, action)
self.__previousValue = self.bpInstruction
def __set_bp(self, aProcess):
"""
Writes a breakpoint instruction at the target address.
@type aProcess: L{Process}
@param aProcess: Process object.
"""
address = self.get_address()
self.__previousValue = aProcess.read(address, len(self.bpInstruction))
if self.__previousValue == self.bpInstruction:
msg = "Possible overlapping code breakpoints at %s"
msg = msg % HexDump.address(address)
warnings.warn(msg, BreakpointWarning)
aProcess.write(address, self.bpInstruction)
def __clear_bp(self, aProcess):
"""
Restores the original byte at the target address.
@type aProcess: L{Process}
@param aProcess: Process object.
"""
address = self.get_address()
currentValue = aProcess.read(address, len(self.bpInstruction))
if currentValue == self.bpInstruction:
# Only restore the previous value if the int3 is still there.
aProcess.write(self.get_address(), self.__previousValue)
else:
self.__previousValue = currentValue
msg = "Overwritten code breakpoint at %s"
msg = msg % HexDump.address(address)
warnings.warn(msg, BreakpointWarning)
def disable(self, aProcess, aThread):
if not self.is_disabled() and not self.is_running():
self.__clear_bp(aProcess)
super(CodeBreakpoint, self).disable(aProcess, aThread)
def enable(self, aProcess, aThread):
if not self.is_enabled() and not self.is_one_shot():
self.__set_bp(aProcess)
super(CodeBreakpoint, self).enable(aProcess, aThread)
def one_shot(self, aProcess, aThread):
if not self.is_enabled() and not self.is_one_shot():
self.__set_bp(aProcess)
super(CodeBreakpoint, self).one_shot(aProcess, aThread)
# FIXME race condition here (however unlikely)
# If another thread runs on over the target address while
# the breakpoint is in RUNNING state, we'll miss it. There
# is a solution to this but it's somewhat complicated, so
# I'm leaving it for another version of the debugger. :(
def running(self, aProcess, aThread):
if self.is_enabled():
self.__clear_bp(aProcess)
aThread.set_tf()
super(CodeBreakpoint, self).running(aProcess, aThread)
#==============================================================================
# TODO:
# * If the original page was already a guard page, the exception should be
# passed to the debugee instead of being handled by the debugger.
# * If the original page was already a guard page, it should NOT be converted
# to a no-access page when disabling the breakpoint.
# * If the page permissions were modified after the breakpoint was enabled,
# no change should be done on them when disabling the breakpoint. For this
# we need to remember the original page permissions instead of blindly
# setting and clearing the guard page bit on them.
# * Some pages seem to be "magic" and resist all attempts at changing their
# protect bits (for example the pages where the PEB and TEB reside). Maybe
# a more descriptive error message could be shown in this case.
class PageBreakpoint (Breakpoint):
"""
Page access breakpoint (using guard pages).
@see: L{Debug.watch_buffer}
@group Information:
get_size_in_pages
"""
typeName = 'page breakpoint'
#------------------------------------------------------------------------------
def __init__(self, address, pages = 1, condition = True, action = None):
"""
Page breakpoint object.
@see: L{Breakpoint.__init__}
@type address: int
@param address: Memory address for breakpoint.
@type pages: int
@param address: Size of breakpoint in pages.
@type condition: function
@param condition: (Optional) Condition callback function.
@type action: function
@param action: (Optional) Action callback function.
"""
Breakpoint.__init__(self, address, pages * MemoryAddresses.pageSize,
condition, action)
## if (address & 0x00000FFF) != 0:
floordiv_align = long(address) // long(MemoryAddresses.pageSize)
truediv_align = float(address) / float(MemoryAddresses.pageSize)
if floordiv_align != truediv_align:
msg = "Address of page breakpoint " \
"must be aligned to a page size boundary " \
"(value %s received)" % HexDump.address(address)
raise ValueError(msg)
def get_size_in_pages(self):
"""
@rtype: int
@return: The size in pages of the breakpoint.
"""
# The size is always a multiple of the page size.
return self.get_size() // MemoryAddresses.pageSize
def __set_bp(self, aProcess):
"""
Sets the target pages as guard pages.
@type aProcess: L{Process}
@param aProcess: Process object.
"""
lpAddress = self.get_address()
dwSize = self.get_size()
flNewProtect = aProcess.mquery(lpAddress).Protect
flNewProtect = flNewProtect | win32.PAGE_GUARD
aProcess.mprotect(lpAddress, dwSize, flNewProtect)
def __clear_bp(self, aProcess):
"""
Restores the original permissions of the target pages.
@type aProcess: L{Process}
@param aProcess: Process object.
"""
lpAddress = self.get_address()
flNewProtect = aProcess.mquery(lpAddress).Protect
flNewProtect = flNewProtect & (0xFFFFFFFF ^ win32.PAGE_GUARD) # DWORD
aProcess.mprotect(lpAddress, self.get_size(), flNewProtect)
def disable(self, aProcess, aThread):
if not self.is_disabled():
self.__clear_bp(aProcess)
super(PageBreakpoint, self).disable(aProcess, aThread)
def enable(self, aProcess, aThread):
if win32.arch not in (win32.ARCH_I386, win32.ARCH_AMD64):
msg = "Only one-shot page breakpoints are supported for %s"
raise NotImplementedError(msg % win32.arch)
if not self.is_enabled() and not self.is_one_shot():
self.__set_bp(aProcess)
super(PageBreakpoint, self).enable(aProcess, aThread)
def one_shot(self, aProcess, aThread):
if not self.is_enabled() and not self.is_one_shot():
self.__set_bp(aProcess)
super(PageBreakpoint, self).one_shot(aProcess, aThread)
def running(self, aProcess, aThread):
aThread.set_tf()
super(PageBreakpoint, self).running(aProcess, aThread)
#==============================================================================
class HardwareBreakpoint (Breakpoint):
"""
Hardware breakpoint (using debug registers).
@see: L{Debug.watch_variable}
@group Information:
get_slot, get_trigger, get_watch
@group Trigger flags:
BREAK_ON_EXECUTION, BREAK_ON_WRITE, BREAK_ON_ACCESS
@group Watch size flags:
WATCH_BYTE, WATCH_WORD, WATCH_DWORD, WATCH_QWORD
@type BREAK_ON_EXECUTION: int
@cvar BREAK_ON_EXECUTION: Break on execution.
@type BREAK_ON_WRITE: int
@cvar BREAK_ON_WRITE: Break on write.
@type BREAK_ON_ACCESS: int
@cvar BREAK_ON_ACCESS: Break on read or write.
@type WATCH_BYTE: int
@cvar WATCH_BYTE: Watch a byte.
@type WATCH_WORD: int
@cvar WATCH_WORD: Watch a word (2 bytes).
@type WATCH_DWORD: int
@cvar WATCH_DWORD: Watch a double word (4 bytes).
@type WATCH_QWORD: int
@cvar WATCH_QWORD: Watch one quad word (8 bytes).
@type validTriggers: tuple
@cvar validTriggers: Valid trigger flag values.
@type validWatchSizes: tuple
@cvar validWatchSizes: Valid watch flag values.
"""
typeName = 'hardware breakpoint'
BREAK_ON_EXECUTION = DebugRegister.BREAK_ON_EXECUTION
BREAK_ON_WRITE = DebugRegister.BREAK_ON_WRITE
BREAK_ON_ACCESS = DebugRegister.BREAK_ON_ACCESS
WATCH_BYTE = DebugRegister.WATCH_BYTE
WATCH_WORD = DebugRegister.WATCH_WORD
WATCH_DWORD = DebugRegister.WATCH_DWORD
WATCH_QWORD = DebugRegister.WATCH_QWORD
validTriggers = (
BREAK_ON_EXECUTION,
BREAK_ON_WRITE,
BREAK_ON_ACCESS,
)
validWatchSizes = (
WATCH_BYTE,
WATCH_WORD,
WATCH_DWORD,
WATCH_QWORD,
)
def __init__(self, address, triggerFlag = BREAK_ON_ACCESS,
sizeFlag = WATCH_DWORD,
condition = True,
action = None):
"""
Hardware breakpoint object.
@see: L{Breakpoint.__init__}
@type address: int
@param address: Memory address for breakpoint.
@type triggerFlag: int
@param triggerFlag: Trigger of breakpoint. Must be one of the following:
- L{BREAK_ON_EXECUTION}
Break on code execution.
- L{BREAK_ON_WRITE}
Break on memory read or write.
- L{BREAK_ON_ACCESS}
Break on memory write.
@type sizeFlag: int
@param sizeFlag: Size of breakpoint. Must be one of the following:
- L{WATCH_BYTE}
One (1) byte in size.
- L{WATCH_WORD}
Two (2) bytes in size.
- L{WATCH_DWORD}
Four (4) bytes in size.
- L{WATCH_QWORD}
Eight (8) bytes in size.
@type condition: function
@param condition: (Optional) Condition callback function.
@type action: function
@param action: (Optional) Action callback function.
"""
if win32.arch not in (win32.ARCH_I386, win32.ARCH_AMD64):
msg = "Hardware breakpoints not supported for %s" % win32.arch
raise NotImplementedError(msg)
if sizeFlag == self.WATCH_BYTE:
size = 1
elif sizeFlag == self.WATCH_WORD:
size = 2
elif sizeFlag == self.WATCH_DWORD:
size = 4
elif sizeFlag == self.WATCH_QWORD:
size = 8
else:
msg = "Invalid size flag for hardware breakpoint (%s)"
msg = msg % repr(sizeFlag)
raise ValueError(msg)
if triggerFlag not in self.validTriggers:
msg = "Invalid trigger flag for hardware breakpoint (%s)"
msg = msg % repr(triggerFlag)
raise ValueError(msg)
Breakpoint.__init__(self, address, size, condition, action)
self.__trigger = triggerFlag
self.__watch = sizeFlag
self.__slot = None
def __clear_bp(self, aThread):
"""
Clears this breakpoint from the debug registers.
@type aThread: L{Thread}
@param aThread: Thread object.
"""
if self.__slot is not None:
aThread.suspend()
try:
ctx = aThread.get_context(win32.CONTEXT_DEBUG_REGISTERS)
DebugRegister.clear_bp(ctx, self.__slot)
aThread.set_context(ctx)
self.__slot = None
finally:
aThread.resume()
def __set_bp(self, aThread):
"""
Sets this breakpoint in the debug registers.
@type aThread: L{Thread}
@param aThread: Thread object.
"""
if self.__slot is None:
aThread.suspend()
try:
ctx = aThread.get_context(win32.CONTEXT_DEBUG_REGISTERS)
self.__slot = DebugRegister.find_slot(ctx)
if self.__slot is None:
msg = "No available hardware breakpoint slots for thread ID %d"
msg = msg % aThread.get_tid()
raise RuntimeError(msg)
DebugRegister.set_bp(ctx, self.__slot, self.get_address(),
self.__trigger, self.__watch)
aThread.set_context(ctx)
finally:
aThread.resume()
def get_slot(self):
"""
@rtype: int
@return: The debug register number used by this breakpoint,
or C{None} if the breakpoint is not active.
"""
return self.__slot
def get_trigger(self):
"""
@see: L{validTriggers}
@rtype: int
@return: The breakpoint trigger flag.
"""
return self.__trigger
def get_watch(self):
"""
@see: L{validWatchSizes}
@rtype: int
@return: The breakpoint watch flag.
"""
return self.__watch
def disable(self, aProcess, aThread):
if not self.is_disabled():
self.__clear_bp(aThread)
super(HardwareBreakpoint, self).disable(aProcess, aThread)
def enable(self, aProcess, aThread):
if not self.is_enabled() and not self.is_one_shot():
self.__set_bp(aThread)
super(HardwareBreakpoint, self).enable(aProcess, aThread)
def one_shot(self, aProcess, aThread):
if not self.is_enabled() and not self.is_one_shot():
self.__set_bp(aThread)
super(HardwareBreakpoint, self).one_shot(aProcess, aThread)
def running(self, aProcess, aThread):
self.__clear_bp(aThread)
super(HardwareBreakpoint, self).running(aProcess, aThread)
aThread.set_tf()
#==============================================================================
# XXX FIXME
#
# The implementation of function hooks is very simple. A breakpoint is set at
# the entry point. Each time it's hit the "pre" callback is executed. If a
# "post" callback was defined, a one-shot breakpoint is set at the return
# address - and when that breakpoint hits, the "post" callback is executed.
#
# Functions hooks, as they are implemented now, don't work correctly for
# recursive functions. The problem is we don't know when to remove the
# breakpoint at the return address. Also there could be more than one return
# address.
#
# One possible solution would involve a dictionary of lists, where the key
# would be the thread ID and the value a stack of return addresses. But we
# still don't know what to do if the "wrong" return address is hit for some
# reason (maybe check the stack pointer?). Or if both a code and a hardware
# breakpoint are hit simultaneously.
#
# For now, the workaround for the user is to set only the "pre" callback for
# functions that are known to be recursive.
#
# If an exception is thrown by a hooked function and caught by one of it's
# parent functions, the "post" callback won't be called and weird stuff may
# happen. A possible solution is to put a breakpoint in the system call that
# unwinds the stack, to detect this case and remove the "post" breakpoint.
#
# Hooks may also behave oddly if the return address is overwritten by a buffer
# overflow bug (this is similar to the exception problem). But it's probably a
# minor issue since when you're fuzzing a function for overflows you're usually
# not interested in the return value anyway.
# TODO: an API to modify the hooked function's arguments
class Hook (object):
"""
Factory class to produce hook objects. Used by L{Debug.hook_function} and
L{Debug.stalk_function}.
When you try to instance this class, one of the architecture specific
implementations is returned instead.
Instances act as an action callback for code breakpoints set at the
beginning of a function. It automatically retrieves the parameters from
the stack, sets a breakpoint at the return address and retrieves the
return value from the function call.
@see: L{_Hook_i386}, L{_Hook_amd64}
@type useHardwareBreakpoints: bool
@cvar useHardwareBreakpoints: C{True} to try to use hardware breakpoints,
C{False} otherwise.
"""
# This is a factory class that returns
# the architecture specific implementation.
def __new__(cls, *argv, **argd):
try:
arch = argd['arch']
del argd['arch']
except KeyError:
try:
arch = argv[4]
argv = argv[:4] + argv[5:]
except IndexError:
raise TypeError("Missing 'arch' argument!")
if arch is None:
arch = win32.arch
if arch == win32.ARCH_I386:
return _Hook_i386(*argv, **argd)
if arch == win32.ARCH_AMD64:
return _Hook_amd64(*argv, **argd)
return object.__new__(cls, *argv, **argd)
# XXX FIXME
#
# Hardware breakpoints don't work correctly (or al all) in old VirtualBox
# versions (3.0 and below).
#
# Maybe there should be a way to autodetect the buggy VirtualBox versions
# and tell Hook objects not to use hardware breakpoints?
#
# For now the workaround is to manually set this variable to True when
# WinAppDbg is installed on a physical machine.
#
useHardwareBreakpoints = False
def __init__(self, preCB = None, postCB = None,
paramCount = None, signature = None,
arch = None):
"""
@type preCB: function
@param preCB: (Optional) Callback triggered on function entry.
The signature for the callback should be something like this::
def pre_LoadLibraryEx(event, ra, lpFilename, hFile, dwFlags):
# return address
ra = params[0]
# function arguments start from here...
szFilename = event.get_process().peek_string(lpFilename)
# (...)
Note that all pointer types are treated like void pointers, so your
callback won't get the string or structure pointed to by it, but
the remote memory address instead. This is so to prevent the ctypes
library from being "too helpful" and trying to dereference the
pointer. To get the actual data being pointed to, use one of the
L{Process.read} methods.
@type postCB: function
@param postCB: (Optional) Callback triggered on function exit.
The signature for the callback should be something like this::
def post_LoadLibraryEx(event, return_value):
# (...)
@type paramCount: int
@param paramCount:
(Optional) Number of parameters for the C{preCB} callback,
not counting the return address. Parameters are read from
the stack and assumed to be DWORDs in 32 bits and QWORDs in 64.
This is a faster way to pull stack parameters in 32 bits, but in 64
bits (or with some odd APIs in 32 bits) it won't be useful, since
not all arguments to the hooked function will be of the same size.
For a more reliable and cross-platform way of hooking use the
C{signature} argument instead.
@type signature: tuple
@param signature:
(Optional) Tuple of C{ctypes} data types that constitute the
hooked function signature. When the function is called, this will
be used to parse the arguments from the stack. Overrides the
C{paramCount} argument.
@type arch: str
@param arch: (Optional) Target architecture. Defaults to the current
architecture. See: L{win32.arch}
"""
self.__preCB = preCB
self.__postCB = postCB
self.__paramStack = dict() # tid -> list of tuple( arg, arg, arg... )
self._paramCount = paramCount
if win32.arch != win32.ARCH_I386:
self.useHardwareBreakpoints = False
if win32.bits == 64 and paramCount and not signature:
signature = (win32.QWORD,) * paramCount
if signature:
self._signature = self._calc_signature(signature)
else:
self._signature = None
def _cast_signature_pointers_to_void(self, signature):
c_void_p = ctypes.c_void_p
c_char_p = ctypes.c_char_p
c_wchar_p = ctypes.c_wchar_p
_Pointer = ctypes._Pointer
cast = ctypes.cast
for i in xrange(len(signature)):
t = signature[i]
if t is not c_void_p and (issubclass(t, _Pointer) \
or t in [c_char_p, c_wchar_p]):
signature[i] = cast(t, c_void_p)
def _calc_signature(self, signature):
raise NotImplementedError(
"Hook signatures are not supported for architecture: %s" \
% win32.arch)
def _get_return_address(self, aProcess, aThread):
return None
def _get_function_arguments(self, aProcess, aThread):
if self._signature or self._paramCount:
raise NotImplementedError(
"Hook signatures are not supported for architecture: %s" \
% win32.arch)
return ()
def _get_return_value(self, aThread):
return None
# By using break_at() to set a process-wide breakpoint on the function's
# return address, we might hit a race condition when more than one thread
# is being debugged.
#
# Hardware breakpoints should be used instead. But since a thread can run
# out of those, we need to fall back to this method when needed.
def __call__(self, event):
"""
Handles the breakpoint event on entry of the function.
@type event: L{ExceptionEvent}
@param event: Breakpoint hit event.
@raise WindowsError: An error occured.
"""
debug = event.debug
dwProcessId = event.get_pid()
dwThreadId = event.get_tid()
aProcess = event.get_process()
aThread = event.get_thread()
# Get the return address and function arguments.
ra = self._get_return_address(aProcess, aThread)
params = self._get_function_arguments(aProcess, aThread)
# Keep the function arguments for later use.
self.__push_params(dwThreadId, params)
# If we need to hook the return from the function...
bHookedReturn = False
if ra is not None and self.__postCB is not None:
# Try to set a one shot hardware breakpoint at the return address.
useHardwareBreakpoints = self.useHardwareBreakpoints
if useHardwareBreakpoints:
try:
debug.define_hardware_breakpoint(
dwThreadId,
ra,
event.debug.BP_BREAK_ON_EXECUTION,
event.debug.BP_WATCH_BYTE,
True,
self.__postCallAction_hwbp
)
debug.enable_one_shot_hardware_breakpoint(dwThreadId, ra)
bHookedReturn = True
except Exception, e:
useHardwareBreakpoints = False
msg = ("Failed to set hardware breakpoint"
" at address %s for thread ID %d")
msg = msg % (HexDump.address(ra), dwThreadId)
warnings.warn(msg, BreakpointWarning)
# If not possible, set a code breakpoint instead.
if not useHardwareBreakpoints:
try:
debug.break_at(dwProcessId, ra,
self.__postCallAction_codebp)
bHookedReturn = True
except Exception, e:
msg = ("Failed to set code breakpoint"
" at address %s for process ID %d")
msg = msg % (HexDump.address(ra), dwProcessId)
warnings.warn(msg, BreakpointWarning)
# Call the "pre" callback.
try:
self.__callHandler(self.__preCB, event, ra, *params)
# If no "post" callback is defined, forget the function arguments.
finally:
if not bHookedReturn:
self.__pop_params(dwThreadId)
def __postCallAction_hwbp(self, event):
"""
Handles hardware breakpoint events on return from the function.
@type event: L{ExceptionEvent}
@param event: Single step event.
"""
# Remove the one shot hardware breakpoint
# at the return address location in the stack.
tid = event.get_tid()
address = event.breakpoint.get_address()
event.debug.erase_hardware_breakpoint(tid, address)
# Call the "post" callback.
try:
self.__postCallAction(event)
# Forget the parameters.
finally:
self.__pop_params(tid)
def __postCallAction_codebp(self, event):
"""
Handles code breakpoint events on return from the function.
@type event: L{ExceptionEvent}
@param event: Breakpoint hit event.
"""
# If the breakpoint was accidentally hit by another thread,
# pass it to the debugger instead of calling the "post" callback.
#
# XXX FIXME:
# I suppose this check will fail under some weird conditions...
#
tid = event.get_tid()
if tid not in self.__paramStack:
return True
# Remove the code breakpoint at the return address.
pid = event.get_pid()
address = event.breakpoint.get_address()
event.debug.dont_break_at(pid, address)
# Call the "post" callback.
try:
self.__postCallAction(event)
# Forget the parameters.
finally:
self.__pop_params(tid)
def __postCallAction(self, event):
"""
Calls the "post" callback.
@type event: L{ExceptionEvent}
@param event: Breakpoint hit event.
"""
aThread = event.get_thread()
retval = self._get_return_value(aThread)
self.__callHandler(self.__postCB, event, retval)
def __callHandler(self, callback, event, *params):
"""
Calls a "pre" or "post" handler, if set.
@type callback: function
@param callback: Callback function to call.
@type event: L{ExceptionEvent}
@param event: Breakpoint hit event.
@type params: tuple
@param params: Parameters for the callback function.
"""
if callback is not None:
event.hook = self
callback(event, *params)
def __push_params(self, tid, params):
"""
Remembers the arguments tuple for the last call to the hooked function
from this thread.
@type tid: int
@param tid: Thread global ID.
@type params: tuple( arg, arg, arg... )
@param params: Tuple of arguments.
"""
stack = self.__paramStack.get( tid, [] )
stack.append(params)
self.__paramStack[tid] = stack
def __pop_params(self, tid):
"""
Forgets the arguments tuple for the last call to the hooked function
from this thread.
@type tid: int
@param tid: Thread global ID.
"""
stack = self.__paramStack[tid]
stack.pop()
if not stack:
del self.__paramStack[tid]
def get_params(self, tid):
"""
Returns the parameters found in the stack when the hooked function
was last called by this thread.
@type tid: int
@param tid: Thread global ID.
@rtype: tuple( arg, arg, arg... )
@return: Tuple of arguments.
"""
try:
params = self.get_params_stack(tid)[-1]
except IndexError:
msg = "Hooked function called from thread %d already returned"
raise IndexError(msg % tid)
return params
def get_params_stack(self, tid):
"""
Returns the parameters found in the stack each time the hooked function
was called by this thread and hasn't returned yet.
@type tid: int
@param tid: Thread global ID.
@rtype: list of tuple( arg, arg, arg... )
@return: List of argument tuples.
"""
try:
stack = self.__paramStack[tid]
except KeyError:
msg = "Hooked function was not called from thread %d"
raise KeyError(msg % tid)
return stack
def hook(self, debug, pid, address):
"""
Installs the function hook at a given process and address.
@see: L{unhook}
@warning: Do not call from an function hook callback.
@type debug: L{Debug}
@param debug: Debug object.
@type pid: int
@param pid: Process ID.
@type address: int
@param address: Function address.
"""
return debug.break_at(pid, address, self)
def unhook(self, debug, pid, address):
"""
Removes the function hook at a given process and address.
@see: L{hook}
@warning: Do not call from an function hook callback.
@type debug: L{Debug}
@param debug: Debug object.
@type pid: int
@param pid: Process ID.
@type address: int
@param address: Function address.
"""
return debug.dont_break_at(pid, address)
class _Hook_i386 (Hook):
"""
Implementation details for L{Hook} on the L{win32.ARCH_I386} architecture.
"""
# We don't want to inherit the parent class __new__ method.
__new__ = object.__new__
def _calc_signature(self, signature):
self._cast_signature_pointers_to_void(signature)
class Arguments (ctypes.Structure):
# pack structures, don't align 64 bit values to 64 bit boundaries
_pack_ = ctypes.sizeof(ctypes.c_void_p)
_fields_ = [ ("arg_%s" % i, t) for (i, t) in enumerate(signature)]
return Arguments
def _get_return_address(self, aProcess, aThread):
return aProcess.read_pointer( aThread.get_sp() )
def _get_function_arguments(self, aProcess, aThread):
if self._signature:
params = aThread.read_stack_structure(self._signature,
offset = win32.sizeof(win32.LPVOID))
elif self._paramCount:
params = aThread.read_stack_dwords(self._paramCount,
offset = win32.sizeof(win32.LPVOID))
else:
params = ()
return params
def _get_return_value(self, aThread):
ctx = aThread.get_context(win32.CONTEXT_INTEGER)
return ctx['Eax']
class _Hook_amd64 (Hook):
"""
Implementation details for L{Hook} on the L{win32.ARCH_AMD64} architecture.
"""
# We don't want to inherit the parent class __new__ method.
__new__ = object.__new__
# Make a list of floating point types.
__float_types = (
ctypes.c_double,
ctypes.c_float,
)
# Long doubles are not supported in old versions of ctypes!
try:
__float_types += (ctypes.c_longdouble,)
except AttributeError:
pass
def _calc_signature(self, signature):
self._cast_signature_pointers_to_void(signature)
float_types = self.__float_types
c_sizeof = ctypes.sizeof
reg_size = c_sizeof(ctypes.c_size_t)
reg_int_sig = []
reg_float_sig = []
stack_sig = []
for i in xrange(len(signature)):
arg = signature[i]
name = "arg_%d" % i
stack_sig.insert( 0, (name, arg) )
if i < 4:
if type(arg) in float_types:
reg_float_sig.append( (name, arg) )
elif c_sizeof(arg) <= reg_size:
reg_int_sig.append( (name, arg) )
else:
msg = ("Hook signatures don't support structures"
" within the first 4 arguments of a function"
" for the %s architecture") % win32.arch
raise NotImplementedError(msg)
if reg_int_sig:
class RegisterArguments (ctypes.Structure):
_fields_ = reg_int_sig
else:
RegisterArguments = None
if reg_float_sig:
class FloatArguments (ctypes.Structure):
_fields_ = reg_float_sig
else:
FloatArguments = None
if stack_sig:
class StackArguments (ctypes.Structure):
_fields_ = stack_sig
else:
StackArguments = None
return (len(signature),
RegisterArguments,
FloatArguments,
StackArguments)
def _get_return_address(self, aProcess, aThread):
return aProcess.read_pointer( aThread.get_sp() )
def _get_function_arguments(self, aProcess, aThread):
if self._signature:
(args_count,
RegisterArguments,
FloatArguments,
StackArguments) = self._signature
arguments = {}
if StackArguments:
address = aThread.get_sp() + win32.sizeof(win32.LPVOID)
stack_struct = aProcess.read_structure(address,
StackArguments)
stack_args = dict(
[ (name, stack_struct.__getattribute__(name))
for (name, type) in stack_struct._fields_ ]
)
arguments.update(stack_args)
flags = 0
if RegisterArguments:
flags = flags | win32.CONTEXT_INTEGER
if FloatArguments:
flags = flags | win32.CONTEXT_MMX_REGISTERS
if flags:
ctx = aThread.get_context(flags)
if RegisterArguments:
buffer = (win32.QWORD * 4)(ctx['Rcx'], ctx['Rdx'],
ctx['R8'], ctx['R9'])
reg_args = self._get_arguments_from_buffer(buffer,
RegisterArguments)
arguments.update(reg_args)
if FloatArguments:
buffer = (win32.M128A * 4)(ctx['XMM0'], ctx['XMM1'],
ctx['XMM2'], ctx['XMM3'])
float_args = self._get_arguments_from_buffer(buffer,
FloatArguments)
arguments.update(float_args)
params = tuple( [ arguments["arg_%d" % i]
for i in xrange(args_count) ] )
else:
params = ()
return params
def _get_arguments_from_buffer(self, buffer, structure):
b_ptr = ctypes.pointer(buffer)
v_ptr = ctypes.cast(b_ptr, ctypes.c_void_p)
s_ptr = ctypes.cast(v_ptr, ctypes.POINTER(structure))
struct = s_ptr.contents
return dict(
[ (name, struct.__getattribute__(name))
for (name, type) in struct._fields_ ]
)
def _get_return_value(self, aThread):
ctx = aThread.get_context(win32.CONTEXT_INTEGER)
return ctx['Rax']
#------------------------------------------------------------------------------
# This class acts as a factory of Hook objects, one per target process.
# Said objects are deleted by the unhook() method.
class ApiHook (object):
"""
Used by L{EventHandler}.
This class acts as an action callback for code breakpoints set at the
beginning of a function. It automatically retrieves the parameters from
the stack, sets a breakpoint at the return address and retrieves the
return value from the function call.
@see: L{EventHandler.apiHooks}
@type modName: str
@ivar modName: Module name.
@type procName: str
@ivar procName: Procedure name.
"""
def __init__(self, eventHandler, modName, procName, paramCount = None,
signature = None):
"""
@type eventHandler: L{EventHandler}
@param eventHandler: Event handler instance. This is where the hook
callbacks are to be defined (see below).
@type modName: str
@param modName: Module name.
@type procName: str
@param procName: Procedure name.
The pre and post callbacks will be deduced from it.
For example, if the procedure is "LoadLibraryEx" the callback
routines will be "pre_LoadLibraryEx" and "post_LoadLibraryEx".
The signature for the callbacks should be something like this::
def pre_LoadLibraryEx(self, event, ra, lpFilename, hFile, dwFlags):
# return address
ra = params[0]
# function arguments start from here...
szFilename = event.get_process().peek_string(lpFilename)
# (...)
def post_LoadLibraryEx(self, event, return_value):
# (...)
Note that all pointer types are treated like void pointers, so your
callback won't get the string or structure pointed to by it, but
the remote memory address instead. This is so to prevent the ctypes
library from being "too helpful" and trying to dereference the
pointer. To get the actual data being pointed to, use one of the
L{Process.read} methods.
@type paramCount: int
@param paramCount:
(Optional) Number of parameters for the C{preCB} callback,
not counting the return address. Parameters are read from
the stack and assumed to be DWORDs in 32 bits and QWORDs in 64.
This is a faster way to pull stack parameters in 32 bits, but in 64
bits (or with some odd APIs in 32 bits) it won't be useful, since
not all arguments to the hooked function will be of the same size.
For a more reliable and cross-platform way of hooking use the
C{signature} argument instead.
@type signature: tuple
@param signature:
(Optional) Tuple of C{ctypes} data types that constitute the
hooked function signature. When the function is called, this will
be used to parse the arguments from the stack. Overrides the
C{paramCount} argument.
"""
self.__modName = modName
self.__procName = procName
self.__paramCount = paramCount
self.__signature = signature
self.__preCB = getattr(eventHandler, 'pre_%s' % procName, None)
self.__postCB = getattr(eventHandler, 'post_%s' % procName, None)
self.__hook = dict()
def __call__(self, event):
"""
Handles the breakpoint event on entry of the function.
@type event: L{ExceptionEvent}
@param event: Breakpoint hit event.
@raise WindowsError: An error occured.
"""
pid = event.get_pid()
try:
hook = self.__hook[pid]
except KeyError:
hook = Hook(self.__preCB, self.__postCB,
self.__paramCount, self.__signature,
event.get_process().get_arch() )
self.__hook[pid] = hook
return hook(event)
@property
def modName(self):
return self.__modName
@property
def procName(self):
return self.__procName
def hook(self, debug, pid):
"""
Installs the API hook on a given process and module.
@warning: Do not call from an API hook callback.
@type debug: L{Debug}
@param debug: Debug object.
@type pid: int
@param pid: Process ID.
"""
label = "%s!%s" % (self.__modName, self.__procName)
try:
hook = self.__hook[pid]
except KeyError:
try:
aProcess = debug.system.get_process(pid)
except KeyError:
aProcess = Process(pid)
hook = Hook(self.__preCB, self.__postCB,
self.__paramCount, self.__signature,
aProcess.get_arch() )
self.__hook[pid] = hook
hook.hook(debug, pid, label)
def unhook(self, debug, pid):
"""
Removes the API hook from the given process and module.
@warning: Do not call from an API hook callback.
@type debug: L{Debug}
@param debug: Debug object.
@type pid: int
@param pid: Process ID.
"""
try:
hook = self.__hook[pid]
except KeyError:
return
label = "%s!%s" % (self.__modName, self.__procName)
hook.unhook(debug, pid, label)
del self.__hook[pid]
#==============================================================================
class BufferWatch (object):
"""
Returned by L{Debug.watch_buffer}.
This object uniquely references a buffer being watched, even if there are
multiple watches set on the exact memory region.
@type pid: int
@ivar pid: Process ID.
@type start: int
@ivar start: Memory address of the start of the buffer.
@type end: int
@ivar end: Memory address of the end of the buffer.
@type action: callable
@ivar action: Action callback.
@type oneshot: bool
@ivar oneshot: C{True} for one shot breakpoints, C{False} otherwise.
"""
def __init__(self, pid, start, end, action = None, oneshot = False):
self.__pid = pid
self.__start = start
self.__end = end
self.__action = action
self.__oneshot = oneshot
@property
def pid(self):
return self.__pid
@property
def start(self):
return self.__start
@property
def end(self):
return self.__end
@property
def action(self):
return self.__action
@property
def oneshot(self):
return self.__oneshot
def match(self, address):
"""
Determine if the given memory address lies within the watched buffer.
@rtype: bool
@return: C{True} if the given memory address lies within the watched
buffer, C{False} otherwise.
"""
return self.__start <= address < self.__end
#==============================================================================
class _BufferWatchCondition (object):
"""
Used by L{Debug.watch_buffer}.
This class acts as a condition callback for page breakpoints.
It emulates page breakpoints that can overlap and/or take up less
than a page's size.
"""
def __init__(self):
self.__ranges = list() # list of BufferWatch in definition order
def add(self, bw):
"""
Adds a buffer watch identifier.
@type bw: L{BufferWatch}
@param bw:
Buffer watch identifier.
"""
self.__ranges.append(bw)
def remove(self, bw):
"""
Removes a buffer watch identifier.
@type bw: L{BufferWatch}
@param bw:
Buffer watch identifier.
@raise KeyError: The buffer watch identifier was already removed.
"""
try:
self.__ranges.remove(bw)
except KeyError:
if not bw.oneshot:
raise
def remove_last_match(self, address, size):
"""
Removes the last buffer from the watch object
to match the given address and size.
@type address: int
@param address: Memory address of buffer to stop watching.
@type size: int
@param size: Size in bytes of buffer to stop watching.
@rtype: int
@return: Number of matching elements found. Only the last one to be
added is actually deleted upon calling this method.
This counter allows you to know if there are more matching elements
and how many.
"""
count = 0
start = address
end = address + size - 1
matched = None
for item in self.__ranges:
if item.match(start) and item.match(end):
matched = item
count += 1
self.__ranges.remove(matched)
return count
def count(self):
"""
@rtype: int
@return: Number of buffers being watched.
"""
return len(self.__ranges)
def __call__(self, event):
"""
Breakpoint condition callback.
This method will also call the action callbacks for each
buffer being watched.
@type event: L{ExceptionEvent}
@param event: Guard page exception event.
@rtype: bool
@return: C{True} if the address being accessed belongs
to at least one of the buffers that was being watched
and had no action callback.
"""
address = event.get_exception_information(1)
bCondition = False
for bw in self.__ranges:
bMatched = bw.match(address)
try:
action = bw.action
if bMatched and action is not None:
try:
action(event)
except Exception, e:
msg = ("Breakpoint action callback %r"
" raised an exception: %s")
msg = msg % (action, traceback.format_exc(e))
warnings.warn(msg, BreakpointCallbackWarning)
else:
bCondition = bCondition or bMatched
finally:
if bMatched and bw.oneshot:
event.debug.dont_watch_buffer(bw)
return bCondition
#==============================================================================
class _BreakpointContainer (object):
"""
Encapsulates the capability to contain Breakpoint objects.
@group Breakpoints:
break_at, watch_variable, watch_buffer, hook_function,
dont_break_at, dont_watch_variable, dont_watch_buffer,
dont_hook_function, unhook_function,
break_on_error, dont_break_on_error
@group Stalking:
stalk_at, stalk_variable, stalk_buffer, stalk_function,
dont_stalk_at, dont_stalk_variable, dont_stalk_buffer,
dont_stalk_function
@group Tracing:
is_tracing, get_traced_tids,
start_tracing, stop_tracing,
start_tracing_process, stop_tracing_process,
start_tracing_all, stop_tracing_all
@group Symbols:
resolve_label, resolve_exported_function
@group Advanced breakpoint use:
define_code_breakpoint,
define_page_breakpoint,
define_hardware_breakpoint,
has_code_breakpoint,
has_page_breakpoint,
has_hardware_breakpoint,
get_code_breakpoint,
get_page_breakpoint,
get_hardware_breakpoint,
erase_code_breakpoint,
erase_page_breakpoint,
erase_hardware_breakpoint,
enable_code_breakpoint,
enable_page_breakpoint,
enable_hardware_breakpoint,
enable_one_shot_code_breakpoint,
enable_one_shot_page_breakpoint,
enable_one_shot_hardware_breakpoint,
disable_code_breakpoint,
disable_page_breakpoint,
disable_hardware_breakpoint
@group Listing breakpoints:
get_all_breakpoints,
get_all_code_breakpoints,
get_all_page_breakpoints,
get_all_hardware_breakpoints,
get_process_breakpoints,
get_process_code_breakpoints,
get_process_page_breakpoints,
get_process_hardware_breakpoints,
get_thread_hardware_breakpoints,
get_all_deferred_code_breakpoints,
get_process_deferred_code_breakpoints
@group Batch operations on breakpoints:
enable_all_breakpoints,
enable_one_shot_all_breakpoints,
disable_all_breakpoints,
erase_all_breakpoints,
enable_process_breakpoints,
enable_one_shot_process_breakpoints,
disable_process_breakpoints,
erase_process_breakpoints
@group Breakpoint types:
BP_TYPE_ANY, BP_TYPE_CODE, BP_TYPE_PAGE, BP_TYPE_HARDWARE
@group Breakpoint states:
BP_STATE_DISABLED, BP_STATE_ENABLED, BP_STATE_ONESHOT, BP_STATE_RUNNING
@group Memory breakpoint trigger flags:
BP_BREAK_ON_EXECUTION, BP_BREAK_ON_WRITE, BP_BREAK_ON_ACCESS
@group Memory breakpoint size flags:
BP_WATCH_BYTE, BP_WATCH_WORD, BP_WATCH_DWORD, BP_WATCH_QWORD
@type BP_TYPE_ANY: int
@cvar BP_TYPE_ANY: To get all breakpoints
@type BP_TYPE_CODE: int
@cvar BP_TYPE_CODE: To get code breakpoints only
@type BP_TYPE_PAGE: int
@cvar BP_TYPE_PAGE: To get page breakpoints only
@type BP_TYPE_HARDWARE: int
@cvar BP_TYPE_HARDWARE: To get hardware breakpoints only
@type BP_STATE_DISABLED: int
@cvar BP_STATE_DISABLED: Breakpoint is disabled.
@type BP_STATE_ENABLED: int
@cvar BP_STATE_ENABLED: Breakpoint is enabled.
@type BP_STATE_ONESHOT: int
@cvar BP_STATE_ONESHOT: Breakpoint is enabled for one shot.
@type BP_STATE_RUNNING: int
@cvar BP_STATE_RUNNING: Breakpoint is running (recently hit).
@type BP_BREAK_ON_EXECUTION: int
@cvar BP_BREAK_ON_EXECUTION: Break on code execution.
@type BP_BREAK_ON_WRITE: int
@cvar BP_BREAK_ON_WRITE: Break on memory write.
@type BP_BREAK_ON_ACCESS: int
@cvar BP_BREAK_ON_ACCESS: Break on memory read or write.
"""
# Breakpoint types
BP_TYPE_ANY = 0 # to get all breakpoints
BP_TYPE_CODE = 1
BP_TYPE_PAGE = 2
BP_TYPE_HARDWARE = 3
# Breakpoint states
BP_STATE_DISABLED = Breakpoint.DISABLED
BP_STATE_ENABLED = Breakpoint.ENABLED
BP_STATE_ONESHOT = Breakpoint.ONESHOT
BP_STATE_RUNNING = Breakpoint.RUNNING
# Memory breakpoint trigger flags
BP_BREAK_ON_EXECUTION = HardwareBreakpoint.BREAK_ON_EXECUTION
BP_BREAK_ON_WRITE = HardwareBreakpoint.BREAK_ON_WRITE
BP_BREAK_ON_ACCESS = HardwareBreakpoint.BREAK_ON_ACCESS
# Memory breakpoint size flags
BP_WATCH_BYTE = HardwareBreakpoint.WATCH_BYTE
BP_WATCH_WORD = HardwareBreakpoint.WATCH_WORD
BP_WATCH_QWORD = HardwareBreakpoint.WATCH_QWORD
BP_WATCH_DWORD = HardwareBreakpoint.WATCH_DWORD
def __init__(self):
self.__codeBP = dict() # (pid, address) -> CodeBreakpoint
self.__pageBP = dict() # (pid, address) -> PageBreakpoint
self.__hardwareBP = dict() # tid -> [ HardwareBreakpoint ]
self.__runningBP = dict() # tid -> set( Breakpoint )
self.__tracing = set() # set( tid )
self.__deferredBP = dict() # pid -> label -> (action, oneshot)
#------------------------------------------------------------------------------
# This operates on the dictionary of running breakpoints.
# Since the bps are meant to stay alive no cleanup is done here.
def __get_running_bp_set(self, tid):
"Auxiliary method."
return self.__runningBP.get(tid, ())
def __add_running_bp(self, tid, bp):
"Auxiliary method."
if tid not in self.__runningBP:
self.__runningBP[tid] = set()
self.__runningBP[tid].add(bp)
def __del_running_bp(self, tid, bp):
"Auxiliary method."
self.__runningBP[tid].remove(bp)
if not self.__runningBP[tid]:
del self.__runningBP[tid]
def __del_running_bp_from_all_threads(self, bp):
"Auxiliary method."
for (tid, bpset) in self.__runningBP.iteritems():
if bp in bpset:
bpset.remove(bp)
self.system.get_thread(tid).clear_tf()
#------------------------------------------------------------------------------
# This is the cleanup code. Mostly called on response to exit/unload debug
# events. If possible it shouldn't raise exceptions on runtime errors.
# The main goal here is to avoid memory or handle leaks.
def __cleanup_breakpoint(self, event, bp):
"Auxiliary method."
try:
process = event.get_process()
thread = event.get_thread()
bp.disable(process, thread) # clear the debug regs / trap flag
except Exception:
pass
bp.set_condition(True) # break possible circular reference
bp.set_action(None) # break possible circular reference
def __cleanup_thread(self, event):
"""
Auxiliary method for L{_notify_exit_thread}
and L{_notify_exit_process}.
"""
tid = event.get_tid()
# Cleanup running breakpoints
try:
for bp in self.__runningBP[tid]:
self.__cleanup_breakpoint(event, bp)
del self.__runningBP[tid]
except KeyError:
pass
# Cleanup hardware breakpoints
try:
for bp in self.__hardwareBP[tid]:
self.__cleanup_breakpoint(event, bp)
del self.__hardwareBP[tid]
except KeyError:
pass
# Cleanup set of threads being traced
if tid in self.__tracing:
self.__tracing.remove(tid)
def __cleanup_process(self, event):
"""
Auxiliary method for L{_notify_exit_process}.
"""
pid = event.get_pid()
process = event.get_process()
# Cleanup code breakpoints
for (bp_pid, bp_address) in self.__codeBP.keys():
if bp_pid == pid:
bp = self.__codeBP[ (bp_pid, bp_address) ]
self.__cleanup_breakpoint(event, bp)
del self.__codeBP[ (bp_pid, bp_address) ]
# Cleanup page breakpoints
for (bp_pid, bp_address) in self.__pageBP.keys():
if bp_pid == pid:
bp = self.__pageBP[ (bp_pid, bp_address) ]
self.__cleanup_breakpoint(event, bp)
del self.__pageBP[ (bp_pid, bp_address) ]
# Cleanup deferred code breakpoints
try:
del self.__deferredBP[pid]
except KeyError:
pass
def __cleanup_module(self, event):
"""
Auxiliary method for L{_notify_unload_dll}.
"""
pid = event.get_pid()
process = event.get_process()
module = event.get_module()
# Cleanup thread breakpoints on this module
for tid in process.iter_thread_ids():
thread = process.get_thread(tid)
# Running breakpoints
if tid in self.__runningBP:
bplist = list(self.__runningBP[tid])
for bp in bplist:
bp_address = bp.get_address()
if process.get_module_at_address(bp_address) == module:
self.__cleanup_breakpoint(event, bp)
self.__runningBP[tid].remove(bp)
# Hardware breakpoints
if tid in self.__hardwareBP:
bplist = list(self.__hardwareBP[tid])
for bp in bplist:
bp_address = bp.get_address()
if process.get_module_at_address(bp_address) == module:
self.__cleanup_breakpoint(event, bp)
self.__hardwareBP[tid].remove(bp)
# Cleanup code breakpoints on this module
for (bp_pid, bp_address) in self.__codeBP.keys():
if bp_pid == pid:
if process.get_module_at_address(bp_address) == module:
bp = self.__codeBP[ (bp_pid, bp_address) ]
self.__cleanup_breakpoint(event, bp)
del self.__codeBP[ (bp_pid, bp_address) ]
# Cleanup page breakpoints on this module
for (bp_pid, bp_address) in self.__pageBP.keys():
if bp_pid == pid:
if process.get_module_at_address(bp_address) == module:
bp = self.__pageBP[ (bp_pid, bp_address) ]
self.__cleanup_breakpoint(event, bp)
del self.__pageBP[ (bp_pid, bp_address) ]
#------------------------------------------------------------------------------
# Defining breakpoints.
# Code breakpoints.
def define_code_breakpoint(self, dwProcessId, address, condition = True,
action = None):
"""
Creates a disabled code breakpoint at the given address.
@see:
L{has_code_breakpoint},
L{get_code_breakpoint},
L{enable_code_breakpoint},
L{enable_one_shot_code_breakpoint},
L{disable_code_breakpoint},
L{erase_code_breakpoint}
@type dwProcessId: int
@param dwProcessId: Process global ID.
@type address: int
@param address: Memory address of the code instruction to break at.
@type condition: function
@param condition: (Optional) Condition callback function.
The callback signature is::
def condition_callback(event):
return True # returns True or False
Where B{event} is an L{Event} object,
and the return value is a boolean
(C{True} to dispatch the event, C{False} otherwise).
@type action: function
@param action: (Optional) Action callback function.
If specified, the event is handled by this callback instead of
being dispatched normally.
The callback signature is::
def action_callback(event):
pass # no return value
Where B{event} is an L{Event} object,
and the return value is a boolean
(C{True} to dispatch the event, C{False} otherwise).
@rtype: L{CodeBreakpoint}
@return: The code breakpoint object.
"""
process = self.system.get_process(dwProcessId)
bp = CodeBreakpoint(address, condition, action)
key = (dwProcessId, bp.get_address())
if key in self.__codeBP:
msg = "Already exists (PID %d) : %r"
raise KeyError(msg % (dwProcessId, self.__codeBP[key]))
self.__codeBP[key] = bp
return bp
# Page breakpoints.
def define_page_breakpoint(self, dwProcessId, address, pages = 1,
condition = True,
action = None):
"""
Creates a disabled page breakpoint at the given address.
@see:
L{has_page_breakpoint},
L{get_page_breakpoint},
L{enable_page_breakpoint},
L{enable_one_shot_page_breakpoint},
L{disable_page_breakpoint},
L{erase_page_breakpoint}
@type dwProcessId: int
@param dwProcessId: Process global ID.
@type address: int
@param address: Memory address of the first page to watch.
@type pages: int
@param pages: Number of pages to watch.
@type condition: function
@param condition: (Optional) Condition callback function.
The callback signature is::
def condition_callback(event):
return True # returns True or False
Where B{event} is an L{Event} object,
and the return value is a boolean
(C{True} to dispatch the event, C{False} otherwise).
@type action: function
@param action: (Optional) Action callback function.
If specified, the event is handled by this callback instead of
being dispatched normally.
The callback signature is::
def action_callback(event):
pass # no return value
Where B{event} is an L{Event} object,
and the return value is a boolean
(C{True} to dispatch the event, C{False} otherwise).
@rtype: L{PageBreakpoint}
@return: The page breakpoint object.
"""
process = self.system.get_process(dwProcessId)
bp = PageBreakpoint(address, pages, condition, action)
begin = bp.get_address()
end = begin + bp.get_size()
address = begin
pageSize = MemoryAddresses.pageSize
while address < end:
key = (dwProcessId, address)
if key in self.__pageBP:
msg = "Already exists (PID %d) : %r"
msg = msg % (dwProcessId, self.__pageBP[key])
raise KeyError(msg)
address = address + pageSize
address = begin
while address < end:
key = (dwProcessId, address)
self.__pageBP[key] = bp
address = address + pageSize
return bp
# Hardware breakpoints.
def define_hardware_breakpoint(self, dwThreadId, address,
triggerFlag = BP_BREAK_ON_ACCESS,
sizeFlag = BP_WATCH_DWORD,
condition = True,
action = None):
"""
Creates a disabled hardware breakpoint at the given address.
@see:
L{has_hardware_breakpoint},
L{get_hardware_breakpoint},
L{enable_hardware_breakpoint},
L{enable_one_shot_hardware_breakpoint},
L{disable_hardware_breakpoint},
L{erase_hardware_breakpoint}
@note:
Hardware breakpoints do not seem to work properly on VirtualBox.
See U{http://www.virtualbox.org/ticket/477}.
@type dwThreadId: int
@param dwThreadId: Thread global ID.
@type address: int
@param address: Memory address to watch.
@type triggerFlag: int
@param triggerFlag: Trigger of breakpoint. Must be one of the following:
- L{BP_BREAK_ON_EXECUTION}
Break on code execution.
- L{BP_BREAK_ON_WRITE}
Break on memory read or write.
- L{BP_BREAK_ON_ACCESS}
Break on memory write.
@type sizeFlag: int
@param sizeFlag: Size of breakpoint. Must be one of the following:
- L{BP_WATCH_BYTE}
One (1) byte in size.
- L{BP_WATCH_WORD}
Two (2) bytes in size.
- L{BP_WATCH_DWORD}
Four (4) bytes in size.
- L{BP_WATCH_QWORD}
Eight (8) bytes in size.
@type condition: function
@param condition: (Optional) Condition callback function.
The callback signature is::
def condition_callback(event):
return True # returns True or False
Where B{event} is an L{Event} object,
and the return value is a boolean
(C{True} to dispatch the event, C{False} otherwise).
@type action: function
@param action: (Optional) Action callback function.
If specified, the event is handled by this callback instead of
being dispatched normally.
The callback signature is::
def action_callback(event):
pass # no return value
Where B{event} is an L{Event} object,
and the return value is a boolean
(C{True} to dispatch the event, C{False} otherwise).
@rtype: L{HardwareBreakpoint}
@return: The hardware breakpoint object.
"""
thread = self.system.get_thread(dwThreadId)
bp = HardwareBreakpoint(address, triggerFlag, sizeFlag, condition,
action)
begin = bp.get_address()
end = begin + bp.get_size()
if dwThreadId in self.__hardwareBP:
bpSet = self.__hardwareBP[dwThreadId]
for oldbp in bpSet:
old_begin = oldbp.get_address()
old_end = old_begin + oldbp.get_size()
if MemoryAddresses.do_ranges_intersect(begin, end, old_begin,
old_end):
msg = "Already exists (TID %d) : %r" % (dwThreadId, oldbp)
raise KeyError(msg)
else:
bpSet = set()
self.__hardwareBP[dwThreadId] = bpSet
bpSet.add(bp)
return bp
#------------------------------------------------------------------------------
# Checking breakpoint definitions.
def has_code_breakpoint(self, dwProcessId, address):
"""
Checks if a code breakpoint is defined at the given address.
@see:
L{define_code_breakpoint},
L{get_code_breakpoint},
L{erase_code_breakpoint},
L{enable_code_breakpoint},
L{enable_one_shot_code_breakpoint},
L{disable_code_breakpoint}
@type dwProcessId: int
@param dwProcessId: Process global ID.
@type address: int
@param address: Memory address of breakpoint.
@rtype: bool
@return: C{True} if the breakpoint is defined, C{False} otherwise.
"""
return (dwProcessId, address) in self.__codeBP
def has_page_breakpoint(self, dwProcessId, address):
"""
Checks if a page breakpoint is defined at the given address.
@see:
L{define_page_breakpoint},
L{get_page_breakpoint},
L{erase_page_breakpoint},
L{enable_page_breakpoint},
L{enable_one_shot_page_breakpoint},
L{disable_page_breakpoint}
@type dwProcessId: int
@param dwProcessId: Process global ID.
@type address: int
@param address: Memory address of breakpoint.
@rtype: bool
@return: C{True} if the breakpoint is defined, C{False} otherwise.
"""
return (dwProcessId, address) in self.__pageBP
def has_hardware_breakpoint(self, dwThreadId, address):
"""
Checks if a hardware breakpoint is defined at the given address.
@see:
L{define_hardware_breakpoint},
L{get_hardware_breakpoint},
L{erase_hardware_breakpoint},
L{enable_hardware_breakpoint},
L{enable_one_shot_hardware_breakpoint},
L{disable_hardware_breakpoint}
@type dwThreadId: int
@param dwThreadId: Thread global ID.
@type address: int
@param address: Memory address of breakpoint.
@rtype: bool
@return: C{True} if the breakpoint is defined, C{False} otherwise.
"""
if dwThreadId in self.__hardwareBP:
bpSet = self.__hardwareBP[dwThreadId]
for bp in bpSet:
if bp.get_address() == address:
return True
return False
#------------------------------------------------------------------------------
# Getting breakpoints.
def get_code_breakpoint(self, dwProcessId, address):
"""
Returns the internally used breakpoint object,
for the code breakpoint defined at the given address.
@warning: It's usually best to call the L{Debug} methods
instead of accessing the breakpoint objects directly.
@see:
L{define_code_breakpoint},
L{has_code_breakpoint},
L{enable_code_breakpoint},
L{enable_one_shot_code_breakpoint},
L{disable_code_breakpoint},
L{erase_code_breakpoint}
@type dwProcessId: int
@param dwProcessId: Process global ID.
@type address: int
@param address: Memory address where the breakpoint is defined.
@rtype: L{CodeBreakpoint}
@return: The code breakpoint object.
"""
key = (dwProcessId, address)
if key not in self.__codeBP:
msg = "No breakpoint at process %d, address %s"
address = HexDump.address(address)
raise KeyError(msg % (dwProcessId, address))
return self.__codeBP[key]
def get_page_breakpoint(self, dwProcessId, address):
"""
Returns the internally used breakpoint object,
for the page breakpoint defined at the given address.
@warning: It's usually best to call the L{Debug} methods
instead of accessing the breakpoint objects directly.
@see:
L{define_page_breakpoint},
L{has_page_breakpoint},
L{enable_page_breakpoint},
L{enable_one_shot_page_breakpoint},
L{disable_page_breakpoint},
L{erase_page_breakpoint}
@type dwProcessId: int
@param dwProcessId: Process global ID.
@type address: int
@param address: Memory address where the breakpoint is defined.
@rtype: L{PageBreakpoint}
@return: The page breakpoint object.
"""
key = (dwProcessId, address)
if key not in self.__pageBP:
msg = "No breakpoint at process %d, address %s"
address = HexDump.addresS(address)
raise KeyError(msg % (dwProcessId, address))
return self.__pageBP[key]
def get_hardware_breakpoint(self, dwThreadId, address):
"""
Returns the internally used breakpoint object,
for the code breakpoint defined at the given address.
@warning: It's usually best to call the L{Debug} methods
instead of accessing the breakpoint objects directly.
@see:
L{define_hardware_breakpoint},
L{has_hardware_breakpoint},
L{get_code_breakpoint},
L{enable_hardware_breakpoint},
L{enable_one_shot_hardware_breakpoint},
L{disable_hardware_breakpoint},
L{erase_hardware_breakpoint}
@type dwThreadId: int
@param dwThreadId: Thread global ID.
@type address: int
@param address: Memory address where the breakpoint is defined.
@rtype: L{HardwareBreakpoint}
@return: The hardware breakpoint object.
"""
if dwThreadId not in self.__hardwareBP:
msg = "No hardware breakpoints set for thread %d"
raise KeyError(msg % dwThreadId)
for bp in self.__hardwareBP[dwThreadId]:
if bp.is_here(address):
return bp
msg = "No hardware breakpoint at thread %d, address %s"
raise KeyError(msg % (dwThreadId, HexDump.address(address)))
#------------------------------------------------------------------------------
# Enabling and disabling breakpoints.
def enable_code_breakpoint(self, dwProcessId, address):
"""
Enables the code breakpoint at the given address.
@see:
L{define_code_breakpoint},
L{has_code_breakpoint},
L{enable_one_shot_code_breakpoint},
L{disable_code_breakpoint}
L{erase_code_breakpoint},
@type dwProcessId: int
@param dwProcessId: Process global ID.
@type address: int
@param address: Memory address of breakpoint.
"""
p = self.system.get_process(dwProcessId)
bp = self.get_code_breakpoint(dwProcessId, address)
if bp.is_running():
self.__del_running_bp_from_all_threads(bp)
bp.enable(p, None) # XXX HACK thread is not used
def enable_page_breakpoint(self, dwProcessId, address):
"""
Enables the page breakpoint at the given address.
@see:
L{define_page_breakpoint},
L{has_page_breakpoint},
L{get_page_breakpoint},
L{enable_one_shot_page_breakpoint},
L{disable_page_breakpoint}
L{erase_page_breakpoint},
@type dwProcessId: int
@param dwProcessId: Process global ID.
@type address: int
@param address: Memory address of breakpoint.
"""
p = self.system.get_process(dwProcessId)
bp = self.get_page_breakpoint(dwProcessId, address)
if bp.is_running():
self.__del_running_bp_from_all_threads(bp)
bp.enable(p, None) # XXX HACK thread is not used
def enable_hardware_breakpoint(self, dwThreadId, address):
"""
Enables the hardware breakpoint at the given address.
@see:
L{define_hardware_breakpoint},
L{has_hardware_breakpoint},
L{get_hardware_breakpoint},
L{enable_one_shot_hardware_breakpoint},
L{disable_hardware_breakpoint}
L{erase_hardware_breakpoint},
@note: Do not set hardware breakpoints while processing the system
breakpoint event.
@type dwThreadId: int
@param dwThreadId: Thread global ID.
@type address: int
@param address: Memory address of breakpoint.
"""
t = self.system.get_thread(dwThreadId)
bp = self.get_hardware_breakpoint(dwThreadId, address)
if bp.is_running():
self.__del_running_bp_from_all_threads(bp)
bp.enable(None, t) # XXX HACK process is not used
def enable_one_shot_code_breakpoint(self, dwProcessId, address):
"""
Enables the code breakpoint at the given address for only one shot.
@see:
L{define_code_breakpoint},
L{has_code_breakpoint},
L{get_code_breakpoint},
L{enable_code_breakpoint},
L{disable_code_breakpoint}
L{erase_code_breakpoint},
@type dwProcessId: int
@param dwProcessId: Process global ID.
@type address: int
@param address: Memory address of breakpoint.
"""
p = self.system.get_process(dwProcessId)
bp = self.get_code_breakpoint(dwProcessId, address)
if bp.is_running():
self.__del_running_bp_from_all_threads(bp)
bp.one_shot(p, None) # XXX HACK thread is not used
def enable_one_shot_page_breakpoint(self, dwProcessId, address):
"""
Enables the page breakpoint at the given address for only one shot.
@see:
L{define_page_breakpoint},
L{has_page_breakpoint},
L{get_page_breakpoint},
L{enable_page_breakpoint},
L{disable_page_breakpoint}
L{erase_page_breakpoint},
@type dwProcessId: int
@param dwProcessId: Process global ID.
@type address: int
@param address: Memory address of breakpoint.
"""
p = self.system.get_process(dwProcessId)
bp = self.get_page_breakpoint(dwProcessId, address)
if bp.is_running():
self.__del_running_bp_from_all_threads(bp)
bp.one_shot(p, None) # XXX HACK thread is not used
def enable_one_shot_hardware_breakpoint(self, dwThreadId, address):
"""
Enables the hardware breakpoint at the given address for only one shot.
@see:
L{define_hardware_breakpoint},
L{has_hardware_breakpoint},
L{get_hardware_breakpoint},
L{enable_hardware_breakpoint},
L{disable_hardware_breakpoint}
L{erase_hardware_breakpoint},
@type dwThreadId: int
@param dwThreadId: Thread global ID.
@type address: int
@param address: Memory address of breakpoint.
"""
t = self.system.get_thread(dwThreadId)
bp = self.get_hardware_breakpoint(dwThreadId, address)
if bp.is_running():
self.__del_running_bp_from_all_threads(bp)
bp.one_shot(None, t) # XXX HACK process is not used
def disable_code_breakpoint(self, dwProcessId, address):
"""
Disables the code breakpoint at the given address.
@see:
L{define_code_breakpoint},
L{has_code_breakpoint},
L{get_code_breakpoint},
L{enable_code_breakpoint}
L{enable_one_shot_code_breakpoint},
L{erase_code_breakpoint},
@type dwProcessId: int
@param dwProcessId: Process global ID.
@type address: int
@param address: Memory address of breakpoint.
"""
p = self.system.get_process(dwProcessId)
bp = self.get_code_breakpoint(dwProcessId, address)
if bp.is_running():
self.__del_running_bp_from_all_threads(bp)
bp.disable(p, None) # XXX HACK thread is not used
def disable_page_breakpoint(self, dwProcessId, address):
"""
Disables the page breakpoint at the given address.
@see:
L{define_page_breakpoint},
L{has_page_breakpoint},
L{get_page_breakpoint},
L{enable_page_breakpoint}
L{enable_one_shot_page_breakpoint},
L{erase_page_breakpoint},
@type dwProcessId: int
@param dwProcessId: Process global ID.
@type address: int
@param address: Memory address of breakpoint.
"""
p = self.system.get_process(dwProcessId)
bp = self.get_page_breakpoint(dwProcessId, address)
if bp.is_running():
self.__del_running_bp_from_all_threads(bp)
bp.disable(p, None) # XXX HACK thread is not used
def disable_hardware_breakpoint(self, dwThreadId, address):
"""
Disables the hardware breakpoint at the given address.
@see:
L{define_hardware_breakpoint},
L{has_hardware_breakpoint},
L{get_hardware_breakpoint},
L{enable_hardware_breakpoint}
L{enable_one_shot_hardware_breakpoint},
L{erase_hardware_breakpoint},
@type dwThreadId: int
@param dwThreadId: Thread global ID.
@type address: int
@param address: Memory address of breakpoint.
"""
t = self.system.get_thread(dwThreadId)
p = t.get_process()
bp = self.get_hardware_breakpoint(dwThreadId, address)
if bp.is_running():
self.__del_running_bp(dwThreadId, bp)
bp.disable(p, t)
#------------------------------------------------------------------------------
# Undefining (erasing) breakpoints.
def erase_code_breakpoint(self, dwProcessId, address):
"""
Erases the code breakpoint at the given address.
@see:
L{define_code_breakpoint},
L{has_code_breakpoint},
L{get_code_breakpoint},
L{enable_code_breakpoint},
L{enable_one_shot_code_breakpoint},
L{disable_code_breakpoint}
@type dwProcessId: int
@param dwProcessId: Process global ID.
@type address: int
@param address: Memory address of breakpoint.
"""
bp = self.get_code_breakpoint(dwProcessId, address)
if not bp.is_disabled():
self.disable_code_breakpoint(dwProcessId, address)
del self.__codeBP[ (dwProcessId, address) ]
def erase_page_breakpoint(self, dwProcessId, address):
"""
Erases the page breakpoint at the given address.
@see:
L{define_page_breakpoint},
L{has_page_breakpoint},
L{get_page_breakpoint},
L{enable_page_breakpoint},
L{enable_one_shot_page_breakpoint},
L{disable_page_breakpoint}
@type dwProcessId: int
@param dwProcessId: Process global ID.
@type address: int
@param address: Memory address of breakpoint.
"""
bp = self.get_page_breakpoint(dwProcessId, address)
begin = bp.get_address()
end = begin + bp.get_size()
if not bp.is_disabled():
self.disable_page_breakpoint(dwProcessId, address)
address = begin
pageSize = MemoryAddresses.pageSize
while address < end:
del self.__pageBP[ (dwProcessId, address) ]
address = address + pageSize
def erase_hardware_breakpoint(self, dwThreadId, address):
"""
Erases the hardware breakpoint at the given address.
@see:
L{define_hardware_breakpoint},
L{has_hardware_breakpoint},
L{get_hardware_breakpoint},
L{enable_hardware_breakpoint},
L{enable_one_shot_hardware_breakpoint},
L{disable_hardware_breakpoint}
@type dwThreadId: int
@param dwThreadId: Thread global ID.
@type address: int
@param address: Memory address of breakpoint.
"""
bp = self.get_hardware_breakpoint(dwThreadId, address)
if not bp.is_disabled():
self.disable_hardware_breakpoint(dwThreadId, address)
bpSet = self.__hardwareBP[dwThreadId]
bpSet.remove(bp)
if not bpSet:
del self.__hardwareBP[dwThreadId]
#------------------------------------------------------------------------------
# Listing breakpoints.
def get_all_breakpoints(self):
"""
Returns all breakpoint objects as a list of tuples.
Each tuple contains:
- Process global ID to which the breakpoint applies.
- Thread global ID to which the breakpoint applies, or C{None}.
- The L{Breakpoint} object itself.
@note: If you're only interested in a specific breakpoint type, or in
breakpoints for a specific process or thread, it's probably faster
to call one of the following methods:
- L{get_all_code_breakpoints}
- L{get_all_page_breakpoints}
- L{get_all_hardware_breakpoints}
- L{get_process_code_breakpoints}
- L{get_process_page_breakpoints}
- L{get_process_hardware_breakpoints}
- L{get_thread_hardware_breakpoints}
@rtype: list of tuple( pid, tid, bp )
@return: List of all breakpoints.
"""
bplist = list()
# Get the code breakpoints.
for (pid, bp) in self.get_all_code_breakpoints():
bplist.append( (pid, None, bp) )
# Get the page breakpoints.
for (pid, bp) in self.get_all_page_breakpoints():
bplist.append( (pid, None, bp) )
# Get the hardware breakpoints.
for (tid, bp) in self.get_all_hardware_breakpoints():
pid = self.system.get_thread(tid).get_pid()
bplist.append( (pid, tid, bp) )
# Return the list of breakpoints.
return bplist
def get_all_code_breakpoints(self):
"""
@rtype: list of tuple( int, L{CodeBreakpoint} )
@return: All code breakpoints as a list of tuples (pid, bp).
"""
return [ (pid, bp) for ((pid, address), bp) in self.__codeBP.iteritems() ]
def get_all_page_breakpoints(self):
"""
@rtype: list of tuple( int, L{PageBreakpoint} )
@return: All page breakpoints as a list of tuples (pid, bp).
"""
## return list( set( [ (pid, bp) for ((pid, address), bp) in self.__pageBP.iteritems() ] ) )
result = set()
for ((pid, address), bp) in self.__pageBP.iteritems():
result.add( (pid, bp) )
return list(result)
def get_all_hardware_breakpoints(self):
"""
@rtype: list of tuple( int, L{HardwareBreakpoint} )
@return: All hardware breakpoints as a list of tuples (tid, bp).
"""
result = list()
for (tid, bplist) in self.__hardwareBP.iteritems():
for bp in bplist:
result.append( (tid, bp) )
return result
def get_process_breakpoints(self, dwProcessId):
"""
Returns all breakpoint objects for the given process as a list of tuples.
Each tuple contains:
- Process global ID to which the breakpoint applies.
- Thread global ID to which the breakpoint applies, or C{None}.
- The L{Breakpoint} object itself.
@note: If you're only interested in a specific breakpoint type, or in
breakpoints for a specific process or thread, it's probably faster
to call one of the following methods:
- L{get_all_code_breakpoints}
- L{get_all_page_breakpoints}
- L{get_all_hardware_breakpoints}
- L{get_process_code_breakpoints}
- L{get_process_page_breakpoints}
- L{get_process_hardware_breakpoints}
- L{get_thread_hardware_breakpoints}
@type dwProcessId: int
@param dwProcessId: Process global ID.
@rtype: list of tuple( pid, tid, bp )
@return: List of all breakpoints for the given process.
"""
bplist = list()
# Get the code breakpoints.
for bp in self.get_process_code_breakpoints(dwProcessId):
bplist.append( (dwProcessId, None, bp) )
# Get the page breakpoints.
for bp in self.get_process_page_breakpoints(dwProcessId):
bplist.append( (dwProcessId, None, bp) )
# Get the hardware breakpoints.
for (tid, bp) in self.get_process_hardware_breakpoints(dwProcessId):
pid = self.system.get_thread(tid).get_pid()
bplist.append( (dwProcessId, tid, bp) )
# Return the list of breakpoints.
return bplist
def get_process_code_breakpoints(self, dwProcessId):
"""
@type dwProcessId: int
@param dwProcessId: Process global ID.
@rtype: list of L{CodeBreakpoint}
@return: All code breakpoints for the given process.
"""
return [ bp for ((pid, address), bp) in self.__codeBP.iteritems() \
if pid == dwProcessId ]
def get_process_page_breakpoints(self, dwProcessId):
"""
@type dwProcessId: int
@param dwProcessId: Process global ID.
@rtype: list of L{PageBreakpoint}
@return: All page breakpoints for the given process.
"""
return [ bp for ((pid, address), bp) in self.__pageBP.iteritems() \
if pid == dwProcessId ]
def get_thread_hardware_breakpoints(self, dwThreadId):
"""
@see: L{get_process_hardware_breakpoints}
@type dwThreadId: int
@param dwThreadId: Thread global ID.
@rtype: list of L{HardwareBreakpoint}
@return: All hardware breakpoints for the given thread.
"""
result = list()
for (tid, bplist) in self.__hardwareBP.iteritems():
if tid == dwThreadId:
for bp in bplist:
result.append(bp)
return result
def get_process_hardware_breakpoints(self, dwProcessId):
"""
@see: L{get_thread_hardware_breakpoints}
@type dwProcessId: int
@param dwProcessId: Process global ID.
@rtype: list of tuple( int, L{HardwareBreakpoint} )
@return: All hardware breakpoints for each thread in the given process
as a list of tuples (tid, bp).
"""
result = list()
aProcess = self.system.get_process(dwProcessId)
for dwThreadId in aProcess.iter_thread_ids():
if dwThreadId in self.__hardwareBP:
bplist = self.__hardwareBP[dwThreadId]
for bp in bplist:
result.append( (dwThreadId, bp) )
return result
## def get_all_hooks(self):
## """
## @see: L{get_process_hooks}
##
## @rtype: list of tuple( int, int, L{Hook} )
## @return: All defined hooks as a list of tuples (pid, address, hook).
## """
## return [ (pid, address, hook) \
## for ((pid, address), hook) in self.__hook_objects ]
##
## def get_process_hooks(self, dwProcessId):
## """
## @see: L{get_all_hooks}
##
## @type dwProcessId: int
## @param dwProcessId: Process global ID.
##
## @rtype: list of tuple( int, int, L{Hook} )
## @return: All hooks for the given process as a list of tuples
## (pid, address, hook).
## """
## return [ (pid, address, hook) \
## for ((pid, address), hook) in self.__hook_objects \
## if pid == dwProcessId ]
#------------------------------------------------------------------------------
# Batch operations on all breakpoints.
def enable_all_breakpoints(self):
"""
Enables all disabled breakpoints in all processes.
@see:
enable_code_breakpoint,
enable_page_breakpoint,
enable_hardware_breakpoint
"""
# disable code breakpoints
for (pid, bp) in self.get_all_code_breakpoints():
if bp.is_disabled():
self.enable_code_breakpoint(pid, bp.get_address())
# disable page breakpoints
for (pid, bp) in self.get_all_page_breakpoints():
if bp.is_disabled():
self.enable_page_breakpoint(pid, bp.get_address())
# disable hardware breakpoints
for (tid, bp) in self.get_all_hardware_breakpoints():
if bp.is_disabled():
self.enable_hardware_breakpoint(tid, bp.get_address())
def enable_one_shot_all_breakpoints(self):
"""
Enables for one shot all disabled breakpoints in all processes.
@see:
enable_one_shot_code_breakpoint,
enable_one_shot_page_breakpoint,
enable_one_shot_hardware_breakpoint
"""
# disable code breakpoints for one shot
for (pid, bp) in self.get_all_code_breakpoints():
if bp.is_disabled():
self.enable_one_shot_code_breakpoint(pid, bp.get_address())
# disable page breakpoints for one shot
for (pid, bp) in self.get_all_page_breakpoints():
if bp.is_disabled():
self.enable_one_shot_page_breakpoint(pid, bp.get_address())
# disable hardware breakpoints for one shot
for (tid, bp) in self.get_all_hardware_breakpoints():
if bp.is_disabled():
self.enable_one_shot_hardware_breakpoint(tid, bp.get_address())
def disable_all_breakpoints(self):
"""
Disables all breakpoints in all processes.
@see:
disable_code_breakpoint,
disable_page_breakpoint,
disable_hardware_breakpoint
"""
# disable code breakpoints
for (pid, bp) in self.get_all_code_breakpoints():
self.disable_code_breakpoint(pid, bp.get_address())
# disable page breakpoints
for (pid, bp) in self.get_all_page_breakpoints():
self.disable_page_breakpoint(pid, bp.get_address())
# disable hardware breakpoints
for (tid, bp) in self.get_all_hardware_breakpoints():
self.disable_hardware_breakpoint(tid, bp.get_address())
def erase_all_breakpoints(self):
"""
Erases all breakpoints in all processes.
@see:
erase_code_breakpoint,
erase_page_breakpoint,
erase_hardware_breakpoint
"""
# This should be faster but let's not trust the GC so much :P
# self.disable_all_breakpoints()
# self.__codeBP = dict()
# self.__pageBP = dict()
# self.__hardwareBP = dict()
# self.__runningBP = dict()
# self.__hook_objects = dict()
## # erase hooks
## for (pid, address, hook) in self.get_all_hooks():
## self.dont_hook_function(pid, address)
# erase code breakpoints
for (pid, bp) in self.get_all_code_breakpoints():
self.erase_code_breakpoint(pid, bp.get_address())
# erase page breakpoints
for (pid, bp) in self.get_all_page_breakpoints():
self.erase_page_breakpoint(pid, bp.get_address())
# erase hardware breakpoints
for (tid, bp) in self.get_all_hardware_breakpoints():
self.erase_hardware_breakpoint(tid, bp.get_address())
#------------------------------------------------------------------------------
# Batch operations on breakpoints per process.
def enable_process_breakpoints(self, dwProcessId):
"""
Enables all disabled breakpoints for the given process.
@type dwProcessId: int
@param dwProcessId: Process global ID.
"""
# enable code breakpoints
for bp in self.get_process_code_breakpoints(dwProcessId):
if bp.is_disabled():
self.enable_code_breakpoint(dwProcessId, bp.get_address())
# enable page breakpoints
for bp in self.get_process_page_breakpoints(dwProcessId):
if bp.is_disabled():
self.enable_page_breakpoint(dwProcessId, bp.get_address())
# enable hardware breakpoints
if self.system.has_process(dwProcessId):
aProcess = self.system.get_process(dwProcessId)
else:
aProcess = Process(dwProcessId)
aProcess.scan_threads()
for aThread in aProcess.iter_threads():
dwThreadId = aThread.get_tid()
for bp in self.get_thread_hardware_breakpoints(dwThreadId):
if bp.is_disabled():
self.enable_hardware_breakpoint(dwThreadId, bp.get_address())
def enable_one_shot_process_breakpoints(self, dwProcessId):
"""
Enables for one shot all disabled breakpoints for the given process.
@type dwProcessId: int
@param dwProcessId: Process global ID.
"""
# enable code breakpoints for one shot
for bp in self.get_process_code_breakpoints(dwProcessId):
if bp.is_disabled():
self.enable_one_shot_code_breakpoint(dwProcessId, bp.get_address())
# enable page breakpoints for one shot
for bp in self.get_process_page_breakpoints(dwProcessId):
if bp.is_disabled():
self.enable_one_shot_page_breakpoint(dwProcessId, bp.get_address())
# enable hardware breakpoints for one shot
if self.system.has_process(dwProcessId):
aProcess = self.system.get_process(dwProcessId)
else:
aProcess = Process(dwProcessId)
aProcess.scan_threads()
for aThread in aProcess.iter_threads():
dwThreadId = aThread.get_tid()
for bp in self.get_thread_hardware_breakpoints(dwThreadId):
if bp.is_disabled():
self.enable_one_shot_hardware_breakpoint(dwThreadId, bp.get_address())
def disable_process_breakpoints(self, dwProcessId):
"""
Disables all breakpoints for the given process.
@type dwProcessId: int
@param dwProcessId: Process global ID.
"""
# disable code breakpoints
for bp in self.get_process_code_breakpoints(dwProcessId):
self.disable_code_breakpoint(dwProcessId, bp.get_address())
# disable page breakpoints
for bp in self.get_process_page_breakpoints(dwProcessId):
self.disable_page_breakpoint(dwProcessId, bp.get_address())
# disable hardware breakpoints
if self.system.has_process(dwProcessId):
aProcess = self.system.get_process(dwProcessId)
else:
aProcess = Process(dwProcessId)
aProcess.scan_threads()
for aThread in aProcess.iter_threads():
dwThreadId = aThread.get_tid()
for bp in self.get_thread_hardware_breakpoints(dwThreadId):
self.disable_hardware_breakpoint(dwThreadId, bp.get_address())
def erase_process_breakpoints(self, dwProcessId):
"""
Erases all breakpoints for the given process.
@type dwProcessId: int
@param dwProcessId: Process global ID.
"""
# disable breakpoints first
# if an error occurs, no breakpoint is erased
self.disable_process_breakpoints(dwProcessId)
## # erase hooks
## for address, hook in self.get_process_hooks(dwProcessId):
## self.dont_hook_function(dwProcessId, address)
# erase code breakpoints
for bp in self.get_process_code_breakpoints(dwProcessId):
self.erase_code_breakpoint(dwProcessId, bp.get_address())
# erase page breakpoints
for bp in self.get_process_page_breakpoints(dwProcessId):
self.erase_page_breakpoint(dwProcessId, bp.get_address())
# erase hardware breakpoints
if self.system.has_process(dwProcessId):
aProcess = self.system.get_process(dwProcessId)
else:
aProcess = Process(dwProcessId)
aProcess.scan_threads()
for aThread in aProcess.iter_threads():
dwThreadId = aThread.get_tid()
for bp in self.get_thread_hardware_breakpoints(dwThreadId):
self.erase_hardware_breakpoint(dwThreadId, bp.get_address())
#------------------------------------------------------------------------------
# Internal handlers of debug events.
def _notify_guard_page(self, event):
"""
Notify breakpoints of a guard page exception event.
@type event: L{ExceptionEvent}
@param event: Guard page exception event.
@rtype: bool
@return: C{True} to call the user-defined handle, C{False} otherwise.
"""
address = event.get_fault_address()
pid = event.get_pid()
bCallHandler = True
# Align address to page boundary.
mask = ~(MemoryAddresses.pageSize - 1)
address = address & mask
# Do we have an active page breakpoint there?
key = (pid, address)
if key in self.__pageBP:
bp = self.__pageBP[key]
if bp.is_enabled() or bp.is_one_shot():
# Breakpoint is ours.
event.continueStatus = win32.DBG_CONTINUE
## event.continueStatus = win32.DBG_EXCEPTION_HANDLED
# Hit the breakpoint.
bp.hit(event)
# Remember breakpoints in RUNNING state.
if bp.is_running():
tid = event.get_tid()
self.__add_running_bp(tid, bp)
# Evaluate the breakpoint condition.
bCondition = bp.eval_condition(event)
# If the breakpoint is automatic, run the action.
# If not, notify the user.
if bCondition and bp.is_automatic():
bp.run_action(event)
bCallHandler = False
else:
bCallHandler = bCondition
# If we don't have a breakpoint here pass the exception to the debugee.
# This is a normally occurring exception so we shouldn't swallow it.
else:
event.continueStatus = win32.DBG_EXCEPTION_NOT_HANDLED
return bCallHandler
def _notify_breakpoint(self, event):
"""
Notify breakpoints of a breakpoint exception event.
@type event: L{ExceptionEvent}
@param event: Breakpoint exception event.
@rtype: bool
@return: C{True} to call the user-defined handle, C{False} otherwise.
"""
address = event.get_exception_address()
pid = event.get_pid()
bCallHandler = True
# Do we have an active code breakpoint there?
key = (pid, address)
if key in self.__codeBP:
bp = self.__codeBP[key]
if not bp.is_disabled():
# Change the program counter (PC) to the exception address.
# This accounts for the change in PC caused by
# executing the breakpoint instruction, no matter
# the size of it.
aThread = event.get_thread()
aThread.set_pc(address)
# Swallow the exception.
event.continueStatus = win32.DBG_CONTINUE
# Hit the breakpoint.
bp.hit(event)
# Remember breakpoints in RUNNING state.
if bp.is_running():
tid = event.get_tid()
self.__add_running_bp(tid, bp)
# Evaluate the breakpoint condition.
bCondition = bp.eval_condition(event)
# If the breakpoint is automatic, run the action.
# If not, notify the user.
if bCondition and bp.is_automatic():
bCallHandler = bp.run_action(event)
else:
bCallHandler = bCondition
# Handle the system breakpoint.
# TODO: examine the stack trace to figure out if it's really a
# system breakpoint or an antidebug trick. The caller should be
# inside ntdll if it's legit.
elif event.get_process().is_system_defined_breakpoint(address):
event.continueStatus = win32.DBG_CONTINUE
# In hostile mode, if we don't have a breakpoint here pass the
# exception to the debugee. In normal mode assume all breakpoint
# exceptions are to be handled by the debugger.
else:
if self.in_hostile_mode():
event.continueStatus = win32.DBG_EXCEPTION_NOT_HANDLED
else:
event.continueStatus = win32.DBG_CONTINUE
return bCallHandler
def _notify_single_step(self, event):
"""
Notify breakpoints of a single step exception event.
@type event: L{ExceptionEvent}
@param event: Single step exception event.
@rtype: bool
@return: C{True} to call the user-defined handle, C{False} otherwise.
"""
pid = event.get_pid()
tid = event.get_tid()
aThread = event.get_thread()
aProcess = event.get_process()
bCallHandler = True
bIsOurs = False
# In hostile mode set the default to pass the exception to the debugee.
# If we later determine the exception is ours, hide it instead.
old_continueStatus = event.continueStatus
try:
if self.in_hostile_mode():
event.continueStatus = win32.DBG_EXCEPTION_NOT_HANDLED
# Single step support is implemented on x86/x64 architectures only.
if self.system.arch not in (win32.ARCH_I386, win32.ARCH_AMD64):
return bCallHandler
# In hostile mode, read the last executed bytes to try to detect
# some antidebug tricks. Skip this check in normal mode because
# it'd slow things down.
#
# FIXME: weird opcode encodings may bypass this check!
#
# bFakeSingleStep: Ice Breakpoint undocumented instruction.
# bHideTrapFlag: Don't let pushf instructions get the real value of
# the trap flag.
# bNextIsPopFlags: Don't let popf instructions clear the trap flag.
#
bFakeSingleStep = False
bLastIsPushFlags = False
bNextIsPopFlags = False
if self.in_hostile_mode():
pc = aThread.get_pc()
c = aProcess.read_char(pc - 1)
if c == 0xF1: # int1
bFakeSingleStep = True
elif c == 0x9C: # pushf
bLastIsPushFlags = True
c = aProcess.peek_char(pc)
if c == 0x66: # the only valid prefix for popf
c = aProcess.peek_char(pc + 1)
if c == 0x9D: # popf
if bLastIsPushFlags:
bLastIsPushFlags = False # they cancel each other out
else:
bNextIsPopFlags = True
# When the thread is in tracing mode,
# don't pass the exception to the debugee
# and set the trap flag again.
if self.is_tracing(tid):
bIsOurs = True
if not bFakeSingleStep:
event.continueStatus = win32.DBG_CONTINUE
aThread.set_tf()
# Don't let the debugee read or write the trap flag.
# This code works in 32 and 64 bits thanks to the endianness.
if bLastIsPushFlags or bNextIsPopFlags:
sp = aThread.get_sp()
flags = aProcess.read_dword(sp)
if bLastIsPushFlags:
flags &= ~Thread.Flags.Trap
else: # if bNextIsPopFlags:
flags |= Thread.Flags.Trap
aProcess.write_dword(sp, flags)
# Handle breakpoints in RUNNING state.
running = self.__get_running_bp_set(tid)
if running:
bIsOurs = True
if not bFakeSingleStep:
event.continueStatus = win32.DBG_CONTINUE
bCallHandler = False
while running:
try:
running.pop().hit(event)
except Exception, e:
warnings.warn(str(e), BreakpointWarning)
# Handle hardware breakpoints.
if tid in self.__hardwareBP:
ctx = aThread.get_context(win32.CONTEXT_DEBUG_REGISTERS)
Dr6 = ctx['Dr6']
ctx['Dr6'] = Dr6 & DebugRegister.clearHitMask
aThread.set_context(ctx)
bFoundBreakpoint = False
bCondition = False
hwbpList = [ bp for bp in self.__hardwareBP[tid] ]
for bp in hwbpList:
if not bp in self.__hardwareBP[tid]:
continue # it was removed by a user-defined callback
slot = bp.get_slot()
if (slot is not None) and \
(Dr6 & DebugRegister.hitMask[slot]):
if not bFoundBreakpoint: #set before actions are called
if not bFakeSingleStep:
event.continueStatus = win32.DBG_CONTINUE
bFoundBreakpoint = True
bIsOurs = True
bp.hit(event)
if bp.is_running():
self.__add_running_bp(tid, bp)
bThisCondition = bp.eval_condition(event)
if bThisCondition and bp.is_automatic():
bp.run_action(event)
bThisCondition = False
bCondition = bCondition or bThisCondition
if bFoundBreakpoint:
bCallHandler = bCondition
# Always call the user-defined handler
# when the thread is in tracing mode.
if self.is_tracing(tid):
bCallHandler = True
# If we're not in hostile mode, by default we assume all single
# step exceptions are caused by the debugger.
if not bIsOurs and not self.in_hostile_mode():
aThread.clear_tf()
# If the user hit Control-C while we were inside the try block,
# set the default continueStatus back.
except:
event.continueStatus = old_continueStatus
raise
return bCallHandler
def _notify_load_dll(self, event):
"""
Notify the loading of a DLL.
@type event: L{LoadDLLEvent}
@param event: Load DLL event.
@rtype: bool
@return: C{True} to call the user-defined handler, C{False} otherwise.
"""
self.__set_deferred_breakpoints(event)
return True
def _notify_unload_dll(self, event):
"""
Notify the unloading of a DLL.
@type event: L{UnloadDLLEvent}
@param event: Unload DLL event.
@rtype: bool
@return: C{True} to call the user-defined handler, C{False} otherwise.
"""
self.__cleanup_module(event)
return True
def _notify_exit_thread(self, event):
"""
Notify the termination of a thread.
@type event: L{ExitThreadEvent}
@param event: Exit thread event.
@rtype: bool
@return: C{True} to call the user-defined handler, C{False} otherwise.
"""
self.__cleanup_thread(event)
return True
def _notify_exit_process(self, event):
"""
Notify the termination of a process.
@type event: L{ExitProcessEvent}
@param event: Exit process event.
@rtype: bool
@return: C{True} to call the user-defined handler, C{False} otherwise.
"""
self.__cleanup_process(event)
self.__cleanup_thread(event)
return True
#------------------------------------------------------------------------------
# This is the high level breakpoint interface. Here we don't have to care
# about defining or enabling breakpoints, and many errors are ignored
# (like for example setting the same breakpoint twice, here the second
# breakpoint replaces the first, much like in WinDBG). It should be easier
# and more intuitive, if less detailed. It also allows the use of deferred
# breakpoints.
#------------------------------------------------------------------------------
# Code breakpoints
def __set_break(self, pid, address, action, oneshot):
"""
Used by L{break_at} and L{stalk_at}.
@type pid: int
@param pid: Process global ID.
@type address: int or str
@param address:
Memory address of code instruction to break at. It can be an
integer value for the actual address or a string with a label
to be resolved.
@type action: function
@param action: (Optional) Action callback function.
See L{define_code_breakpoint} for more details.
@type oneshot: bool
@param oneshot: C{True} for one-shot breakpoints, C{False} otherwise.
@rtype: L{Breakpoint}
@return: Returns the new L{Breakpoint} object, or C{None} if the label
couldn't be resolved and the breakpoint was deferred. Deferred
breakpoints are set when the DLL they point to is loaded.
"""
if type(address) not in (int, long):
label = address
try:
address = self.system.get_process(pid).resolve_label(address)
if not address:
raise Exception()
except Exception:
try:
deferred = self.__deferredBP[pid]
except KeyError:
deferred = dict()
self.__deferredBP[pid] = deferred
if label in deferred:
msg = "Redefined deferred code breakpoint at %s in process ID %d"
msg = msg % (label, pid)
warnings.warn(msg, BreakpointWarning)
deferred[label] = (action, oneshot)
return None
if self.has_code_breakpoint(pid, address):
bp = self.get_code_breakpoint(pid, address)
if bp.get_action() != action: # can't use "is not", fails for bound methods
bp.set_action(action)
msg = "Redefined code breakpoint at %s in process ID %d"
msg = msg % (label, pid)
warnings.warn(msg, BreakpointWarning)
else:
self.define_code_breakpoint(pid, address, True, action)
bp = self.get_code_breakpoint(pid, address)
if oneshot:
if not bp.is_one_shot():
self.enable_one_shot_code_breakpoint(pid, address)
else:
if not bp.is_enabled():
self.enable_code_breakpoint(pid, address)
return bp
def __clear_break(self, pid, address):
"""
Used by L{dont_break_at} and L{dont_stalk_at}.
@type pid: int
@param pid: Process global ID.
@type address: int or str
@param address:
Memory address of code instruction to break at. It can be an
integer value for the actual address or a string with a label
to be resolved.
"""
if type(address) not in (int, long):
unknown = True
label = address
try:
deferred = self.__deferredBP[pid]
del deferred[label]
unknown = False
except KeyError:
## traceback.print_last() # XXX DEBUG
pass
aProcess = self.system.get_process(pid)
try:
address = aProcess.resolve_label(label)
if not address:
raise Exception()
except Exception:
## traceback.print_last() # XXX DEBUG
if unknown:
msg = ("Can't clear unknown code breakpoint"
" at %s in process ID %d")
msg = msg % (label, pid)
warnings.warn(msg, BreakpointWarning)
return
if self.has_code_breakpoint(pid, address):
self.erase_code_breakpoint(pid, address)
def __set_deferred_breakpoints(self, event):
"""
Used internally. Sets all deferred breakpoints for a DLL when it's
loaded.
@type event: L{LoadDLLEvent}
@param event: Load DLL event.
"""
pid = event.get_pid()
try:
deferred = self.__deferredBP[pid]
except KeyError:
return
aProcess = event.get_process()
for (label, (action, oneshot)) in deferred.items():
try:
address = aProcess.resolve_label(label)
except Exception:
continue
del deferred[label]
try:
self.__set_break(pid, address, action, oneshot)
except Exception:
msg = "Can't set deferred breakpoint %s at process ID %d"
msg = msg % (label, pid)
warnings.warn(msg, BreakpointWarning)
def get_all_deferred_code_breakpoints(self):
"""
Returns a list of deferred code breakpoints.
@rtype: tuple of (int, str, callable, bool)
@return: Tuple containing the following elements:
- Process ID where to set the breakpoint.
- Label pointing to the address where to set the breakpoint.
- Action callback for the breakpoint.
- C{True} of the breakpoint is one-shot, C{False} otherwise.
"""
result = []
for pid, deferred in self.__deferredBP.iteritems():
for (label, (action, oneshot)) in deferred.iteritems():
result.add( (pid, label, action, oneshot) )
return result
def get_process_deferred_code_breakpoints(self, dwProcessId):
"""
Returns a list of deferred code breakpoints.
@type dwProcessId: int
@param dwProcessId: Process ID.
@rtype: tuple of (int, str, callable, bool)
@return: Tuple containing the following elements:
- Label pointing to the address where to set the breakpoint.
- Action callback for the breakpoint.
- C{True} of the breakpoint is one-shot, C{False} otherwise.
"""
return [ (label, action, oneshot)
for (label, (action, oneshot))
in self.__deferredBP.get(dwProcessId, {}).iteritems() ]
def stalk_at(self, pid, address, action = None):
"""
Sets a one shot code breakpoint at the given process and address.
If instead of an address you pass a label, the breakpoint may be
deferred until the DLL it points to is loaded.
@see: L{break_at}, L{dont_stalk_at}
@type pid: int
@param pid: Process global ID.
@type address: int or str
@param address:
Memory address of code instruction to break at. It can be an
integer value for the actual address or a string with a label
to be resolved.
@type action: function
@param action: (Optional) Action callback function.
See L{define_code_breakpoint} for more details.
@rtype: bool
@return: C{True} if the breakpoint was set immediately, or C{False} if
it was deferred.
"""
bp = self.__set_break(pid, address, action, oneshot = True)
return bp is not None
def break_at(self, pid, address, action = None):
"""
Sets a code breakpoint at the given process and address.
If instead of an address you pass a label, the breakpoint may be
deferred until the DLL it points to is loaded.
@see: L{stalk_at}, L{dont_break_at}
@type pid: int
@param pid: Process global ID.
@type address: int or str
@param address:
Memory address of code instruction to break at. It can be an
integer value for the actual address or a string with a label
to be resolved.
@type action: function
@param action: (Optional) Action callback function.
See L{define_code_breakpoint} for more details.
@rtype: bool
@return: C{True} if the breakpoint was set immediately, or C{False} if
it was deferred.
"""
bp = self.__set_break(pid, address, action, oneshot = False)
return bp is not None
def dont_break_at(self, pid, address):
"""
Clears a code breakpoint set by L{break_at}.
@type pid: int
@param pid: Process global ID.
@type address: int or str
@param address:
Memory address of code instruction to break at. It can be an
integer value for the actual address or a string with a label
to be resolved.
"""
self.__clear_break(pid, address)
def dont_stalk_at(self, pid, address):
"""
Clears a code breakpoint set by L{stalk_at}.
@type pid: int
@param pid: Process global ID.
@type address: int or str
@param address:
Memory address of code instruction to break at. It can be an
integer value for the actual address or a string with a label
to be resolved.
"""
self.__clear_break(pid, address)
#------------------------------------------------------------------------------
# Function hooks
def hook_function(self, pid, address,
preCB = None, postCB = None,
paramCount = None, signature = None):
"""
Sets a function hook at the given address.
If instead of an address you pass a label, the hook may be
deferred until the DLL it points to is loaded.
@type pid: int
@param pid: Process global ID.
@type address: int or str
@param address:
Memory address of code instruction to break at. It can be an
integer value for the actual address or a string with a label
to be resolved.
@type preCB: function
@param preCB: (Optional) Callback triggered on function entry.
The signature for the callback should be something like this::
def pre_LoadLibraryEx(event, ra, lpFilename, hFile, dwFlags):
# return address
ra = params[0]
# function arguments start from here...
szFilename = event.get_process().peek_string(lpFilename)
# (...)
Note that all pointer types are treated like void pointers, so your
callback won't get the string or structure pointed to by it, but
the remote memory address instead. This is so to prevent the ctypes
library from being "too helpful" and trying to dereference the
pointer. To get the actual data being pointed to, use one of the
L{Process.read} methods.
@type postCB: function
@param postCB: (Optional) Callback triggered on function exit.
The signature for the callback should be something like this::
def post_LoadLibraryEx(event, return_value):
# (...)
@type paramCount: int
@param paramCount:
(Optional) Number of parameters for the C{preCB} callback,
not counting the return address. Parameters are read from
the stack and assumed to be DWORDs in 32 bits and QWORDs in 64.
This is a faster way to pull stack parameters in 32 bits, but in 64
bits (or with some odd APIs in 32 bits) it won't be useful, since
not all arguments to the hooked function will be of the same size.
For a more reliable and cross-platform way of hooking use the
C{signature} argument instead.
@type signature: tuple
@param signature:
(Optional) Tuple of C{ctypes} data types that constitute the
hooked function signature. When the function is called, this will
be used to parse the arguments from the stack. Overrides the
C{paramCount} argument.
@rtype: bool
@return: C{True} if the hook was set immediately, or C{False} if
it was deferred.
"""
try:
aProcess = self.system.get_process(pid)
except KeyError:
aProcess = Process(pid)
arch = aProcess.get_arch()
hookObj = Hook(preCB, postCB, paramCount, signature, arch)
bp = self.break_at(pid, address, hookObj)
return bp is not None
def stalk_function(self, pid, address,
preCB = None, postCB = None,
paramCount = None, signature = None):
"""
Sets a one-shot function hook at the given address.
If instead of an address you pass a label, the hook may be
deferred until the DLL it points to is loaded.
@type pid: int
@param pid: Process global ID.
@type address: int or str
@param address:
Memory address of code instruction to break at. It can be an
integer value for the actual address or a string with a label
to be resolved.
@type preCB: function
@param preCB: (Optional) Callback triggered on function entry.
The signature for the callback should be something like this::
def pre_LoadLibraryEx(event, ra, lpFilename, hFile, dwFlags):
# return address
ra = params[0]
# function arguments start from here...
szFilename = event.get_process().peek_string(lpFilename)
# (...)
Note that all pointer types are treated like void pointers, so your
callback won't get the string or structure pointed to by it, but
the remote memory address instead. This is so to prevent the ctypes
library from being "too helpful" and trying to dereference the
pointer. To get the actual data being pointed to, use one of the
L{Process.read} methods.
@type postCB: function
@param postCB: (Optional) Callback triggered on function exit.
The signature for the callback should be something like this::
def post_LoadLibraryEx(event, return_value):
# (...)
@type paramCount: int
@param paramCount:
(Optional) Number of parameters for the C{preCB} callback,
not counting the return address. Parameters are read from
the stack and assumed to be DWORDs in 32 bits and QWORDs in 64.
This is a faster way to pull stack parameters in 32 bits, but in 64
bits (or with some odd APIs in 32 bits) it won't be useful, since
not all arguments to the hooked function will be of the same size.
For a more reliable and cross-platform way of hooking use the
C{signature} argument instead.
@type signature: tuple
@param signature:
(Optional) Tuple of C{ctypes} data types that constitute the
hooked function signature. When the function is called, this will
be used to parse the arguments from the stack. Overrides the
C{paramCount} argument.
@rtype: bool
@return: C{True} if the breakpoint was set immediately, or C{False} if
it was deferred.
"""
try:
aProcess = self.system.get_process(pid)
except KeyError:
aProcess = Process(pid)
arch = aProcess.get_arch()
hookObj = Hook(preCB, postCB, paramCount, signature, arch)
bp = self.stalk_at(pid, address, hookObj)
return bp is not None
def dont_hook_function(self, pid, address):
"""
Removes a function hook set by L{hook_function}.
@type pid: int
@param pid: Process global ID.
@type address: int or str
@param address:
Memory address of code instruction to break at. It can be an
integer value for the actual address or a string with a label
to be resolved.
"""
self.dont_break_at(pid, address)
# alias
unhook_function = dont_hook_function
def dont_stalk_function(self, pid, address):
"""
Removes a function hook set by L{stalk_function}.
@type pid: int
@param pid: Process global ID.
@type address: int or str
@param address:
Memory address of code instruction to break at. It can be an
integer value for the actual address or a string with a label
to be resolved.
"""
self.dont_stalk_at(pid, address)
#------------------------------------------------------------------------------
# Variable watches
def __set_variable_watch(self, tid, address, size, action):
"""
Used by L{watch_variable} and L{stalk_variable}.
@type tid: int
@param tid: Thread global ID.
@type address: int
@param address: Memory address of variable to watch.
@type size: int
@param size: Size of variable to watch. The only supported sizes are:
byte (1), word (2), dword (4) and qword (8).
@type action: function
@param action: (Optional) Action callback function.
See L{define_hardware_breakpoint} for more details.
@rtype: L{HardwareBreakpoint}
@return: Hardware breakpoint at the requested address.
"""
# TODO
# We should merge the breakpoints instead of overwriting them.
# We'll have the same problem as watch_buffer and we'll need to change
# the API again.
if size == 1:
sizeFlag = self.BP_WATCH_BYTE
elif size == 2:
sizeFlag = self.BP_WATCH_WORD
elif size == 4:
sizeFlag = self.BP_WATCH_DWORD
elif size == 8:
sizeFlag = self.BP_WATCH_QWORD
else:
raise ValueError("Bad size for variable watch: %r" % size)
if self.has_hardware_breakpoint(tid, address):
warnings.warn(
"Hardware breakpoint in thread %d at address %s was overwritten!" \
% (tid, HexDump.address(address,
self.system.get_thread(tid).get_bits())),
BreakpointWarning)
bp = self.get_hardware_breakpoint(tid, address)
if bp.get_trigger() != self.BP_BREAK_ON_ACCESS or \
bp.get_watch() != sizeFlag:
self.erase_hardware_breakpoint(tid, address)
self.define_hardware_breakpoint(tid, address,
self.BP_BREAK_ON_ACCESS, sizeFlag, True, action)
bp = self.get_hardware_breakpoint(tid, address)
else:
self.define_hardware_breakpoint(tid, address,
self.BP_BREAK_ON_ACCESS, sizeFlag, True, action)
bp = self.get_hardware_breakpoint(tid, address)
return bp
def __clear_variable_watch(self, tid, address):
"""
Used by L{dont_watch_variable} and L{dont_stalk_variable}.
@type tid: int
@param tid: Thread global ID.
@type address: int
@param address: Memory address of variable to stop watching.
"""
if self.has_hardware_breakpoint(tid, address):
self.erase_hardware_breakpoint(tid, address)
def watch_variable(self, tid, address, size, action = None):
"""
Sets a hardware breakpoint at the given thread, address and size.
@see: L{dont_watch_variable}
@type tid: int
@param tid: Thread global ID.
@type address: int
@param address: Memory address of variable to watch.
@type size: int
@param size: Size of variable to watch. The only supported sizes are:
byte (1), word (2), dword (4) and qword (8).
@type action: function
@param action: (Optional) Action callback function.
See L{define_hardware_breakpoint} for more details.
"""
bp = self.__set_variable_watch(tid, address, size, action)
if not bp.is_enabled():
self.enable_hardware_breakpoint(tid, address)
def stalk_variable(self, tid, address, size, action = None):
"""
Sets a one-shot hardware breakpoint at the given thread,
address and size.
@see: L{dont_watch_variable}
@type tid: int
@param tid: Thread global ID.
@type address: int
@param address: Memory address of variable to watch.
@type size: int
@param size: Size of variable to watch. The only supported sizes are:
byte (1), word (2), dword (4) and qword (8).
@type action: function
@param action: (Optional) Action callback function.
See L{define_hardware_breakpoint} for more details.
"""
bp = self.__set_variable_watch(tid, address, size, action)
if not bp.is_one_shot():
self.enable_one_shot_hardware_breakpoint(tid, address)
def dont_watch_variable(self, tid, address):
"""
Clears a hardware breakpoint set by L{watch_variable}.
@type tid: int
@param tid: Thread global ID.
@type address: int
@param address: Memory address of variable to stop watching.
"""
self.__clear_variable_watch(tid, address)
def dont_stalk_variable(self, tid, address):
"""
Clears a hardware breakpoint set by L{stalk_variable}.
@type tid: int
@param tid: Thread global ID.
@type address: int
@param address: Memory address of variable to stop watching.
"""
self.__clear_variable_watch(tid, address)
#------------------------------------------------------------------------------
# Buffer watches
def __set_buffer_watch(self, pid, address, size, action, bOneShot):
"""
Used by L{watch_buffer} and L{stalk_buffer}.
@type pid: int
@param pid: Process global ID.
@type address: int
@param address: Memory address of buffer to watch.
@type size: int
@param size: Size in bytes of buffer to watch.
@type action: function
@param action: (Optional) Action callback function.
See L{define_page_breakpoint} for more details.
@type bOneShot: bool
@param bOneShot:
C{True} to set a one-shot breakpoint,
C{False} to set a normal breakpoint.
"""
# Check the size isn't zero or negative.
if size < 1:
raise ValueError("Bad size for buffer watch: %r" % size)
# Create the buffer watch identifier.
bw = BufferWatch(pid, address, address + size, action, bOneShot)
# Get the base address and size in pages required for this buffer.
base = MemoryAddresses.align_address_to_page_start(address)
limit = MemoryAddresses.align_address_to_page_end(address + size)
pages = MemoryAddresses.get_buffer_size_in_pages(address, size)
try:
# For each page:
# + if a page breakpoint exists reuse it
# + if it doesn't exist define it
bset = set() # all breakpoints used
nset = set() # newly defined breakpoints
cset = set() # condition objects
page_addr = base
pageSize = MemoryAddresses.pageSize
while page_addr < limit:
# If a breakpoints exists, reuse it.
if self.has_page_breakpoint(pid, page_addr):
bp = self.get_page_breakpoint(pid, page_addr)
if bp not in bset:
condition = bp.get_condition()
if not condition in cset:
if not isinstance(condition,_BufferWatchCondition):
# this shouldn't happen unless you tinkered
# with it or defined your own page breakpoints
# manually.
msg = "Can't watch buffer at page %s"
msg = msg % HexDump.address(page_addr)
raise RuntimeError(msg)
cset.add(condition)
bset.add(bp)
# If it doesn't, define it.
else:
condition = _BufferWatchCondition()
bp = self.define_page_breakpoint(pid, page_addr, 1,
condition = condition)
bset.add(bp)
nset.add(bp)
cset.add(condition)
# Next page.
page_addr = page_addr + pageSize
# For each breakpoint, enable it if needed.
aProcess = self.system.get_process(pid)
for bp in bset:
if bp.is_disabled() or bp.is_one_shot():
bp.enable(aProcess, None)
# On error...
except:
# Erase the newly defined breakpoints.
for bp in nset:
try:
self.erase_page_breakpoint(pid, bp.get_address())
except:
pass
# Pass the exception to the caller
raise
# For each condition object, add the new buffer.
for condition in cset:
condition.add(bw)
def __clear_buffer_watch_old_method(self, pid, address, size):
"""
Used by L{dont_watch_buffer} and L{dont_stalk_buffer}.
@warn: Deprecated since WinAppDbg 1.5.
@type pid: int
@param pid: Process global ID.
@type address: int
@param address: Memory address of buffer to stop watching.
@type size: int
@param size: Size in bytes of buffer to stop watching.
"""
warnings.warn("Deprecated since WinAppDbg 1.5", DeprecationWarning)
# Check the size isn't zero or negative.
if size < 1:
raise ValueError("Bad size for buffer watch: %r" % size)
# Get the base address and size in pages required for this buffer.
base = MemoryAddresses.align_address_to_page_start(address)
limit = MemoryAddresses.align_address_to_page_end(address + size)
pages = MemoryAddresses.get_buffer_size_in_pages(address, size)
# For each page, get the breakpoint and it's condition object.
# For each condition, remove the buffer.
# For each breakpoint, if no buffers are on watch, erase it.
cset = set() # condition objects
page_addr = base
pageSize = MemoryAddresses.pageSize
while page_addr < limit:
if self.has_page_breakpoint(pid, page_addr):
bp = self.get_page_breakpoint(pid, page_addr)
condition = bp.get_condition()
if condition not in cset:
if not isinstance(condition, _BufferWatchCondition):
# this shouldn't happen unless you tinkered with it
# or defined your own page breakpoints manually.
continue
cset.add(condition)
condition.remove_last_match(address, size)
if condition.count() == 0:
try:
self.erase_page_breakpoint(pid, bp.get_address())
except WindowsError:
pass
page_addr = page_addr + pageSize
def __clear_buffer_watch(self, bw):
"""
Used by L{dont_watch_buffer} and L{dont_stalk_buffer}.
@type bw: L{BufferWatch}
@param bw: Buffer watch identifier.
"""
# Get the PID and the start and end addresses of the buffer.
pid = bw.pid
start = bw.start
end = bw.end
# Get the base address and size in pages required for the buffer.
base = MemoryAddresses.align_address_to_page_start(start)
limit = MemoryAddresses.align_address_to_page_end(end)
pages = MemoryAddresses.get_buffer_size_in_pages(start, end - start)
# For each page, get the breakpoint and it's condition object.
# For each condition, remove the buffer.
# For each breakpoint, if no buffers are on watch, erase it.
cset = set() # condition objects
page_addr = base
pageSize = MemoryAddresses.pageSize
while page_addr < limit:
if self.has_page_breakpoint(pid, page_addr):
bp = self.get_page_breakpoint(pid, page_addr)
condition = bp.get_condition()
if condition not in cset:
if not isinstance(condition, _BufferWatchCondition):
# this shouldn't happen unless you tinkered with it
# or defined your own page breakpoints manually.
continue
cset.add(condition)
condition.remove(bw)
if condition.count() == 0:
try:
self.erase_page_breakpoint(pid, bp.get_address())
except WindowsError:
msg = "Cannot remove page breakpoint at address %s"
msg = msg % HexDump.address( bp.get_address() )
warnings.warn(msg, BreakpointWarning)
page_addr = page_addr + pageSize
def watch_buffer(self, pid, address, size, action = None):
"""
Sets a page breakpoint and notifies when the given buffer is accessed.
@see: L{dont_watch_variable}
@type pid: int
@param pid: Process global ID.
@type address: int
@param address: Memory address of buffer to watch.
@type size: int
@param size: Size in bytes of buffer to watch.
@type action: function
@param action: (Optional) Action callback function.
See L{define_page_breakpoint} for more details.
@rtype: L{BufferWatch}
@return: Buffer watch identifier.
"""
self.__set_buffer_watch(pid, address, size, action, False)
def stalk_buffer(self, pid, address, size, action = None):
"""
Sets a one-shot page breakpoint and notifies
when the given buffer is accessed.
@see: L{dont_watch_variable}
@type pid: int
@param pid: Process global ID.
@type address: int
@param address: Memory address of buffer to watch.
@type size: int
@param size: Size in bytes of buffer to watch.
@type action: function
@param action: (Optional) Action callback function.
See L{define_page_breakpoint} for more details.
@rtype: L{BufferWatch}
@return: Buffer watch identifier.
"""
self.__set_buffer_watch(pid, address, size, action, True)
def dont_watch_buffer(self, bw, *argv, **argd):
"""
Clears a page breakpoint set by L{watch_buffer}.
@type bw: L{BufferWatch}
@param bw:
Buffer watch identifier returned by L{watch_buffer}.
"""
# The sane way to do it.
if not (argv or argd):
self.__clear_buffer_watch(bw)
# Backwards compatibility with WinAppDbg 1.4.
else:
argv = list(argv)
argv.insert(0, bw)
if 'pid' in argd:
argv.insert(0, argd.pop('pid'))
if 'address' in argd:
argv.insert(1, argd.pop('address'))
if 'size' in argd:
argv.insert(2, argd.pop('size'))
if argd:
raise TypeError("Wrong arguments for dont_watch_buffer()")
try:
pid, address, size = argv
except ValueError:
raise TypeError("Wrong arguments for dont_watch_buffer()")
self.__clear_buffer_watch_old_method(pid, address, size)
def dont_stalk_buffer(self, bw, *argv, **argd):
"""
Clears a page breakpoint set by L{stalk_buffer}.
@type bw: L{BufferWatch}
@param bw:
Buffer watch identifier returned by L{stalk_buffer}.
"""
self.dont_watch_buffer(bw, *argv, **argd)
#------------------------------------------------------------------------------
# Tracing
# XXX TODO
# Add "action" parameter to tracing mode
def __start_tracing(self, thread):
"""
@type thread: L{Thread}
@param thread: Thread to start tracing.
"""
tid = thread.get_tid()
if not tid in self.__tracing:
thread.set_tf()
self.__tracing.add(tid)
def __stop_tracing(self, thread):
"""
@type thread: L{Thread}
@param thread: Thread to stop tracing.
"""
tid = thread.get_tid()
if tid in self.__tracing:
self.__tracing.remove(tid)
if thread.is_alive():
thread.clear_tf()
def is_tracing(self, tid):
"""
@type tid: int
@param tid: Thread global ID.
@rtype: bool
@return: C{True} if the thread is being traced, C{False} otherwise.
"""
return tid in self.__tracing
def get_traced_tids(self):
"""
Retrieves the list of global IDs of all threads being traced.
@rtype: list( int... )
@return: List of thread global IDs.
"""
tids = list(self.__tracing)
tids.sort()
return tids
def start_tracing(self, tid):
"""
Start tracing mode in the given thread.
@type tid: int
@param tid: Global ID of thread to start tracing.
"""
if not self.is_tracing(tid):
thread = self.system.get_thread(tid)
self.__start_tracing(thread)
def stop_tracing(self, tid):
"""
Stop tracing mode in the given thread.
@type tid: int
@param tid: Global ID of thread to stop tracing.
"""
if self.is_tracing(tid):
thread = self.system.get_thread(tid)
self.__stop_tracing(thread)
def start_tracing_process(self, pid):
"""
Start tracing mode for all threads in the given process.
@type pid: int
@param pid: Global ID of process to start tracing.
"""
for thread in self.system.get_process(pid).iter_threads():
self.__start_tracing(thread)
def stop_tracing_process(self, pid):
"""
Stop tracing mode for all threads in the given process.
@type pid: int
@param pid: Global ID of process to stop tracing.
"""
for thread in self.system.get_process(pid).iter_threads():
self.__stop_tracing(thread)
def start_tracing_all(self):
"""
Start tracing mode for all threads in all debugees.
"""
for pid in self.get_debugee_pids():
self.start_tracing_process(pid)
def stop_tracing_all(self):
"""
Stop tracing mode for all threads in all debugees.
"""
for pid in self.get_debugee_pids():
self.stop_tracing_process(pid)
#------------------------------------------------------------------------------
# Break on LastError values (only available since Windows Server 2003)
def break_on_error(self, pid, errorCode):
"""
Sets or clears the system breakpoint for a given Win32 error code.
Use L{Process.is_system_defined_breakpoint} to tell if a breakpoint
exception was caused by a system breakpoint or by the application
itself (for example because of a failed assertion in the code).
@note: This functionality is only available since Windows Server 2003.
In 2003 it only breaks on error values set externally to the
kernel32.dll library, but this was fixed in Windows Vista.
@warn: This method will fail if the debug symbols for ntdll (kernel32
in Windows 2003) are not present. For more information see:
L{System.fix_symbol_store_path}.
@see: U{http://www.nynaeve.net/?p=147}
@type pid: int
@param pid: Process ID.
@type errorCode: int
@param errorCode: Win32 error code to stop on. Set to C{0} or
C{ERROR_SUCCESS} to clear the breakpoint instead.
@raise NotImplementedError:
The functionality is not supported in this system.
@raise WindowsError:
An error occurred while processing this request.
"""
aProcess = self.system.get_process(pid)
address = aProcess.get_break_on_error_ptr()
if not address:
raise NotImplementedError(
"The functionality is not supported in this system.")
aProcess.write_dword(address, errorCode)
def dont_break_on_error(self, pid):
"""
Alias to L{break_on_error}C{(pid, ERROR_SUCCESS)}.
@type pid: int
@param pid: Process ID.
@raise NotImplementedError:
The functionality is not supported in this system.
@raise WindowsError:
An error occurred while processing this request.
"""
self.break_on_error(pid, 0)
#------------------------------------------------------------------------------
# Simplified symbol resolving, useful for hooking functions
def resolve_exported_function(self, pid, modName, procName):
"""
Resolves the exported DLL function for the given process.
@type pid: int
@param pid: Process global ID.
@type modName: str
@param modName: Name of the module that exports the function.
@type procName: str
@param procName: Name of the exported function to resolve.
@rtype: int, None
@return: On success, the address of the exported function.
On failure, returns C{None}.
"""
aProcess = self.system.get_process(pid)
aModule = aProcess.get_module_by_name(modName)
if not aModule:
aProcess.scan_modules()
aModule = aProcess.get_module_by_name(modName)
if aModule:
address = aModule.resolve(procName)
return address
return None
def resolve_label(self, pid, label):
"""
Resolves a label for the given process.
@type pid: int
@param pid: Process global ID.
@type label: str
@param label: Label to resolve.
@rtype: int
@return: Memory address pointed to by the label.
@raise ValueError: The label is malformed or impossible to resolve.
@raise RuntimeError: Cannot resolve the module or function.
"""
return self.get_process(pid).resolve_label(label)
|
repotvsupertuga/repo | refs/heads/master | script.module.httplib2/lib/httplib2/__init__.py | 246 | from __future__ import generators
"""
httplib2
A caching http interface that supports ETags and gzip
to conserve bandwidth.
Requires Python 2.3 or later
Changelog:
2007-08-18, Rick: Modified so it's able to use a socks proxy if needed.
"""
__author__ = "Joe Gregorio (joe@bitworking.org)"
__copyright__ = "Copyright 2006, Joe Gregorio"
__contributors__ = ["Thomas Broyer (t.broyer@ltgt.net)",
"James Antill",
"Xavier Verges Farrero",
"Jonathan Feinberg",
"Blair Zajac",
"Sam Ruby",
"Louis Nyffenegger"]
__license__ = "MIT"
__version__ = "0.8"
import re
import sys
import email
import email.Utils
import email.Message
import email.FeedParser
import StringIO
import gzip
import zlib
import httplib
import urlparse
import urllib
import base64
import os
import copy
import calendar
import time
import random
import errno
try:
from hashlib import sha1 as _sha, md5 as _md5
except ImportError:
# prior to Python 2.5, these were separate modules
import sha
import md5
_sha = sha.new
_md5 = md5.new
import hmac
from gettext import gettext as _
import socket
try:
from httplib2 import socks
except ImportError:
try:
import socks
except (ImportError, AttributeError):
socks = None
# Build the appropriate socket wrapper for ssl
try:
import ssl # python 2.6
ssl_SSLError = ssl.SSLError
def _ssl_wrap_socket(sock, key_file, cert_file,
disable_validation, ca_certs):
if disable_validation:
cert_reqs = ssl.CERT_NONE
else:
cert_reqs = ssl.CERT_REQUIRED
# We should be specifying SSL version 3 or TLS v1, but the ssl module
# doesn't expose the necessary knobs. So we need to go with the default
# of SSLv23.
return ssl.wrap_socket(sock, keyfile=key_file, certfile=cert_file,
cert_reqs=cert_reqs, ca_certs=ca_certs)
except (AttributeError, ImportError):
ssl_SSLError = None
def _ssl_wrap_socket(sock, key_file, cert_file,
disable_validation, ca_certs):
if not disable_validation:
raise CertificateValidationUnsupported(
"SSL certificate validation is not supported without "
"the ssl module installed. To avoid this error, install "
"the ssl module, or explicity disable validation.")
ssl_sock = socket.ssl(sock, key_file, cert_file)
return httplib.FakeSocket(sock, ssl_sock)
if sys.version_info >= (2,3):
from iri2uri import iri2uri
else:
def iri2uri(uri):
return uri
def has_timeout(timeout): # python 2.6
if hasattr(socket, '_GLOBAL_DEFAULT_TIMEOUT'):
return (timeout is not None and timeout is not socket._GLOBAL_DEFAULT_TIMEOUT)
return (timeout is not None)
__all__ = [
'Http', 'Response', 'ProxyInfo', 'HttpLib2Error', 'RedirectMissingLocation',
'RedirectLimit', 'FailedToDecompressContent',
'UnimplementedDigestAuthOptionError',
'UnimplementedHmacDigestAuthOptionError',
'debuglevel', 'ProxiesUnavailableError']
# The httplib debug level, set to a non-zero value to get debug output
debuglevel = 0
# A request will be tried 'RETRIES' times if it fails at the socket/connection level.
RETRIES = 2
# Python 2.3 support
if sys.version_info < (2,4):
def sorted(seq):
seq.sort()
return seq
# Python 2.3 support
def HTTPResponse__getheaders(self):
"""Return list of (header, value) tuples."""
if self.msg is None:
raise httplib.ResponseNotReady()
return self.msg.items()
if not hasattr(httplib.HTTPResponse, 'getheaders'):
httplib.HTTPResponse.getheaders = HTTPResponse__getheaders
# All exceptions raised here derive from HttpLib2Error
class HttpLib2Error(Exception): pass
# Some exceptions can be caught and optionally
# be turned back into responses.
class HttpLib2ErrorWithResponse(HttpLib2Error):
def __init__(self, desc, response, content):
self.response = response
self.content = content
HttpLib2Error.__init__(self, desc)
class RedirectMissingLocation(HttpLib2ErrorWithResponse): pass
class RedirectLimit(HttpLib2ErrorWithResponse): pass
class FailedToDecompressContent(HttpLib2ErrorWithResponse): pass
class UnimplementedDigestAuthOptionError(HttpLib2ErrorWithResponse): pass
class UnimplementedHmacDigestAuthOptionError(HttpLib2ErrorWithResponse): pass
class MalformedHeader(HttpLib2Error): pass
class RelativeURIError(HttpLib2Error): pass
class ServerNotFoundError(HttpLib2Error): pass
class ProxiesUnavailableError(HttpLib2Error): pass
class CertificateValidationUnsupported(HttpLib2Error): pass
class SSLHandshakeError(HttpLib2Error): pass
class NotSupportedOnThisPlatform(HttpLib2Error): pass
class CertificateHostnameMismatch(SSLHandshakeError):
def __init__(self, desc, host, cert):
HttpLib2Error.__init__(self, desc)
self.host = host
self.cert = cert
# Open Items:
# -----------
# Proxy support
# Are we removing the cached content too soon on PUT (only delete on 200 Maybe?)
# Pluggable cache storage (supports storing the cache in
# flat files by default. We need a plug-in architecture
# that can support Berkeley DB and Squid)
# == Known Issues ==
# Does not handle a resource that uses conneg and Last-Modified but no ETag as a cache validator.
# Does not handle Cache-Control: max-stale
# Does not use Age: headers when calculating cache freshness.
# The number of redirections to follow before giving up.
# Note that only GET redirects are automatically followed.
# Will also honor 301 requests by saving that info and never
# requesting that URI again.
DEFAULT_MAX_REDIRECTS = 5
try:
# Users can optionally provide a module that tells us where the CA_CERTS
# are located.
import ca_certs_locater
CA_CERTS = ca_certs_locater.get()
except ImportError:
# Default CA certificates file bundled with httplib2.
CA_CERTS = os.path.join(
os.path.dirname(os.path.abspath(__file__ )), "cacerts.txt")
# Which headers are hop-by-hop headers by default
HOP_BY_HOP = ['connection', 'keep-alive', 'proxy-authenticate', 'proxy-authorization', 'te', 'trailers', 'transfer-encoding', 'upgrade']
def _get_end2end_headers(response):
hopbyhop = list(HOP_BY_HOP)
hopbyhop.extend([x.strip() for x in response.get('connection', '').split(',')])
return [header for header in response.keys() if header not in hopbyhop]
URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?")
def parse_uri(uri):
"""Parses a URI using the regex given in Appendix B of RFC 3986.
(scheme, authority, path, query, fragment) = parse_uri(uri)
"""
groups = URI.match(uri).groups()
return (groups[1], groups[3], groups[4], groups[6], groups[8])
def urlnorm(uri):
(scheme, authority, path, query, fragment) = parse_uri(uri)
if not scheme or not authority:
raise RelativeURIError("Only absolute URIs are allowed. uri = %s" % uri)
authority = authority.lower()
scheme = scheme.lower()
if not path:
path = "/"
# Could do syntax based normalization of the URI before
# computing the digest. See Section 6.2.2 of Std 66.
request_uri = query and "?".join([path, query]) or path
scheme = scheme.lower()
defrag_uri = scheme + "://" + authority + request_uri
return scheme, authority, request_uri, defrag_uri
# Cache filename construction (original borrowed from Venus http://intertwingly.net/code/venus/)
re_url_scheme = re.compile(r'^\w+://')
re_slash = re.compile(r'[?/:|]+')
def safename(filename):
"""Return a filename suitable for the cache.
Strips dangerous and common characters to create a filename we
can use to store the cache in.
"""
try:
if re_url_scheme.match(filename):
if isinstance(filename,str):
filename = filename.decode('utf-8')
filename = filename.encode('idna')
else:
filename = filename.encode('idna')
except UnicodeError:
pass
if isinstance(filename,unicode):
filename=filename.encode('utf-8')
filemd5 = _md5(filename).hexdigest()
filename = re_url_scheme.sub("", filename)
filename = re_slash.sub(",", filename)
# limit length of filename
if len(filename)>200:
filename=filename[:200]
return ",".join((filename, filemd5))
NORMALIZE_SPACE = re.compile(r'(?:\r\n)?[ \t]+')
def _normalize_headers(headers):
return dict([ (key.lower(), NORMALIZE_SPACE.sub(value, ' ').strip()) for (key, value) in headers.iteritems()])
def _parse_cache_control(headers):
retval = {}
if headers.has_key('cache-control'):
parts = headers['cache-control'].split(',')
parts_with_args = [tuple([x.strip().lower() for x in part.split("=", 1)]) for part in parts if -1 != part.find("=")]
parts_wo_args = [(name.strip().lower(), 1) for name in parts if -1 == name.find("=")]
retval = dict(parts_with_args + parts_wo_args)
return retval
# Whether to use a strict mode to parse WWW-Authenticate headers
# Might lead to bad results in case of ill-formed header value,
# so disabled by default, falling back to relaxed parsing.
# Set to true to turn on, usefull for testing servers.
USE_WWW_AUTH_STRICT_PARSING = 0
# In regex below:
# [^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+ matches a "token" as defined by HTTP
# "(?:[^\0-\x08\x0A-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?" matches a "quoted-string" as defined by HTTP, when LWS have already been replaced by a single space
# Actually, as an auth-param value can be either a token or a quoted-string, they are combined in a single pattern which matches both:
# \"?((?<=\")(?:[^\0-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?(?=\")|(?<!\")[^\0-\x08\x0A-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+(?!\"))\"?
WWW_AUTH_STRICT = re.compile(r"^(?:\s*(?:,\s*)?([^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+)\s*=\s*\"?((?<=\")(?:[^\0-\x08\x0A-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?(?=\")|(?<!\")[^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+(?!\"))\"?)(.*)$")
WWW_AUTH_RELAXED = re.compile(r"^(?:\s*(?:,\s*)?([^ \t\r\n=]+)\s*=\s*\"?((?<=\")(?:[^\\\"]|\\.)*?(?=\")|(?<!\")[^ \t\r\n,]+(?!\"))\"?)(.*)$")
UNQUOTE_PAIRS = re.compile(r'\\(.)')
def _parse_www_authenticate(headers, headername='www-authenticate'):
"""Returns a dictionary of dictionaries, one dict
per auth_scheme."""
retval = {}
if headers.has_key(headername):
try:
authenticate = headers[headername].strip()
www_auth = USE_WWW_AUTH_STRICT_PARSING and WWW_AUTH_STRICT or WWW_AUTH_RELAXED
while authenticate:
# Break off the scheme at the beginning of the line
if headername == 'authentication-info':
(auth_scheme, the_rest) = ('digest', authenticate)
else:
(auth_scheme, the_rest) = authenticate.split(" ", 1)
# Now loop over all the key value pairs that come after the scheme,
# being careful not to roll into the next scheme
match = www_auth.search(the_rest)
auth_params = {}
while match:
if match and len(match.groups()) == 3:
(key, value, the_rest) = match.groups()
auth_params[key.lower()] = UNQUOTE_PAIRS.sub(r'\1', value) # '\\'.join([x.replace('\\', '') for x in value.split('\\\\')])
match = www_auth.search(the_rest)
retval[auth_scheme.lower()] = auth_params
authenticate = the_rest.strip()
except ValueError:
raise MalformedHeader("WWW-Authenticate")
return retval
def _entry_disposition(response_headers, request_headers):
"""Determine freshness from the Date, Expires and Cache-Control headers.
We don't handle the following:
1. Cache-Control: max-stale
2. Age: headers are not used in the calculations.
Not that this algorithm is simpler than you might think
because we are operating as a private (non-shared) cache.
This lets us ignore 's-maxage'. We can also ignore
'proxy-invalidate' since we aren't a proxy.
We will never return a stale document as
fresh as a design decision, and thus the non-implementation
of 'max-stale'. This also lets us safely ignore 'must-revalidate'
since we operate as if every server has sent 'must-revalidate'.
Since we are private we get to ignore both 'public' and
'private' parameters. We also ignore 'no-transform' since
we don't do any transformations.
The 'no-store' parameter is handled at a higher level.
So the only Cache-Control parameters we look at are:
no-cache
only-if-cached
max-age
min-fresh
"""
retval = "STALE"
cc = _parse_cache_control(request_headers)
cc_response = _parse_cache_control(response_headers)
if request_headers.has_key('pragma') and request_headers['pragma'].lower().find('no-cache') != -1:
retval = "TRANSPARENT"
if 'cache-control' not in request_headers:
request_headers['cache-control'] = 'no-cache'
elif cc.has_key('no-cache'):
retval = "TRANSPARENT"
elif cc_response.has_key('no-cache'):
retval = "STALE"
elif cc.has_key('only-if-cached'):
retval = "FRESH"
elif response_headers.has_key('date'):
date = calendar.timegm(email.Utils.parsedate_tz(response_headers['date']))
now = time.time()
current_age = max(0, now - date)
if cc_response.has_key('max-age'):
try:
freshness_lifetime = int(cc_response['max-age'])
except ValueError:
freshness_lifetime = 0
elif response_headers.has_key('expires'):
expires = email.Utils.parsedate_tz(response_headers['expires'])
if None == expires:
freshness_lifetime = 0
else:
freshness_lifetime = max(0, calendar.timegm(expires) - date)
else:
freshness_lifetime = 0
if cc.has_key('max-age'):
try:
freshness_lifetime = int(cc['max-age'])
except ValueError:
freshness_lifetime = 0
if cc.has_key('min-fresh'):
try:
min_fresh = int(cc['min-fresh'])
except ValueError:
min_fresh = 0
current_age += min_fresh
if freshness_lifetime > current_age:
retval = "FRESH"
return retval
def _decompressContent(response, new_content):
content = new_content
try:
encoding = response.get('content-encoding', None)
if encoding in ['gzip', 'deflate']:
if encoding == 'gzip':
content = gzip.GzipFile(fileobj=StringIO.StringIO(new_content)).read()
if encoding == 'deflate':
content = zlib.decompress(content)
response['content-length'] = str(len(content))
# Record the historical presence of the encoding in a way the won't interfere.
response['-content-encoding'] = response['content-encoding']
del response['content-encoding']
except IOError:
content = ""
raise FailedToDecompressContent(_("Content purported to be compressed with %s but failed to decompress.") % response.get('content-encoding'), response, content)
return content
def _updateCache(request_headers, response_headers, content, cache, cachekey):
if cachekey:
cc = _parse_cache_control(request_headers)
cc_response = _parse_cache_control(response_headers)
if cc.has_key('no-store') or cc_response.has_key('no-store'):
cache.delete(cachekey)
else:
info = email.Message.Message()
for key, value in response_headers.iteritems():
if key not in ['status','content-encoding','transfer-encoding']:
info[key] = value
# Add annotations to the cache to indicate what headers
# are variant for this request.
vary = response_headers.get('vary', None)
if vary:
vary_headers = vary.lower().replace(' ', '').split(',')
for header in vary_headers:
key = '-varied-%s' % header
try:
info[key] = request_headers[header]
except KeyError:
pass
status = response_headers.status
if status == 304:
status = 200
status_header = 'status: %d\r\n' % status
header_str = info.as_string()
header_str = re.sub("\r(?!\n)|(?<!\r)\n", "\r\n", header_str)
text = "".join([status_header, header_str, content])
cache.set(cachekey, text)
def _cnonce():
dig = _md5("%s:%s" % (time.ctime(), ["0123456789"[random.randrange(0, 9)] for i in range(20)])).hexdigest()
return dig[:16]
def _wsse_username_token(cnonce, iso_now, password):
return base64.b64encode(_sha("%s%s%s" % (cnonce, iso_now, password)).digest()).strip()
# For credentials we need two things, first
# a pool of credential to try (not necesarily tied to BAsic, Digest, etc.)
# Then we also need a list of URIs that have already demanded authentication
# That list is tricky since sub-URIs can take the same auth, or the
# auth scheme may change as you descend the tree.
# So we also need each Auth instance to be able to tell us
# how close to the 'top' it is.
class Authentication(object):
def __init__(self, credentials, host, request_uri, headers, response, content, http):
(scheme, authority, path, query, fragment) = parse_uri(request_uri)
self.path = path
self.host = host
self.credentials = credentials
self.http = http
def depth(self, request_uri):
(scheme, authority, path, query, fragment) = parse_uri(request_uri)
return request_uri[len(self.path):].count("/")
def inscope(self, host, request_uri):
# XXX Should we normalize the request_uri?
(scheme, authority, path, query, fragment) = parse_uri(request_uri)
return (host == self.host) and path.startswith(self.path)
def request(self, method, request_uri, headers, content):
"""Modify the request headers to add the appropriate
Authorization header. Over-ride this in sub-classes."""
pass
def response(self, response, content):
"""Gives us a chance to update with new nonces
or such returned from the last authorized response.
Over-rise this in sub-classes if necessary.
Return TRUE is the request is to be retried, for
example Digest may return stale=true.
"""
return False
class BasicAuthentication(Authentication):
def __init__(self, credentials, host, request_uri, headers, response, content, http):
Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
def request(self, method, request_uri, headers, content):
"""Modify the request headers to add the appropriate
Authorization header."""
headers['authorization'] = 'Basic ' + base64.b64encode("%s:%s" % self.credentials).strip()
class DigestAuthentication(Authentication):
"""Only do qop='auth' and MD5, since that
is all Apache currently implements"""
def __init__(self, credentials, host, request_uri, headers, response, content, http):
Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
challenge = _parse_www_authenticate(response, 'www-authenticate')
self.challenge = challenge['digest']
qop = self.challenge.get('qop', 'auth')
self.challenge['qop'] = ('auth' in [x.strip() for x in qop.split()]) and 'auth' or None
if self.challenge['qop'] is None:
raise UnimplementedDigestAuthOptionError( _("Unsupported value for qop: %s." % qop))
self.challenge['algorithm'] = self.challenge.get('algorithm', 'MD5').upper()
if self.challenge['algorithm'] != 'MD5':
raise UnimplementedDigestAuthOptionError( _("Unsupported value for algorithm: %s." % self.challenge['algorithm']))
self.A1 = "".join([self.credentials[0], ":", self.challenge['realm'], ":", self.credentials[1]])
self.challenge['nc'] = 1
def request(self, method, request_uri, headers, content, cnonce = None):
"""Modify the request headers"""
H = lambda x: _md5(x).hexdigest()
KD = lambda s, d: H("%s:%s" % (s, d))
A2 = "".join([method, ":", request_uri])
self.challenge['cnonce'] = cnonce or _cnonce()
request_digest = '"%s"' % KD(H(self.A1), "%s:%s:%s:%s:%s" % (
self.challenge['nonce'],
'%08x' % self.challenge['nc'],
self.challenge['cnonce'],
self.challenge['qop'], H(A2)))
headers['authorization'] = 'Digest username="%s", realm="%s", nonce="%s", uri="%s", algorithm=%s, response=%s, qop=%s, nc=%08x, cnonce="%s"' % (
self.credentials[0],
self.challenge['realm'],
self.challenge['nonce'],
request_uri,
self.challenge['algorithm'],
request_digest,
self.challenge['qop'],
self.challenge['nc'],
self.challenge['cnonce'])
if self.challenge.get('opaque'):
headers['authorization'] += ', opaque="%s"' % self.challenge['opaque']
self.challenge['nc'] += 1
def response(self, response, content):
if not response.has_key('authentication-info'):
challenge = _parse_www_authenticate(response, 'www-authenticate').get('digest', {})
if 'true' == challenge.get('stale'):
self.challenge['nonce'] = challenge['nonce']
self.challenge['nc'] = 1
return True
else:
updated_challenge = _parse_www_authenticate(response, 'authentication-info').get('digest', {})
if updated_challenge.has_key('nextnonce'):
self.challenge['nonce'] = updated_challenge['nextnonce']
self.challenge['nc'] = 1
return False
class HmacDigestAuthentication(Authentication):
"""Adapted from Robert Sayre's code and DigestAuthentication above."""
__author__ = "Thomas Broyer (t.broyer@ltgt.net)"
def __init__(self, credentials, host, request_uri, headers, response, content, http):
Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
challenge = _parse_www_authenticate(response, 'www-authenticate')
self.challenge = challenge['hmacdigest']
# TODO: self.challenge['domain']
self.challenge['reason'] = self.challenge.get('reason', 'unauthorized')
if self.challenge['reason'] not in ['unauthorized', 'integrity']:
self.challenge['reason'] = 'unauthorized'
self.challenge['salt'] = self.challenge.get('salt', '')
if not self.challenge.get('snonce'):
raise UnimplementedHmacDigestAuthOptionError( _("The challenge doesn't contain a server nonce, or this one is empty."))
self.challenge['algorithm'] = self.challenge.get('algorithm', 'HMAC-SHA-1')
if self.challenge['algorithm'] not in ['HMAC-SHA-1', 'HMAC-MD5']:
raise UnimplementedHmacDigestAuthOptionError( _("Unsupported value for algorithm: %s." % self.challenge['algorithm']))
self.challenge['pw-algorithm'] = self.challenge.get('pw-algorithm', 'SHA-1')
if self.challenge['pw-algorithm'] not in ['SHA-1', 'MD5']:
raise UnimplementedHmacDigestAuthOptionError( _("Unsupported value for pw-algorithm: %s." % self.challenge['pw-algorithm']))
if self.challenge['algorithm'] == 'HMAC-MD5':
self.hashmod = _md5
else:
self.hashmod = _sha
if self.challenge['pw-algorithm'] == 'MD5':
self.pwhashmod = _md5
else:
self.pwhashmod = _sha
self.key = "".join([self.credentials[0], ":",
self.pwhashmod.new("".join([self.credentials[1], self.challenge['salt']])).hexdigest().lower(),
":", self.challenge['realm']])
self.key = self.pwhashmod.new(self.key).hexdigest().lower()
def request(self, method, request_uri, headers, content):
"""Modify the request headers"""
keys = _get_end2end_headers(headers)
keylist = "".join(["%s " % k for k in keys])
headers_val = "".join([headers[k] for k in keys])
created = time.strftime('%Y-%m-%dT%H:%M:%SZ',time.gmtime())
cnonce = _cnonce()
request_digest = "%s:%s:%s:%s:%s" % (method, request_uri, cnonce, self.challenge['snonce'], headers_val)
request_digest = hmac.new(self.key, request_digest, self.hashmod).hexdigest().lower()
headers['authorization'] = 'HMACDigest username="%s", realm="%s", snonce="%s", cnonce="%s", uri="%s", created="%s", response="%s", headers="%s"' % (
self.credentials[0],
self.challenge['realm'],
self.challenge['snonce'],
cnonce,
request_uri,
created,
request_digest,
keylist)
def response(self, response, content):
challenge = _parse_www_authenticate(response, 'www-authenticate').get('hmacdigest', {})
if challenge.get('reason') in ['integrity', 'stale']:
return True
return False
class WsseAuthentication(Authentication):
"""This is thinly tested and should not be relied upon.
At this time there isn't any third party server to test against.
Blogger and TypePad implemented this algorithm at one point
but Blogger has since switched to Basic over HTTPS and
TypePad has implemented it wrong, by never issuing a 401
challenge but instead requiring your client to telepathically know that
their endpoint is expecting WSSE profile="UsernameToken"."""
def __init__(self, credentials, host, request_uri, headers, response, content, http):
Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
def request(self, method, request_uri, headers, content):
"""Modify the request headers to add the appropriate
Authorization header."""
headers['authorization'] = 'WSSE profile="UsernameToken"'
iso_now = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
cnonce = _cnonce()
password_digest = _wsse_username_token(cnonce, iso_now, self.credentials[1])
headers['X-WSSE'] = 'UsernameToken Username="%s", PasswordDigest="%s", Nonce="%s", Created="%s"' % (
self.credentials[0],
password_digest,
cnonce,
iso_now)
class GoogleLoginAuthentication(Authentication):
def __init__(self, credentials, host, request_uri, headers, response, content, http):
from urllib import urlencode
Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
challenge = _parse_www_authenticate(response, 'www-authenticate')
service = challenge['googlelogin'].get('service', 'xapi')
# Bloggger actually returns the service in the challenge
# For the rest we guess based on the URI
if service == 'xapi' and request_uri.find("calendar") > 0:
service = "cl"
# No point in guessing Base or Spreadsheet
#elif request_uri.find("spreadsheets") > 0:
# service = "wise"
auth = dict(Email=credentials[0], Passwd=credentials[1], service=service, source=headers['user-agent'])
resp, content = self.http.request("https://www.google.com/accounts/ClientLogin", method="POST", body=urlencode(auth), headers={'Content-Type': 'application/x-www-form-urlencoded'})
lines = content.split('\n')
d = dict([tuple(line.split("=", 1)) for line in lines if line])
if resp.status == 403:
self.Auth = ""
else:
self.Auth = d['Auth']
def request(self, method, request_uri, headers, content):
"""Modify the request headers to add the appropriate
Authorization header."""
headers['authorization'] = 'GoogleLogin Auth=' + self.Auth
AUTH_SCHEME_CLASSES = {
"basic": BasicAuthentication,
"wsse": WsseAuthentication,
"digest": DigestAuthentication,
"hmacdigest": HmacDigestAuthentication,
"googlelogin": GoogleLoginAuthentication
}
AUTH_SCHEME_ORDER = ["hmacdigest", "googlelogin", "digest", "wsse", "basic"]
class FileCache(object):
"""Uses a local directory as a store for cached files.
Not really safe to use if multiple threads or processes are going to
be running on the same cache.
"""
def __init__(self, cache, safe=safename): # use safe=lambda x: md5.new(x).hexdigest() for the old behavior
self.cache = cache
self.safe = safe
if not os.path.exists(cache):
os.makedirs(self.cache)
def get(self, key):
retval = None
cacheFullPath = os.path.join(self.cache, self.safe(key))
try:
f = file(cacheFullPath, "rb")
retval = f.read()
f.close()
except IOError:
pass
return retval
def set(self, key, value):
cacheFullPath = os.path.join(self.cache, self.safe(key))
f = file(cacheFullPath, "wb")
f.write(value)
f.close()
def delete(self, key):
cacheFullPath = os.path.join(self.cache, self.safe(key))
if os.path.exists(cacheFullPath):
os.remove(cacheFullPath)
class Credentials(object):
def __init__(self):
self.credentials = []
def add(self, name, password, domain=""):
self.credentials.append((domain.lower(), name, password))
def clear(self):
self.credentials = []
def iter(self, domain):
for (cdomain, name, password) in self.credentials:
if cdomain == "" or domain == cdomain:
yield (name, password)
class KeyCerts(Credentials):
"""Identical to Credentials except that
name/password are mapped to key/cert."""
pass
class AllHosts(object):
pass
class ProxyInfo(object):
"""Collect information required to use a proxy."""
bypass_hosts = ()
def __init__(self, proxy_type, proxy_host, proxy_port,
proxy_rdns=None, proxy_user=None, proxy_pass=None):
"""The parameter proxy_type must be set to one of socks.PROXY_TYPE_XXX
constants. For example:
p = ProxyInfo(proxy_type=socks.PROXY_TYPE_HTTP,
proxy_host='localhost', proxy_port=8000)
"""
self.proxy_type = proxy_type
self.proxy_host = proxy_host
self.proxy_port = proxy_port
self.proxy_rdns = proxy_rdns
self.proxy_user = proxy_user
self.proxy_pass = proxy_pass
def astuple(self):
return (self.proxy_type, self.proxy_host, self.proxy_port,
self.proxy_rdns, self.proxy_user, self.proxy_pass)
def isgood(self):
return (self.proxy_host != None) and (self.proxy_port != None)
def applies_to(self, hostname):
return not self.bypass_host(hostname)
def bypass_host(self, hostname):
"""Has this host been excluded from the proxy config"""
if self.bypass_hosts is AllHosts:
return True
bypass = False
for domain in self.bypass_hosts:
if hostname.endswith(domain):
bypass = True
return bypass
def proxy_info_from_environment(method='http'):
"""
Read proxy info from the environment variables.
"""
if method not in ['http', 'https']:
return
env_var = method + '_proxy'
url = os.environ.get(env_var, os.environ.get(env_var.upper()))
if not url:
return
pi = proxy_info_from_url(url, method)
no_proxy = os.environ.get('no_proxy', os.environ.get('NO_PROXY', ''))
bypass_hosts = []
if no_proxy:
bypass_hosts = no_proxy.split(',')
# special case, no_proxy=* means all hosts bypassed
if no_proxy == '*':
bypass_hosts = AllHosts
pi.bypass_hosts = bypass_hosts
return pi
def proxy_info_from_url(url, method='http'):
"""
Construct a ProxyInfo from a URL (such as http_proxy env var)
"""
url = urlparse.urlparse(url)
username = None
password = None
port = None
if '@' in url[1]:
ident, host_port = url[1].split('@', 1)
if ':' in ident:
username, password = ident.split(':', 1)
else:
password = ident
else:
host_port = url[1]
if ':' in host_port:
host, port = host_port.split(':', 1)
else:
host = host_port
if port:
port = int(port)
else:
port = dict(https=443, http=80)[method]
proxy_type = 3 # socks.PROXY_TYPE_HTTP
return ProxyInfo(
proxy_type = proxy_type,
proxy_host = host,
proxy_port = port,
proxy_user = username or None,
proxy_pass = password or None,
)
class HTTPConnectionWithTimeout(httplib.HTTPConnection):
"""
HTTPConnection subclass that supports timeouts
All timeouts are in seconds. If None is passed for timeout then
Python's default timeout for sockets will be used. See for example
the docs of socket.setdefaulttimeout():
http://docs.python.org/library/socket.html#socket.setdefaulttimeout
"""
def __init__(self, host, port=None, strict=None, timeout=None, proxy_info=None):
httplib.HTTPConnection.__init__(self, host, port, strict)
self.timeout = timeout
self.proxy_info = proxy_info
def connect(self):
"""Connect to the host and port specified in __init__."""
# Mostly verbatim from httplib.py.
if self.proxy_info and socks is None:
raise ProxiesUnavailableError(
'Proxy support missing but proxy use was requested!')
msg = "getaddrinfo returns an empty list"
if self.proxy_info and self.proxy_info.isgood():
use_proxy = True
proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass = self.proxy_info.astuple()
else:
use_proxy = False
if use_proxy and proxy_rdns:
host = proxy_host
port = proxy_port
else:
host = self.host
port = self.port
for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM):
af, socktype, proto, canonname, sa = res
try:
if use_proxy:
self.sock = socks.socksocket(af, socktype, proto)
self.sock.setproxy(proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass)
else:
self.sock = socket.socket(af, socktype, proto)
self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
# Different from httplib: support timeouts.
if has_timeout(self.timeout):
self.sock.settimeout(self.timeout)
# End of difference from httplib.
if self.debuglevel > 0:
print "connect: (%s, %s) ************" % (self.host, self.port)
if use_proxy:
print "proxy: %s ************" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass))
self.sock.connect((self.host, self.port) + sa[2:])
except socket.error, msg:
if self.debuglevel > 0:
print "connect fail: (%s, %s)" % (self.host, self.port)
if use_proxy:
print "proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass))
if self.sock:
self.sock.close()
self.sock = None
continue
break
if not self.sock:
raise socket.error, msg
class HTTPSConnectionWithTimeout(httplib.HTTPSConnection):
"""
This class allows communication via SSL.
All timeouts are in seconds. If None is passed for timeout then
Python's default timeout for sockets will be used. See for example
the docs of socket.setdefaulttimeout():
http://docs.python.org/library/socket.html#socket.setdefaulttimeout
"""
def __init__(self, host, port=None, key_file=None, cert_file=None,
strict=None, timeout=None, proxy_info=None,
ca_certs=None, disable_ssl_certificate_validation=False):
httplib.HTTPSConnection.__init__(self, host, port=port,
key_file=key_file,
cert_file=cert_file, strict=strict)
self.timeout = timeout
self.proxy_info = proxy_info
if ca_certs is None:
ca_certs = CA_CERTS
self.ca_certs = ca_certs
self.disable_ssl_certificate_validation = \
disable_ssl_certificate_validation
# The following two methods were adapted from https_wrapper.py, released
# with the Google Appengine SDK at
# http://googleappengine.googlecode.com/svn-history/r136/trunk/python/google/appengine/tools/https_wrapper.py
# under the following license:
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
def _GetValidHostsForCert(self, cert):
"""Returns a list of valid host globs for an SSL certificate.
Args:
cert: A dictionary representing an SSL certificate.
Returns:
list: A list of valid host globs.
"""
if 'subjectAltName' in cert:
return [x[1] for x in cert['subjectAltName']
if x[0].lower() == 'dns']
else:
return [x[0][1] for x in cert['subject']
if x[0][0].lower() == 'commonname']
def _ValidateCertificateHostname(self, cert, hostname):
"""Validates that a given hostname is valid for an SSL certificate.
Args:
cert: A dictionary representing an SSL certificate.
hostname: The hostname to test.
Returns:
bool: Whether or not the hostname is valid for this certificate.
"""
hosts = self._GetValidHostsForCert(cert)
for host in hosts:
host_re = host.replace('.', '\.').replace('*', '[^.]*')
if re.search('^%s$' % (host_re,), hostname, re.I):
return True
return False
def connect(self):
"Connect to a host on a given (SSL) port."
msg = "getaddrinfo returns an empty list"
if self.proxy_info and self.proxy_info.isgood():
use_proxy = True
proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass = self.proxy_info.astuple()
else:
use_proxy = False
if use_proxy and proxy_rdns:
host = proxy_host
port = proxy_port
else:
host = self.host
port = self.port
address_info = socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM)
for family, socktype, proto, canonname, sockaddr in address_info:
try:
if use_proxy:
sock = socks.socksocket(family, socktype, proto)
sock.setproxy(proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass)
else:
sock = socket.socket(family, socktype, proto)
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
if has_timeout(self.timeout):
sock.settimeout(self.timeout)
sock.connect((self.host, self.port))
self.sock =_ssl_wrap_socket(
sock, self.key_file, self.cert_file,
self.disable_ssl_certificate_validation, self.ca_certs)
if self.debuglevel > 0:
print "connect: (%s, %s)" % (self.host, self.port)
if use_proxy:
print "proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass))
if not self.disable_ssl_certificate_validation:
cert = self.sock.getpeercert()
hostname = self.host.split(':', 0)[0]
if not self._ValidateCertificateHostname(cert, hostname):
raise CertificateHostnameMismatch(
'Server presented certificate that does not match '
'host %s: %s' % (hostname, cert), hostname, cert)
except ssl_SSLError, e:
if sock:
sock.close()
if self.sock:
self.sock.close()
self.sock = None
# Unfortunately the ssl module doesn't seem to provide any way
# to get at more detailed error information, in particular
# whether the error is due to certificate validation or
# something else (such as SSL protocol mismatch).
if e.errno == ssl.SSL_ERROR_SSL:
raise SSLHandshakeError(e)
else:
raise
except (socket.timeout, socket.gaierror):
raise
except socket.error, msg:
if self.debuglevel > 0:
print "connect fail: (%s, %s)" % (self.host, self.port)
if use_proxy:
print "proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass))
if self.sock:
self.sock.close()
self.sock = None
continue
break
if not self.sock:
raise socket.error, msg
SCHEME_TO_CONNECTION = {
'http': HTTPConnectionWithTimeout,
'https': HTTPSConnectionWithTimeout
}
# Use a different connection object for Google App Engine
try:
try:
from google.appengine.api import apiproxy_stub_map
if apiproxy_stub_map.apiproxy.GetStub('urlfetch') is None:
raise ImportError # Bail out; we're not actually running on App Engine.
from google.appengine.api.urlfetch import fetch
from google.appengine.api.urlfetch import InvalidURLError
except (ImportError, AttributeError):
from google3.apphosting.api import apiproxy_stub_map
if apiproxy_stub_map.apiproxy.GetStub('urlfetch') is None:
raise ImportError # Bail out; we're not actually running on App Engine.
from google3.apphosting.api.urlfetch import fetch
from google3.apphosting.api.urlfetch import InvalidURLError
def _new_fixed_fetch(validate_certificate):
def fixed_fetch(url, payload=None, method="GET", headers={},
allow_truncated=False, follow_redirects=True,
deadline=5):
return fetch(url, payload=payload, method=method, headers=headers,
allow_truncated=allow_truncated,
follow_redirects=follow_redirects, deadline=deadline,
validate_certificate=validate_certificate)
return fixed_fetch
class AppEngineHttpConnection(httplib.HTTPConnection):
"""Use httplib on App Engine, but compensate for its weirdness.
The parameters key_file, cert_file, proxy_info, ca_certs, and
disable_ssl_certificate_validation are all dropped on the ground.
"""
def __init__(self, host, port=None, key_file=None, cert_file=None,
strict=None, timeout=None, proxy_info=None, ca_certs=None,
disable_ssl_certificate_validation=False):
httplib.HTTPConnection.__init__(self, host, port=port,
strict=strict, timeout=timeout)
class AppEngineHttpsConnection(httplib.HTTPSConnection):
"""Same as AppEngineHttpConnection, but for HTTPS URIs."""
def __init__(self, host, port=None, key_file=None, cert_file=None,
strict=None, timeout=None, proxy_info=None, ca_certs=None,
disable_ssl_certificate_validation=False):
httplib.HTTPSConnection.__init__(self, host, port=port,
key_file=key_file,
cert_file=cert_file, strict=strict,
timeout=timeout)
self._fetch = _new_fixed_fetch(
not disable_ssl_certificate_validation)
# Update the connection classes to use the Googel App Engine specific ones.
SCHEME_TO_CONNECTION = {
'http': AppEngineHttpConnection,
'https': AppEngineHttpsConnection
}
except (ImportError, AttributeError):
pass
class Http(object):
"""An HTTP client that handles:
- all methods
- caching
- ETags
- compression,
- HTTPS
- Basic
- Digest
- WSSE
and more.
"""
def __init__(self, cache=None, timeout=None,
proxy_info=proxy_info_from_environment,
ca_certs=None, disable_ssl_certificate_validation=False):
"""If 'cache' is a string then it is used as a directory name for
a disk cache. Otherwise it must be an object that supports the
same interface as FileCache.
All timeouts are in seconds. If None is passed for timeout
then Python's default timeout for sockets will be used. See
for example the docs of socket.setdefaulttimeout():
http://docs.python.org/library/socket.html#socket.setdefaulttimeout
`proxy_info` may be:
- a callable that takes the http scheme ('http' or 'https') and
returns a ProxyInfo instance per request. By default, uses
proxy_nfo_from_environment.
- a ProxyInfo instance (static proxy config).
- None (proxy disabled).
ca_certs is the path of a file containing root CA certificates for SSL
server certificate validation. By default, a CA cert file bundled with
httplib2 is used.
If disable_ssl_certificate_validation is true, SSL cert validation will
not be performed.
"""
self.proxy_info = proxy_info
self.ca_certs = ca_certs
self.disable_ssl_certificate_validation = \
disable_ssl_certificate_validation
# Map domain name to an httplib connection
self.connections = {}
# The location of the cache, for now a directory
# where cached responses are held.
if cache and isinstance(cache, basestring):
self.cache = FileCache(cache)
else:
self.cache = cache
# Name/password
self.credentials = Credentials()
# Key/cert
self.certificates = KeyCerts()
# authorization objects
self.authorizations = []
# If set to False then no redirects are followed, even safe ones.
self.follow_redirects = True
# Which HTTP methods do we apply optimistic concurrency to, i.e.
# which methods get an "if-match:" etag header added to them.
self.optimistic_concurrency_methods = ["PUT", "PATCH"]
# If 'follow_redirects' is True, and this is set to True then
# all redirecs are followed, including unsafe ones.
self.follow_all_redirects = False
self.ignore_etag = False
self.force_exception_to_status_code = False
self.timeout = timeout
# Keep Authorization: headers on a redirect.
self.forward_authorization_headers = False
def __getstate__(self):
state_dict = copy.copy(self.__dict__)
# In case request is augmented by some foreign object such as
# credentials which handle auth
if 'request' in state_dict:
del state_dict['request']
if 'connections' in state_dict:
del state_dict['connections']
return state_dict
def __setstate__(self, state):
self.__dict__.update(state)
self.connections = {}
def _auth_from_challenge(self, host, request_uri, headers, response, content):
"""A generator that creates Authorization objects
that can be applied to requests.
"""
challenges = _parse_www_authenticate(response, 'www-authenticate')
for cred in self.credentials.iter(host):
for scheme in AUTH_SCHEME_ORDER:
if challenges.has_key(scheme):
yield AUTH_SCHEME_CLASSES[scheme](cred, host, request_uri, headers, response, content, self)
def add_credentials(self, name, password, domain=""):
"""Add a name and password that will be used
any time a request requires authentication."""
self.credentials.add(name, password, domain)
def add_certificate(self, key, cert, domain):
"""Add a key and cert that will be used
any time a request requires authentication."""
self.certificates.add(key, cert, domain)
def clear_credentials(self):
"""Remove all the names and passwords
that are used for authentication"""
self.credentials.clear()
self.authorizations = []
def _conn_request(self, conn, request_uri, method, body, headers):
for i in range(RETRIES):
try:
if hasattr(conn, 'sock') and conn.sock is None:
conn.connect()
conn.request(method, request_uri, body, headers)
except socket.timeout:
raise
except socket.gaierror:
conn.close()
raise ServerNotFoundError("Unable to find the server at %s" % conn.host)
except ssl_SSLError:
conn.close()
raise
except socket.error, e:
err = 0
if hasattr(e, 'args'):
err = getattr(e, 'args')[0]
else:
err = e.errno
if err == errno.ECONNREFUSED: # Connection refused
raise
except httplib.HTTPException:
# Just because the server closed the connection doesn't apparently mean
# that the server didn't send a response.
if hasattr(conn, 'sock') and conn.sock is None:
if i < RETRIES-1:
conn.close()
conn.connect()
continue
else:
conn.close()
raise
if i < RETRIES-1:
conn.close()
conn.connect()
continue
try:
response = conn.getresponse()
except (socket.error, httplib.HTTPException):
if i < RETRIES-1:
conn.close()
conn.connect()
continue
else:
conn.close()
raise
else:
content = ""
if method == "HEAD":
conn.close()
else:
content = response.read()
response = Response(response)
if method != "HEAD":
content = _decompressContent(response, content)
break
return (response, content)
def _request(self, conn, host, absolute_uri, request_uri, method, body, headers, redirections, cachekey):
"""Do the actual request using the connection object
and also follow one level of redirects if necessary"""
auths = [(auth.depth(request_uri), auth) for auth in self.authorizations if auth.inscope(host, request_uri)]
auth = auths and sorted(auths)[0][1] or None
if auth:
auth.request(method, request_uri, headers, body)
(response, content) = self._conn_request(conn, request_uri, method, body, headers)
if auth:
if auth.response(response, body):
auth.request(method, request_uri, headers, body)
(response, content) = self._conn_request(conn, request_uri, method, body, headers )
response._stale_digest = 1
if response.status == 401:
for authorization in self._auth_from_challenge(host, request_uri, headers, response, content):
authorization.request(method, request_uri, headers, body)
(response, content) = self._conn_request(conn, request_uri, method, body, headers, )
if response.status != 401:
self.authorizations.append(authorization)
authorization.response(response, body)
break
if (self.follow_all_redirects or (method in ["GET", "HEAD"]) or response.status == 303):
if self.follow_redirects and response.status in [300, 301, 302, 303, 307]:
# Pick out the location header and basically start from the beginning
# remembering first to strip the ETag header and decrement our 'depth'
if redirections:
if not response.has_key('location') and response.status != 300:
raise RedirectMissingLocation( _("Redirected but the response is missing a Location: header."), response, content)
# Fix-up relative redirects (which violate an RFC 2616 MUST)
if response.has_key('location'):
location = response['location']
(scheme, authority, path, query, fragment) = parse_uri(location)
if authority == None:
response['location'] = urlparse.urljoin(absolute_uri, location)
if response.status == 301 and method in ["GET", "HEAD"]:
response['-x-permanent-redirect-url'] = response['location']
if not response.has_key('content-location'):
response['content-location'] = absolute_uri
_updateCache(headers, response, content, self.cache, cachekey)
if headers.has_key('if-none-match'):
del headers['if-none-match']
if headers.has_key('if-modified-since'):
del headers['if-modified-since']
if 'authorization' in headers and not self.forward_authorization_headers:
del headers['authorization']
if response.has_key('location'):
location = response['location']
old_response = copy.deepcopy(response)
if not old_response.has_key('content-location'):
old_response['content-location'] = absolute_uri
redirect_method = method
if response.status in [302, 303]:
redirect_method = "GET"
body = None
(response, content) = self.request(location, redirect_method, body=body, headers = headers, redirections = redirections - 1)
response.previous = old_response
else:
raise RedirectLimit("Redirected more times than rediection_limit allows.", response, content)
elif response.status in [200, 203] and method in ["GET", "HEAD"]:
# Don't cache 206's since we aren't going to handle byte range requests
if not response.has_key('content-location'):
response['content-location'] = absolute_uri
_updateCache(headers, response, content, self.cache, cachekey)
return (response, content)
def _normalize_headers(self, headers):
return _normalize_headers(headers)
# Need to catch and rebrand some exceptions
# Then need to optionally turn all exceptions into status codes
# including all socket.* and httplib.* exceptions.
def request(self, uri, method="GET", body=None, headers=None, redirections=DEFAULT_MAX_REDIRECTS, connection_type=None):
""" Performs a single HTTP request.
The 'uri' is the URI of the HTTP resource and can begin with either
'http' or 'https'. The value of 'uri' must be an absolute URI.
The 'method' is the HTTP method to perform, such as GET, POST, DELETE,
etc. There is no restriction on the methods allowed.
The 'body' is the entity body to be sent with the request. It is a
string object.
Any extra headers that are to be sent with the request should be
provided in the 'headers' dictionary.
The maximum number of redirect to follow before raising an
exception is 'redirections. The default is 5.
The return value is a tuple of (response, content), the first
being and instance of the 'Response' class, the second being
a string that contains the response entity body.
"""
try:
if headers is None:
headers = {}
else:
headers = self._normalize_headers(headers)
if not headers.has_key('user-agent'):
headers['user-agent'] = "Python-httplib2/%s (gzip)" % __version__
uri = iri2uri(uri)
(scheme, authority, request_uri, defrag_uri) = urlnorm(uri)
domain_port = authority.split(":")[0:2]
if len(domain_port) == 2 and domain_port[1] == '443' and scheme == 'http':
scheme = 'https'
authority = domain_port[0]
proxy_info = self._get_proxy_info(scheme, authority)
conn_key = scheme+":"+authority
if conn_key in self.connections:
conn = self.connections[conn_key]
else:
if not connection_type:
connection_type = SCHEME_TO_CONNECTION[scheme]
certs = list(self.certificates.iter(authority))
if scheme == 'https':
if certs:
conn = self.connections[conn_key] = connection_type(
authority, key_file=certs[0][0],
cert_file=certs[0][1], timeout=self.timeout,
proxy_info=proxy_info,
ca_certs=self.ca_certs,
disable_ssl_certificate_validation=
self.disable_ssl_certificate_validation)
else:
conn = self.connections[conn_key] = connection_type(
authority, timeout=self.timeout,
proxy_info=proxy_info,
ca_certs=self.ca_certs,
disable_ssl_certificate_validation=
self.disable_ssl_certificate_validation)
else:
conn = self.connections[conn_key] = connection_type(
authority, timeout=self.timeout,
proxy_info=proxy_info)
conn.set_debuglevel(debuglevel)
if 'range' not in headers and 'accept-encoding' not in headers:
headers['accept-encoding'] = 'gzip, deflate'
info = email.Message.Message()
cached_value = None
if self.cache:
cachekey = defrag_uri
cached_value = self.cache.get(cachekey)
if cached_value:
# info = email.message_from_string(cached_value)
#
# Need to replace the line above with the kludge below
# to fix the non-existent bug not fixed in this
# bug report: http://mail.python.org/pipermail/python-bugs-list/2005-September/030289.html
try:
info, content = cached_value.split('\r\n\r\n', 1)
feedparser = email.FeedParser.FeedParser()
feedparser.feed(info)
info = feedparser.close()
feedparser._parse = None
except (IndexError, ValueError):
self.cache.delete(cachekey)
cachekey = None
cached_value = None
else:
cachekey = None
if method in self.optimistic_concurrency_methods and self.cache and info.has_key('etag') and not self.ignore_etag and 'if-match' not in headers:
# http://www.w3.org/1999/04/Editing/
headers['if-match'] = info['etag']
if method not in ["GET", "HEAD"] and self.cache and cachekey:
# RFC 2616 Section 13.10
self.cache.delete(cachekey)
# Check the vary header in the cache to see if this request
# matches what varies in the cache.
if method in ['GET', 'HEAD'] and 'vary' in info:
vary = info['vary']
vary_headers = vary.lower().replace(' ', '').split(',')
for header in vary_headers:
key = '-varied-%s' % header
value = info[key]
if headers.get(header, None) != value:
cached_value = None
break
if cached_value and method in ["GET", "HEAD"] and self.cache and 'range' not in headers:
if info.has_key('-x-permanent-redirect-url'):
# Should cached permanent redirects be counted in our redirection count? For now, yes.
if redirections <= 0:
raise RedirectLimit("Redirected more times than rediection_limit allows.", {}, "")
(response, new_content) = self.request(info['-x-permanent-redirect-url'], "GET", headers = headers, redirections = redirections - 1)
response.previous = Response(info)
response.previous.fromcache = True
else:
# Determine our course of action:
# Is the cached entry fresh or stale?
# Has the client requested a non-cached response?
#
# There seems to be three possible answers:
# 1. [FRESH] Return the cache entry w/o doing a GET
# 2. [STALE] Do the GET (but add in cache validators if available)
# 3. [TRANSPARENT] Do a GET w/o any cache validators (Cache-Control: no-cache) on the request
entry_disposition = _entry_disposition(info, headers)
if entry_disposition == "FRESH":
if not cached_value:
info['status'] = '504'
content = ""
response = Response(info)
if cached_value:
response.fromcache = True
return (response, content)
if entry_disposition == "STALE":
if info.has_key('etag') and not self.ignore_etag and not 'if-none-match' in headers:
headers['if-none-match'] = info['etag']
if info.has_key('last-modified') and not 'last-modified' in headers:
headers['if-modified-since'] = info['last-modified']
elif entry_disposition == "TRANSPARENT":
pass
(response, new_content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey)
if response.status == 304 and method == "GET":
# Rewrite the cache entry with the new end-to-end headers
# Take all headers that are in response
# and overwrite their values in info.
# unless they are hop-by-hop, or are listed in the connection header.
for key in _get_end2end_headers(response):
info[key] = response[key]
merged_response = Response(info)
if hasattr(response, "_stale_digest"):
merged_response._stale_digest = response._stale_digest
_updateCache(headers, merged_response, content, self.cache, cachekey)
response = merged_response
response.status = 200
response.fromcache = True
elif response.status == 200:
content = new_content
else:
self.cache.delete(cachekey)
content = new_content
else:
cc = _parse_cache_control(headers)
if cc.has_key('only-if-cached'):
info['status'] = '504'
response = Response(info)
content = ""
else:
(response, content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey)
except Exception, e:
if self.force_exception_to_status_code:
if isinstance(e, HttpLib2ErrorWithResponse):
response = e.response
content = e.content
response.status = 500
response.reason = str(e)
elif isinstance(e, socket.timeout):
content = "Request Timeout"
response = Response({
"content-type": "text/plain",
"status": "408",
"content-length": len(content)
})
response.reason = "Request Timeout"
else:
content = str(e)
response = Response({
"content-type": "text/plain",
"status": "400",
"content-length": len(content)
})
response.reason = "Bad Request"
else:
raise
return (response, content)
def _get_proxy_info(self, scheme, authority):
"""Return a ProxyInfo instance (or None) based on the scheme
and authority.
"""
hostname, port = urllib.splitport(authority)
proxy_info = self.proxy_info
if callable(proxy_info):
proxy_info = proxy_info(scheme)
if (hasattr(proxy_info, 'applies_to')
and not proxy_info.applies_to(hostname)):
proxy_info = None
return proxy_info
class Response(dict):
"""An object more like email.Message than httplib.HTTPResponse."""
"""Is this response from our local cache"""
fromcache = False
"""HTTP protocol version used by server. 10 for HTTP/1.0, 11 for HTTP/1.1. """
version = 11
"Status code returned by server. "
status = 200
"""Reason phrase returned by server."""
reason = "Ok"
previous = None
def __init__(self, info):
# info is either an email.Message or
# an httplib.HTTPResponse object.
if isinstance(info, httplib.HTTPResponse):
for key, value in info.getheaders():
self[key.lower()] = value
self.status = info.status
self['status'] = str(self.status)
self.reason = info.reason
self.version = info.version
elif isinstance(info, email.Message.Message):
for key, value in info.items():
self[key.lower()] = value
self.status = int(self['status'])
else:
for key, value in info.iteritems():
self[key.lower()] = value
self.status = int(self.get('status', self.status))
self.reason = self.get('reason', self.reason)
def __getattr__(self, name):
if name == 'dict':
return self
else:
raise AttributeError, name
|
iuliat/nova | refs/heads/master | nova/api/openstack/compute/contrib/extended_quotas.py | 100 | # Copyright 2013 Rackspace Hosting
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.api.openstack import extensions
class Extended_quotas(extensions.ExtensionDescriptor):
"""Adds ability for admins to delete quota
and optionally force the update Quota command.
"""
name = "ExtendedQuotas"
alias = "os-extended-quotas"
namespace = ("http://docs.openstack.org/compute/ext/extended_quotas"
"/api/v1.1")
updated = "2013-06-09T00:00:00Z"
|
kxgames/kxg | refs/heads/master | kxg/multiplayer.py | 1 | from .errors import *
from .forums import Forum, IdFactory
from .actors import Actor
class ClientForum(Forum):
def __init__(self, pipe):
super().__init__()
self.pipe = pipe
self.pipe.lock()
from collections import OrderedDict
self.actor_id_factory = None
self.response_id_factory = IdFactory(0, 1)
self.sent_message_cache = OrderedDict()
def receive_id_from_server(self):
"""
Listen for an id from the server. Return true if an id has been
received and false otherwise.
At the beginning of a game, each client receives an IdFactory from the
server. This factory are used to give id numbers that are guaranteed
to be unique to tokens that created locally. This method checks to see if such
a factory has been received. If it hasn't, this method does not block
and immediately returns False. If it has, this method returns True
after saving the factory internally. At this point it is safe to call
Game.start_game(). It is also safe to call this method as many times
as you'd like after an id has been received.
"""
if self.actor_id_factory is not None:
return True
for message in self.pipe.receive():
if isinstance(message, IdFactory):
self.actor_id_factory = message
return True
return False
def connect_everyone(self, world, actors):
# Make sure this forum has been assigned an id from the server.
if self.actor_id_factory is None:
raise ApiUsageError("""\
ClientForum wasn't assigned an id number by the server
before the game started.
In a multiplayer game, each client must receive an id
number from the server before the game starts. ServerActor
automatically sends this id as soon as Game.start_game() is
called on the server, but ClientForum must explicitly
receive it by calling receive_id_from_server(). This
method is non-blocking, so it has to be called repeatedly
until it returns true, indicating that an id was received. """)
# Make sure that this forum is only connected to one actor.
assert len(actors) == 1
self.actor = actors[0]
# Connect the forum, world, and actors as usual.
super().connect_everyone(world, actors)
def execute_message(self, message):
# Cache the message and give it an id number the server can reference
# in its response. Messages are cached so they can be undone if they
# are rejected by the server. The id is necessary so the client forum
# (i.e. this object) can associate each response with a cached message.
message._set_server_response_id(self.response_id_factory.next())
self.sent_message_cache[message._get_server_response_id()] = message
# Relay the message to a ServerActor running on the server to update
# the world on all of the other machines playing the game as well.
self.pipe.send(message)
self.pipe.deliver()
# Have the message update the local world like usual.
super().execute_message(message)
def execute_sync(self, message):
"""
Respond when the server indicates that the client is out of sync.
The server can request a sync when this client sends a message that
fails the check() on the server. If the reason for the failure isn't
very serious, then the server can decide to send it as usual in the
interest of a smooth gameplay experience. When this happens, the
server sends out an extra response providing the clients with the
information they need to resync themselves.
"""
info("synchronizing message: {message}")
# Synchronize the world.
with self.world._unlock_temporarily():
message._sync(self.world)
self.world._react_to_sync_response(message)
# Synchronize the tokens.
for actor in self.actors:
actor._react_to_sync_response(message)
def execute_undo(self, message):
"""
Manage the response when the server rejects a message.
An undo is when required this client sends a message that the server
refuses to pass on to the other clients playing the game. When this
happens, the client must undo the changes that the message made to the
world before being sent or crash. Note that unlike sync requests, undo
requests are only reported to the client that sent the offending
message.
"""
info("undoing message: {message}")
# Roll back changes that the original message made to the world.
with self.world._unlock_temporarily():
message._undo(self.world)
self.world._react_to_undo_response(message)
# Give the actors a chance to react to the error. For example, a
# GUI actor might inform the user that there are connectivity
# issues and that their last action was countermanded.
for actor in self.actors:
actor._react_to_undo_response(message)
def on_start_game(self):
serializer = MessageSerializer(self.world)
self.pipe.push_serializer(serializer)
def on_update_game(self):
from .messages import Message
# An attempt is made to immediately deliver any messages passed into
# execute_message(), but sometimes it takes more than one try to send a
# message. So in case there are any messages waiting to be sent, the
# code below attempts to clear the queue every frame.
self.pipe.deliver()
# For each message received from the server:
for packet in self.pipe.receive():
# If the incoming packet is a message, execute it on this client
# and, if necessary, synchronize this client's world with the
# server's. Messages that were sent from this client will not
# reappear here, so we don't need to worry about double-dipping.
if isinstance(packet, Message):
info("receiving message: {packet}")
super().execute_message(packet)
response = packet._get_server_response()
if response and response.sync_needed:
self.execute_sync(packet)
# If the incoming packet is a response to a message sent from this
# client, find that message in the "sent message cache" and attach
# the response to it. The response is handled in the while loop
# below (and not right here) to better handle weird cases that can
# occur when several messages are sent between server responses.
elif isinstance(packet, ServerResponse):
message = self.sent_message_cache[packet.id]
message._set_server_response(packet)
# Try to clear the sent message cache:
while self.sent_message_cache:
message = self.sent_message_cache[next(reversed(self.sent_message_cache))]
response = message._get_server_response()
# Don't handle any response until responses for any messages that
# were sent after it have been handled. This keeps the world in a
# sane state for every response.
if response is None:
break
# If the server requested that a message sync or undo itself, then
# do that. Messages coming from any client may need to be synced,
# but messages that need to be undone were sent by this client and
# rejected by the server.
if response.sync_needed:
self.execute_sync(message)
if response.undo_needed:
self.execute_undo(message)
# Now that the message has been fully handled, pop it off the
# cache.
self.sent_message_cache.popitem()
def on_finish_game(self):
self.pipe.pop_serializer()
def _assign_id_factories(self):
assert self.actor_id_factory is not None, msg("""\
Can't assign id factories without an id number from the server.
This should've been caught by ClientActor.connect_everyone().""")
return {self.actor: self.actor_id_factory}
class ServerActor(Actor):
def __init__(self, pipe):
super().__init__()
self._disable_forum_observation()
self.pipe = pipe
self.pipe.lock()
def send_message(self, message):
raise NotImplementedError
def on_start_game(self, num_players):
serializer = MessageSerializer(self.world)
self.pipe.push_serializer(serializer)
def on_update_game(self, dt):
from .messages import MessageCheck
# For each message received from the connected client:
for message in self.pipe.receive():
info("received message: {message}")
# Make sure the message wasn't sent by an actor with a different id
# than this one. This should absolutely never happen because this
# actor gives its id to its client, so if a mismatch is detected
# there's probably a bug in the game engine.
if not message.was_sent_by(self._id_factory):
critical("ignoring message from player {self.id} claiming to be from player {message.sender_id}.")
continue
# Check the message to make sure it matches the state of the game
# world on the server. If the message doesn't pass the check, the
# client and server must be out of sync, because the same check was
# just passed on the client.
response = ServerResponse(message)
try:
message._check(self.world)
except MessageCheck:
response.sync_needed = True
else:
response.sync_needed = False
# Decide if it will be enough for the clients to sync themselves,
# or if this message shouldn't be relayed at all (and should be
# undone on the client that sent it). The message is also given a
# chance to store information it can use later to sync the game.
if response.sync_needed:
response.undo_needed = not message._prepare_sync(
self.world, response)
# Tell the clients how to treat this message. For the client that
# sent the message in the first place, the response is sent on its
# own. If a sync or an undo is needed, the client will retrieve
# the original message from its cache and use it to reconcile its
# world with the server's. Otherwise, the client will just clear
# the original message from its cache. For all the other clients,
# the response is attached to the message, but only if a sync is
# needed (otherwise nothing special needs to be done).
self.pipe.send(response)
# If the message doesn't have an irreparable sync error, execute it
# on the server and relay it to all the other clients.
if not response.undo_needed:
self._forum.execute_message(message)
# Deliver any messages waiting to be sent. This has to be done every
# frame because it sometimes takes more than one try to send a message.
self.pipe.deliver()
def on_finish_game(self):
self.pipe.pop_serializer()
def _set_forum(self, forum, id):
super()._set_forum(forum, id)
self.pipe.send(id)
self.pipe.deliver()
def _relay_message(self, message):
"""
Relay messages from the forum on the server to the client represented
by this actor.
"""
info("relaying message: {message}")
if not message.was_sent_by(self._id_factory):
self.pipe.send(message)
self.pipe.deliver()
def _react_to_message(self, message):
"""
Don't ever change the world in response to a message.
This method is defined is called by the game engine to trigger
callbacks tied by this actor to particular messages. This is useful
for ordinary actors, but remote actors are only meant to shuttle
message between clients and should never react to individual messages.
"""
pass
class ServerResponse:
def __init__(self, message):
self.id = message._get_server_response_id()
self.sync_needed = False
self.undo_needed = False
def __repr__(self):
return "{}(sync_needed={}, undo_needed={})".format(
self.__class__.__name__, self.sync_needed, self.undo_needed)
class MessageSerializer:
"""
Pickle messages before they are sent over the network, and unpickle them
when they are received. Tokens that have been added to the world are
serialized using their ID, then replaced with the corresponding token from
the remote world when the message is deserialized.
"""
def __init__(self, world):
self.world = world
def pack(self, message):
from pickle import Pickler
from io import BytesIO
from .tokens import Token
from .messages import Message, require_message
buffer = BytesIO()
delegate = Pickler(buffer)
def persistent_id(token):
if isinstance(token, Token):
assert isinstance(message, Message), msg("""\
Both Message and ServerResponse objects can be
serialized, but only Messages can contain tokens.""")
assert token.id, msg("""\
Every token should have an id by now. Tokens that are
in the world should always have an id, and tokens that
are being added to the world should've been assigned an
id by Actor.send_message().""")
if token in self.world:
assert token not in message.tokens_to_add(), msg("""\
Actor.send_message() should've refused to send a
message that would add a token that's already in
the world.""")
return token.id
else:
assert token in message.tokens_to_add(), msg("""\
Actor.send_message() should've refused to send a
message referencing tokens that aren't in the world
and aren't being added to the world.""")
return None
delegate.persistent_id = persistent_id
delegate.dump(message)
return buffer.getvalue()
def unpack(self, packet):
from pickle import Unpickler
from io import BytesIO
buffer = BytesIO(packet)
delegate = Unpickler(buffer)
delegate.persistent_load = lambda id: self.world.get_token(int(id))
return delegate.load()
|
rue89-tech/edx-platform | refs/heads/master | common/djangoapps/datadog/startup.py | 229 | from django.conf import settings
from dogapi import dog_stats_api, dog_http_api
def run():
"""
Initialize connection to datadog during django startup.
Can be configured using a dictionary named DATADOG in the django
project settings.
"""
# By default use the statsd agent
options = {'statsd': True}
if hasattr(settings, 'DATADOG'):
options.update(settings.DATADOG)
# Not all arguments are documented.
# Look at the source code for details.
dog_stats_api.start(**options)
dog_http_api.api_key = options.get('api_key')
|
humberos/android_kernel_samsung_smdk4412 | refs/heads/cm-10.2 | tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/SchedGui.py | 12980 | # SchedGui.py - Python extension for perf script, basic GUI code for
# traces drawing and overview.
#
# Copyright (C) 2010 by Frederic Weisbecker <fweisbec@gmail.com>
#
# This software is distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
try:
import wx
except ImportError:
raise ImportError, "You need to install the wxpython lib for this script"
class RootFrame(wx.Frame):
Y_OFFSET = 100
RECT_HEIGHT = 100
RECT_SPACE = 50
EVENT_MARKING_WIDTH = 5
def __init__(self, sched_tracer, title, parent = None, id = -1):
wx.Frame.__init__(self, parent, id, title)
(self.screen_width, self.screen_height) = wx.GetDisplaySize()
self.screen_width -= 10
self.screen_height -= 10
self.zoom = 0.5
self.scroll_scale = 20
self.sched_tracer = sched_tracer
self.sched_tracer.set_root_win(self)
(self.ts_start, self.ts_end) = sched_tracer.interval()
self.update_width_virtual()
self.nr_rects = sched_tracer.nr_rectangles() + 1
self.height_virtual = RootFrame.Y_OFFSET + (self.nr_rects * (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE))
# whole window panel
self.panel = wx.Panel(self, size=(self.screen_width, self.screen_height))
# scrollable container
self.scroll = wx.ScrolledWindow(self.panel)
self.scroll.SetScrollbars(self.scroll_scale, self.scroll_scale, self.width_virtual / self.scroll_scale, self.height_virtual / self.scroll_scale)
self.scroll.EnableScrolling(True, True)
self.scroll.SetFocus()
# scrollable drawing area
self.scroll_panel = wx.Panel(self.scroll, size=(self.screen_width - 15, self.screen_height / 2))
self.scroll_panel.Bind(wx.EVT_PAINT, self.on_paint)
self.scroll_panel.Bind(wx.EVT_KEY_DOWN, self.on_key_press)
self.scroll_panel.Bind(wx.EVT_LEFT_DOWN, self.on_mouse_down)
self.scroll.Bind(wx.EVT_PAINT, self.on_paint)
self.scroll.Bind(wx.EVT_KEY_DOWN, self.on_key_press)
self.scroll.Bind(wx.EVT_LEFT_DOWN, self.on_mouse_down)
self.scroll.Fit()
self.Fit()
self.scroll_panel.SetDimensions(-1, -1, self.width_virtual, self.height_virtual, wx.SIZE_USE_EXISTING)
self.txt = None
self.Show(True)
def us_to_px(self, val):
return val / (10 ** 3) * self.zoom
def px_to_us(self, val):
return (val / self.zoom) * (10 ** 3)
def scroll_start(self):
(x, y) = self.scroll.GetViewStart()
return (x * self.scroll_scale, y * self.scroll_scale)
def scroll_start_us(self):
(x, y) = self.scroll_start()
return self.px_to_us(x)
def paint_rectangle_zone(self, nr, color, top_color, start, end):
offset_px = self.us_to_px(start - self.ts_start)
width_px = self.us_to_px(end - self.ts_start)
offset_py = RootFrame.Y_OFFSET + (nr * (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE))
width_py = RootFrame.RECT_HEIGHT
dc = self.dc
if top_color is not None:
(r, g, b) = top_color
top_color = wx.Colour(r, g, b)
brush = wx.Brush(top_color, wx.SOLID)
dc.SetBrush(brush)
dc.DrawRectangle(offset_px, offset_py, width_px, RootFrame.EVENT_MARKING_WIDTH)
width_py -= RootFrame.EVENT_MARKING_WIDTH
offset_py += RootFrame.EVENT_MARKING_WIDTH
(r ,g, b) = color
color = wx.Colour(r, g, b)
brush = wx.Brush(color, wx.SOLID)
dc.SetBrush(brush)
dc.DrawRectangle(offset_px, offset_py, width_px, width_py)
def update_rectangles(self, dc, start, end):
start += self.ts_start
end += self.ts_start
self.sched_tracer.fill_zone(start, end)
def on_paint(self, event):
dc = wx.PaintDC(self.scroll_panel)
self.dc = dc
width = min(self.width_virtual, self.screen_width)
(x, y) = self.scroll_start()
start = self.px_to_us(x)
end = self.px_to_us(x + width)
self.update_rectangles(dc, start, end)
def rect_from_ypixel(self, y):
y -= RootFrame.Y_OFFSET
rect = y / (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE)
height = y % (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE)
if rect < 0 or rect > self.nr_rects - 1 or height > RootFrame.RECT_HEIGHT:
return -1
return rect
def update_summary(self, txt):
if self.txt:
self.txt.Destroy()
self.txt = wx.StaticText(self.panel, -1, txt, (0, (self.screen_height / 2) + 50))
def on_mouse_down(self, event):
(x, y) = event.GetPositionTuple()
rect = self.rect_from_ypixel(y)
if rect == -1:
return
t = self.px_to_us(x) + self.ts_start
self.sched_tracer.mouse_down(rect, t)
def update_width_virtual(self):
self.width_virtual = self.us_to_px(self.ts_end - self.ts_start)
def __zoom(self, x):
self.update_width_virtual()
(xpos, ypos) = self.scroll.GetViewStart()
xpos = self.us_to_px(x) / self.scroll_scale
self.scroll.SetScrollbars(self.scroll_scale, self.scroll_scale, self.width_virtual / self.scroll_scale, self.height_virtual / self.scroll_scale, xpos, ypos)
self.Refresh()
def zoom_in(self):
x = self.scroll_start_us()
self.zoom *= 2
self.__zoom(x)
def zoom_out(self):
x = self.scroll_start_us()
self.zoom /= 2
self.__zoom(x)
def on_key_press(self, event):
key = event.GetRawKeyCode()
if key == ord("+"):
self.zoom_in()
return
if key == ord("-"):
self.zoom_out()
return
key = event.GetKeyCode()
(x, y) = self.scroll.GetViewStart()
if key == wx.WXK_RIGHT:
self.scroll.Scroll(x + 1, y)
elif key == wx.WXK_LEFT:
self.scroll.Scroll(x - 1, y)
elif key == wx.WXK_DOWN:
self.scroll.Scroll(x, y + 1)
elif key == wx.WXK_UP:
self.scroll.Scroll(x, y - 1)
|
defrank/roshi | refs/heads/master | app/roshi/views/home.py | 1 | """
Roshi
~~~~~
Home views.
:copyright: (c) 2017 Derek M. Frank
:license: MPL-2.0
"""
from flask import Blueprint # type: ignore
from flask_mako import render_template # type: ignore
home: Blueprint = Blueprint('home', __name__)
@home.route('/')
def index() -> str:
"""
The most important view. Currently doesn't do anything :(
"""
return render_template('home/index.mako',
message='Welcome to the dog training app!')
|
Arksine/atom-serialmonitor | refs/heads/master | lib/python/pyserialserver.py | 1 | import os
import socketio
import eventlet
from eventlet import wsgi
import serial
from serial.tools import list_ports
eventlet.monkey_patch()
class SerialReader(object):
def __init__(self, serial_instance, sio_in, sid_in):
self.serial = serial_instance
self.sio = sio_in
self.sid = sid_in
self.started = False
self.serThread = None
def start(self):
self.started = True
self.serThread = eventlet.spawn_n(self.read_from_port)
def stop(self):
self.started = False
def read_from_port(self):
while self.serial.is_open and self.started:
if self.serial.in_waiting > 0:
try:
data = self.serial.read(self.serial.in_waiting)
except serial.SerialException as e:
# probably some I/O problem such as disconnected USB serial
# adapters -> exit
print(e)
self.sio.emit('port_disconnected')
break
else:
self.sio.emit('serial_received', data.decode('latin_1'),
self.sid)
eventlet.sleep(.005)
sio = socketio.Server(logger=True)
ser = serial.Serial()
serialReader = None
def set_data_bits(dbs):
if dbs == 5:
ser.bytesize = serial.FIVEBITS
elif dbs == 6:
ser.bytesize = serial.SIXBITS
elif dbs == 7:
ser.bytesize = serial.SEVENBITS
elif dbs == 8:
ser.bytesize = serial.EIGHTBITS
return
def set_stop_bits(sbs):
if sbs == '1':
ser.stopbits = serial.STOPBITS_ONE
elif sbs == '1.5':
ser.stopbits = serial.STOPBITS_ONE_POINT_FIVE
elif sbs == '2':
ser.stopbits = serial.STOPBITS_TWO
return
def set_parity(par):
if par == 'none':
ser.parity = serial.PARITY_NONE
elif par == 'even':
ser.parity = serial.PARITY_EVEN
elif par == 'odd':
ser.parity = serial.PARITY_ODD
elif par == 'mark':
ser.parity = serial.PARITY_MARK
elif par == 'space':
ser.parity = serial.PARITY_SPACE
return
@sio.on('connect')
def connect(sid, environ):
print('connect ', sid)
@sio.on('disconnect')
def disconnect(sid):
print('disconnect ', sid)
if ser.is_open:
global serialReader
serialReader.stop()
del(serialReader)
serialReader = None
ser.close()
quit()
@sio.on('list_serial_ports')
def list_serial_ports(sid):
print('port list requested', sid)
portList = sorted(list_ports.comports())
if len(portList) > 0:
outList = []
desc = ''
for port in portList:
if os.name == 'nt': # win32
desc = port.description
elif os.name == 'posix':
desc = '(' + port.name + ') ' + port.description
else:
desc = '(' + port.name + ')'
outList.append(
{'port': port.device, 'description': desc})
sio.emit('port_list', outList, sid)
else:
sio.emit('port_list', 'none', sid)
return
@sio.on('connect_serial')
def connect_serial(sid, data):
ser.port = data['port']
ser.baudrate = data['baud']
ser.rts = data['rts']
ser.dtr = data['dtr']
set_data_bits(data['databits'])
set_parity(data['parity'])
set_stop_bits(data['stopbits'])
ser.xonxoff = data['xonxoff']
ser.rtscts = data['rtscts']
ser.dsrdtr = data['dsrdtr']
ser.write_timeout = 2.0
try:
ser.open()
except serial.SerialException:
sio.emit('port_connected', 'false', sid)
return
if ser.is_open:
global serialReader
if serialReader is not None:
del(serialReader)
serialReader = SerialReader(ser, sio, sid)
serialReader.start()
sio.emit('port_connected', 'true', sid)
else:
sio.emit('port_connected', 'false', sid)
return
@sio.on('disconnect_serial')
def disconnect_serial(sid):
if ser.is_open:
global serialReader
serialReader.stop()
del(serialReader)
serialReader = None
ser.close()
sio.emit('port_disconnected', room=sid)
return
@sio.on('write_to_serial')
def write_to_serial(sid, data):
if type(data) is unicode:
# Unicode Text received, send as latin-1(retains byte data)
try:
ser.write(data.encode('latin_1'))
except serial.SerialTimeoutException:
print "Write timed out"
else:
# Node.js buffer received
try:
ser.write(data)
except serial.SerialTimeoutException:
print "Write timed out"
return
@sio.on('update_serial_setting')
def update_serial_setting(sid, data):
if data['setting'] == 'baud':
ser.baudrate = data['value']
elif data['setting'] == 'dtr':
ser.dtr = data['value']
elif data['setting'] == 'rts':
ser.baudrate = data['value']
return
if __name__ == '__main__':
app = socketio.Middleware(sio)
wsgi.server(eventlet.listen(('127.0.0.1', 8000)), app, max_size=1)
|
ahmadRagheb/goldenHR | refs/heads/master | erpnext/docs/user/manual/de/human-resources/__init__.py | 12133432 | |
simonneuville/runamic_server | refs/heads/master | djangoserver/server/logic/database/__init__.py | 12133432 | |
indictranstech/focal-erpnext | refs/heads/develop | selling/doctype/job_order/__init__.py | 12133432 | |
dariemp/odoo | refs/heads/8.0 | addons/point_of_sale/wizard/pos_open_statement.py | 387 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv
from openerp.tools.translate import _
class pos_open_statement(osv.osv_memory):
_name = 'pos.open.statement'
_description = 'Open Statements'
def open_statement(self, cr, uid, ids, context=None):
"""
Open the statements
@param self: The object pointer.
@param cr: A database cursor
@param uid: ID of the user currently logged in
@param context: A standard dictionary
@return : Blank Directory
"""
data = {}
mod_obj = self.pool.get('ir.model.data')
statement_obj = self.pool.get('account.bank.statement')
sequence_obj = self.pool.get('ir.sequence')
journal_obj = self.pool.get('account.journal')
if context is None:
context = {}
st_ids = []
j_ids = journal_obj.search(cr, uid, [('journal_user','=',1)], context=context)
if not j_ids:
raise osv.except_osv(_('No Cash Register Defined!'), _('You have to define which payment method must be available in the point of sale by reusing existing bank and cash through "Accounting / Configuration / Journals / Journals". Select a journal and check the field "PoS Payment Method" from the "Point of Sale" tab. You can also create new payment methods directly from menu "PoS Backend / Configuration / Payment Methods".'))
for journal in journal_obj.browse(cr, uid, j_ids, context=context):
ids = statement_obj.search(cr, uid, [('state', '!=', 'confirm'), ('user_id', '=', uid), ('journal_id', '=', journal.id)], context=context)
if journal.sequence_id:
number = sequence_obj.next_by_id(cr, uid, journal.sequence_id.id, context=context)
else:
number = sequence_obj.next_by_code(cr, uid, 'account.cash.statement', context=context)
data.update({
'journal_id': journal.id,
'user_id': uid,
'state': 'draft',
'name': number
})
statement_id = statement_obj.create(cr, uid, data, context=context)
st_ids.append(int(statement_id))
if journal.cash_control:
statement_obj.button_open(cr, uid, [statement_id], context)
tree_res = mod_obj.get_object_reference(cr, uid, 'point_of_sale', 'view_cash_statement_pos_tree')
tree_id = tree_res and tree_res[1] or False
form_res = mod_obj.get_object_reference(cr, uid, 'account', 'view_bank_statement_form2')
form_id = form_res and form_res[1] or False
search_res = mod_obj.get_object_reference(cr, uid, 'account', 'view_account_bank_statement_filter')
search_id = search_res and search_res[1] or False
return {
'type': 'ir.actions.act_window',
'name': _('List of Cash Registers'),
'view_type': 'form',
'view_mode': 'tree,form',
'res_model': 'account.bank.statement',
'domain': str([('id', 'in', st_ids)]),
'views': [(tree_id, 'tree'), (form_id, 'form')],
'search_view_id': search_id,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
MatthewWilkes/django-oscar | refs/heads/master | src/oscar/test/factories/__init__.py | 40 | # coding=utf-8
from decimal import Decimal as D
import random
import datetime
from django.conf import settings
from django.utils import timezone
from oscar.apps.offer import models
from oscar.apps.partner import strategy, availability, prices
from oscar.core.loading import get_class, get_model
from oscar.test.factories.address import * # noqa
from oscar.test.factories.basket import * # noqa
from oscar.test.factories.catalogue import * # noqa
from oscar.test.factories.contrib import * # noqa
from oscar.test.factories.customer import * # noqa
from oscar.test.factories.offer import * # noqa
from oscar.test.factories.order import * # noqa
from oscar.test.factories.partner import * # noqa
from oscar.test.factories.payment import * # noqa
from oscar.test.factories.voucher import * # noqa
Basket = get_model('basket', 'Basket')
Free = get_class('shipping.methods', 'Free')
Voucher = get_model('voucher', 'Voucher')
OrderCreator = get_class('order.utils', 'OrderCreator')
OrderTotalCalculator = get_class('checkout.calculators',
'OrderTotalCalculator')
Partner = get_model('partner', 'Partner')
StockRecord = get_model('partner', 'StockRecord')
Product = get_model('catalogue', 'Product')
ProductClass = get_model('catalogue', 'ProductClass')
ProductAttribute = get_model('catalogue', 'ProductAttribute')
ProductAttributeValue = get_model('catalogue', 'ProductAttributeValue')
ProductImage = get_model('catalogue', 'ProductImage')
ConditionalOffer = get_model('offer', 'ConditionalOffer')
WeightBand = get_model('shipping', 'WeightBand')
WeightBased = get_model('shipping', 'WeightBased')
def create_stockrecord(product=None, price_excl_tax=None, partner_sku=None,
num_in_stock=None, partner_name=None,
currency=settings.OSCAR_DEFAULT_CURRENCY,
partner_users=None):
if product is None:
product = create_product()
partner, __ = Partner.objects.get_or_create(name=partner_name or '')
if partner_users:
for user in partner_users:
partner.users.add(user)
if price_excl_tax is None:
price_excl_tax = D('9.99')
if partner_sku is None:
partner_sku = 'sku_%d_%d' % (product.id, random.randint(0, 10000))
return product.stockrecords.create(
partner=partner, partner_sku=partner_sku,
price_currency=currency,
price_excl_tax=price_excl_tax, num_in_stock=num_in_stock)
def create_purchase_info(record):
return strategy.PurchaseInfo(
price=prices.FixedPrice(
record.price_currency,
record.price_excl_tax,
D('0.00') # Default to no tax
),
availability=availability.StockRequired(
record.net_stock_level),
stockrecord=record
)
def create_product(upc=None, title=u"Dùmϻϒ title",
product_class=u"Dùmϻϒ item class",
partner_name=None, partner_sku=None, price=None,
num_in_stock=None, attributes=None,
partner_users=None, **kwargs):
"""
Helper method for creating products that are used in tests.
"""
product_class, __ = ProductClass._default_manager.get_or_create(
name=product_class)
product = product_class.products.model(
product_class=product_class,
title=title, upc=upc, **kwargs)
if kwargs.get('parent') and 'structure' not in kwargs:
product.structure = 'child'
if attributes:
for code, value in attributes.items():
# Ensure product attribute exists
product_class.attributes.get_or_create(name=code, code=code)
setattr(product.attr, code, value)
product.save()
# Shortcut for creating stockrecord
stockrecord_fields = [
price, partner_sku, partner_name, num_in_stock, partner_users]
if any([field is not None for field in stockrecord_fields]):
create_stockrecord(
product, price_excl_tax=price, num_in_stock=num_in_stock,
partner_users=partner_users, partner_sku=partner_sku,
partner_name=partner_name)
return product
def create_product_image(product=None,
original=None,
caption='Dummy Caption',
display_order=None,
):
if not product:
product = create_product()
if not display_order:
if not product.images.all():
display_order = 0
else:
display_order = max(
[i.display_order for i in product.images.all()]) + 1
kwargs = {'product_id': product.id,
'original': original,
'display_order': display_order,
'caption': caption, }
return ProductImage.objects.create(**kwargs)
def create_basket(empty=False):
basket = Basket.objects.create()
basket.strategy = strategy.Default()
if not empty:
product = create_product()
create_stockrecord(product, num_in_stock=2)
basket.add_product(product)
return basket
def create_order(number=None, basket=None, user=None, shipping_address=None,
shipping_method=None, billing_address=None,
total=None, **kwargs):
"""
Helper method for creating an order for testing
"""
if not basket:
basket = Basket.objects.create()
basket.strategy = strategy.Default()
product = create_product()
create_stockrecord(
product, num_in_stock=10, price_excl_tax=D('10.00'))
basket.add_product(product)
if not basket.id:
basket.save()
if shipping_method is None:
shipping_method = Free()
shipping_charge = shipping_method.calculate(basket)
if total is None:
total = OrderTotalCalculator().calculate(basket, shipping_charge)
order = OrderCreator().place_order(
order_number=number,
user=user,
basket=basket,
shipping_address=shipping_address,
shipping_method=shipping_method,
shipping_charge=shipping_charge,
billing_address=billing_address,
total=total,
**kwargs)
basket.set_as_submitted()
return order
def create_offer(name=u"Dùmϻϒ offer", offer_type="Site",
max_basket_applications=None, range=None, condition=None,
benefit=None, priority=0, status=None, start=None, end=None):
"""
Helper method for creating an offer
"""
if range is None:
range, __ = models.Range.objects.get_or_create(
name=u"All products räñgë", includes_all_products=True)
if condition is None:
condition, __ = models.Condition.objects.get_or_create(
range=range, type=models.Condition.COUNT, value=1)
if benefit is None:
benefit, __ = models.Benefit.objects.get_or_create(
range=range, type=models.Benefit.PERCENTAGE, value=20)
if status is None:
status = ConditionalOffer.OPEN
# Create start and end date so offer is active
now = timezone.now()
if start is None:
start = now - datetime.timedelta(days=1)
if end is None:
end = now + datetime.timedelta(days=30)
return ConditionalOffer.objects.create(
name=name,
start_datetime=start,
end_datetime=end,
status=status,
offer_type=offer_type,
condition=condition,
benefit=benefit,
max_basket_applications=max_basket_applications,
priority=priority)
def create_voucher():
"""
Helper method for creating a voucher
"""
voucher = Voucher.objects.create(
name=u"Dùmϻϒ voucher",
code="test",
start_datetime=timezone.now(),
end_datetime=timezone.now() + datetime.timedelta(days=12))
voucher.offers.add(create_offer(offer_type='Voucher'))
return voucher
def create_shipping_weight_based(default_weight=D(1)):
return WeightBased.objects.create(
default_weight=default_weight
)
def create_shipping_weight_band(upper_limit, charge, weight_based=None):
if not weight_based:
weight_based = create_shipping_weight_based()
return WeightBand.objects.create(
method=weight_based,
upper_limit=upper_limit,
charge=charge
)
|
MaTriXy/thumbor | refs/heads/master | thumbor/detectors/feature_detector/__init__.py | 14 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# thumbor imaging service
# https://github.com/globocom/thumbor/wiki
# Licensed under the MIT license:
# http://www.opensource.org/licenses/mit-license
# Copyright (c) 2011 globo.com timehome@corp.globo.com
try:
import cv
except ImportError:
import cv2.cv as cv
from thumbor.detectors import BaseDetector
from thumbor.point import FocalPoint
class Detector(BaseDetector):
def detect(self, callback):
engine = self.context.modules.engine
sz = engine.size
image = cv.CreateImageHeader(sz, cv.IPL_DEPTH_8U, 3)
image_mode, image_data = engine.image_data_as_rgb(False)
cv.SetData(image, image_data)
gray_image = cv.CreateImage(engine.size, 8, 1)
convert_mode = getattr(cv, 'CV_%s2GRAY' % image_mode)
cv.CvtColor(image, gray_image, convert_mode)
image = gray_image
rows = sz[0]
cols = sz[1]
eig_image = cv.CreateMat(rows, cols, cv.CV_32FC1)
temp_image = cv.CreateMat(rows, cols, cv.CV_32FC1)
points = cv.GoodFeaturesToTrack(image, eig_image, temp_image, 20, 0.04, 1.0, useHarris=False)
if points:
for x, y in points:
self.context.request.focal_points.append(FocalPoint(x, y, 1))
callback()
else:
self.next(callback)
|
CydarLtd/ansible | refs/heads/devel | test/runner/lib/sanity.py | 27 | """Execute Ansible sanity tests."""
from __future__ import absolute_import, print_function
import glob
import json
import os
import re
from xml.etree.ElementTree import (
fromstring,
Element,
)
from lib.util import (
ApplicationError,
SubprocessError,
display,
run_command,
deepest_path,
parse_to_dict,
)
from lib.ansible_util import (
ansible_environment,
)
from lib.target import (
walk_external_targets,
walk_internal_targets,
walk_sanity_targets,
)
from lib.executor import (
get_changes_filter,
AllTargetsSkipped,
Delegate,
install_command_requirements,
SUPPORTED_PYTHON_VERSIONS,
intercept_command,
SanityConfig,
)
from lib.test import (
TestSuccess,
TestFailure,
TestSkipped,
TestMessage,
calculate_best_confidence,
)
COMMAND = 'sanity'
PEP8_SKIP_PATH = 'test/sanity/pep8/skip.txt'
PEP8_LEGACY_PATH = 'test/sanity/pep8/legacy-files.txt'
PYLINT_SKIP_PATH = 'test/sanity/pylint/skip.txt'
def command_sanity(args):
"""
:type args: SanityConfig
"""
changes = get_changes_filter(args)
require = (args.require or []) + changes
targets = SanityTargets(args.include, args.exclude, require)
if not targets.include:
raise AllTargetsSkipped()
if args.delegate:
raise Delegate(require=changes)
install_command_requirements(args)
tests = sanity_get_tests()
if args.test:
tests = [t for t in tests if t.name in args.test]
if args.skip_test:
tests = [t for t in tests if t.name not in args.skip_test]
total = 0
failed = []
for test in tests:
if args.list_tests:
display.info(test.name)
continue
if test.intercept:
versions = SUPPORTED_PYTHON_VERSIONS
else:
versions = None,
for version in versions:
if args.python and version and version != args.python:
continue
display.info('Sanity check using %s%s' % (test.name, ' with Python %s' % version if version else ''))
options = ''
if test.script:
result = test.func(args, targets, test.script)
elif test.intercept:
result = test.func(args, targets, python_version=version)
options = ' --python %s' % version
else:
result = test.func(args, targets)
result.write(args)
total += 1
if isinstance(result, SanityFailure):
failed.append(result.test + options)
if failed:
message = 'The %d sanity test(s) listed below (out of %d) failed. See error output above for details.\n%s' % (
len(failed), total, '\n'.join(failed))
if args.failure_ok:
display.error(message)
else:
raise ApplicationError(message)
def command_sanity_code_smell(args, _, script):
"""
:type args: SanityConfig
:type _: SanityTargets
:type script: str
:rtype: SanityResult
"""
test = os.path.splitext(os.path.basename(script))[0]
cmd = [script]
env = ansible_environment(args, color=False)
try:
stdout, stderr = run_command(args, cmd, env=env, capture=True)
status = 0
except SubprocessError as ex:
stdout = ex.stdout
stderr = ex.stderr
status = ex.status
if stderr or status:
summary = str(SubprocessError(cmd=cmd, status=status, stderr=stderr, stdout=stdout))
return SanityFailure(test, summary=summary)
return SanitySuccess(test)
def command_sanity_validate_modules(args, targets):
"""
:type args: SanityConfig
:type targets: SanityTargets
:rtype: SanityResult
"""
test = 'validate-modules'
env = ansible_environment(args, color=False)
paths = [deepest_path(i.path, 'lib/ansible/modules/') for i in targets.include_external]
paths = sorted(set(p for p in paths if p))
if not paths:
return SanitySkipped(test)
cmd = [
'test/sanity/validate-modules/validate-modules',
'--format', 'json',
] + paths
with open('test/sanity/validate-modules/skip.txt', 'r') as skip_fd:
skip_paths = skip_fd.read().splitlines()
skip_paths += [e.path for e in targets.exclude_external]
if skip_paths:
cmd += ['--exclude', '^(%s)' % '|'.join(skip_paths)]
if args.base_branch:
cmd.extend([
'--base-branch', args.base_branch,
])
else:
display.warning('Cannot perform module comparison against the base branch. Base branch not detected when running locally.')
try:
stdout, stderr = run_command(args, cmd, env=env, capture=True)
status = 0
except SubprocessError as ex:
stdout = ex.stdout
stderr = ex.stderr
status = ex.status
if stderr or status not in (0, 3):
raise SubprocessError(cmd=cmd, status=status, stderr=stderr, stdout=stdout)
if args.explain:
return SanitySkipped(test)
messages = json.loads(stdout)
results = []
for filename in messages:
output = messages[filename]
for item in output['errors']:
results.append(SanityMessage(
path=filename,
line=int(item['line']) if 'line' in item else 0,
column=int(item['column']) if 'column' in item else 0,
level='error',
code='E%s' % item['code'],
message=item['msg'],
))
if results:
return SanityFailure(test, messages=results)
return SanitySuccess(test)
def command_sanity_shellcheck(args, targets):
"""
:type args: SanityConfig
:type targets: SanityTargets
:rtype: SanityResult
"""
test = 'shellcheck'
with open('test/sanity/shellcheck/skip.txt', 'r') as skip_fd:
skip_paths = set(skip_fd.read().splitlines())
with open('test/sanity/shellcheck/exclude.txt', 'r') as exclude_fd:
exclude = set(exclude_fd.read().splitlines())
paths = sorted(i.path for i in targets.include if os.path.splitext(i.path)[1] == '.sh' and i.path not in skip_paths)
if not paths:
return SanitySkipped(test)
cmd = [
'shellcheck',
'-e', ','.join(sorted(exclude)),
'--format', 'checkstyle',
] + paths
try:
stdout, stderr = run_command(args, cmd, capture=True)
status = 0
except SubprocessError as ex:
stdout = ex.stdout
stderr = ex.stderr
status = ex.status
if stderr or status > 1:
raise SubprocessError(cmd=cmd, status=status, stderr=stderr, stdout=stdout)
if args.explain:
return SanitySkipped(test)
# json output is missing file paths in older versions of shellcheck, so we'll use xml instead
root = fromstring(stdout) # type: Element
results = []
for item in root: # type: Element
for entry in item: # type: Element
results.append(SanityMessage(
message=entry.attrib['message'],
path=item.attrib['name'],
line=int(entry.attrib['line']),
column=int(entry.attrib['column']),
level=entry.attrib['severity'],
code=entry.attrib['source'].replace('ShellCheck.', ''),
))
if results:
return SanityFailure(test, messages=results)
return SanitySuccess(test)
def command_sanity_pep8(args, targets):
"""
:type args: SanityConfig
:type targets: SanityTargets
:rtype: SanityResult
"""
test = 'pep8'
with open(PEP8_SKIP_PATH, 'r') as skip_fd:
skip_paths = skip_fd.read().splitlines()
with open(PEP8_LEGACY_PATH, 'r') as legacy_fd:
legacy_paths = legacy_fd.read().splitlines()
with open('test/sanity/pep8/legacy-ignore.txt', 'r') as ignore_fd:
legacy_ignore = set(ignore_fd.read().splitlines())
with open('test/sanity/pep8/current-ignore.txt', 'r') as ignore_fd:
current_ignore = sorted(ignore_fd.read().splitlines())
skip_paths_set = set(skip_paths)
legacy_paths_set = set(legacy_paths)
paths = sorted(i.path for i in targets.include if os.path.splitext(i.path)[1] == '.py' and i.path not in skip_paths_set)
if not paths:
return SanitySkipped(test)
cmd = [
'pep8',
'--max-line-length', '160',
'--config', '/dev/null',
'--ignore', ','.join(sorted(current_ignore)),
] + paths
try:
stdout, stderr = run_command(args, cmd, capture=True)
status = 0
except SubprocessError as ex:
stdout = ex.stdout
stderr = ex.stderr
status = ex.status
if stderr:
raise SubprocessError(cmd=cmd, status=status, stderr=stderr, stdout=stdout)
if args.explain:
return SanitySkipped(test)
pattern = '^(?P<path>[^:]*):(?P<line>[0-9]+):(?P<column>[0-9]+): (?P<code>[WE][0-9]{3}) (?P<message>.*)$'
results = [re.search(pattern, line).groupdict() for line in stdout.splitlines()]
results = [SanityMessage(
message=r['message'],
path=r['path'],
line=int(r['line']),
column=int(r['column']),
level='warning' if r['code'].startswith('W') else 'error',
code=r['code'],
) for r in results]
failed_result_paths = set([result.path for result in results])
used_paths = set(paths)
errors = []
summary = {}
line = 0
for path in legacy_paths:
line += 1
if not os.path.exists(path):
# Keep files out of the list which no longer exist in the repo.
errors.append(SanityMessage(
code='A101',
message='Remove "%s" since it does not exist' % path,
path=PEP8_LEGACY_PATH,
line=line,
column=1,
confidence=calculate_best_confidence(((PEP8_LEGACY_PATH, line), (path, 0)), args.metadata) if args.metadata.changes else None,
))
if path in used_paths and path not in failed_result_paths:
# Keep files out of the list which no longer require the relaxed rule set.
errors.append(SanityMessage(
code='A201',
message='Remove "%s" since it passes the current rule set' % path,
path=PEP8_LEGACY_PATH,
line=line,
column=1,
confidence=calculate_best_confidence(((PEP8_LEGACY_PATH, line), (path, 0)), args.metadata) if args.metadata.changes else None,
))
line = 0
for path in skip_paths:
line += 1
if not os.path.exists(path):
# Keep files out of the list which no longer exist in the repo.
errors.append(SanityMessage(
code='A101',
message='Remove "%s" since it does not exist' % path,
path=PEP8_SKIP_PATH,
line=line,
column=1,
confidence=calculate_best_confidence(((PEP8_SKIP_PATH, line), (path, 0)), args.metadata) if args.metadata.changes else None,
))
for result in results:
if result.path in legacy_paths_set and result.code in legacy_ignore:
# Files on the legacy list are permitted to have errors on the legacy ignore list.
# However, we want to report on their existence to track progress towards eliminating these exceptions.
display.info('PEP 8: %s (legacy)' % result, verbosity=3)
key = '%s %s' % (result.code, re.sub('[0-9]+', 'NNN', result.message))
if key not in summary:
summary[key] = 0
summary[key] += 1
else:
# Files not on the legacy list and errors not on the legacy ignore list are PEP 8 policy errors.
errors.append(result)
if summary:
lines = []
count = 0
for key in sorted(summary):
count += summary[key]
lines.append('PEP 8: %5d %s' % (summary[key], key))
display.info('PEP 8: There were %d different legacy issues found (%d total):' % (len(summary), count), verbosity=1)
display.info('PEP 8: Count Code Message', verbosity=1)
for line in lines:
display.info(line, verbosity=1)
if errors:
return SanityFailure(test, messages=errors)
return SanitySuccess(test)
def command_sanity_pylint(args, targets):
"""
:type args: SanityConfig
:type targets: SanityTargets
:rtype: SanityResult
"""
test = 'pylint'
with open(PYLINT_SKIP_PATH, 'r') as skip_fd:
skip_paths = skip_fd.read().splitlines()
with open('test/sanity/pylint/disable.txt', 'r') as disable_fd:
disable = set(disable_fd.read().splitlines())
skip_paths_set = set(skip_paths)
paths = sorted(i.path for i in targets.include if os.path.splitext(i.path)[1] == '.py' and i.path not in skip_paths_set)
if not paths:
return SanitySkipped(test)
cmd = [
'pylint',
'--jobs', '0',
'--reports', 'n',
'--max-line-length', '160',
'--rcfile', '/dev/null',
'--output-format', 'json',
'--disable', ','.join(sorted(disable)),
] + paths
env = ansible_environment(args)
try:
stdout, stderr = run_command(args, cmd, env=env, capture=True)
status = 0
except SubprocessError as ex:
stdout = ex.stdout
stderr = ex.stderr
status = ex.status
if stderr or status >= 32:
raise SubprocessError(cmd=cmd, status=status, stderr=stderr, stdout=stdout)
if args.explain:
return SanitySkipped(test)
if stdout:
messages = json.loads(stdout)
else:
messages = []
errors = [SanityMessage(
message=m['message'],
path=m['path'],
line=int(m['line']),
column=int(m['column']),
level=m['type'],
code=m['symbol'],
) for m in messages]
line = 0
for path in skip_paths:
line += 1
if not os.path.exists(path):
# Keep files out of the list which no longer exist in the repo.
errors.append(SanityMessage(
code='A101',
message='Remove "%s" since it does not exist' % path,
path=PYLINT_SKIP_PATH,
line=line,
column=1,
confidence=calculate_best_confidence(((PYLINT_SKIP_PATH, line), (path, 0)), args.metadata) if args.metadata.changes else None,
))
if errors:
return SanityFailure(test, messages=errors)
return SanitySuccess(test)
def command_sanity_yamllint(args, targets):
"""
:type args: SanityConfig
:type targets: SanityTargets
:rtype: SanityResult
"""
test = 'yamllint'
paths = sorted(i.path for i in targets.include if os.path.splitext(i.path)[1] in ('.yml', '.yaml'))
if not paths:
return SanitySkipped(test)
cmd = [
'yamllint',
'--format', 'parsable',
] + paths
try:
stdout, stderr = run_command(args, cmd, capture=True)
status = 0
except SubprocessError as ex:
stdout = ex.stdout
stderr = ex.stderr
status = ex.status
if stderr:
raise SubprocessError(cmd=cmd, status=status, stderr=stderr, stdout=stdout)
if args.explain:
return SanitySkipped(test)
pattern = r'^(?P<path>[^:]*):(?P<line>[0-9]+):(?P<column>[0-9]+): \[(?P<level>warning|error)\] (?P<message>.*)$'
results = [re.search(pattern, line).groupdict() for line in stdout.splitlines()]
results = [SanityMessage(
message=r['message'],
path=r['path'],
line=int(r['line']),
column=int(r['column']),
level=r['level'],
) for r in results]
if results:
return SanityFailure(test, messages=results)
return SanitySuccess(test)
def command_sanity_rstcheck(args, targets):
"""
:type args: SanityConfig
:type targets: SanityTargets
:rtype: SanityResult
"""
test = 'rstcheck'
with open('test/sanity/rstcheck/ignore-substitutions.txt', 'r') as ignore_fd:
ignore_substitutions = sorted(set(ignore_fd.read().splitlines()))
paths = sorted(i.path for i in targets.include if os.path.splitext(i.path)[1] in ('.rst',))
if not paths:
return SanitySkipped(test)
cmd = [
'rstcheck',
'--report', 'warning',
'--ignore-substitutions', ','.join(ignore_substitutions),
] + paths
try:
stdout, stderr = run_command(args, cmd, capture=True)
status = 0
except SubprocessError as ex:
stdout = ex.stdout
stderr = ex.stderr
status = ex.status
if stdout:
raise SubprocessError(cmd=cmd, status=status, stderr=stderr, stdout=stdout)
if args.explain:
return SanitySkipped(test)
pattern = r'^(?P<path>[^:]*):(?P<line>[0-9]+): \((?P<level>INFO|WARNING|ERROR|SEVERE)/[0-4]\) (?P<message>.*)$'
results = [parse_to_dict(pattern, line) for line in stderr.splitlines()]
results = [SanityMessage(
message=r['message'],
path=r['path'],
line=int(r['line']),
column=0,
level=r['level'],
) for r in results]
if results:
return SanityFailure(test, messages=results)
return SanitySuccess(test)
def command_sanity_ansible_doc(args, targets, python_version):
"""
:type args: SanityConfig
:type targets: SanityTargets
:type python_version: str
:rtype: SanityResult
"""
test = 'ansible-doc'
with open('test/sanity/ansible-doc/skip.txt', 'r') as skip_fd:
skip_modules = set(skip_fd.read().splitlines())
modules = sorted(set(m for i in targets.include_external for m in i.modules) -
set(m for i in targets.exclude_external for m in i.modules) -
skip_modules)
if not modules:
return SanitySkipped(test, python_version=python_version)
env = ansible_environment(args, color=False)
cmd = ['ansible-doc'] + modules
try:
stdout, stderr = intercept_command(args, cmd, target_name='ansible-doc', env=env, capture=True, python_version=python_version)
status = 0
except SubprocessError as ex:
stdout = ex.stdout
stderr = ex.stderr
status = ex.status
if status:
summary = str(SubprocessError(cmd=cmd, status=status, stderr=stderr))
return SanityFailure(test, summary=summary, python_version=python_version)
if stdout:
display.info(stdout.strip(), verbosity=3)
if stderr:
summary = 'Output on stderr from ansible-doc is considered an error.\n\n%s' % SubprocessError(cmd, stderr=stderr)
return SanityFailure(test, summary=summary, python_version=python_version)
return SanitySuccess(test, python_version=python_version)
def collect_code_smell_tests():
"""
:rtype: tuple(SanityFunc)
"""
with open('test/sanity/code-smell/skip.txt', 'r') as skip_fd:
skip_tests = skip_fd.read().splitlines()
paths = glob.glob('test/sanity/code-smell/*')
paths = sorted(p for p in paths if os.access(p, os.X_OK) and os.path.isfile(p) and os.path.basename(p) not in skip_tests)
tests = tuple(SanityFunc(os.path.splitext(os.path.basename(p))[0], command_sanity_code_smell, script=p, intercept=False) for p in paths)
return tests
def sanity_get_tests():
"""
:rtype: tuple(SanityFunc)
"""
return SANITY_TESTS
class SanitySuccess(TestSuccess):
"""Sanity test success."""
def __init__(self, test, python_version=None):
"""
:type test: str
:type python_version: str
"""
super(SanitySuccess, self).__init__(COMMAND, test, python_version)
class SanitySkipped(TestSkipped):
"""Sanity test skipped."""
def __init__(self, test, python_version=None):
"""
:type test: str
:type python_version: str
"""
super(SanitySkipped, self).__init__(COMMAND, test, python_version)
class SanityFailure(TestFailure):
"""Sanity test failure."""
def __init__(self, test, python_version=None, messages=None, summary=None):
"""
:type test: str
:type python_version: str
:type messages: list[SanityMessage]
:type summary: str
"""
super(SanityFailure, self).__init__(COMMAND, test, python_version, messages, summary)
class SanityMessage(TestMessage):
"""Single sanity test message for one file."""
pass
class SanityTargets(object):
"""Sanity test target information."""
def __init__(self, include, exclude, require):
"""
:type include: list[str]
:type exclude: list[str]
:type require: list[str]
"""
self.all = not include
self.targets = tuple(sorted(walk_sanity_targets()))
self.include = walk_internal_targets(self.targets, include, exclude, require)
self.include_external, self.exclude_external = walk_external_targets(self.targets, include, exclude, require)
class SanityTest(object):
"""Sanity test base class."""
def __init__(self, name):
self.name = name
class SanityFunc(SanityTest):
"""Sanity test function information."""
def __init__(self, name, func, intercept=True, script=None):
"""
:type name: str
:type func: (SanityConfig, SanityTargets) -> SanityResult
:type intercept: bool
:type script: str | None
"""
super(SanityFunc, self).__init__(name)
self.func = func
self.intercept = intercept
self.script = script
SANITY_TESTS = (
SanityFunc('shellcheck', command_sanity_shellcheck, intercept=False),
SanityFunc('pep8', command_sanity_pep8, intercept=False),
SanityFunc('pylint', command_sanity_pylint, intercept=False),
SanityFunc('yamllint', command_sanity_yamllint, intercept=False),
SanityFunc('rstcheck', command_sanity_rstcheck, intercept=False),
SanityFunc('validate-modules', command_sanity_validate_modules, intercept=False),
SanityFunc('ansible-doc', command_sanity_ansible_doc),
)
def sanity_init():
"""Initialize full sanity test list (includes code-smell scripts determined at runtime)."""
global SANITY_TESTS # pylint: disable=locally-disabled, global-statement
SANITY_TESTS = tuple(sorted(SANITY_TESTS + collect_code_smell_tests(), key=lambda k: k.name))
|
fin/fragdenstaat.at | refs/heads/master | fragdenstaat_at/fds_cms/management/commands/__init__.py | 12133432 | |
mvaled/sentry | refs/heads/master | src/sentry/utils/math.py | 3 | from __future__ import absolute_import, division
import math
def mean(values):
return sum(values) / len(values)
def stddev(values, mean_=None):
if mean_ is None:
mean_ = mean(values)
n = 0
for val in values:
n += (val - mean_) ** 2
n = math.sqrt(n / float(len(values) - 1))
return n
def median(values):
values = sorted(values)
size = len(values)
if size % 2 == 1:
return values[int((size - 1) / 2)]
return (values[int(size / 2 - 1)] + values[int(size / 2)]) / 2
def mad(values, K=1.4826):
# http://en.wikipedia.org/wiki/Median_absolute_deviation
med = median(values)
return K * median([abs(val - med) for val in values])
|
ticosax/django | refs/heads/master | tests/gis_tests/distapp/__init__.py | 12133432 | |
whn09/tensorflow | refs/heads/master | tensorflow/docs_src/__init__.py | 12133432 | |
appendjeff/pianobar-client | refs/heads/master | pianobarclient/__init__.py | 12133432 | |
redhat-openstack/neutron | refs/heads/f22-patches | neutron/plugins/oneconvergence/__init__.py | 12133432 | |
CenterForOpenScience/SHARE | refs/heads/develop | db/backends/__init__.py | 12133432 | |
reddymeghraj/showroom | refs/heads/master | erpnext/crm/doctype/lead/__init__.py | 12133432 | |
schwartzmx/ansible-modules-extras | refs/heads/devel | network/f5/bigip_pool.py | 63 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, Matt Hite <mhite@hotmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: bigip_pool
short_description: "Manages F5 BIG-IP LTM pools"
description:
- "Manages F5 BIG-IP LTM pools via iControl SOAP API"
version_added: "1.2"
author: Matt Hite
notes:
- "Requires BIG-IP software version >= 11"
- "F5 developed module 'bigsuds' required (see http://devcentral.f5.com)"
- "Best run as a local_action in your playbook"
requirements:
- bigsuds
options:
server:
description:
- BIG-IP host
required: true
default: null
choices: []
aliases: []
user:
description:
- BIG-IP username
required: true
default: null
choices: []
aliases: []
password:
description:
- BIG-IP password
required: true
default: null
choices: []
aliases: []
state:
description:
- Pool/pool member state
required: false
default: present
choices: ['present', 'absent']
aliases: []
name:
description:
- Pool name
required: true
default: null
choices: []
aliases: ['pool']
partition:
description:
- Partition of pool/pool member
required: false
default: 'Common'
choices: []
aliases: []
lb_method:
description:
- Load balancing method
version_added: "1.3"
required: False
default: 'round_robin'
choices: ['round_robin', 'ratio_member', 'least_connection_member',
'observed_member', 'predictive_member', 'ratio_node_address',
'least_connection_node_address', 'fastest_node_address',
'observed_node_address', 'predictive_node_address',
'dynamic_ratio', 'fastest_app_response', 'least_sessions',
'dynamic_ratio_member', 'l3_addr', 'unknown',
'weighted_least_connection_member',
'weighted_least_connection_node_address',
'ratio_session', 'ratio_least_connection_member',
'ratio_least_connection_node_address']
aliases: []
monitor_type:
description:
- Monitor rule type when monitors > 1
version_added: "1.3"
required: False
default: null
choices: ['and_list', 'm_of_n']
aliases: []
quorum:
description:
- Monitor quorum value when monitor_type is m_of_n
version_added: "1.3"
required: False
default: null
choices: []
aliases: []
monitors:
description:
- Monitor template name list. Always use the full path to the monitor.
version_added: "1.3"
required: False
default: null
choices: []
aliases: []
slow_ramp_time:
description:
- Sets the ramp-up time (in seconds) to gradually ramp up the load on newly added or freshly detected up pool members
version_added: "1.3"
required: False
default: null
choices: []
aliases: []
service_down_action:
description:
- Sets the action to take when node goes down in pool
version_added: "1.3"
required: False
default: null
choices: ['none', 'reset', 'drop', 'reselect']
aliases: []
host:
description:
- "Pool member IP"
required: False
default: null
choices: []
aliases: ['address']
port:
description:
- "Pool member port"
required: False
default: null
choices: []
aliases: []
'''
EXAMPLES = '''
## playbook task examples:
---
# file bigip-test.yml
# ...
- hosts: localhost
tasks:
- name: Create pool
local_action: >
bigip_pool
server=lb.mydomain.com
user=admin
password=mysecret
state=present
name=matthite-pool
partition=matthite
lb_method=least_connection_member
slow_ramp_time=120
- name: Modify load balancer method
local_action: >
bigip_pool
server=lb.mydomain.com
user=admin
password=mysecret
state=present
name=matthite-pool
partition=matthite
lb_method=round_robin
- hosts: bigip-test
tasks:
- name: Add pool member
local_action: >
bigip_pool
server=lb.mydomain.com
user=admin
password=mysecret
state=present
name=matthite-pool
partition=matthite
host="{{ ansible_default_ipv4["address"] }}"
port=80
- name: Remove pool member from pool
local_action: >
bigip_pool
server=lb.mydomain.com
user=admin
password=mysecret
state=absent
name=matthite-pool
partition=matthite
host="{{ ansible_default_ipv4["address"] }}"
port=80
- hosts: localhost
tasks:
- name: Delete pool
local_action: >
bigip_pool
server=lb.mydomain.com
user=admin
password=mysecret
state=absent
name=matthite-pool
partition=matthite
'''
try:
import bigsuds
except ImportError:
bigsuds_found = False
else:
bigsuds_found = True
# ===========================================
# bigip_pool module specific support methods.
#
def bigip_api(bigip, user, password):
api = bigsuds.BIGIP(hostname=bigip, username=user, password=password)
return api
def pool_exists(api, pool):
# hack to determine if pool exists
result = False
try:
api.LocalLB.Pool.get_object_status(pool_names=[pool])
result = True
except bigsuds.OperationFailed, e:
if "was not found" in str(e):
result = False
else:
# genuine exception
raise
return result
def create_pool(api, pool, lb_method):
# create requires lb_method but we don't want to default
# to a value on subsequent runs
if not lb_method:
lb_method = 'round_robin'
lb_method = "LB_METHOD_%s" % lb_method.strip().upper()
api.LocalLB.Pool.create_v2(pool_names=[pool], lb_methods=[lb_method],
members=[[]])
def remove_pool(api, pool):
api.LocalLB.Pool.delete_pool(pool_names=[pool])
def get_lb_method(api, pool):
lb_method = api.LocalLB.Pool.get_lb_method(pool_names=[pool])[0]
lb_method = lb_method.strip().replace('LB_METHOD_', '').lower()
return lb_method
def set_lb_method(api, pool, lb_method):
lb_method = "LB_METHOD_%s" % lb_method.strip().upper()
api.LocalLB.Pool.set_lb_method(pool_names=[pool], lb_methods=[lb_method])
def get_monitors(api, pool):
result = api.LocalLB.Pool.get_monitor_association(pool_names=[pool])[0]['monitor_rule']
monitor_type = result['type'].split("MONITOR_RULE_TYPE_")[-1].lower()
quorum = result['quorum']
monitor_templates = result['monitor_templates']
return (monitor_type, quorum, monitor_templates)
def set_monitors(api, pool, monitor_type, quorum, monitor_templates):
monitor_type = "MONITOR_RULE_TYPE_%s" % monitor_type.strip().upper()
monitor_rule = {'type': monitor_type, 'quorum': quorum, 'monitor_templates': monitor_templates}
monitor_association = {'pool_name': pool, 'monitor_rule': monitor_rule}
api.LocalLB.Pool.set_monitor_association(monitor_associations=[monitor_association])
def get_slow_ramp_time(api, pool):
result = api.LocalLB.Pool.get_slow_ramp_time(pool_names=[pool])[0]
return result
def set_slow_ramp_time(api, pool, seconds):
api.LocalLB.Pool.set_slow_ramp_time(pool_names=[pool], values=[seconds])
def get_action_on_service_down(api, pool):
result = api.LocalLB.Pool.get_action_on_service_down(pool_names=[pool])[0]
result = result.split("SERVICE_DOWN_ACTION_")[-1].lower()
return result
def set_action_on_service_down(api, pool, action):
action = "SERVICE_DOWN_ACTION_%s" % action.strip().upper()
api.LocalLB.Pool.set_action_on_service_down(pool_names=[pool], actions=[action])
def member_exists(api, pool, address, port):
# hack to determine if member exists
result = False
try:
members = [{'address': address, 'port': port}]
api.LocalLB.Pool.get_member_object_status(pool_names=[pool],
members=[members])
result = True
except bigsuds.OperationFailed, e:
if "was not found" in str(e):
result = False
else:
# genuine exception
raise
return result
def delete_node_address(api, address):
result = False
try:
api.LocalLB.NodeAddressV2.delete_node_address(nodes=[address])
result = True
except bigsuds.OperationFailed, e:
if "is referenced by a member of pool" in str(e):
result = False
else:
# genuine exception
raise
return result
def remove_pool_member(api, pool, address, port):
members = [{'address': address, 'port': port}]
api.LocalLB.Pool.remove_member_v2(pool_names=[pool], members=[members])
def add_pool_member(api, pool, address, port):
members = [{'address': address, 'port': port}]
api.LocalLB.Pool.add_member_v2(pool_names=[pool], members=[members])
def main():
lb_method_choices = ['round_robin', 'ratio_member',
'least_connection_member', 'observed_member',
'predictive_member', 'ratio_node_address',
'least_connection_node_address',
'fastest_node_address', 'observed_node_address',
'predictive_node_address', 'dynamic_ratio',
'fastest_app_response', 'least_sessions',
'dynamic_ratio_member', 'l3_addr', 'unknown',
'weighted_least_connection_member',
'weighted_least_connection_node_address',
'ratio_session', 'ratio_least_connection_member',
'ratio_least_connection_node_address']
monitor_type_choices = ['and_list', 'm_of_n']
service_down_choices = ['none', 'reset', 'drop', 'reselect']
module = AnsibleModule(
argument_spec = dict(
server = dict(type='str', required=True),
user = dict(type='str', required=True),
password = dict(type='str', required=True),
state = dict(type='str', default='present', choices=['present', 'absent']),
name = dict(type='str', required=True, aliases=['pool']),
partition = dict(type='str', default='Common'),
lb_method = dict(type='str', choices=lb_method_choices),
monitor_type = dict(type='str', choices=monitor_type_choices),
quorum = dict(type='int'),
monitors = dict(type='list'),
slow_ramp_time = dict(type='int'),
service_down_action = dict(type='str', choices=service_down_choices),
host = dict(type='str', aliases=['address']),
port = dict(type='int')
),
supports_check_mode=True
)
if not bigsuds_found:
module.fail_json(msg="the python bigsuds module is required")
server = module.params['server']
user = module.params['user']
password = module.params['password']
state = module.params['state']
name = module.params['name']
partition = module.params['partition']
pool = "/%s/%s" % (partition, name)
lb_method = module.params['lb_method']
if lb_method:
lb_method = lb_method.lower()
monitor_type = module.params['monitor_type']
if monitor_type:
monitor_type = monitor_type.lower()
quorum = module.params['quorum']
monitors = module.params['monitors']
if monitors:
monitors = []
for monitor in module.params['monitors']:
if "/" not in monitor:
monitors.append("/%s/%s" % (partition, monitor))
else:
monitors.append(monitor)
slow_ramp_time = module.params['slow_ramp_time']
service_down_action = module.params['service_down_action']
if service_down_action:
service_down_action = service_down_action.lower()
host = module.params['host']
address = "/%s/%s" % (partition, host)
port = module.params['port']
# sanity check user supplied values
if (host and not port) or (port and not host):
module.fail_json(msg="both host and port must be supplied")
if 1 > port > 65535:
module.fail_json(msg="valid ports must be in range 1 - 65535")
if monitors:
if len(monitors) == 1:
# set default required values for single monitor
quorum = 0
monitor_type = 'single'
elif len(monitors) > 1:
if not monitor_type:
module.fail_json(msg="monitor_type required for monitors > 1")
if monitor_type == 'm_of_n' and not quorum:
module.fail_json(msg="quorum value required for monitor_type m_of_n")
if monitor_type != 'm_of_n':
quorum = 0
elif monitor_type:
# no monitors specified but monitor_type exists
module.fail_json(msg="monitor_type require monitors parameter")
elif quorum is not None:
# no monitors specified but quorum exists
module.fail_json(msg="quorum requires monitors parameter")
try:
api = bigip_api(server, user, password)
result = {'changed': False} # default
if state == 'absent':
if host and port and pool:
# member removal takes precedent
if pool_exists(api, pool) and member_exists(api, pool, address, port):
if not module.check_mode:
remove_pool_member(api, pool, address, port)
deleted = delete_node_address(api, address)
result = {'changed': True, 'deleted': deleted}
else:
result = {'changed': True}
elif pool_exists(api, pool):
# no host/port supplied, must be pool removal
if not module.check_mode:
# hack to handle concurrent runs of module
# pool might be gone before we actually remove it
try:
remove_pool(api, pool)
result = {'changed': True}
except bigsuds.OperationFailed, e:
if "was not found" in str(e):
result = {'changed': False}
else:
# genuine exception
raise
else:
# check-mode return value
result = {'changed': True}
elif state == 'present':
update = False
if not pool_exists(api, pool):
# pool does not exist -- need to create it
if not module.check_mode:
# a bit of a hack to handle concurrent runs of this module.
# even though we've checked the pool doesn't exist,
# it may exist by the time we run create_pool().
# this catches the exception and does something smart
# about it!
try:
create_pool(api, pool, lb_method)
result = {'changed': True}
except bigsuds.OperationFailed, e:
if "already exists" in str(e):
update = True
else:
# genuine exception
raise
else:
if monitors:
set_monitors(api, pool, monitor_type, quorum, monitors)
if slow_ramp_time:
set_slow_ramp_time(api, pool, slow_ramp_time)
if service_down_action:
set_action_on_service_down(api, pool, service_down_action)
if host and port:
add_pool_member(api, pool, address, port)
else:
# check-mode return value
result = {'changed': True}
else:
# pool exists -- potentially modify attributes
update = True
if update:
if lb_method and lb_method != get_lb_method(api, pool):
if not module.check_mode:
set_lb_method(api, pool, lb_method)
result = {'changed': True}
if monitors:
t_monitor_type, t_quorum, t_monitor_templates = get_monitors(api, pool)
if (t_monitor_type != monitor_type) or (t_quorum != quorum) or (set(t_monitor_templates) != set(monitors)):
if not module.check_mode:
set_monitors(api, pool, monitor_type, quorum, monitors)
result = {'changed': True}
if slow_ramp_time and slow_ramp_time != get_slow_ramp_time(api, pool):
if not module.check_mode:
set_slow_ramp_time(api, pool, slow_ramp_time)
result = {'changed': True}
if service_down_action and service_down_action != get_action_on_service_down(api, pool):
if not module.check_mode:
set_action_on_service_down(api, pool, service_down_action)
result = {'changed': True}
if (host and port) and not member_exists(api, pool, address, port):
if not module.check_mode:
add_pool_member(api, pool, address, port)
result = {'changed': True}
except Exception, e:
module.fail_json(msg="received exception: %s" % e)
module.exit_json(**result)
# import module snippets
from ansible.module_utils.basic import *
main()
|
justathoughtor2/atomicApe | refs/heads/encaged | cygwin/lib/python2.7/site-packages/pygments/lexers/rust.py | 22 | # -*- coding: utf-8 -*-
"""
pygments.lexers.rust
~~~~~~~~~~~~~~~~~~~~
Lexers for the Rust language.
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.lexer import RegexLexer, include, bygroups, words, default
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Whitespace
__all__ = ['RustLexer']
class RustLexer(RegexLexer):
"""
Lexer for the Rust programming language (version 1.0).
.. versionadded:: 1.6
"""
name = 'Rust'
filenames = ['*.rs', '*.rs.in']
aliases = ['rust']
mimetypes = ['text/rust']
tokens = {
'root': [
# rust allows a file to start with a shebang, but if the first line
# starts with #![ then it’s not a shebang but a crate attribute.
(r'#![^[\r\n].*$', Comment.Preproc),
default('base'),
],
'base': [
# Whitespace and Comments
(r'\n', Whitespace),
(r'\s+', Whitespace),
(r'//!.*?\n', String.Doc),
(r'///(\n|[^/].*?\n)', String.Doc),
(r'//(.*?)\n', Comment.Single),
(r'/\*\*(\n|[^/*])', String.Doc, 'doccomment'),
(r'/\*!', String.Doc, 'doccomment'),
(r'/\*', Comment.Multiline, 'comment'),
# Macro parameters
(r"""\$([a-zA-Z_]\w*|\(,?|\),?|,?)""", Comment.Preproc),
# Keywords
(words((
'as', 'box', 'crate', 'do', 'else', 'enum', 'extern', # break and continue are in labels
'fn', 'for', 'if', 'impl', 'in', 'loop', 'match', 'mut', 'priv',
'proc', 'pub', 'ref', 'return', 'static', 'struct',
'trait', 'true', 'type', 'unsafe', 'while'), suffix=r'\b'),
Keyword),
(words(('alignof', 'be', 'const', 'offsetof', 'pure', 'sizeof',
'typeof', 'once', 'unsized', 'yield'), suffix=r'\b'),
Keyword.Reserved),
(r'(mod|use)\b', Keyword.Namespace),
(r'(true|false)\b', Keyword.Constant),
(r'let\b', Keyword.Declaration),
(words(('u8', 'u16', 'u32', 'u64', 'i8', 'i16', 'i32', 'i64', 'usize',
'isize', 'f32', 'f64', 'str', 'bool'), suffix=r'\b'),
Keyword.Type),
(r'self\b', Name.Builtin.Pseudo),
# Prelude (taken from Rust’s src/libstd/prelude.rs)
(words((
# Reexported core operators
'Copy', 'Send', 'Sized', 'Sync',
'Drop', 'Fn', 'FnMut', 'FnOnce',
# Reexported functions
'drop',
# Reexported types and traits
'Box',
'ToOwned',
'Clone',
'PartialEq', 'PartialOrd', 'Eq', 'Ord',
'AsRef', 'AsMut', 'Into', 'From',
'Default',
'Iterator', 'Extend', 'IntoIterator',
'DoubleEndedIterator', 'ExactSizeIterator',
'Option',
'Some', 'None',
'Result',
'Ok', 'Err',
'SliceConcatExt',
'String', 'ToString',
'Vec',
), suffix=r'\b'),
Name.Builtin),
# Labels
(r'(break|continue)(\s*)(\'[A-Za-z_]\w*)?', bygroups(Keyword, Text.Whitespace, Name.Label)),
# Character Literal
(r"""'(\\['"\\nrt]|\\x[0-7][0-9a-fA-F]|\\0"""
r"""|\\u\{[0-9a-fA-F]{1,6}\}|.)'""",
String.Char),
(r"""b'(\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\0"""
r"""|\\u\{[0-9a-fA-F]{1,6}\}|.)'""",
String.Char),
# Binary Literal
(r'0b[01_]+', Number.Bin, 'number_lit'),
# Octal Literal
(r'0o[0-7_]+', Number.Oct, 'number_lit'),
# Hexadecimal Literal
(r'0[xX][0-9a-fA-F_]+', Number.Hex, 'number_lit'),
# Decimal Literal
(r'[0-9][0-9_]*(\.[0-9_]+[eE][+\-]?[0-9_]+|'
r'\.[0-9_]*(?!\.)|[eE][+\-]?[0-9_]+)', Number.Float, 'number_lit'),
(r'[0-9][0-9_]*', Number.Integer, 'number_lit'),
# String Literal
(r'b"', String, 'bytestring'),
(r'"', String, 'string'),
(r'b?r(#*)".*?"\1', String),
# Lifetime
(r"""'static""", Name.Builtin),
(r"""'[a-zA-Z_]\w*""", Name.Attribute),
# Operators and Punctuation
(r'[{}()\[\],.;]', Punctuation),
(r'[+\-*/%&|<>^!~@=:?]', Operator),
# Identifier
(r'[a-zA-Z_]\w*', Name),
# Attributes
(r'#!?\[', Comment.Preproc, 'attribute['),
# Macros
(r'([A-Za-z_]\w*)(!)(\s*)([A-Za-z_]\w*)?(\s*)(\{)',
bygroups(Comment.Preproc, Punctuation, Whitespace, Name,
Whitespace, Punctuation), 'macro{'),
(r'([A-Za-z_]\w*)(!)(\s*)([A-Za-z_]\w*)?(\()',
bygroups(Comment.Preproc, Punctuation, Whitespace, Name,
Punctuation), 'macro('),
],
'comment': [
(r'[^*/]+', Comment.Multiline),
(r'/\*', Comment.Multiline, '#push'),
(r'\*/', Comment.Multiline, '#pop'),
(r'[*/]', Comment.Multiline),
],
'doccomment': [
(r'[^*/]+', String.Doc),
(r'/\*', String.Doc, '#push'),
(r'\*/', String.Doc, '#pop'),
(r'[*/]', String.Doc),
],
'number_lit': [
(r'[ui](8|16|32|64|size)', Keyword, '#pop'),
(r'f(32|64)', Keyword, '#pop'),
default('#pop'),
],
'string': [
(r'"', String, '#pop'),
(r"""\\['"\\nrt]|\\x[0-7][0-9a-fA-F]|\\0"""
r"""|\\u\{[0-9a-fA-F]{1,6}\}""", String.Escape),
(r'[^\\"]+', String),
(r'\\', String),
],
'bytestring': [
(r"""\\x[89a-fA-F][0-9a-fA-F]""", String.Escape),
include('string'),
],
'macro{': [
(r'\{', Operator, '#push'),
(r'\}', Operator, '#pop'),
],
'macro(': [
(r'\(', Operator, '#push'),
(r'\)', Operator, '#pop'),
],
'attribute_common': [
(r'"', String, 'string'),
(r'\[', Comment.Preproc, 'attribute['),
(r'\(', Comment.Preproc, 'attribute('),
],
'attribute[': [
include('attribute_common'),
(r'\];?', Comment.Preproc, '#pop'),
(r'[^"\]]+', Comment.Preproc),
],
'attribute(': [
include('attribute_common'),
(r'\);?', Comment.Preproc, '#pop'),
(r'[^")]+', Comment.Preproc),
],
}
|
ondoheer/GOT-english | refs/heads/master | NPCgenerator.py | 1 | # -*- coding: utf-8 -*-
from random import randint
eventsGeneral = {"personal": [
"exiled from his homeland",
"roamed lost for a while",
"was kidnapped and escaped",
"was ransomed",
"was kidnapped and latter joined the kidnappers",
"is loved by someone of status or importance",
"is loved by someone in secret",
"was loved by someone of status or importance",
"has a secret that could cost his/her life",
"has a secret that could bring him/her lots of trouble",
"traveled for a while",
"traveled the whole region",
"travels constantly",
],
"family": (
"there han been an important death in the family",
"there is/has been an important marriage in the family,"
"inherited unexpectedly",
"the family status has increased recently",
"the family status has decreased recently",
"is a bastard",
"is an aknowledged bastard",
"an important family schism exists",
"a rival family exists"
),
"social": [
"took part in a revolt",
"is infamous",
"is famous",
"is the leader of his/her group",
"has a reknown title",
],
"war": [
"killed for the first time during the war and this marked him/her",
"suffered a light wound during war",
"was reknown for a specific act during war",
"organized an important grooup during war",
"did bussiness during war",
"helped to solve a conflict during war",
"was a looter during war",
"is ashamed of his/her actions during war",
"was rapped/abused during war"
],
"money": (
"lost some",
"lost a lot",
"gained some",
"gained a lot",
"is a trader",
"has a profitable bussiness",
"inherited a fortune or bussiness"
),
"physical": (
"suffered a severe wound that incapacitates him/her",
"suffered a very notorious wound",
"suffered a light wound he/she can hide",
"was maimed",
"has a notorious birthmark",
"was born with a disability"
)
}
eventsMale = {
"personal": (
"has received proper education (maester or similar)",
"has learned a trait"
),
"social": (
"is a knight or equivalent",
),
"war": (
"fought during war",
"deserted during war",
"has a brotherhood that protects each other developed during war times"
)
}
eventsFemale = {
"personal": (
"has learned a skill that brings food to the table",
"was or is a prostitute",
"was or is a midwife"
),
"social": (
"is considered a witch",
),
"war": (
"got pregnant, doesn't know the father",
"got pregnant from an enemy soldier",
"got pregnant from a friendly soldier"
)
}
religion = {
"believe": (
"The Seven: Mother",
"The Seven: Father",
"The Seven: Smith",
"The Seven: Crone",
"The Seven: Maiden",
"The Seven: Warrior",
"The Seven: Stranger",
"Regional God(s) (ancient gods, drowned god, dark spirits, harpy,\
etc)",
"r\'hllor",
"many faces",
"other",
"unknowkn",
"atheist"
),
"level": (
"fervient beleiver",
"does procelitism",
"was raised in his faith, doesn't question it",
"his/her faith is really important",
"it's not really something he/her cares about",
"doesn't question the faith",
"really doesn't believe in it",
"more than beleiver he/she is a superstitious",
"his/her faith is a burden",
"his/her faith is more like a family tradition"
)
}
family = {
"siblings": (
"older",
"younger",
"twin",
"identical twin",
"bastard"
),
"spouse": (
"older than him/her",
"younger than him/her",
"quite older than him/her",
"quite younger than him/her",
"died",
"escaped",
"escaped with a lover",
"vanished",
"is unfaithful and he/she knows it",
"is unfaithful and he/she doesn't know it"
),
"lover": (
"has a partner of the same gender",
"has been his/her lover for years",
"has multiple lovers",
"is of high status and if possible gives him/her money",
"leeches him/her for money",
"is a known secret",
"it's no secret",
"it's and open relationship"
),
"descendants": (
"alive",
"dead",
"sick",
"injured/crippled",
"worthy",
"unworthy",
"unaknowledged bastard",
"aknowledged bastard",
"adopted",
"doesn't know of his/her existance",
"lives someplace else",
"being trained in a skill",
"learnes from the character",
"is part of a religious organization",
"is part of a military organization",
"is part of a specific organization"
),
"relatives": (
"bad relationship",
"good relationship",
"have just met recently",
"long distance with no relationship at all",
"grandious or famous",
"despicable",
"feared",
"really close and supportive"
)
}
traits = {
"special": (
"likes a particular kind of game or hobbie (cyvasse, cards, etc)",
"is a collector of some kind",
"practices a sport with passion (pit fighting, hunting, joustin, etc)",
"is really skilled at something",
"has a flaw he/she tries to hide",
"has an important friend",
"greensight",
"warg",
"animals respect him/her",
"he/she always knows which path will lead to survival",
"knows a way to read the future (birds fly, cards, etc)",
"is a great singer",
"is a great musician",
"has photographic memory",
"speaks many languages",
"is clairvoyant",
),
"psychological": (
"obsesiv",
"nervous",
"talks all the time",
"quiet",
"insecure",
"phobic",
"crazy",
"agresiv",
"paranoid",
"amiable",
"confided",
"ignorant",
"inocent",
"honorable",
"liar",
"traecharous",
"taked advantage of people",
"distracted",
"witty",
"clever",
"careful",
"pious",
"trustworthy",
"incorruptible",
"good at bargains",
"terrible at bargains",
"temperamental",
"pasionate",
"adict to (alcohol, readleaf, etc)",
"coward",
"forgetful",
"supersticious",
"made an oath to be asexual",
"phobia to genitals"
),
"physical": (
"large",
"obese"
"strong",
"hipnotazing voice",
"fair",
"midget",
"quimsy",
"thin",
"paralitic",
"crippled",
"agile",
"quick",
"one handed",
"weird eyes",
"notable scar",
"notoriious color of hair or eyes",
"sick of something visible (greyscale, pox, etc)",
"sick of something not visible (neumonia, siphilis, etc)"
"albine",
"eunuch",
"sterile",
"deaf",
"blind",
"one eyed",
),
"motivational": (
"love",
"money",
"honor",
"glory",
"goods",
"status",
"religión",
"cause",
"family",
"oath",
"hedonism"
)
}
dataNPC = {
"age": None,
"gender": None,
"events": {
"personal": False,
"family": False,
"social": False,
"war": False,
"money": False,
"physical": False
},
"religion": {
"believe": False,
"level": False
},
"traits": {
"special": False,
"psychological": False,
"physical": False,
"motivational": False
},
"family": {
"siblings": False,
"spouse": False,
"lover": False,
"descendants": False,
"relatives": False,
"spouse2": False,
"spouse3": False,
"spouse4": False,
"spouse5": False,
"spouse6": False,
}
}
class generateNPC(object):
"""se le pasa el diccionario data
con todos los datos del NPC y estos se rellenan,
en los casos de family se buscara generar family
en numero aleatorios"""
@staticmethod
def resetearNPC(NPC):
"""Toma como input un objeto JSON especcifico NPC para procesar
Resetea el objeto para no enviar la misma data"""
NPC["religion"]["believe"] = False
NPC["religion"]["level"] = False
NPC["family"]["siblings"] = False
NPC["family"]["descendants"] = False
for evento in NPC["events"]:
NPC["events"][evento] = False
for rasgo in NPC["traits"]:
NPC["traits"][rasgo] = False
for family in NPC["family"]:
if "descendant" in family:
NPC["family"][family] = False
if "sibling" in family:
NPC["family"][family] = False
if family == "spouse":
NPC["family"]["spouse"] = False
if family == "relatives":
NPC["family"]["relatives"] = False
@staticmethod
def unirDicts(dictToAdd, targetList):
""" (dict, list) -> list
agrega los elementos de un dict dentro de los de una lista,
sirve para generar listas de sucesos segun gender"""
for key in dictToAdd:
if key == 'personal':
for value in dictToAdd[key]:
targetList[key].append(value)
if key == "social":
for value in dictToAdd[key]:
targetList[key].append(value)
if key == "war":
for value in dictToAdd[key]:
targetList[key].append(value)
@staticmethod
def checkGenderText(strToMod, gender):
""" (str, str) -> str
gender solo acepta como valores validos 'male' o 'female'
Reemplaza en todos los textos bi-enero como 'o/a' por 'o' u 'a' ..."""
if gender == 'female':
if 'eunuch' in strToMod:
string = strToMod.replace('eunuch', 'is now barren')
elif 'him/her' in strToMod:
string = strToMod.replace('his/her', 'her')
elif 'his/her' in strToMod:
string = strToMod.replace('him/her', 'her')
elif 'he/she' in strToMod:
string = strToMod.replace('he/she', 'she')
else:
return strToMod
return string
elif gender == 'male':
if 'him/her' in strToMod:
string = strToMod.replace('him/her', 'him')
elif 'his/her' in strToMod:
string = strToMod.replace('his/her', 'his')
elif 'he/she' in strToMod:
string = strToMod.replace('he/she', 'he')
else:
return strToMod
return string
@classmethod
def generateNPC(_class, gender, age):
""" (str, int) -> JSON
gender solo acepta como valores validos 'male' o 'female'
Rellena el Objeto JSON que se enviará con el AJAX.Get request,
ejecuta todos los comandos de creacion en orden"""
if gender == 'female':
_class.unirDicts(eventsFemale, eventsGeneral)
elif gender == 'male':
_class.unirDicts(eventsMale, eventsGeneral)
_class.resetearNPC(dataNPC)
_class.religion(religion, dataNPC, gender)
_class.traits(traits, dataNPC, gender)
_class.eventsGeneral(eventsGeneral, dataNPC, gender)
_class.family(family, dataNPC, gender, age)
return dataNPC
@classmethod
def singleTupleSelector(_class, tupleToSelect, gender):
""" (tuple, str) -> str
selecciona un elemento de un tuple
para retornarlo como string"""
num = len(tupleToSelect)
selected = randint(0, num - 1)
itemToWrite = tupleToSelect[selected]
return _class.checkGenderText(itemToWrite, gender)
@classmethod
def religion(_class, religionDict, NPC, gender):
""" (dict, JSON, str) -> JSON.str
rellena los datos del JSON de religion
con la información del dict"""
for key, value in religionDict.items():
if key == "believe":
NPC["religion"][
"believe"] = _class.singleTupleSelector(value, gender)
if key == 'level':
NPC["religion"]["level"] = _class.singleTupleSelector(
value, gender)
@classmethod
def traits(_class, traitsDict, NPC, gender):
""" (dict, JSON, str) -> JSON.str
elige traits con cierta probabilidad y los
asigna al JSON"""
for key, value in traitsDict.items():
if key == "special":
if randint(1, 100) < 30:
NPC["traits"][
"special"] = _class.singleTupleSelector(value, gender)
if key == "psychological":
if randint(1, 100) < 70:
NPC["traits"][
"psychological"] = _class.singleTupleSelector(value, gender)
if key == "physical":
if randint(1, 100) < 30:
NPC["traits"][
"physical"] = _class.singleTupleSelector(value, gender)
if key == "motivational":
if randint(1, 100) < 80:
NPC["traits"][
"motivational"] = _class.singleTupleSelector(value, gender)
@classmethod
def eventsGeneral(_class, eventsDict, NPC, gender):
"""(dict, JSON, str) -> JSON.str
elige aleatoriamente events y los asigna al JSON"""
for key, value in eventsDict.items():
if key == "personal":
if randint(1, 100) < 80:
NPC["events"][
"personal"] = _class.singleTupleSelector(value, gender)
if key == "family":
if randint(1, 100) < 80:
NPC["events"][
"family"] = _class.singleTupleSelector(value, gender)
if key == "social":
if randint(1, 100) < 80:
NPC["events"][
"social"] = _class.singleTupleSelector(value, gender)
if key == "war":
if randint(1, 100) < 80:
NPC["events"][
"war"] = _class.singleTupleSelector(value, gender)
if key == "money":
if randint(1, 100) < 80:
NPC["events"][
"money"] = _class.singleTupleSelector(value, gender)
if key == "physical":
if randint(1, 100) < 80:
NPC["events"][
"physical"] = _class.singleTupleSelector(value, gender)
@classmethod
def family(_class, familyDict, NPC, gender, age=30):
"""(dict, JSON, str, int) -> JSON.str
Ejecuta al final las funciones internas
fueron desmembradas para poder pulirlas luego si fuese
necesario"""
def siblings(familyDict, NPC, age):
"""(dict, JSON, int) -> JSON.str
Genera siblings """
chanceSiblings = randint(1, 100)
if chanceSiblings <= 5:
NPC["family"]["siblings"] = randint(6, 10)
elif 5 < chanceSiblings <= 15:
NPC["family"]["siblings"] = randint(4, 7)
elif 15 < chanceSiblings <= 40:
NPC["family"]["siblings"] = randint(2, 5)
elif 40 < chanceSiblings <= 80:
NPC["family"]["siblings"] = randint(1, 3)
elif 80 < chanceSiblings <= 100:
NPC["family"]["siblings"] = 0
# ahora rellena para cada sibling generado
if NPC["family"]["siblings"] > 0:
for sibling in range(NPC["family"]["siblings"]):
chanceCualidadsibling = randint(1, 100)
if chanceCualidadsibling <= 3:
NPC["family"]["sibling" +
str(sibling + 1)] = 'twin'
elif chanceCualidadsibling <= 7:
NPC["family"]["sibling" +
str(sibling + 1)] = 'identical twin'
elif chanceCualidadsibling <= 20:
NPC["family"]["sibling" +
str(sibling + 1)] = 'bastard (older)'
elif chanceCualidadsibling <= 35:
NPC["family"]["sibling" +
str(sibling + 1)] = 'bastard (younger)'
elif chanceCualidadsibling <= 67:
NPC["family"]["sibling" +
str(sibling + 1)] = 'natural older'
elif chanceCualidadsibling <= 100:
NPC["family"]["sibling" +
str(sibling + 1)] = 'natural younger'
def spouse(familyDict, NPC, gender, age):
"""(dict, JSON, str) -> JSON.str
determina si se tiene relación marital y además
extra marital, tmb si se ha vuelto a casar
"""
chancespouse = randint(1, 100)
chancelover = randint(1, 100)
if gender == 'female' and age > 12:
if chancespouse <= 70:
NPC["family"]["spouse"] = _class.singleTupleSelector(
familyDict['spouse'], gender)
if NPC["family"]["spouse"] is 'died' and chancespouse <= 40:
NPC["family"]["spouse2"] = _class.singleTupleSelector(
familyDict['spouse'], gender)
if NPC["family"]["spouse2"] is 'died' and chancespouse <= 10:
NPC["family"]["spouse3"] = _class.singleTupleSelector(
familyDict['spouse'], gender)
if NPC["family"]["spouse3"] is 'died' and chancespouse <= 5:
NPC["family"]["spouse4"] = _class.singleTupleSelector(
familyDict['spouse'], gender)
if chancelover <= 10:
NPC["family"]["lover"] = _class.singleTupleSelector(
familyDict['lover'], gender)
if gender == 'male' and age > 16:
if chancespouse <= 70:
NPC["family"]["spouse"] = _class.singleTupleSelector(
familyDict['spouse'], gender)
if NPC["family"]["spouse"] is 'died' and chancespouse <= 60:
NPC["family"]["spouse2"] = _class.singleTupleSelector(
familyDict['spouse'], gender)
if NPC["family"]["spouse2"] is 'died' and chancespouse <= 40:
NPC["family"]["spouse3"] = _class.singleTupleSelector(
familyDict['spouse'], gender)
if NPC["family"]["spouse3"] is 'died' and chancespouse <= 20:
NPC["family"]["spouse4"] = _class.singleTupleSelector(
familyDict['spouse'], gender)
if NPC["family"]["spouse4"] is 'died' and chancespouse <= 10:
NPC["family"]["spouse5"] = _class.singleTupleSelector(
familyDict['spouse'], gender)
if NPC["family"]["spouse5"] is 'died' and chancespouse <= 5:
NPC["family"]["spouse6"] = _class.singleTupleSelector(
familyDict['spouse'], gender)
if chancelover <= 25:
NPC["family"]["lover"] = _class.singleTupleSelector(
familyDict['lover'], gender)
def descendants(familyDict, NPC, gender):
"""(dict, JSON, str) -> JSON.str
genera descendants y los agrega al JSON
"""
chancedescendants = randint(1, 100)
if chancedescendants <= 5:
NPC["family"]["descendants"] = randint(8, 10)
elif 5 < chancedescendants <= 15:
NPC["family"]["descendants"] = randint(5, 7)
elif 15 < chancedescendants <= 55:
NPC["family"]["descendants"] = randint(2, 5)
elif 55 < chancedescendants <= 80:
NPC["family"]["descendants"] = randint(1, 2)
elif 80 < chancedescendants <= 100:
NPC["family"]["descendants"] = 0
# rol de los descendants
if NPC["family"]["descendants"] > 0:
for descendant in range(NPC["family"]["descendants"]):
NPC["family"]["descendant" +
str(descendant + 1)] = _class.singleTupleSelector(familyDict['descendants'], gender)
def relatives(familyDict, NPC, gender):
"""(dict, JSON, str) -> JSON.str
agrega elementos del diccionario al json"""
if randint(1, 4) < 10:
NPC["family"][
"relatives"] = _class.singleTupleSelector(familyDict['relatives'], gender)
"""Ejecucion de las subfunciones"""
siblings(familyDict, NPC, age)
spouse(familyDict, NPC, gender, age)
descendants(familyDict, NPC, gender)
relatives(familyDict, NPC, gender)
|
eino-makitalo/mezzanine | refs/heads/master | docs/conf.py | 6 | # -*- coding: utf-8 -*-
#
# Mezzanine documentation build configuration file, created by
# sphinx-quickstart on Wed Mar 10 07:20:42 2010.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
from __future__ import unicode_literals
from datetime import datetime
import sys
import os
import mezzanine
if "DJANGO_SETTINGS_MODULE" not in os.environ:
docs_path = os.getcwd()
mezzanine_path_parts = (docs_path, "..")
sys.path.insert(0, docs_path)
sys.path.insert(0, os.path.realpath(os.path.join(*mezzanine_path_parts)))
os.environ["DJANGO_SETTINGS_MODULE"] = "docs_settings"
# Django 1.7's setup is required before touching translated strings.
import django
try:
django.setup()
except AttributeError: # < 1.7
pass
# When a full build is run (eg from the root of the repo), we
# run all the Mezzanine utils for dynamically generated docs.
if sys.argv[-2:] == ["docs", "docs/build"]:
from mezzanine.utils import docs
docs.build_settings_docs(docs_path)
docs.build_deploy_docs(docs_path)
docs.build_changelog(docs_path)
docs.build_modelgraph(docs_path)
docs.build_requirements(docs_path)
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
# sys.path.append(os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ["sphinx.ext.autodoc", "sphinx.ext.viewcode"]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
# source_encoding = 'utf-8'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'Mezzanine'
copyright = '2009 - %s, Stephen McDonald' % datetime.now().year
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = mezzanine.__version__
# The full version, including alpha/beta/rc tags.
release = mezzanine.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
# unused_docs = []
# List of directories, relative to source directory, that shouldn't be searched
# for source files.
exclude_trees = ['_build']
# List of files to be excluded when looking for source files.
# Added settings.rst its still will be included in configuration.rst
# only that prevents error of duplicate reference labels.
exclude_patterns = ['settings.rst', 'fabfile.rst']
# The reST default role (used for this markup: `text`) to use for all documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
# html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
# html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_use_modindex = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'Mezzaninedoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
# latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
# latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'Mezzanine.tex', 'Mezzanine Documentation',
'Stephen McDonald', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# Additional stuff for the LaTeX preamble.
# latex_preamble = ''
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_use_modindex = True
html_theme_path = ["."]
html_theme = "mezzanine_theme"
locale_dirs = ['./locale/']
|
jeffposnick/chromium-dashboard | refs/heads/master | bulkloader_helpers.py | 5 | import datetime
from google.appengine.ext import db
from google.appengine.api import users
def email_to_list():
def wrapper(value):
if value == '' or value is None or value == []:
return None
return [db.Email(x.strip()) for x in value.split(',')]
return wrapper
def finalize(input_dict, instance, bulkload_state_copy):
#print input_dict
if instance['owner'] is None:
del instance['owner']
if instance['created'] is None:
instance['created'] = datetime.datetime.utcnow()
if instance['updated'] is None:
instance['updated'] = datetime.datetime.utcnow()
if instance['created_by'] is None:
instance['created_by'] = users.User(email='admin') #users.get_current_user().email()
if instance['updated_by'] is None:
instance['updated_by'] = users.User(email='admin') #users.get_current_user().email()
if instance['summary'] == '' or instance['summary'] is None:
instance['summary'] = ' '
return instance
|
jfbelisle/triosante | refs/heads/master | node_modules/node-gyp/gyp/pylib/gyp/generator/__init__.py | 12133432 | |
joshblum/django-with-audit | refs/heads/master | django/contrib/localflavor/fi/__init__.py | 12133432 | |
pramodhkp/pes | refs/heads/master | pes/urls.py | 1 | from django.conf.urls import patterns, include, url
from college.views import login, logout, home, profile_student, dashboard_student, dashboard_teacher
from django.contrib import admin
admin.autodiscover()
from dajaxice.core import dajaxice_autodiscover, dajaxice_config
dajaxice_autodiscover()
urlpatterns = patterns('',
url(dajaxice_config.dajaxice_url, include('dajaxice.urls')),
# Examples:
url(r'^college/main/$', 'college.views.main', name='main'),
url(r'^college/home/$', 'college.views.home', name='home'),
url(r'^college/logout/$', 'college.views.logout', name='logout'),
url(r'^college/login/$', 'college.views.login', name='login'),
url(r'^college/student/$', 'college.views.dashboard_student', name='home'),
url(r'^college/student/(\d+)/$', 'college.views.profile_student', name='profile_student'),
url(r'^college/teacher/(\d+)/$', 'college.views.profile_teacher', name='profile_teacher'),
url(r'^college/teacher/evaluate/(\d+)/$', 'college.views.evaluate_project', name='evaluate_project'),
url(r'^college/projects/(\d+)/$', 'college.views.project_details', name='project_details'),
url(r'^college/teacher/$', 'college.views.dashboard_teacher', name='home'),
url(r'^college/editprofile/$', 'college.views.edit_profile', name='edit_profile'),
(r'^college/messages/', include('postman.urls')),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
)
|
ruzhytskyi/Koans | refs/heads/master | python3/runner/koan.py | 120 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
import re
# Starting a classname or attribute with an underscore normally implies Private scope.
# However, we are making an exception for __ and ___.
__all__ = [ "__", "___", "____", "_____", "Koan" ]
__ = "-=> FILL ME IN! <=-"
class ___(Exception):
pass
____ = "-=> TRUE OR FALSE? <=-"
_____ = 0
class Koan(unittest.TestCase):
def assertNoRegexpMatches(self, text, expected_regex, msg=None):
"""
Throw an exception if the regular expresson pattern is not matched
"""
if isinstance(expected_regex, (str, bytes)):
expected_regex = re.compile(expected_regex)
if expected_regex.search(text):
msg = msg or "Regexp matched"
msg = '{0}: {1!r} found in {2!r}'.format(msg, expected_regex.pattern, text)
raise self.failureException(msg)
|
infowantstobeseen/pyglet-darwincore | refs/heads/master | tests/test.py | 3 | #!/usr/bin/env python
'''Test framework for pyglet. Reads details of components and capabilities
from a requirements document, runs the appropriate unit tests.
How to Run the Tests
--------------------
::
python tests/test.py top app graphics clock resource # these all run automatically
python tests/test.py font media text
python tests/test.py image
python tests/test.py window
Because the tests are interactive, they can take quite a while to complete. The
'window' section in particular takes a long time. It can be frustrating to get
almost through the tests and then something gets messed up, so we suggest you
run the tests in sections as listed above. If you are curious, the sections are
defined in tests/plan.txt.
Here are the different sections and how long they take.
=========== ===========
Section Time to Run
=========== ===========
top automatic
app automatic
graphics automatic
clock automatic
resource automatic
font 1 minute
media 1 minute
text 1 minute
image 5 minutes
window 10 minutes
=========== ===========
Overview
--------
First, some definitions:
Test case:
A single test, implemented by a Python module in the tests/ directory.
Tests can be interactive (requiring the user to pass or fail them) or
non-interactive (the test passes or fails itself).
Section:
A list of test cases to be run in a specified order. Sections can
also contain other sections to an arbitrary level.
Capability:
A capability is a tag that can be applied to a test-case, which specifies
a particular instance of the test. The tester can select which
capabilities are present on their system; and only test cases matching
those capabilities will be run.
There are platform capabilities "WIN", "OSX" and "X11", which are
automatically selected by default.
The "DEVELOPER" capability is used to mark test cases which test a feature
under active development.
The "GENERIC" capability signifies that the test case is equivalent under
all platforms, and is selected by default.
Other capabilities can be specified and selected as needed. For example,
we may wish to use an "NVIDIA" or "ATI" capability to specialise a
test-case for a particular video card make.
Some tests generate regression images if enabled, so you will only
need to run through the interactive procedure once. During
subsequent runs the image shown on screen will be compared with the
regression images and passed automatically if they match. There are
command line options for enabling this feature.Literal block
By default regression images are saved in tests/regression/images/
Running tests
-------------
The test procedure is interactive (this is necessary to facilitate the
many GUI-related tests, which cannot be completely automated). With no
command-line arguments, all test cases in all sections will be run::
python tests/test.py
Before each test, a description of the test will be printed, including
some information of what you should look for, and what interactivityLiteral block
is provided (including how to stop the test). Press ENTER to begin
the test.
When the test is complete, assuming there were no detectable errors
(for example, failed assertions or an exception), you will be asked
to enter a [P]ass or [F]ail. You should Fail the test if the behaviour
was not as described, and enter a short reason.
Details of each test session are logged for future use.
Command-line options:
`--plan=`
Specify the test plan file (defaults to tests/plan.txt)
`--test-root=`
Specify the top-level directory to look for unit tests in (defaults
to test/)
`--capabilities=`
Specify the capabilities to select, comma separated. By default this
only includes your operating system capability (X11, WIN or OSX) and
GENERIC.
`--log-level=`
Specify the minimum log level to write (defaults to 20: info)
`--log-file=`
Specify log file to write to (defaults to "pyglet.%d.log")
`--regression-capture`
Save regression images to disk. Use this only if the tests have
already been shown to pass.
`--regression-check`
Look for a regression image on disk instead of prompting the user for
passage. If a regression image is found, it is compared with the test
case using the tolerance specified below. Recommended only for
developers.
`--regression-tolerance=`
Specify the tolerance when comparing a regression image. A value of
2, for example, means each sample component must be +/- 2 units
of the regression image. Tolerance of 0 means images must be identical,
tolerance of 256 means images will always match (if correct dimensions).
Defaults to 2.
`--regression-path=`
Specify the directory to store and look for regression images.
Defaults to tests/regression/images/
`--developer`
Selects the DEVELOPER capability.
`--no-interactive=`
Don't write descriptions or prompt for confirmation; just run each
test in succcession.
After the command line options, you can specify a list of sections or test
cases to run.
Examples
--------
python tests/test.py --capabilities=GENERIC,NVIDIA,WIN window
Runs all tests in the window section with the given capabilities.
Test just the FULLSCREEN_TOGGLE test case without prompting for input (useful
for development).
python tests/image/PIL_RGBA_SAVE.py
Run a single test outside of the test harness. Handy for development; it
is equivalent to specifying --no-interactive.
Writing tests
-------------
Add the test case to the appropriate section in the test plan (plan.txt).
Create one unit test script per test case. For example, the test for
window.FULLSCREEN_TOGGLE is located at::
tests/window/FULLSCREEN_TOGGLE.py
The test file must contain:
- A module docstring describing what the test does and what the user should
look for.
- One or more subclasses of unittest.TestCase.
- No other module-level code, except perhaps an if __name__ == '__main__'
condition for running tests stand-alone.
- Optionally, the attribute "__noninteractive = True" to specify that
the test is not interactive; doesn't require user intervention.
During development, test cases should be marked with DEVELOPER. Once finished
add the WIN, OSX and X11 capabilities, or GENERIC if it's platform
independent.
Writing regression tests
------------------------
Your test case should subclass tests.regression.ImageRegressionTestCase
instead of unitttest.TestCase. At the point where the buffer (window
image) should be checked/saved, call self.capture_regression_image().
If this method returns True, you can exit straight away (regression
test passed), otherwise continue running interactively (regression image
was captured, wait for user confirmation). You can call
capture_regression_image() several times; only the final image will be
used.
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id: $'
import array
import logging
import os
import optparse
import re
import sys
import time
import unittest
# --- Python 2/3 compatibility helpers ---
PY3K = True if sys.version_info[0] == 3 else False
def prompt(message):
if PY3K:
return input(message)
else:
return raw_input(message)
# So we can find tests.regression and ensure local pyglet copy is tested.
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
import tests.regression
import pyglet.image
regressions_path = os.path.join(os.path.dirname(__file__),
'regression', 'images')
class TestCase(object):
def __init__(self, name):
self.name = name
self.short_name = name.split('.')[-1]
self.capabilities = set()
def get_module_filename(self, root=''):
path = os.path.join(*self.name.split('.'))
return '%s.py' % os.path.join(root, path)
def get_module(self, root=''):
name = 'tests.%s' % self.name
module = __import__(name)
for c in name.split('.')[1:]:
module = getattr(module, c)
return module
def get_regression_image_filename(self):
return os.path.join(regressions_path, '%s.png' % self.name)
def test(self, options):
options.tests_count += 1
if not options.capabilities.intersection(self.capabilities):
options.tests_skipped += 1
options.log.debug('Capabilities mismatch. Skipping %s', self)
return
options.log.info('--- test (%d/%d) %s',
options.tests_count, options.num_tests, self)
if options.pretend:
return
module = None
try:
module = self.get_module(options.test_root)
except IOError:
options.log.warning('No test exists for %s', self)
except Exception:
options.log.exception('Cannot load test for %s', self)
if not module:
return
module_interactive = options.interactive
if hasattr(module, '__noninteractive') and \
getattr(module, '__noninteractive'):
module_interactive = False
if options.regression_check and \
os.path.exists(self.get_regression_image_filename()):
result = RegressionCheckTestResult(
self, options.regression_tolerance)
module_interactive = False
elif options.regression_capture:
result = RegressionCaptureTestResult(self)
else:
result = StandardTestResult(self)
print('-' * 78)
print("Running Test: %s (%d/%d)\n" % (self, options.tests_count, options.num_tests))
if module.__doc__:
print(' ' + module.__doc__.replace('\n','\n '))
if module_interactive:
prompt('Press return to begin test...')
suite = unittest.TestLoader().loadTestsFromModule(module)
options.log.info('Begin unit tests for %s', self)
suite(result)
for failure in result.failures:
options.log.error('Failure in %s', self)
options.log.error(failure[1])
for error in result.errors:
options.log.error('Error in %s', self)
options.log.error(error[1])
options.log.info('%d tests run', result.testsRun)
num_failures = len(result.failures)
num_errors = len(result.errors)
if num_failures or num_errors:
print('%d Failures and %d Errors detected.' % (num_failures, num_errors))
if (module_interactive and
len(result.failures) == 0 and
len(result.errors) == 0):
# print module.__doc__
user_result = prompt('Passed [Yn]: ')
while user_result and user_result not in 'YyNn':
print("Unrecognized response '%s'" % user_result)
user_result = prompt('Passed [Yn]: ')
if user_result and user_result in 'Nn':
print('Enter failure description: ')
description = prompt('> ')
options.log.error('User marked fail for %s', self)
options.log.error(description)
else:
options.log.info('User marked pass for %s', self)
result.setUserPass()
def __repr__(self):
return 'TestCase(%s)' % self.name
def __str__(self):
return self.name
def __cmp__(self, other):
return cmp(str(self), str(other))
def num_tests(self):
return 1
class TestSection(object):
def __init__(self, name):
self.name = name
self.children = []
def add(self, child):
# child can be TestSection or TestCase
self.children.append(child)
def test(self, options):
for child in self.children:
child.test(options)
def __repr__(self):
return 'TestSection(%s)' % self.name
def num_tests(self):
return sum([c.num_tests() for c in self.children])
class TestPlan(object):
def __init__(self):
self.root = None
self.names = {}
@classmethod
def from_file(cls, file):
plan = TestPlan()
plan.root = TestSection('{root}')
plan.root.indent = None
# Section stack
sections = [plan.root]
if not hasattr(file, 'read'):
file = open(file, 'r')
line_number = 0
for line in file:
line_number += 1
# Skip empty lines
if not line.strip():
continue
# Skip comments
if line[0] == '#':
continue
indent = len(line) - len(line.lstrip())
while (sections and sections[-1].indent and
sections[-1].indent > indent):
sections.pop()
if sections[-1].indent is None:
sections[-1].indent = indent
if sections[-1].indent != indent:
raise Exception('Indentation mismatch line %d' % line_number)
if '.' in line:
tokens = line.strip().split()
test_case = TestCase(tokens[0])
test_case.capabilities = set(tokens[1:])
sections[-1].add(test_case)
plan.names[test_case.name] = test_case
plan.names[test_case.short_name] = test_case
else:
section = TestSection(line.strip())
section.indent = None
sections[-1].add(section)
sections.append(section)
plan.names[section.name] = section
return plan
def run(self, options, names=[]):
if not names:
components = [self.root]
else:
components = []
for name in names:
if name not in self.names:
options.log.error('Unknown test case or section "%s"', name)
return False
else:
components.append(self.names[name])
options.num_tests = sum([c.num_tests() for c in components])
options.tests_count = 0
options.tests_skipped = 0
for component in components:
component.test(options)
print('-' * 78)
return True
class StandardTestResult(unittest.TestResult):
def __init__(self, component):
super(StandardTestResult, self).__init__()
def setUserPass(self):
pass
class RegressionCaptureTestResult(unittest.TestResult):
def __init__(self, component):
super(RegressionCaptureTestResult, self).__init__()
self.component = component
self.captured_image = None
def startTest(self, test):
super(RegressionCaptureTestResult, self).startTest(test)
if isinstance(test, tests.regression.ImageRegressionTestCase):
test._enable_regression_image = True
def addSuccess(self, test):
super(RegressionCaptureTestResult, self).addSuccess(test)
assert self.captured_image is None
if isinstance(test, tests.regression.ImageRegressionTestCase):
self.captured_image = test._captured_image
def setUserPass(self):
if self.captured_image:
filename = self.component.get_regression_image_filename()
self.captured_image.save(filename)
logging.getLogger().info('Wrote regression image %s' % filename)
class Regression(Exception):
pass
def buffer_equal(a, b, tolerance=0):
if tolerance == 0:
return a == b
if len(a) != len(b):
return False
a = array.array('B', a)
b = array.array('B', b)
for i in range(len(a)):
if abs(a[i] - b[i]) > tolerance:
return False
return True
class RegressionCheckTestResult(unittest.TestResult):
def __init__(self, component, tolerance):
super(RegressionCheckTestResult, self).__init__()
self.filename = component.get_regression_image_filename()
self.regression_image = pyglet.image.load(self.filename)
self.tolerance = tolerance
def startTest(self, test):
super(RegressionCheckTestResult, self).startTest(test)
if isinstance(test, tests.regression.ImageRegressionTestCase):
test._enable_regression_image = True
test._enable_interactive = False
logging.getLogger().info('Using regression %s' % self.filename)
def addSuccess(self, test):
# Check image
ref_image = self.regression_image.image_data
this_image = test._captured_image.image_data
this_image.format = ref_image.format
this_image.pitch = ref_image.pitch
if this_image.width != ref_image.width:
self.addFailure(test,
'Buffer width does not match regression image')
elif this_image.height != ref_image.height:
self.addFailure(test,
'Buffer height does not match regression image')
elif not buffer_equal(this_image.data, ref_image.data,
self.tolerance):
self.addFailure(test,
'Buffer does not match regression image')
else:
super(RegressionCheckTestResult, self).addSuccess(test)
def addFailure(self, test, err):
err = Regression(err)
super(RegressionCheckTestResult, self).addFailure(test, (Regression,
err, []))
def main():
capabilities = ['GENERIC']
platform_capabilities = {
'linux': 'X11',
'linux2': 'X11',
'linux3': 'X11',
'win32': 'WIN',
'cygwin': 'WIN',
'darwin': 'OSX'
}
if sys.platform in platform_capabilities:
capabilities.append(platform_capabilities[sys.platform])
script_root = os.path.dirname(__file__)
plan_filename = os.path.normpath(os.path.join(script_root, 'plan.txt'))
test_root = script_root
op = optparse.OptionParser()
op.usage = 'test.py [options] [components]'
op.add_option('--plan', help='test plan file', default=plan_filename)
op.add_option('--test-root', default=script_root,
help='directory containing test cases')
op.add_option('--capabilities', help='selected test capabilities',
default=','.join(capabilities))
op.add_option('--log-level', help='verbosity of logging',
default=20, type='int')
op.add_option('--log-file', help='log to FILE', metavar='FILE',
default='pyglet.%d.log')
op.add_option('--regression-path', metavar='DIR', default=regressions_path,
help='locate regression images in DIR')
op.add_option('--regression-tolerance', type='int', default=2,
help='tolerance for comparing regression images')
op.add_option('--regression-check', action='store_true',
help='enable image regression checks')
op.add_option('--regression-capture', action='store_true',
help='enable image regression capture')
op.add_option('--no-interactive', action='store_false', default=True,
dest='interactive', help='disable interactive prompting')
op.add_option('--developer', action='store_true',
help='add DEVELOPER capability')
op.add_option('--pretend', action='store_true',
help='print selected test cases only')
options, args = op.parse_args()
options.capabilities = set(options.capabilities.split(','))
if options.developer:
options.capabilities.add('DEVELOPER')
if options.regression_capture:
try:
os.makedirs(regressions_path)
except OSError:
pass
if '%d' in options.log_file:
i = 1
while os.path.exists(options.log_file % i):
i += 1
options.log_file = options.log_file % i
print('Test results are saved in log file:', options.log_file)
logging.basicConfig(filename=options.log_file, level=options.log_level, format='%(levelname)s %(message)s')
options.log = logging.getLogger()
options.log.info('Beginning test at %s', time.ctime())
options.log.info('Capabilities are: %s', ', '.join(options.capabilities))
options.log.info('sys.platform = %s', sys.platform)
options.log.info('pyglet.version = %s', pyglet.version)
options.log.info('Reading test plan from %s', options.plan)
plan = TestPlan.from_file(options.plan)
if not plan.run(options, args):
options.log.error('Test run failed.')
print('Test results are saved in log file:', options.log_file)
if __name__ == '__main__':
main()
|
jacques/connector | refs/heads/master | test/feedvalidator/feedvalidator/link.py | 2 | """$Id: link.py 573 2006-03-18 20:56:15Z rubys $"""
__author__ = "Sam Ruby <http://intertwingly.net/> and Mark Pilgrim <http://diveintomark.org/>"
__version__ = "$Revision: 573 $"
__date__ = "$Date: 2006-03-19 08:56:15 +1200 (Sun, 19 Mar 2006) $"
__copyright__ = "Copyright (c) 2002 Sam Ruby and Mark Pilgrim"
__license__ = "Python"
from base import validatorBase
from validators import *
#
# Atom link element
#
class link(nonblank,xmlbase,iso639,nonhtml,positiveInteger,nonNegativeInteger,rfc3339,nonblank):
validRelations = ['alternate', 'enclosure', 'related', 'self', 'via',
"previous", "next", "first", "last", "current", "payment",
# http://www.imc.org/atom-protocol/mail-archive/msg04095.html
"edit",
# 'edit' is part of the APP
"replies",
# 'replies' is defined by atompub-feed-thread
]
def getExpectedAttrNames(self):
return [(None, u'type'), (None, u'title'), (None, u'rel'),
(None, u'href'), (None, u'length'), (None, u'hreflang'),
(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#', u'type'),
(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#', u'resource'),
(u'http://purl.org/syndication/thread/1.0', u'count'),
(u'http://purl.org/syndication/thread/1.0', u'when')]
def validate(self):
self.type = ""
self.rel = "alternate"
self.hreflang = ""
self.title = ""
if self.attrs.has_key((None, "rel")):
self.value = self.rel = self.attrs.getValue((None, "rel"))
if self.rel in self.validRelations:
self.log(ValidAtomLinkRel({"parent":self.parent.name, "element":self.name, "attr":"rel", "value":self.rel}))
elif rfc2396_full.rfc2396_re.match(self.rel.encode('idna')):
self.log(ValidAtomLinkRel({"parent":self.parent.name, "element":self.name, "attr":"rel", "value":self.rel}))
else:
self.log(UnregisteredAtomLinkRel({"parent":self.parent.name, "element":self.name, "attr":"rel", "value":self.rel}))
nonblank.validate(self, errorClass=AttrNotBlank, extraParams={"attr": "rel"})
if self.attrs.has_key((None, "type")):
self.value = self.type = self.attrs.getValue((None, "type"))
if not mime_re.match(self.type):
self.log(InvalidMIMEType({"parent":self.parent.name, "element":self.name, "attr":"type", "value":self.type}))
elif self.rel == "self" and self.type not in ["application/atom+xml", "application/rss+xml", "application/rdf+xml"]:
self.log(SelfNotAtom({"parent":self.parent.name, "element":self.name, "attr":"type", "value":self.type}))
else:
self.log(ValidMIMEAttribute({"parent":self.parent.name, "element":self.name, "attr":"type", "value":self.type}))
if self.attrs.has_key((None, "title")):
self.log(ValidTitle({"parent":self.parent.name, "element":self.name, "attr":"title"}))
self.value = self.title = self.attrs.getValue((None, "title"))
nonblank.validate(self, errorClass=AttrNotBlank, extraParams={"attr": "title"})
nonhtml.validate(self)
if self.attrs.has_key((None, "length")):
self.value = self.hreflang = self.attrs.getValue((None, "length"))
positiveInteger.validate(self)
nonblank.validate(self)
if self.attrs.has_key((None, "hreflang")):
self.value = self.hreflang = self.attrs.getValue((None, "hreflang"))
iso639.validate(self)
if self.attrs.has_key((None, "href")):
self.value = self.attrs.getValue((None, "href"))
xmlbase.validate(self, extraParams={"attr": "href"})
if self.rel == "self" and self.parent.name == "feed":
from urlparse import urljoin
if urljoin(self.xmlBase,self.value) not in self.dispatcher.selfURIs:
if urljoin(self.xmlBase,self.value).split('#')[0] != self.xmlBase.split('#')[0]:
from uri import Uri
value = Uri(self.value)
for docbase in self.dispatcher.selfURIs:
if value == Uri(docbase): break
else:
self.log(SelfDoesntMatchLocation({"parent":self.parent.name, "element":self.name}))
else:
self.log(MissingHref({"parent":self.parent.name, "element":self.name, "attr":"href"}))
if self.attrs.has_key((u'http://purl.org/syndication/thread/1.0', u'count')):
if self.rel != "replies":
self.log(UnexpectedAttribute({"parent":self.parent.name, "element":self.name, "attribute":"thr:count"}))
self.value = self.attrs.getValue((u'http://purl.org/syndication/thread/1.0', u'count'))
self.name="thr:count"
nonNegativeInteger.validate(self)
if self.attrs.has_key((u'http://purl.org/syndication/thread/1.0', u'when')):
if self.rel != "replies":
self.log(UnexpectedAttribute({"parent":self.parent.name, "element":self.name, "attribute":"thr:when"}))
self.value = self.attrs.getValue((u'http://purl.org/syndication/thread/1.0', u'when'))
self.name="thr:when"
rfc3339.validate(self)
def startElementNS(self, name, qname, attrs):
self.push(eater(), name, attrs)
def characters(self, text):
if text.strip():
self.log(AtomLinkNotEmpty({"parent":self.parent.name, "element":self.name}))
|
rhyolight/nupic.son | refs/heads/master | tests/app/soc/logic/__init__.py | 12133432 | |
secynic/nfsinkhole | refs/heads/master | nfsinkhole/tests/docker/__init__.py | 12133432 | |
camilonova/sentry | refs/heads/master | tests/sentry/web/helpers/__init__.py | 12133432 | |
BRMWebDev/BRMFlask | refs/heads/master | brmflask/tests/utils/__init__.py | 12133432 | |
pomegranited/edx-platform | refs/heads/master | openedx/core/djangoapps/user_api/tests/__init__.py | 12133432 | |
vitan/hue | refs/heads/master | desktop/core/ext-py/Django-1.6.10/tests/middleware/cond_get_urls.py | 176 | from django.conf.urls import patterns
from django.http import HttpResponse
urlpatterns = patterns('',
(r'^$', lambda request: HttpResponse('root is here')),
)
|
abhattad4/Digi-Menu | refs/heads/master | django/contrib/gis/geometry/backend/__init__.py | 742 | from importlib import import_module
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
geom_backend = getattr(settings, 'GEOMETRY_BACKEND', 'geos')
try:
module = import_module('django.contrib.gis.geometry.backend.%s' % geom_backend)
except ImportError:
try:
module = import_module(geom_backend)
except ImportError:
raise ImproperlyConfigured('Could not import user-defined GEOMETRY_BACKEND '
'"%s".' % geom_backend)
try:
Geometry = module.Geometry
GeometryException = module.GeometryException
except AttributeError:
raise ImproperlyConfigured('Cannot import Geometry from the "%s" '
'geometry backend.' % geom_backend)
|
shsingh/ansible | refs/heads/devel | test/units/modules/network/fortios/test_fortios_firewall_vip.py | 21 | # Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <https://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import pytest
from mock import ANY
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
try:
from ansible.modules.network.fortios import fortios_firewall_vip
except ImportError:
pytest.skip("Could not load required modules for testing", allow_module_level=True)
@pytest.fixture(autouse=True)
def connection_mock(mocker):
connection_class_mock = mocker.patch('ansible.modules.network.fortios.fortios_firewall_vip.Connection')
return connection_class_mock
fos_instance = FortiOSHandler(connection_mock)
def test_firewall_vip_creation(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'firewall_vip': {
'arp_reply': 'disable',
'color': '4',
'comment': 'Comment.',
'dns_mapping_ttl': '6',
'extintf': 'test_value_7',
'extip': 'test_value_8',
'extport': 'test_value_9',
'gratuitous_arp_interval': '10',
'http_cookie_age': '11',
'http_cookie_domain': 'test_value_12',
'http_cookie_domain_from_host': 'disable',
'http_cookie_generation': '14',
'http_cookie_path': 'test_value_15',
'http_cookie_share': 'disable',
'http_ip_header': 'enable',
'http_ip_header_name': 'test_value_18',
'http_multiplex': 'enable',
'https_cookie_secure': 'disable',
'id': '21',
'ldb_method': 'static',
'mapped_addr': 'test_value_23',
'mappedport': 'test_value_24',
'max_embryonic_connections': '25',
'name': 'default_name_26',
'nat_source_vip': 'disable',
'outlook_web_access': 'disable',
'persistence': 'none',
'portforward': 'disable',
'portmapping_type': '1-to-1',
'protocol': 'tcp',
'server_type': 'http',
'ssl_algorithm': 'high',
'ssl_certificate': 'test_value_35',
'ssl_client_fallback': 'disable',
'ssl_client_renegotiation': 'allow',
'ssl_client_session_state_max': '38',
'ssl_client_session_state_timeout': '39',
'ssl_client_session_state_type': 'disable',
'ssl_dh_bits': '768',
'ssl_hpkp': 'disable',
'ssl_hpkp_age': '43',
'ssl_hpkp_backup': 'test_value_44',
'ssl_hpkp_include_subdomains': 'disable',
'ssl_hpkp_primary': 'test_value_46',
'ssl_hpkp_report_uri': 'test_value_47',
'ssl_hsts': 'disable',
'ssl_hsts_age': '49',
'ssl_hsts_include_subdomains': 'disable',
'ssl_http_location_conversion': 'enable',
'ssl_http_match_host': 'enable',
'ssl_max_version': 'ssl-3.0',
'ssl_min_version': 'ssl-3.0',
'ssl_mode': 'half',
'ssl_pfs': 'require',
'ssl_send_empty_frags': 'enable',
'ssl_server_algorithm': 'high',
'ssl_server_max_version': 'ssl-3.0',
'ssl_server_min_version': 'ssl-3.0',
'ssl_server_session_state_max': '61',
'ssl_server_session_state_timeout': '62',
'ssl_server_session_state_type': 'disable',
'type': 'static-nat',
'uuid': 'test_value_65',
'weblogic_server': 'disable',
'websphere_server': 'disable'
},
'vdom': 'root'}
is_error, changed, response = fortios_firewall_vip.fortios_firewall(input_data, fos_instance)
expected_data = {
'arp-reply': 'disable',
'color': '4',
'comment': 'Comment.',
'dns-mapping-ttl': '6',
'extintf': 'test_value_7',
'extip': 'test_value_8',
'extport': 'test_value_9',
'gratuitous-arp-interval': '10',
'http-cookie-age': '11',
'http-cookie-domain': 'test_value_12',
'http-cookie-domain-from-host': 'disable',
'http-cookie-generation': '14',
'http-cookie-path': 'test_value_15',
'http-cookie-share': 'disable',
'http-ip-header': 'enable',
'http-ip-header-name': 'test_value_18',
'http-multiplex': 'enable',
'https-cookie-secure': 'disable',
'id': '21',
'ldb-method': 'static',
'mapped-addr': 'test_value_23',
'mappedport': 'test_value_24',
'max-embryonic-connections': '25',
'name': 'default_name_26',
'nat-source-vip': 'disable',
'outlook-web-access': 'disable',
'persistence': 'none',
'portforward': 'disable',
'portmapping-type': '1-to-1',
'protocol': 'tcp',
'server-type': 'http',
'ssl-algorithm': 'high',
'ssl-certificate': 'test_value_35',
'ssl-client-fallback': 'disable',
'ssl-client-renegotiation': 'allow',
'ssl-client-session-state-max': '38',
'ssl-client-session-state-timeout': '39',
'ssl-client-session-state-type': 'disable',
'ssl-dh-bits': '768',
'ssl-hpkp': 'disable',
'ssl-hpkp-age': '43',
'ssl-hpkp-backup': 'test_value_44',
'ssl-hpkp-include-subdomains': 'disable',
'ssl-hpkp-primary': 'test_value_46',
'ssl-hpkp-report-uri': 'test_value_47',
'ssl-hsts': 'disable',
'ssl-hsts-age': '49',
'ssl-hsts-include-subdomains': 'disable',
'ssl-http-location-conversion': 'enable',
'ssl-http-match-host': 'enable',
'ssl-max-version': 'ssl-3.0',
'ssl-min-version': 'ssl-3.0',
'ssl-mode': 'half',
'ssl-pfs': 'require',
'ssl-send-empty-frags': 'enable',
'ssl-server-algorithm': 'high',
'ssl-server-max-version': 'ssl-3.0',
'ssl-server-min-version': 'ssl-3.0',
'ssl-server-session-state-max': '61',
'ssl-server-session-state-timeout': '62',
'ssl-server-session-state-type': 'disable',
'type': 'static-nat',
'uuid': 'test_value_65',
'weblogic-server': 'disable',
'websphere-server': 'disable'
}
set_method_mock.assert_called_with('firewall', 'vip', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_firewall_vip_creation_fails(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'firewall_vip': {
'arp_reply': 'disable',
'color': '4',
'comment': 'Comment.',
'dns_mapping_ttl': '6',
'extintf': 'test_value_7',
'extip': 'test_value_8',
'extport': 'test_value_9',
'gratuitous_arp_interval': '10',
'http_cookie_age': '11',
'http_cookie_domain': 'test_value_12',
'http_cookie_domain_from_host': 'disable',
'http_cookie_generation': '14',
'http_cookie_path': 'test_value_15',
'http_cookie_share': 'disable',
'http_ip_header': 'enable',
'http_ip_header_name': 'test_value_18',
'http_multiplex': 'enable',
'https_cookie_secure': 'disable',
'id': '21',
'ldb_method': 'static',
'mapped_addr': 'test_value_23',
'mappedport': 'test_value_24',
'max_embryonic_connections': '25',
'name': 'default_name_26',
'nat_source_vip': 'disable',
'outlook_web_access': 'disable',
'persistence': 'none',
'portforward': 'disable',
'portmapping_type': '1-to-1',
'protocol': 'tcp',
'server_type': 'http',
'ssl_algorithm': 'high',
'ssl_certificate': 'test_value_35',
'ssl_client_fallback': 'disable',
'ssl_client_renegotiation': 'allow',
'ssl_client_session_state_max': '38',
'ssl_client_session_state_timeout': '39',
'ssl_client_session_state_type': 'disable',
'ssl_dh_bits': '768',
'ssl_hpkp': 'disable',
'ssl_hpkp_age': '43',
'ssl_hpkp_backup': 'test_value_44',
'ssl_hpkp_include_subdomains': 'disable',
'ssl_hpkp_primary': 'test_value_46',
'ssl_hpkp_report_uri': 'test_value_47',
'ssl_hsts': 'disable',
'ssl_hsts_age': '49',
'ssl_hsts_include_subdomains': 'disable',
'ssl_http_location_conversion': 'enable',
'ssl_http_match_host': 'enable',
'ssl_max_version': 'ssl-3.0',
'ssl_min_version': 'ssl-3.0',
'ssl_mode': 'half',
'ssl_pfs': 'require',
'ssl_send_empty_frags': 'enable',
'ssl_server_algorithm': 'high',
'ssl_server_max_version': 'ssl-3.0',
'ssl_server_min_version': 'ssl-3.0',
'ssl_server_session_state_max': '61',
'ssl_server_session_state_timeout': '62',
'ssl_server_session_state_type': 'disable',
'type': 'static-nat',
'uuid': 'test_value_65',
'weblogic_server': 'disable',
'websphere_server': 'disable'
},
'vdom': 'root'}
is_error, changed, response = fortios_firewall_vip.fortios_firewall(input_data, fos_instance)
expected_data = {
'arp-reply': 'disable',
'color': '4',
'comment': 'Comment.',
'dns-mapping-ttl': '6',
'extintf': 'test_value_7',
'extip': 'test_value_8',
'extport': 'test_value_9',
'gratuitous-arp-interval': '10',
'http-cookie-age': '11',
'http-cookie-domain': 'test_value_12',
'http-cookie-domain-from-host': 'disable',
'http-cookie-generation': '14',
'http-cookie-path': 'test_value_15',
'http-cookie-share': 'disable',
'http-ip-header': 'enable',
'http-ip-header-name': 'test_value_18',
'http-multiplex': 'enable',
'https-cookie-secure': 'disable',
'id': '21',
'ldb-method': 'static',
'mapped-addr': 'test_value_23',
'mappedport': 'test_value_24',
'max-embryonic-connections': '25',
'name': 'default_name_26',
'nat-source-vip': 'disable',
'outlook-web-access': 'disable',
'persistence': 'none',
'portforward': 'disable',
'portmapping-type': '1-to-1',
'protocol': 'tcp',
'server-type': 'http',
'ssl-algorithm': 'high',
'ssl-certificate': 'test_value_35',
'ssl-client-fallback': 'disable',
'ssl-client-renegotiation': 'allow',
'ssl-client-session-state-max': '38',
'ssl-client-session-state-timeout': '39',
'ssl-client-session-state-type': 'disable',
'ssl-dh-bits': '768',
'ssl-hpkp': 'disable',
'ssl-hpkp-age': '43',
'ssl-hpkp-backup': 'test_value_44',
'ssl-hpkp-include-subdomains': 'disable',
'ssl-hpkp-primary': 'test_value_46',
'ssl-hpkp-report-uri': 'test_value_47',
'ssl-hsts': 'disable',
'ssl-hsts-age': '49',
'ssl-hsts-include-subdomains': 'disable',
'ssl-http-location-conversion': 'enable',
'ssl-http-match-host': 'enable',
'ssl-max-version': 'ssl-3.0',
'ssl-min-version': 'ssl-3.0',
'ssl-mode': 'half',
'ssl-pfs': 'require',
'ssl-send-empty-frags': 'enable',
'ssl-server-algorithm': 'high',
'ssl-server-max-version': 'ssl-3.0',
'ssl-server-min-version': 'ssl-3.0',
'ssl-server-session-state-max': '61',
'ssl-server-session-state-timeout': '62',
'ssl-server-session-state-type': 'disable',
'type': 'static-nat',
'uuid': 'test_value_65',
'weblogic-server': 'disable',
'websphere-server': 'disable'
}
set_method_mock.assert_called_with('firewall', 'vip', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_firewall_vip_removal(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
delete_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
delete_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.delete', return_value=delete_method_result)
input_data = {
'username': 'admin',
'state': 'absent',
'firewall_vip': {
'arp_reply': 'disable',
'color': '4',
'comment': 'Comment.',
'dns_mapping_ttl': '6',
'extintf': 'test_value_7',
'extip': 'test_value_8',
'extport': 'test_value_9',
'gratuitous_arp_interval': '10',
'http_cookie_age': '11',
'http_cookie_domain': 'test_value_12',
'http_cookie_domain_from_host': 'disable',
'http_cookie_generation': '14',
'http_cookie_path': 'test_value_15',
'http_cookie_share': 'disable',
'http_ip_header': 'enable',
'http_ip_header_name': 'test_value_18',
'http_multiplex': 'enable',
'https_cookie_secure': 'disable',
'id': '21',
'ldb_method': 'static',
'mapped_addr': 'test_value_23',
'mappedport': 'test_value_24',
'max_embryonic_connections': '25',
'name': 'default_name_26',
'nat_source_vip': 'disable',
'outlook_web_access': 'disable',
'persistence': 'none',
'portforward': 'disable',
'portmapping_type': '1-to-1',
'protocol': 'tcp',
'server_type': 'http',
'ssl_algorithm': 'high',
'ssl_certificate': 'test_value_35',
'ssl_client_fallback': 'disable',
'ssl_client_renegotiation': 'allow',
'ssl_client_session_state_max': '38',
'ssl_client_session_state_timeout': '39',
'ssl_client_session_state_type': 'disable',
'ssl_dh_bits': '768',
'ssl_hpkp': 'disable',
'ssl_hpkp_age': '43',
'ssl_hpkp_backup': 'test_value_44',
'ssl_hpkp_include_subdomains': 'disable',
'ssl_hpkp_primary': 'test_value_46',
'ssl_hpkp_report_uri': 'test_value_47',
'ssl_hsts': 'disable',
'ssl_hsts_age': '49',
'ssl_hsts_include_subdomains': 'disable',
'ssl_http_location_conversion': 'enable',
'ssl_http_match_host': 'enable',
'ssl_max_version': 'ssl-3.0',
'ssl_min_version': 'ssl-3.0',
'ssl_mode': 'half',
'ssl_pfs': 'require',
'ssl_send_empty_frags': 'enable',
'ssl_server_algorithm': 'high',
'ssl_server_max_version': 'ssl-3.0',
'ssl_server_min_version': 'ssl-3.0',
'ssl_server_session_state_max': '61',
'ssl_server_session_state_timeout': '62',
'ssl_server_session_state_type': 'disable',
'type': 'static-nat',
'uuid': 'test_value_65',
'weblogic_server': 'disable',
'websphere_server': 'disable'
},
'vdom': 'root'}
is_error, changed, response = fortios_firewall_vip.fortios_firewall(input_data, fos_instance)
delete_method_mock.assert_called_with('firewall', 'vip', mkey=ANY, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_firewall_vip_deletion_fails(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
delete_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
delete_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.delete', return_value=delete_method_result)
input_data = {
'username': 'admin',
'state': 'absent',
'firewall_vip': {
'arp_reply': 'disable',
'color': '4',
'comment': 'Comment.',
'dns_mapping_ttl': '6',
'extintf': 'test_value_7',
'extip': 'test_value_8',
'extport': 'test_value_9',
'gratuitous_arp_interval': '10',
'http_cookie_age': '11',
'http_cookie_domain': 'test_value_12',
'http_cookie_domain_from_host': 'disable',
'http_cookie_generation': '14',
'http_cookie_path': 'test_value_15',
'http_cookie_share': 'disable',
'http_ip_header': 'enable',
'http_ip_header_name': 'test_value_18',
'http_multiplex': 'enable',
'https_cookie_secure': 'disable',
'id': '21',
'ldb_method': 'static',
'mapped_addr': 'test_value_23',
'mappedport': 'test_value_24',
'max_embryonic_connections': '25',
'name': 'default_name_26',
'nat_source_vip': 'disable',
'outlook_web_access': 'disable',
'persistence': 'none',
'portforward': 'disable',
'portmapping_type': '1-to-1',
'protocol': 'tcp',
'server_type': 'http',
'ssl_algorithm': 'high',
'ssl_certificate': 'test_value_35',
'ssl_client_fallback': 'disable',
'ssl_client_renegotiation': 'allow',
'ssl_client_session_state_max': '38',
'ssl_client_session_state_timeout': '39',
'ssl_client_session_state_type': 'disable',
'ssl_dh_bits': '768',
'ssl_hpkp': 'disable',
'ssl_hpkp_age': '43',
'ssl_hpkp_backup': 'test_value_44',
'ssl_hpkp_include_subdomains': 'disable',
'ssl_hpkp_primary': 'test_value_46',
'ssl_hpkp_report_uri': 'test_value_47',
'ssl_hsts': 'disable',
'ssl_hsts_age': '49',
'ssl_hsts_include_subdomains': 'disable',
'ssl_http_location_conversion': 'enable',
'ssl_http_match_host': 'enable',
'ssl_max_version': 'ssl-3.0',
'ssl_min_version': 'ssl-3.0',
'ssl_mode': 'half',
'ssl_pfs': 'require',
'ssl_send_empty_frags': 'enable',
'ssl_server_algorithm': 'high',
'ssl_server_max_version': 'ssl-3.0',
'ssl_server_min_version': 'ssl-3.0',
'ssl_server_session_state_max': '61',
'ssl_server_session_state_timeout': '62',
'ssl_server_session_state_type': 'disable',
'type': 'static-nat',
'uuid': 'test_value_65',
'weblogic_server': 'disable',
'websphere_server': 'disable'
},
'vdom': 'root'}
is_error, changed, response = fortios_firewall_vip.fortios_firewall(input_data, fos_instance)
delete_method_mock.assert_called_with('firewall', 'vip', mkey=ANY, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_firewall_vip_idempotent(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'DELETE', 'http_status': 404}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'firewall_vip': {
'arp_reply': 'disable',
'color': '4',
'comment': 'Comment.',
'dns_mapping_ttl': '6',
'extintf': 'test_value_7',
'extip': 'test_value_8',
'extport': 'test_value_9',
'gratuitous_arp_interval': '10',
'http_cookie_age': '11',
'http_cookie_domain': 'test_value_12',
'http_cookie_domain_from_host': 'disable',
'http_cookie_generation': '14',
'http_cookie_path': 'test_value_15',
'http_cookie_share': 'disable',
'http_ip_header': 'enable',
'http_ip_header_name': 'test_value_18',
'http_multiplex': 'enable',
'https_cookie_secure': 'disable',
'id': '21',
'ldb_method': 'static',
'mapped_addr': 'test_value_23',
'mappedport': 'test_value_24',
'max_embryonic_connections': '25',
'name': 'default_name_26',
'nat_source_vip': 'disable',
'outlook_web_access': 'disable',
'persistence': 'none',
'portforward': 'disable',
'portmapping_type': '1-to-1',
'protocol': 'tcp',
'server_type': 'http',
'ssl_algorithm': 'high',
'ssl_certificate': 'test_value_35',
'ssl_client_fallback': 'disable',
'ssl_client_renegotiation': 'allow',
'ssl_client_session_state_max': '38',
'ssl_client_session_state_timeout': '39',
'ssl_client_session_state_type': 'disable',
'ssl_dh_bits': '768',
'ssl_hpkp': 'disable',
'ssl_hpkp_age': '43',
'ssl_hpkp_backup': 'test_value_44',
'ssl_hpkp_include_subdomains': 'disable',
'ssl_hpkp_primary': 'test_value_46',
'ssl_hpkp_report_uri': 'test_value_47',
'ssl_hsts': 'disable',
'ssl_hsts_age': '49',
'ssl_hsts_include_subdomains': 'disable',
'ssl_http_location_conversion': 'enable',
'ssl_http_match_host': 'enable',
'ssl_max_version': 'ssl-3.0',
'ssl_min_version': 'ssl-3.0',
'ssl_mode': 'half',
'ssl_pfs': 'require',
'ssl_send_empty_frags': 'enable',
'ssl_server_algorithm': 'high',
'ssl_server_max_version': 'ssl-3.0',
'ssl_server_min_version': 'ssl-3.0',
'ssl_server_session_state_max': '61',
'ssl_server_session_state_timeout': '62',
'ssl_server_session_state_type': 'disable',
'type': 'static-nat',
'uuid': 'test_value_65',
'weblogic_server': 'disable',
'websphere_server': 'disable'
},
'vdom': 'root'}
is_error, changed, response = fortios_firewall_vip.fortios_firewall(input_data, fos_instance)
expected_data = {
'arp-reply': 'disable',
'color': '4',
'comment': 'Comment.',
'dns-mapping-ttl': '6',
'extintf': 'test_value_7',
'extip': 'test_value_8',
'extport': 'test_value_9',
'gratuitous-arp-interval': '10',
'http-cookie-age': '11',
'http-cookie-domain': 'test_value_12',
'http-cookie-domain-from-host': 'disable',
'http-cookie-generation': '14',
'http-cookie-path': 'test_value_15',
'http-cookie-share': 'disable',
'http-ip-header': 'enable',
'http-ip-header-name': 'test_value_18',
'http-multiplex': 'enable',
'https-cookie-secure': 'disable',
'id': '21',
'ldb-method': 'static',
'mapped-addr': 'test_value_23',
'mappedport': 'test_value_24',
'max-embryonic-connections': '25',
'name': 'default_name_26',
'nat-source-vip': 'disable',
'outlook-web-access': 'disable',
'persistence': 'none',
'portforward': 'disable',
'portmapping-type': '1-to-1',
'protocol': 'tcp',
'server-type': 'http',
'ssl-algorithm': 'high',
'ssl-certificate': 'test_value_35',
'ssl-client-fallback': 'disable',
'ssl-client-renegotiation': 'allow',
'ssl-client-session-state-max': '38',
'ssl-client-session-state-timeout': '39',
'ssl-client-session-state-type': 'disable',
'ssl-dh-bits': '768',
'ssl-hpkp': 'disable',
'ssl-hpkp-age': '43',
'ssl-hpkp-backup': 'test_value_44',
'ssl-hpkp-include-subdomains': 'disable',
'ssl-hpkp-primary': 'test_value_46',
'ssl-hpkp-report-uri': 'test_value_47',
'ssl-hsts': 'disable',
'ssl-hsts-age': '49',
'ssl-hsts-include-subdomains': 'disable',
'ssl-http-location-conversion': 'enable',
'ssl-http-match-host': 'enable',
'ssl-max-version': 'ssl-3.0',
'ssl-min-version': 'ssl-3.0',
'ssl-mode': 'half',
'ssl-pfs': 'require',
'ssl-send-empty-frags': 'enable',
'ssl-server-algorithm': 'high',
'ssl-server-max-version': 'ssl-3.0',
'ssl-server-min-version': 'ssl-3.0',
'ssl-server-session-state-max': '61',
'ssl-server-session-state-timeout': '62',
'ssl-server-session-state-type': 'disable',
'type': 'static-nat',
'uuid': 'test_value_65',
'weblogic-server': 'disable',
'websphere-server': 'disable'
}
set_method_mock.assert_called_with('firewall', 'vip', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 404
def test_firewall_vip_filter_foreign_attributes(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'firewall_vip': {
'random_attribute_not_valid': 'tag',
'arp_reply': 'disable',
'color': '4',
'comment': 'Comment.',
'dns_mapping_ttl': '6',
'extintf': 'test_value_7',
'extip': 'test_value_8',
'extport': 'test_value_9',
'gratuitous_arp_interval': '10',
'http_cookie_age': '11',
'http_cookie_domain': 'test_value_12',
'http_cookie_domain_from_host': 'disable',
'http_cookie_generation': '14',
'http_cookie_path': 'test_value_15',
'http_cookie_share': 'disable',
'http_ip_header': 'enable',
'http_ip_header_name': 'test_value_18',
'http_multiplex': 'enable',
'https_cookie_secure': 'disable',
'id': '21',
'ldb_method': 'static',
'mapped_addr': 'test_value_23',
'mappedport': 'test_value_24',
'max_embryonic_connections': '25',
'name': 'default_name_26',
'nat_source_vip': 'disable',
'outlook_web_access': 'disable',
'persistence': 'none',
'portforward': 'disable',
'portmapping_type': '1-to-1',
'protocol': 'tcp',
'server_type': 'http',
'ssl_algorithm': 'high',
'ssl_certificate': 'test_value_35',
'ssl_client_fallback': 'disable',
'ssl_client_renegotiation': 'allow',
'ssl_client_session_state_max': '38',
'ssl_client_session_state_timeout': '39',
'ssl_client_session_state_type': 'disable',
'ssl_dh_bits': '768',
'ssl_hpkp': 'disable',
'ssl_hpkp_age': '43',
'ssl_hpkp_backup': 'test_value_44',
'ssl_hpkp_include_subdomains': 'disable',
'ssl_hpkp_primary': 'test_value_46',
'ssl_hpkp_report_uri': 'test_value_47',
'ssl_hsts': 'disable',
'ssl_hsts_age': '49',
'ssl_hsts_include_subdomains': 'disable',
'ssl_http_location_conversion': 'enable',
'ssl_http_match_host': 'enable',
'ssl_max_version': 'ssl-3.0',
'ssl_min_version': 'ssl-3.0',
'ssl_mode': 'half',
'ssl_pfs': 'require',
'ssl_send_empty_frags': 'enable',
'ssl_server_algorithm': 'high',
'ssl_server_max_version': 'ssl-3.0',
'ssl_server_min_version': 'ssl-3.0',
'ssl_server_session_state_max': '61',
'ssl_server_session_state_timeout': '62',
'ssl_server_session_state_type': 'disable',
'type': 'static-nat',
'uuid': 'test_value_65',
'weblogic_server': 'disable',
'websphere_server': 'disable'
},
'vdom': 'root'}
is_error, changed, response = fortios_firewall_vip.fortios_firewall(input_data, fos_instance)
expected_data = {
'arp-reply': 'disable',
'color': '4',
'comment': 'Comment.',
'dns-mapping-ttl': '6',
'extintf': 'test_value_7',
'extip': 'test_value_8',
'extport': 'test_value_9',
'gratuitous-arp-interval': '10',
'http-cookie-age': '11',
'http-cookie-domain': 'test_value_12',
'http-cookie-domain-from-host': 'disable',
'http-cookie-generation': '14',
'http-cookie-path': 'test_value_15',
'http-cookie-share': 'disable',
'http-ip-header': 'enable',
'http-ip-header-name': 'test_value_18',
'http-multiplex': 'enable',
'https-cookie-secure': 'disable',
'id': '21',
'ldb-method': 'static',
'mapped-addr': 'test_value_23',
'mappedport': 'test_value_24',
'max-embryonic-connections': '25',
'name': 'default_name_26',
'nat-source-vip': 'disable',
'outlook-web-access': 'disable',
'persistence': 'none',
'portforward': 'disable',
'portmapping-type': '1-to-1',
'protocol': 'tcp',
'server-type': 'http',
'ssl-algorithm': 'high',
'ssl-certificate': 'test_value_35',
'ssl-client-fallback': 'disable',
'ssl-client-renegotiation': 'allow',
'ssl-client-session-state-max': '38',
'ssl-client-session-state-timeout': '39',
'ssl-client-session-state-type': 'disable',
'ssl-dh-bits': '768',
'ssl-hpkp': 'disable',
'ssl-hpkp-age': '43',
'ssl-hpkp-backup': 'test_value_44',
'ssl-hpkp-include-subdomains': 'disable',
'ssl-hpkp-primary': 'test_value_46',
'ssl-hpkp-report-uri': 'test_value_47',
'ssl-hsts': 'disable',
'ssl-hsts-age': '49',
'ssl-hsts-include-subdomains': 'disable',
'ssl-http-location-conversion': 'enable',
'ssl-http-match-host': 'enable',
'ssl-max-version': 'ssl-3.0',
'ssl-min-version': 'ssl-3.0',
'ssl-mode': 'half',
'ssl-pfs': 'require',
'ssl-send-empty-frags': 'enable',
'ssl-server-algorithm': 'high',
'ssl-server-max-version': 'ssl-3.0',
'ssl-server-min-version': 'ssl-3.0',
'ssl-server-session-state-max': '61',
'ssl-server-session-state-timeout': '62',
'ssl-server-session-state-type': 'disable',
'type': 'static-nat',
'uuid': 'test_value_65',
'weblogic-server': 'disable',
'websphere-server': 'disable'
}
set_method_mock.assert_called_with('firewall', 'vip', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
|
robynbergeron/ansible-modules-extras | refs/heads/devel | monitoring/bigpanda.py | 74 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: bigpanda
author: "Hagai Kariti (@hkariti)"
short_description: Notify BigPanda about deployments
version_added: "1.8"
description:
- Notify BigPanda when deployments start and end (successfully or not). Returns a deployment object containing all the parameters for future module calls.
options:
component:
description:
- "The name of the component being deployed. Ex: billing"
required: true
alias: name
version:
description:
- The deployment version.
required: true
token:
description:
- API token.
required: true
state:
description:
- State of the deployment.
required: true
choices: ['started', 'finished', 'failed']
hosts:
description:
- Name of affected host name. Can be a list.
required: false
default: machine's hostname
alias: host
env:
description:
- The environment name, typically 'production', 'staging', etc.
required: false
owner:
description:
- The person responsible for the deployment.
required: false
description:
description:
- Free text description of the deployment.
required: false
url:
description:
- Base URL of the API server.
required: False
default: https://api.bigpanda.io
validate_certs:
description:
- If C(no), SSL certificates for the target url will not be validated. This should only be used
on personally controlled sites using self-signed certificates.
required: false
default: 'yes'
choices: ['yes', 'no']
# informational: requirements for nodes
requirements: [ ]
'''
EXAMPLES = '''
- bigpanda: component=myapp version=1.3 token={{ bigpanda_token }} state=started
...
- bigpanda: component=myapp version=1.3 token={{ bigpanda_token }} state=finished
or using a deployment object:
- bigpanda: component=myapp version=1.3 token={{ bigpanda_token }} state=started
register: deployment
- bigpanda: state=finished
args: deployment
If outside servers aren't reachable from your machine, use local_action and pass the hostname:
- local_action: bigpanda component=myapp version=1.3 hosts={{ansible_hostname}} token={{ bigpanda_token }} state=started
register: deployment
...
- local_action: bigpanda state=finished
args: deployment
'''
# ===========================================
# Module execution.
#
import socket
def main():
module = AnsibleModule(
argument_spec=dict(
component=dict(required=True, aliases=['name']),
version=dict(required=True),
token=dict(required=True),
state=dict(required=True, choices=['started', 'finished', 'failed']),
hosts=dict(required=False, default=[socket.gethostname()], aliases=['host']),
env=dict(required=False),
owner=dict(required=False),
description=dict(required=False),
message=dict(required=False),
source_system=dict(required=False, default='ansible'),
validate_certs=dict(default='yes', type='bool'),
url=dict(required=False, default='https://api.bigpanda.io'),
),
supports_check_mode=True,
check_invalid_arguments=False,
)
token = module.params['token']
state = module.params['state']
url = module.params['url']
# Build the common request body
body = dict()
for k in ('component', 'version', 'hosts'):
v = module.params[k]
if v is not None:
body[k] = v
if not isinstance(body['hosts'], list):
body['hosts'] = [body['hosts']]
# Insert state-specific attributes to body
if state == 'started':
for k in ('source_system', 'env', 'owner', 'description'):
v = module.params[k]
if v is not None:
body[k] = v
request_url = url + '/data/events/deployments/start'
else:
message = module.params['message']
if message is not None:
body['errorMessage'] = message
if state == 'finished':
body['status'] = 'success'
else:
body['status'] = 'failure'
request_url = url + '/data/events/deployments/end'
# Build the deployment object we return
deployment = dict(token=token, url=url)
deployment.update(body)
if 'errorMessage' in deployment:
message = deployment.pop('errorMessage')
deployment['message'] = message
# If we're in check mode, just exit pretending like we succeeded
if module.check_mode:
module.exit_json(changed=True, **deployment)
# Send the data to bigpanda
data = json.dumps(body)
headers = {'Authorization':'Bearer %s' % token, 'Content-Type':'application/json'}
try:
response, info = fetch_url(module, request_url, data=data, headers=headers)
if info['status'] == 200:
module.exit_json(changed=True, **deployment)
else:
module.fail_json(msg=json.dumps(info))
except Exception, e:
module.fail_json(msg=str(e))
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.urls import *
if __name__ == '__main__':
main()
|
bobellis/ghost_blog | refs/heads/master | node_modules/grunt-docker/node_modules/docker/node_modules/pygmentize-bundled/vendor/pygments/build-2.7/pygments/lexers/hdl.py | 363 | # -*- coding: utf-8 -*-
"""
pygments.lexers.hdl
~~~~~~~~~~~~~~~~~~~
Lexers for hardware descriptor languages.
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer, bygroups, include, using, this
from pygments.token import \
Text, Comment, Operator, Keyword, Name, String, Number, Punctuation, \
Error
__all__ = ['VerilogLexer', 'SystemVerilogLexer', 'VhdlLexer']
class VerilogLexer(RegexLexer):
"""
For verilog source code with preprocessor directives.
*New in Pygments 1.4.*
"""
name = 'verilog'
aliases = ['verilog', 'v']
filenames = ['*.v']
mimetypes = ['text/x-verilog']
#: optional Comment or Whitespace
_ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
tokens = {
'root': [
(r'^\s*`define', Comment.Preproc, 'macro'),
(r'\n', Text),
(r'\s+', Text),
(r'\\\n', Text), # line continuation
(r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
(r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
(r'[{}#@]', Punctuation),
(r'L?"', String, 'string'),
(r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
(r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
(r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
(r'([0-9]+)|(\'h)[0-9a-fA-F]+', Number.Hex),
(r'([0-9]+)|(\'b)[0-1]+', Number.Hex), # should be binary
(r'([0-9]+)|(\'d)[0-9]+', Number.Integer),
(r'([0-9]+)|(\'o)[0-7]+', Number.Oct),
(r'\'[01xz]', Number),
(r'\d+[Ll]?', Number.Integer),
(r'\*/', Error),
(r'[~!%^&*+=|?:<>/-]', Operator),
(r'[()\[\],.;\']', Punctuation),
(r'`[a-zA-Z_][a-zA-Z0-9_]*', Name.Constant),
(r'^(\s*)(package)(\s+)', bygroups(Text, Keyword.Namespace, Text)),
(r'^(\s*)(import)(\s+)', bygroups(Text, Keyword.Namespace, Text),
'import'),
(r'(always|always_comb|always_ff|always_latch|and|assign|automatic|'
r'begin|break|buf|bufif0|bufif1|case|casex|casez|cmos|const|'
r'continue|deassign|default|defparam|disable|do|edge|else|end|endcase|'
r'endfunction|endgenerate|endmodule|endpackage|endprimitive|endspecify|'
r'endtable|endtask|enum|event|final|for|force|forever|fork|function|'
r'generate|genvar|highz0|highz1|if|initial|inout|input|'
r'integer|join|large|localparam|macromodule|medium|module|'
r'nand|negedge|nmos|nor|not|notif0|notif1|or|output|packed|'
r'parameter|pmos|posedge|primitive|pull0|pull1|pulldown|pullup|rcmos|'
r'ref|release|repeat|return|rnmos|rpmos|rtran|rtranif0|'
r'rtranif1|scalared|signed|small|specify|specparam|strength|'
r'string|strong0|strong1|struct|table|task|'
r'tran|tranif0|tranif1|type|typedef|'
r'unsigned|var|vectored|void|wait|weak0|weak1|while|'
r'xnor|xor)\b', Keyword),
(r'`(accelerate|autoexpand_vectornets|celldefine|default_nettype|'
r'else|elsif|endcelldefine|endif|endprotect|endprotected|'
r'expand_vectornets|ifdef|ifndef|include|noaccelerate|noexpand_vectornets|'
r'noremove_gatenames|noremove_netnames|nounconnected_drive|'
r'protect|protected|remove_gatenames|remove_netnames|resetall|'
r'timescale|unconnected_drive|undef)\b', Comment.Preproc),
(r'\$(bits|bitstoreal|bitstoshortreal|countdrivers|display|fclose|'
r'fdisplay|finish|floor|fmonitor|fopen|fstrobe|fwrite|'
r'getpattern|history|incsave|input|itor|key|list|log|'
r'monitor|monitoroff|monitoron|nokey|nolog|printtimescale|'
r'random|readmemb|readmemh|realtime|realtobits|reset|reset_count|'
r'reset_value|restart|rtoi|save|scale|scope|shortrealtobits|'
r'showscopes|showvariables|showvars|sreadmemb|sreadmemh|'
r'stime|stop|strobe|time|timeformat|write)\b', Name.Builtin),
(r'(byte|shortint|int|longint|integer|time|'
r'bit|logic|reg|'
r'supply0|supply1|tri|triand|trior|tri0|tri1|trireg|uwire|wire|wand|wor'
r'shortreal|real|realtime)\b', Keyword.Type),
('[a-zA-Z_][a-zA-Z0-9_]*:(?!:)', Name.Label),
('[a-zA-Z_][a-zA-Z0-9_]*', Name),
],
'string': [
(r'"', String, '#pop'),
(r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
(r'[^\\"\n]+', String), # all other characters
(r'\\\n', String), # line continuation
(r'\\', String), # stray backslash
],
'macro': [
(r'[^/\n]+', Comment.Preproc),
(r'/[*](.|\n)*?[*]/', Comment.Multiline),
(r'//.*?\n', Comment.Single, '#pop'),
(r'/', Comment.Preproc),
(r'(?<=\\)\n', Comment.Preproc),
(r'\n', Comment.Preproc, '#pop'),
],
'import': [
(r'[a-zA-Z0-9_:]+\*?', Name.Namespace, '#pop')
]
}
def get_tokens_unprocessed(self, text):
for index, token, value in \
RegexLexer.get_tokens_unprocessed(self, text):
# Convention: mark all upper case names as constants
if token is Name:
if value.isupper():
token = Name.Constant
yield index, token, value
class SystemVerilogLexer(RegexLexer):
"""
Extends verilog lexer to recognise all SystemVerilog keywords from IEEE
1800-2009 standard.
*New in Pygments 1.5.*
"""
name = 'systemverilog'
aliases = ['systemverilog', 'sv']
filenames = ['*.sv', '*.svh']
mimetypes = ['text/x-systemverilog']
#: optional Comment or Whitespace
_ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
tokens = {
'root': [
(r'^\s*`define', Comment.Preproc, 'macro'),
(r'^(\s*)(package)(\s+)', bygroups(Text, Keyword.Namespace, Text)),
(r'^(\s*)(import)(\s+)', bygroups(Text, Keyword.Namespace, Text), 'import'),
(r'\n', Text),
(r'\s+', Text),
(r'\\\n', Text), # line continuation
(r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
(r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
(r'[{}#@]', Punctuation),
(r'L?"', String, 'string'),
(r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
(r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
(r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
(r'([0-9]+)|(\'h)[0-9a-fA-F]+', Number.Hex),
(r'([0-9]+)|(\'b)[0-1]+', Number.Hex), # should be binary
(r'([0-9]+)|(\'d)[0-9]+', Number.Integer),
(r'([0-9]+)|(\'o)[0-7]+', Number.Oct),
(r'\'[01xz]', Number),
(r'\d+[Ll]?', Number.Integer),
(r'\*/', Error),
(r'[~!%^&*+=|?:<>/-]', Operator),
(r'[()\[\],.;\']', Punctuation),
(r'`[a-zA-Z_][a-zA-Z0-9_]*', Name.Constant),
(r'(accept_on|alias|always|always_comb|always_ff|always_latch|'
r'and|assert|assign|assume|automatic|before|begin|bind|bins|'
r'binsof|bit|break|buf|bufif0|bufif1|byte|case|casex|casez|'
r'cell|chandle|checker|class|clocking|cmos|config|const|constraint|'
r'context|continue|cover|covergroup|coverpoint|cross|deassign|'
r'default|defparam|design|disable|dist|do|edge|else|end|endcase|'
r'endchecker|endclass|endclocking|endconfig|endfunction|endgenerate|'
r'endgroup|endinterface|endmodule|endpackage|endprimitive|'
r'endprogram|endproperty|endsequence|endspecify|endtable|'
r'endtask|enum|event|eventually|expect|export|extends|extern|'
r'final|first_match|for|force|foreach|forever|fork|forkjoin|'
r'function|generate|genvar|global|highz0|highz1|if|iff|ifnone|'
r'ignore_bins|illegal_bins|implies|import|incdir|include|'
r'initial|inout|input|inside|instance|int|integer|interface|'
r'intersect|join|join_any|join_none|large|let|liblist|library|'
r'local|localparam|logic|longint|macromodule|matches|medium|'
r'modport|module|nand|negedge|new|nexttime|nmos|nor|noshowcancelled|'
r'not|notif0|notif1|null|or|output|package|packed|parameter|'
r'pmos|posedge|primitive|priority|program|property|protected|'
r'pull0|pull1|pulldown|pullup|pulsestyle_ondetect|pulsestyle_onevent|'
r'pure|rand|randc|randcase|randsequence|rcmos|real|realtime|'
r'ref|reg|reject_on|release|repeat|restrict|return|rnmos|'
r'rpmos|rtran|rtranif0|rtranif1|s_always|s_eventually|s_nexttime|'
r's_until|s_until_with|scalared|sequence|shortint|shortreal|'
r'showcancelled|signed|small|solve|specify|specparam|static|'
r'string|strong|strong0|strong1|struct|super|supply0|supply1|'
r'sync_accept_on|sync_reject_on|table|tagged|task|this|throughout|'
r'time|timeprecision|timeunit|tran|tranif0|tranif1|tri|tri0|'
r'tri1|triand|trior|trireg|type|typedef|union|unique|unique0|'
r'unsigned|until|until_with|untyped|use|uwire|var|vectored|'
r'virtual|void|wait|wait_order|wand|weak|weak0|weak1|while|'
r'wildcard|wire|with|within|wor|xnor|xor)\b', Keyword ),
(r'(`__FILE__|`__LINE__|`begin_keywords|`celldefine|`default_nettype|'
r'`define|`else|`elsif|`end_keywords|`endcelldefine|`endif|'
r'`ifdef|`ifndef|`include|`line|`nounconnected_drive|`pragma|'
r'`resetall|`timescale|`unconnected_drive|`undef|`undefineall)\b',
Comment.Preproc ),
(r'(\$display|\$displayb|\$displayh|\$displayo|\$dumpall|\$dumpfile|'
r'\$dumpflush|\$dumplimit|\$dumpoff|\$dumpon|\$dumpports|'
r'\$dumpportsall|\$dumpportsflush|\$dumpportslimit|\$dumpportsoff|'
r'\$dumpportson|\$dumpvars|\$fclose|\$fdisplay|\$fdisplayb|'
r'\$fdisplayh|\$fdisplayo|\$feof|\$ferror|\$fflush|\$fgetc|'
r'\$fgets|\$fmonitor|\$fmonitorb|\$fmonitorh|\$fmonitoro|'
r'\$fopen|\$fread|\$fscanf|\$fseek|\$fstrobe|\$fstrobeb|\$fstrobeh|'
r'\$fstrobeo|\$ftell|\$fwrite|\$fwriteb|\$fwriteh|\$fwriteo|'
r'\$monitor|\$monitorb|\$monitorh|\$monitoro|\$monitoroff|'
r'\$monitoron|\$plusargs|\$readmemb|\$readmemh|\$rewind|\$sformat|'
r'\$sformatf|\$sscanf|\$strobe|\$strobeb|\$strobeh|\$strobeo|'
r'\$swrite|\$swriteb|\$swriteh|\$swriteo|\$test|\$ungetc|'
r'\$value\$plusargs|\$write|\$writeb|\$writeh|\$writememb|'
r'\$writememh|\$writeo)\b' , Name.Builtin ),
(r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
(r'(byte|shortint|int|longint|integer|time|'
r'bit|logic|reg|'
r'supply0|supply1|tri|triand|trior|tri0|tri1|trireg|uwire|wire|wand|wor'
r'shortreal|real|realtime)\b', Keyword.Type),
('[a-zA-Z_][a-zA-Z0-9_]*:(?!:)', Name.Label),
('[a-zA-Z_][a-zA-Z0-9_]*', Name),
],
'classname': [
(r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop'),
],
'string': [
(r'"', String, '#pop'),
(r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
(r'[^\\"\n]+', String), # all other characters
(r'\\\n', String), # line continuation
(r'\\', String), # stray backslash
],
'macro': [
(r'[^/\n]+', Comment.Preproc),
(r'/[*](.|\n)*?[*]/', Comment.Multiline),
(r'//.*?\n', Comment.Single, '#pop'),
(r'/', Comment.Preproc),
(r'(?<=\\)\n', Comment.Preproc),
(r'\n', Comment.Preproc, '#pop'),
],
'import': [
(r'[a-zA-Z0-9_:]+\*?', Name.Namespace, '#pop')
]
}
def get_tokens_unprocessed(self, text):
for index, token, value in \
RegexLexer.get_tokens_unprocessed(self, text):
# Convention: mark all upper case names as constants
if token is Name:
if value.isupper():
token = Name.Constant
yield index, token, value
def analyse_text(text):
if text.startswith('//') or text.startswith('/*'):
return 0.5
class VhdlLexer(RegexLexer):
"""
For VHDL source code.
*New in Pygments 1.5.*
"""
name = 'vhdl'
aliases = ['vhdl']
filenames = ['*.vhdl', '*.vhd']
mimetypes = ['text/x-vhdl']
flags = re.MULTILINE | re.IGNORECASE
tokens = {
'root': [
(r'\n', Text),
(r'\s+', Text),
(r'\\\n', Text), # line continuation
(r'--(?![!#$%&*+./<=>?@\^|_~]).*?$', Comment.Single),
(r"'(U|X|0|1|Z|W|L|H|-)'", String.Char),
(r'[~!%^&*+=|?:<>/-]', Operator),
(r"'[a-zA-Z_][a-zA-Z0-9_]*", Name.Attribute),
(r'[()\[\],.;\']', Punctuation),
(r'"[^\n\\]*"', String),
(r'(library)(\s+)([a-zA-Z_][a-zA-Z0-9_]*)',
bygroups(Keyword, Text, Name.Namespace)),
(r'(use)(\s+)(entity)', bygroups(Keyword, Text, Keyword)),
(r'(use)(\s+)([a-zA-Z_][\.a-zA-Z0-9_]*)',
bygroups(Keyword, Text, Name.Namespace)),
(r'(entity|component)(\s+)([a-zA-Z_][a-zA-Z0-9_]*)',
bygroups(Keyword, Text, Name.Class)),
(r'(architecture|configuration)(\s+)([a-zA-Z_][a-zA-Z0-9_]*)(\s+)'
r'(of)(\s+)([a-zA-Z_][a-zA-Z0-9_]*)(\s+)(is)',
bygroups(Keyword, Text, Name.Class, Text, Keyword, Text,
Name.Class, Text, Keyword)),
(r'(end)(\s+)', bygroups(using(this), Text), 'endblock'),
include('types'),
include('keywords'),
include('numbers'),
(r'[a-zA-Z_][a-zA-Z0-9_]*', Name),
],
'endblock': [
include('keywords'),
(r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Class),
(r'(\s+)', Text),
(r';', Punctuation, '#pop'),
],
'types': [
(r'(boolean|bit|character|severity_level|integer|time|delay_length|'
r'natural|positive|string|bit_vector|file_open_kind|'
r'file_open_status|std_ulogic|std_ulogic_vector|std_logic|'
r'std_logic_vector)\b', Keyword.Type),
],
'keywords': [
(r'(abs|access|after|alias|all|and|'
r'architecture|array|assert|attribute|begin|block|'
r'body|buffer|bus|case|component|configuration|'
r'constant|disconnect|downto|else|elsif|end|'
r'entity|exit|file|for|function|generate|'
r'generic|group|guarded|if|impure|in|'
r'inertial|inout|is|label|library|linkage|'
r'literal|loop|map|mod|nand|new|'
r'next|nor|not|null|of|on|'
r'open|or|others|out|package|port|'
r'postponed|procedure|process|pure|range|record|'
r'register|reject|return|rol|ror|select|'
r'severity|signal|shared|sla|sli|sra|'
r'srl|subtype|then|to|transport|type|'
r'units|until|use|variable|wait|when|'
r'while|with|xnor|xor)\b', Keyword),
],
'numbers': [
(r'\d{1,2}#[0-9a-fA-F_]+#?', Number.Integer),
(r'[0-1_]+(\.[0-1_])', Number.Integer),
(r'\d+', Number.Integer),
(r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+', Number.Float),
(r'H"[0-9a-fA-F_]+"', Number.Oct),
(r'O"[0-7_]+"', Number.Oct),
(r'B"[0-1_]+"', Number.Oct),
],
}
|
aetilley/revscoring | refs/heads/master | revscoring/scorer_models/nb.py | 2 | """
.. autoclass:: revscoring.scorer_models.nb.GaussianNB
:members:
:member-order:
.. autoclass:: revscoring.scorer_models.nb.MultinomialNB
:members:
:member-order:
.. autoclass:: revscoring.scorer_models.nb.BernoulliNB
:members:
:member-order:
"""
import logging
from sklearn import naive_bayes
from .scorer_model import ScikitLearnClassifier
logger = logging.getLogger("revscoring.scorers.nb")
class NB(ScikitLearnClassifier):
def __init__(self, features, *, language=None, version=None, nb=None,
sklearn_class=None, **kwargs):
if nb is None: nb = sklearn_class(**kwargs)
super().__init__(features, classifier_model=nb, language=language,
version=version)
NBModel = NB
"Alias for backwards compatibility"
class GaussianNB(NBModel):
"""
Implements a Gaussian Naive Bayes model.
:Params:
features : `collection` of :class:`~revscoring.features.feature.Feature`
The features that the model will be trained on
language : :class:`~revscoring.languages.language.Language`
The language context applied when extracting features.
version : str
A version string representing the version of the model
`**kwargs`
Passed to :class:`sklearn.naive_bayes.GaussianNB`
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, sklearn_class=naive_bayes.GaussianNB, **kwargs)
GaussianNBModel = GaussianNB
"Alias for backwards compatibility"
class MultinomialNB(NBModel):
"""
Implements a Multinomial Naive Bayes model.
:Params:
features : `collection` of :class:`~revscoring.features.feature.Feature`
The features that the model will be trained on
language : :class:`~revscoring.languages.language.Language`
The language context applied when extracting features.
version : str
A version string representing the version of the model
`**kwargs`
Passed to :class:`sklearn.naive_bayes.MultinomialNB`
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, sklearn_class=naive_bayes.MultinomialNB, **kwargs)
MultinomialNBModel = MultinomialNB
"Alias for backwards compatibility"
class BernoulliNB(NBModel):
"""
Implements a Bernoulli Naive Bayes model.
:Params:
features : `collection` of :class:`~revscoring.features.feature.Feature`
The features that the model will be trained on
language : :class:`~revscoring.languages.language.Language`
The language context applied when extracting features.
version : str
A version string representing the version of the model
`**kwargs`
Passed to :class:`sklearn.naive_bayes.BernoulliNB`
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, sklearn_class=naive_bayes.BernoulliNB, **kwargs)
BernoulliNBModel = BernoulliNB
"Alias for backwards compatibility"
|
ulope/django | refs/heads/master | django/db/backends/oracle/compiler.py | 22 | from django.db.models.sql import compiler
class SQLCompiler(compiler.SQLCompiler):
def as_sql(self, with_limits=True, with_col_aliases=False):
"""
Creates the SQL for this query. Returns the SQL string and list
of parameters. This is overridden from the original Query class
to handle the additional SQL Oracle requires to emulate LIMIT
and OFFSET.
If 'with_limits' is False, any limit/offset information is not
included in the query.
"""
if with_limits and self.query.low_mark == self.query.high_mark:
return '', ()
# The `do_offset` flag indicates whether we need to construct
# the SQL needed to use limit/offset with Oracle.
do_offset = with_limits and (self.query.high_mark is not None
or self.query.low_mark)
if not do_offset:
sql, params = super(SQLCompiler, self).as_sql(with_limits=False,
with_col_aliases=with_col_aliases)
else:
sql, params = super(SQLCompiler, self).as_sql(with_limits=False,
with_col_aliases=True)
# Wrap the base query in an outer SELECT * with boundaries on
# the "_RN" column. This is the canonical way to emulate LIMIT
# and OFFSET on Oracle.
high_where = ''
if self.query.high_mark is not None:
high_where = 'WHERE ROWNUM <= %d' % (self.query.high_mark,)
sql = (
'SELECT * FROM (SELECT "_SUB".*, ROWNUM AS "_RN" FROM (%s) '
'"_SUB" %s) WHERE "_RN" > %d' % (sql, high_where, self.query.low_mark)
)
return sql, params
class SQLInsertCompiler(compiler.SQLInsertCompiler, SQLCompiler):
pass
class SQLDeleteCompiler(compiler.SQLDeleteCompiler, SQLCompiler):
pass
class SQLUpdateCompiler(compiler.SQLUpdateCompiler, SQLCompiler):
pass
class SQLAggregateCompiler(compiler.SQLAggregateCompiler, SQLCompiler):
pass
class SQLDateCompiler(compiler.SQLDateCompiler, SQLCompiler):
pass
class SQLDateTimeCompiler(compiler.SQLDateTimeCompiler, SQLCompiler):
pass
|
rtindru/django | refs/heads/master | django/conf/locale/id/formats.py | 504 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j N Y'
DATETIME_FORMAT = "j N Y, G.i"
TIME_FORMAT = 'G.i'
YEAR_MONTH_FORMAT = 'F Y'
MONTH_DAY_FORMAT = 'j F'
SHORT_DATE_FORMAT = 'd-m-Y'
SHORT_DATETIME_FORMAT = 'd-m-Y G.i'
FIRST_DAY_OF_WEEK = 1 # Monday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = [
'%d-%m-%y', '%d/%m/%y', # '25-10-09', 25/10/09'
'%d-%m-%Y', '%d/%m/%Y', # '25-10-2009', 25/10/2009'
'%d %b %Y', # '25 Oct 2006',
'%d %B %Y', # '25 October 2006'
]
TIME_INPUT_FORMATS = [
'%H.%M.%S', # '14.30.59'
'%H.%M', # '14.30'
]
DATETIME_INPUT_FORMATS = [
'%d-%m-%Y %H.%M.%S', # '25-10-2009 14.30.59'
'%d-%m-%Y %H.%M.%S.%f', # '25-10-2009 14.30.59.000200'
'%d-%m-%Y %H.%M', # '25-10-2009 14.30'
'%d-%m-%Y', # '25-10-2009'
'%d-%m-%y %H.%M.%S', # '25-10-09' 14.30.59'
'%d-%m-%y %H.%M.%S.%f', # '25-10-09' 14.30.59.000200'
'%d-%m-%y %H.%M', # '25-10-09' 14.30'
'%d-%m-%y', # '25-10-09''
'%m/%d/%y %H.%M.%S', # '10/25/06 14.30.59'
'%m/%d/%y %H.%M.%S.%f', # '10/25/06 14.30.59.000200'
'%m/%d/%y %H.%M', # '10/25/06 14.30'
'%m/%d/%y', # '10/25/06'
'%m/%d/%Y %H.%M.%S', # '25/10/2009 14.30.59'
'%m/%d/%Y %H.%M.%S.%f', # '25/10/2009 14.30.59.000200'
'%m/%d/%Y %H.%M', # '25/10/2009 14.30'
'%m/%d/%Y', # '10/25/2009'
]
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '.'
NUMBER_GROUPING = 3
|
DolphinDream/sverchok | refs/heads/master | nodes/spatial/voronoi3d.py | 2 | # This file is part of project Sverchok. It's copyrighted by the contributors
# recorded in the version control history of the file, available from
# its original location https://github.com/nortikin/sverchok/commit/master
#
# SPDX-License-Identifier: GPL3
# License-Filename: LICENSE
from collections import defaultdict
import bpy
from bpy.props import FloatProperty, EnumProperty, BoolProperty, IntProperty
import bmesh
from mathutils import Matrix
import sverchok
from sverchok.node_tree import SverchCustomTreeNode
from sverchok.data_structure import updateNode, zip_long_repeat, throttle_and_update_node, get_data_nesting_level
from sverchok.utils.sv_mesh_utils import polygons_to_edges, mesh_join
from sverchok.utils.sv_bmesh_utils import pydata_from_bmesh, bmesh_from_pydata, bmesh_clip
from sverchok.utils.geom import calc_bounds
from sverchok.utils.logging import info, exception
from sverchok.utils.dummy_nodes import add_dummy
from sverchok.dependencies import scipy
if scipy is None:
add_dummy('SvExVoronoi3DNode', "Voronoi 3D", 'scipy')
else:
from scipy.spatial import Voronoi
class SvExVoronoi3DNode(bpy.types.Node, SverchCustomTreeNode):
"""
Triggers: Voronoi 3D
Tooltip: Generate 3D Voronoi diagram
"""
bl_idname = 'SvExVoronoi3DNode'
bl_label = 'Voronoi 3D'
bl_icon = 'OUTLINER_OB_EMPTY'
sv_icon = 'SV_VORONOI'
out_modes = [
('RIDGES', "Ridges", "Ridges", 0),
('REGIONS', "Regions", "Regions", 1)
]
out_mode : EnumProperty(
name = "Output",
items = out_modes,
default = 'REGIONS',
update = updateNode)
join : BoolProperty(
name = "Join",
default = False,
update = updateNode)
closed_only : BoolProperty(
name = "Closed regions only",
default = True,
update = updateNode)
normals : BoolProperty(
name = "Correct normals",
default = True,
update = updateNode)
@throttle_and_update_node
def update_sockets(self, context):
self.inputs['Clipping'].hide_safe = not self.do_clip
do_clip : BoolProperty(
name = "Clip",
default = True,
update = update_sockets)
clipping : FloatProperty(
name = "Clipping",
default = 1.0,
min = 0.0,
update = updateNode)
def sv_init(self, context):
self.inputs.new('SvVerticesSocket', "Vertices")
self.inputs.new('SvStringsSocket', "Clipping").prop_name = 'clipping'
self.outputs.new('SvVerticesSocket', "Vertices")
self.outputs.new('SvStringsSocket', "Edges")
self.outputs.new('SvStringsSocket', "Faces")
self.update_sockets(context)
def draw_buttons(self, context, layout):
layout.prop(self, "out_mode", expand=True)
if self.out_mode == 'REGIONS':
layout.prop(self, "closed_only")
layout.prop(self, "normals")
layout.prop(self, "do_clip")
layout.prop(self, "join")
def make_regions(self, diagram):
faces_per_site = defaultdict(list)
nsites = len(diagram.point_region)
nridges = len(diagram.ridge_points)
open_sites = set()
for ridge_idx in range(nridges):
site_idx_1, site_idx_2 = diagram.ridge_points[ridge_idx]
face = diagram.ridge_vertices[ridge_idx]
if -1 in face:
open_sites.add(site_idx_1)
open_sites.add(site_idx_2)
continue
faces_per_site[site_idx_1].append(face)
faces_per_site[site_idx_2].append(face)
new_verts = []
new_edges = []
new_faces = []
for site_idx in sorted(faces_per_site.keys()):
if self.closed_only and site_idx in open_sites:
continue
done_verts = dict()
bm = bmesh.new()
new_vert = bm.verts.new
new_face = bm.faces.new
for face in faces_per_site[site_idx]:
face_bm_verts = []
for vertex_idx in face:
if vertex_idx not in done_verts:
bm_vert = new_vert(diagram.vertices[vertex_idx])
done_verts[vertex_idx] = bm_vert
else:
bm_vert = done_verts[vertex_idx]
face_bm_verts.append(bm_vert)
new_face(face_bm_verts)
bm.verts.index_update()
bm.verts.ensure_lookup_table()
bm.faces.index_update()
bm.edges.index_update()
if self.closed_only and any (v.is_boundary for v in bm.verts):
bm.free()
continue
if self.normals:
bm.normal_update()
bmesh.ops.recalc_face_normals(bm, faces=bm.faces[:])
region_verts, region_edges, region_faces = pydata_from_bmesh(bm)
bm.free()
new_verts.append(region_verts)
new_edges.append(region_edges)
new_faces.append(region_faces)
return new_verts, new_edges, new_faces
def split_ridges(self, vertices, edges, faces):
result_verts = []
result_edges = []
result_faces = []
for face in faces:
bm = bmesh.new()
new_vert = bm.verts.new
new_face = bm.faces.new
face_bm_verts = []
for vertex_idx in face:
vertex = vertices[vertex_idx]
bm_vert = new_vert(vertex)
face_bm_verts.append(bm_vert)
new_face(face_bm_verts)
bm.verts.index_update()
bm.verts.ensure_lookup_table()
bm.faces.index_update()
bm.edges.index_update()
ridge_verts, ridge_edges, ridge_faces = pydata_from_bmesh(bm)
result_verts.append(ridge_verts)
result_edges.append(ridge_edges)
result_faces.append(ridge_faces)
return result_verts, result_edges, result_faces
def clip_mesh(self, bounds, vertices, edges, faces, fill=False, iterate=None):
if iterate is None:
iterate = get_data_nesting_level(vertices) > 2
if iterate:
vertices_result = []
edges_result = []
faces_result = []
for vertices_item, edges_item, faces_item in zip(vertices, edges, faces):
new_vertices, new_edges, new_faces = self.clip_mesh(bounds, vertices_item, edges_item, faces_item, fill=fill, iterate=False)
if new_vertices:
vertices_result.append(new_vertices)
edges_result.append(new_edges)
faces_result.append(new_faces)
return vertices_result, edges_result, faces_result
else:
bm = bmesh_from_pydata(vertices, edges, faces)
bmesh_clip(bm, bounds, fill)
vertices, edges, faces = pydata_from_bmesh(bm)
bm.free()
return vertices, edges, faces
def process(self):
if not any(socket.is_linked for socket in self.outputs):
return
vertices_s = self.inputs['Vertices'].sv_get()
clipping_s = self.inputs['Clipping'].sv_get()
verts_out = []
edges_out = []
faces_out = []
for sites, clipping in zip_long_repeat(vertices_s, clipping_s):
if isinstance(clipping, (list, tuple)):
clipping = clipping[0]
diagram = Voronoi(sites)
if self.do_clip:
bounds = calc_bounds(sites, clipping)
if self.out_mode == 'RIDGES':
new_verts = diagram.vertices.tolist()
new_faces = [e for e in diagram.ridge_vertices if not -1 in e]
new_edges = polygons_to_edges([new_faces], True)[0]
if self.join:
if self.do_clip:
new_verts, new_edges, new_faces = self.clip_mesh(bounds, new_verts, new_edges, new_faces, fill=False)
verts_out.append(new_verts)
edges_out.append(new_edges)
faces_out.append(new_faces)
else:
new_verts, new_edges, new_faces = self.split_ridges(new_verts, new_edges, new_faces)
if self.do_clip:
new_verts, new_edges, new_faces = self.clip_mesh(bounds, new_verts, new_edges, new_faces, fill=False, iterate=True)
verts_out.extend(new_verts)
edges_out.extend(new_edges)
faces_out.extend(new_faces)
else: # REGIONS
new_verts, new_edges, new_faces = self.make_regions(diagram)
if self.join:
new_verts, new_edges, new_faces = mesh_join(new_verts, new_edges, new_faces)
new_verts = [new_verts]
new_edges = [new_edges]
new_faces = [new_faces]
if self.do_clip:
new_verts, new_edges, new_faces = self.clip_mesh(bounds, new_verts, new_edges, new_faces, fill=True)
verts_out.extend(new_verts)
edges_out.extend(new_edges)
faces_out.extend(new_faces)
self.outputs['Vertices'].sv_set(verts_out)
self.outputs['Edges'].sv_set(edges_out)
self.outputs['Faces'].sv_set(faces_out)
def register():
if scipy is not None:
bpy.utils.register_class(SvExVoronoi3DNode)
def unregister():
if scipy is not None:
bpy.utils.unregister_class(SvExVoronoi3DNode)
|
treemo/circuits | refs/heads/master | tests/node/test_node.py | 3 | #!/usr/bin/env python
from pytest import fixture, skip, PLATFORM
if PLATFORM == 'win32':
skip('Broken on Windows')
from circuits import Component, Event
from circuits.net.events import close
from circuits.node import Node, remote
from circuits.net.sockets import UDPServer
class App(Component):
ready = False
value = False
disconnected = False
def foo(self):
return 'Hello World!'
def ready(self, *args):
self.ready = True
def disconnect(self, component):
self.disconnected = True
def remote_value_changed(self, value):
self.value = True
@fixture()
def bind(request, manager, watcher):
server = UDPServer(0).register(manager)
assert watcher.wait('ready')
host, port = server.host, server.port
server.fire(close())
assert watcher.wait('closed')
server.unregister()
assert watcher.wait('unregistered')
return host, port
@fixture()
def app(request, manager, watcher, bind):
app = App().register(manager)
node = Node().register(app)
watcher.wait('ready')
child = (App() + Node(port=bind[1], server_ip=bind[0]))
child.start(process=True)
node.add('child', *bind)
watcher.wait('connected')
def finalizer():
child.stop()
request.addfinalizer(finalizer)
return app
def test_return_value(app, watcher):
event = Event.create('foo')
event.notify = True
remote_event = remote(event, 'child')
remote_event.notify = True
value = app.fire(remote_event)
assert watcher.wait('remote_value_changed')
assert value.value == 'Hello World!'
|
cloudbase/cinder | refs/heads/master | cinder/tests/unit/db/test_cluster.py | 5 | # Copyright (c) 2016 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for cluster table related operations."""
import datetime
import mock
from oslo_config import cfg
from oslo_utils import timeutils
from sqlalchemy.orm import exc
from cinder import db
from cinder import exception
from cinder.tests.unit import test_db_api
CONF = cfg.CONF
class ClusterTestCase(test_db_api.BaseTest):
"""Unit tests for cinder.db.api.cluster_*."""
def _default_cluster_values(self):
return {
'name': 'cluster_name',
'binary': 'cinder-volume',
'disabled': False,
'disabled_reason': None,
'deleted': False,
'updated_at': None,
'deleted_at': None,
}
def _create_cluster(self, **values):
create_values = self._default_cluster_values()
create_values.update(values)
cluster = db.cluster_create(self.ctxt, create_values)
return db.cluster_get(self.ctxt, cluster.id, services_summary=True)
def _create_populated_cluster(self, num_services, num_down_svcs=0,
**values):
"""Helper method that creates a cluster with up and down services."""
up_time = timeutils.utcnow()
down_time = (up_time -
datetime.timedelta(seconds=CONF.service_down_time + 1))
cluster = self._create_cluster(**values)
svcs = [
db.service_create(
self.ctxt,
{'cluster_name': cluster.name,
'updated_at': down_time if i < num_down_svcs else up_time})
for i in range(num_services)
]
return cluster, svcs
def test_cluster_create_and_get(self):
"""Basic cluster creation test."""
values = self._default_cluster_values()
cluster = db.cluster_create(self.ctxt, values)
values['last_heartbeat'] = None
self.assertEqual(0, cluster.race_preventer)
for k, v in values.items():
self.assertEqual(v, getattr(cluster, k))
db_cluster = db.cluster_get(self.ctxt, cluster.id,
services_summary=True)
for k, v in values.items():
self.assertEqual(v, getattr(db_cluster, k))
self.assertEqual(0, db_cluster.race_preventer)
def test_cluster_create_cfg_disabled(self):
"""Test that create uses enable_new_services configuration option."""
self.override_config('enable_new_services', False)
cluster = self._create_cluster(disabled=None)
self.assertTrue(cluster.disabled)
def test_cluster_create_disabled_preference(self):
"""Test that provided disabled value has highest priority on create."""
self.override_config('enable_new_services', False)
cluster = self._create_cluster()
self.assertFalse(cluster.disabled)
def test_cluster_create_duplicate(self):
"""Test that unique constraints are working.
To remove potential races on creation we have a constraint set on name
and race_preventer fields, and we set value on creation to 0, so 2
clusters with the same name will fail this constraint. On deletion we
change this field to the same value as the id which will be unique and
will not conflict with the creation of another cluster with the same
name.
"""
cluster = self._create_cluster()
self.assertRaises(exception.ClusterExists,
self._create_cluster,
name=cluster.name)
def test_cluster_create_not_duplicate(self):
"""Test that unique constraints will work with delete operation.
To remove potential races on creation we have a constraint set on name
and race_preventer fields, and we set value on creation to 0, so 2
clusters with the same name will fail this constraint. On deletion we
change this field to the same value as the id which will be unique and
will not conflict with the creation of another cluster with the same
name.
"""
cluster = self._create_cluster()
self.assertIsNone(db.cluster_destroy(self.ctxt, cluster.id))
self.assertIsNotNone(self._create_cluster(name=cluster.name))
def test_cluster_get_fail(self):
"""Test that cluster get will fail if the cluster doesn't exists."""
self._create_cluster(name='cluster@backend')
self.assertRaises(exception.ClusterNotFound,
db.cluster_get, self.ctxt, 'name=cluster@backend2')
def test_cluster_get_by_name(self):
"""Getting a cluster by name will include backends if not specified."""
cluster = self._create_cluster(name='cluster@backend')
# Get without the backend
db_cluster = db.cluster_get(self.ctxt, name='cluster')
self.assertEqual(cluster.id, db_cluster.id)
# Get with the backend detail
db_cluster = db.cluster_get(self.ctxt, name='cluster@backend')
self.assertEqual(cluster.id, db_cluster.id)
def test_cluster_get_without_summary(self):
"""Test getting cluster without summary information."""
cluster = self._create_cluster()
db_cluster = db.cluster_get(self.ctxt, cluster.id)
self.assertRaises(exc.DetachedInstanceError,
getattr, db_cluster, 'num_hosts')
self.assertRaises(exc.DetachedInstanceError,
getattr, db_cluster, 'num_down_hosts')
self.assertIsNone(db_cluster.last_heartbeat)
def test_cluster_get_with_summary_empty_cluster(self):
"""Test getting empty cluster with summary information."""
cluster = self._create_cluster()
db_cluster = db.cluster_get(self.ctxt, cluster.id,
services_summary=True)
self.assertEqual(0, db_cluster.num_hosts)
self.assertEqual(0, db_cluster.num_down_hosts)
self.assertIsNone(db_cluster.last_heartbeat)
def test_cluster_get_with_summary(self):
"""Test getting cluster with summary information."""
cluster, svcs = self._create_populated_cluster(3, 1)
db_cluster = db.cluster_get(self.ctxt, cluster.id,
services_summary=True)
self.assertEqual(3, db_cluster.num_hosts)
self.assertEqual(1, db_cluster.num_down_hosts)
self.assertEqual(svcs[1].updated_at, db_cluster.last_heartbeat)
def test_cluster_get_is_up_on_empty_cluster(self):
"""Test is_up filter works on empty clusters."""
cluster = self._create_cluster()
db_cluster = db.cluster_get(self.ctxt, cluster.id, is_up=False)
self.assertEqual(cluster.id, db_cluster.id)
self.assertRaises(exception.ClusterNotFound,
db.cluster_get, self.ctxt, cluster.id, is_up=True)
def test_cluster_get_services_on_empty_cluster(self):
"""Test get_services filter works on empty clusters."""
cluster = self._create_cluster()
db_cluster = db.cluster_get(self.ctxt, cluster.id, get_services=True)
self.assertEqual(cluster.id, db_cluster.id)
self.assertListEqual([], db_cluster.services)
def test_cluster_get_services(self):
"""Test services is properly populated on non empty cluster."""
# We create another cluster to see we do the selection correctly
self._create_populated_cluster(2, name='cluster2')
# We create our cluster with 2 up nodes and 1 down
cluster, svcs = self._create_populated_cluster(3, 1)
# Add a deleted service to the cluster
db.service_create(self.ctxt,
{'cluster_name': cluster.name,
'deleted': True})
db_cluster = db.cluster_get(self.ctxt, name=cluster.name,
get_services=True)
self.assertEqual(3, len(db_cluster.services))
self.assertSetEqual({svc.id for svc in svcs},
{svc.id for svc in db_cluster.services})
def test_cluster_get_is_up_all_are_down(self):
"""Test that is_up filter works when all services are down."""
cluster, svcs = self._create_populated_cluster(3, 3)
self.assertRaises(exception.ClusterNotFound,
db.cluster_get, self.ctxt, cluster.id, is_up=True)
db_cluster = db.cluster_get(self.ctxt, name=cluster.name, is_up=False)
self.assertEqual(cluster.id, db_cluster.id)
def test_cluster_get_by_num_down_hosts(self):
"""Test cluster_get by subquery field num_down_hosts."""
cluster, svcs = self._create_populated_cluster(3, 2)
result = db.cluster_get(self.ctxt, num_down_hosts=2)
self.assertEqual(cluster.id, result.id)
def test_cluster_get_by_num_hosts(self):
"""Test cluster_get by subquery field num_hosts."""
cluster, svcs = self._create_populated_cluster(3, 2)
result = db.cluster_get(self.ctxt, num_hosts=3)
self.assertEqual(cluster.id, result.id)
def test_cluster_destroy(self):
"""Test basic cluster destroy."""
cluster = self._create_cluster()
# On creation race_preventer is marked with a 0
self.assertEqual(0, cluster.race_preventer)
db.cluster_destroy(self.ctxt, cluster.id)
db_cluster = db.cluster_get(self.ctxt, cluster.id, read_deleted='yes')
self.assertTrue(db_cluster.deleted)
self.assertIsNotNone(db_cluster.deleted_at)
# On deletion race_preventer is marked with the id
self.assertEqual(cluster.id, db_cluster.race_preventer)
def test_cluster_destroy_non_existent(self):
"""Test destroying non existent cluster."""
self.assertRaises(exception.ClusterNotFound,
db.cluster_destroy, self.ctxt, 0)
def test_cluster_destroy_has_services(self):
"""Test that we cannot delete a cluster with non deleted services."""
cluster, svcs = self._create_populated_cluster(3, 1)
self.assertRaises(exception.ClusterHasHosts,
db.cluster_destroy, self.ctxt, cluster.id)
def test_cluster_update_non_existent(self):
"""Test that we raise an exception on updating non existent cluster."""
self.assertRaises(exception.ClusterNotFound,
db.cluster_update, self.ctxt, 0, {'disabled': True})
def test_cluster_update(self):
"""Test basic cluster update."""
cluster = self._create_cluster()
self.assertFalse(cluster.disabled)
db.cluster_update(self.ctxt, cluster.id, {'disabled': True})
db_cluster = db.cluster_get(self.ctxt, cluster.id)
self.assertTrue(db_cluster.disabled)
def test_cluster_get_all_empty(self):
"""Test basic empty cluster get_all."""
self.assertListEqual([], db.cluster_get_all(self.ctxt))
def test_cluster_get_all_matches(self):
"""Basic test of get_all with a matching filter."""
cluster1, svcs = self._create_populated_cluster(3, 1)
cluster2, svcs = self._create_populated_cluster(3, 2, name='cluster2')
cluster3, svcs = self._create_populated_cluster(3, 3, name='cluster3')
expected = {cluster1.id, cluster2.id}
result = db.cluster_get_all(self.ctxt, is_up=True)
self.assertEqual(len(expected), len(result))
self.assertSetEqual(expected, {cluster.id for cluster in result})
def test_cluster_get_all_no_match(self):
"""Basic test of get_all with a non matching filter."""
cluster1, svcs = self._create_populated_cluster(3, 3)
result = db.cluster_get_all(self.ctxt, is_up=True)
self.assertListEqual([], result)
@mock.patch('cinder.db.sqlalchemy.api._cluster_query')
def test_cluster_get_all_passes_parameters(self, cluster_query_mock):
"""Test that get_all passes all parameters.
Since we have already tested all filters and parameters with
cluster_get method all we have to do for get_all is to check that we
are passing them to the query building method.
"""
args = (mock.sentinel.read_deleted, mock.sentinel.get_services,
mock.sentinel.services_summary, mock.sentinel.is_up,
mock.sentinel.name_match_level)
filters = {'session': mock.sentinel.session,
'name': mock.sentinel.name,
'disabled': mock.sentinel.disabled,
'disabled_reason': mock.sentinel.disabled_reason,
'race_preventer': mock.sentinel.race_preventer,
'last_heartbeat': mock.sentinel.last_heartbeat,
'num_hosts': mock.sentinel.num_hosts,
'num_down_hosts': mock.sentinel.num_down_hosts}
db.cluster_get_all(self.ctxt, *args, **filters)
cluster_query_mock.assert_called_once_with(self.ctxt, *args, **filters)
|
qiqipipioioi/django-radius-my | refs/heads/master | userWeb/admin.py | 1 | from django.contrib import admin
from userWeb.models import News, userlist
class NewsAdmin(admin.ModelAdmin):
list_display = ('title', 'created_time', 'status')
list_filter = ('status',)
ordering = ('-created_time',)
class userlistAdmin(admin.ModelAdmin):
list_display = ('username', 'statu')
list_filter = ('statu',)
admin.site.register(News, NewsAdmin)
admin.site.register(userlist, userlistAdmin)
# Register your models here.
|
dvliman/jaikuengine | refs/heads/master | .google_appengine/lib/django-1.5/django/contrib/gis/db/backends/mysql/operations.py | 100 | from django.db.backends.mysql.base import DatabaseOperations
from django.contrib.gis.db.backends.adapter import WKTAdapter
from django.contrib.gis.db.backends.base import BaseSpatialOperations
from django.utils import six
class MySQLOperations(DatabaseOperations, BaseSpatialOperations):
compiler_module = 'django.contrib.gis.db.backends.mysql.compiler'
mysql = True
name = 'mysql'
select = 'AsText(%s)'
from_wkb = 'GeomFromWKB'
from_text = 'GeomFromText'
Adapter = WKTAdapter
Adaptor = Adapter # Backwards-compatibility alias.
geometry_functions = {
'bbcontains' : 'MBRContains', # For consistency w/PostGIS API
'bboverlaps' : 'MBROverlaps', # .. ..
'contained' : 'MBRWithin', # .. ..
'contains' : 'MBRContains',
'disjoint' : 'MBRDisjoint',
'equals' : 'MBREqual',
'exact' : 'MBREqual',
'intersects' : 'MBRIntersects',
'overlaps' : 'MBROverlaps',
'same_as' : 'MBREqual',
'touches' : 'MBRTouches',
'within' : 'MBRWithin',
}
gis_terms = dict([(term, None) for term in list(geometry_functions) + ['isnull']])
def geo_db_type(self, f):
return f.geom_type
def get_geom_placeholder(self, value, srid):
"""
The placeholder here has to include MySQL's WKT constructor. Because
MySQL does not support spatial transformations, there is no need to
modify the placeholder based on the contents of the given value.
"""
if hasattr(value, 'expression'):
placeholder = self.get_expression_column(value)
else:
placeholder = '%s(%%s)' % self.from_text
return placeholder
def spatial_lookup_sql(self, lvalue, lookup_type, value, field, qn):
alias, col, db_type = lvalue
geo_col = '%s.%s' % (qn(alias), qn(col))
lookup_info = self.geometry_functions.get(lookup_type, False)
if lookup_info:
return "%s(%s, %s)" % (lookup_info, geo_col,
self.get_geom_placeholder(value, field.srid))
# TODO: Is this really necessary? MySQL can't handle NULL geometries
# in its spatial indexes anyways.
if lookup_type == 'isnull':
return "%s IS %sNULL" % (geo_col, (not value and 'NOT ' or ''))
raise TypeError("Got invalid lookup_type: %s" % repr(lookup_type))
|
googleapis/python-talent | refs/heads/master | google/cloud/talent_v4beta1/__init__.py | 1 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from .services.application_service import ApplicationServiceClient
from .services.application_service import ApplicationServiceAsyncClient
from .services.company_service import CompanyServiceClient
from .services.company_service import CompanyServiceAsyncClient
from .services.completion import CompletionClient
from .services.completion import CompletionAsyncClient
from .services.event_service import EventServiceClient
from .services.event_service import EventServiceAsyncClient
from .services.job_service import JobServiceClient
from .services.job_service import JobServiceAsyncClient
from .services.profile_service import ProfileServiceClient
from .services.profile_service import ProfileServiceAsyncClient
from .services.tenant_service import TenantServiceClient
from .services.tenant_service import TenantServiceAsyncClient
from .types.application import Application
from .types.application_service import CreateApplicationRequest
from .types.application_service import DeleteApplicationRequest
from .types.application_service import GetApplicationRequest
from .types.application_service import ListApplicationsRequest
from .types.application_service import ListApplicationsResponse
from .types.application_service import UpdateApplicationRequest
from .types.common import BatchOperationMetadata
from .types.common import Certification
from .types.common import CompensationInfo
from .types.common import CustomAttribute
from .types.common import DeviceInfo
from .types.common import Interview
from .types.common import Location
from .types.common import Rating
from .types.common import RequestMetadata
from .types.common import ResponseMetadata
from .types.common import Skill
from .types.common import SpellingCorrection
from .types.common import TimestampRange
from .types.common import AvailabilitySignalType
from .types.common import CommuteMethod
from .types.common import CompanySize
from .types.common import ContactInfoUsage
from .types.common import DegreeType
from .types.common import EmploymentType
from .types.common import HtmlSanitization
from .types.common import JobBenefit
from .types.common import JobCategory
from .types.common import JobLevel
from .types.common import Outcome
from .types.common import PostingRegion
from .types.common import SkillProficiencyLevel
from .types.common import Visibility
from .types.company import Company
from .types.company_service import CreateCompanyRequest
from .types.company_service import DeleteCompanyRequest
from .types.company_service import GetCompanyRequest
from .types.company_service import ListCompaniesRequest
from .types.company_service import ListCompaniesResponse
from .types.company_service import UpdateCompanyRequest
from .types.completion_service import CompleteQueryRequest
from .types.completion_service import CompleteQueryResponse
from .types.event import ClientEvent
from .types.event import JobEvent
from .types.event import ProfileEvent
from .types.event_service import CreateClientEventRequest
from .types.filters import ApplicationDateFilter
from .types.filters import ApplicationJobFilter
from .types.filters import ApplicationOutcomeNotesFilter
from .types.filters import AvailabilityFilter
from .types.filters import CandidateAvailabilityFilter
from .types.filters import CommuteFilter
from .types.filters import CompensationFilter
from .types.filters import EducationFilter
from .types.filters import EmployerFilter
from .types.filters import JobQuery
from .types.filters import JobTitleFilter
from .types.filters import LocationFilter
from .types.filters import PersonNameFilter
from .types.filters import ProfileQuery
from .types.filters import SkillFilter
from .types.filters import TimeFilter
from .types.filters import WorkExperienceFilter
from .types.histogram import HistogramQuery
from .types.histogram import HistogramQueryResult
from .types.job import Job
from .types.job_service import BatchCreateJobsRequest
from .types.job_service import BatchDeleteJobsRequest
from .types.job_service import BatchUpdateJobsRequest
from .types.job_service import CreateJobRequest
from .types.job_service import DeleteJobRequest
from .types.job_service import GetJobRequest
from .types.job_service import JobOperationResult
from .types.job_service import ListJobsRequest
from .types.job_service import ListJobsResponse
from .types.job_service import SearchJobsRequest
from .types.job_service import SearchJobsResponse
from .types.job_service import UpdateJobRequest
from .types.job_service import JobView
from .types.profile import Activity
from .types.profile import AdditionalContactInfo
from .types.profile import Address
from .types.profile import AvailabilitySignal
from .types.profile import Degree
from .types.profile import EducationRecord
from .types.profile import Email
from .types.profile import EmploymentRecord
from .types.profile import Patent
from .types.profile import PersonalUri
from .types.profile import PersonName
from .types.profile import Phone
from .types.profile import Profile
from .types.profile import Publication
from .types.profile import Resume
from .types.profile_service import CreateProfileRequest
from .types.profile_service import DeleteProfileRequest
from .types.profile_service import GetProfileRequest
from .types.profile_service import ListProfilesRequest
from .types.profile_service import ListProfilesResponse
from .types.profile_service import SearchProfilesRequest
from .types.profile_service import SearchProfilesResponse
from .types.profile_service import SummarizedProfile
from .types.profile_service import UpdateProfileRequest
from .types.tenant import Tenant
from .types.tenant_service import CreateTenantRequest
from .types.tenant_service import DeleteTenantRequest
from .types.tenant_service import GetTenantRequest
from .types.tenant_service import ListTenantsRequest
from .types.tenant_service import ListTenantsResponse
from .types.tenant_service import UpdateTenantRequest
__all__ = (
"ApplicationServiceAsyncClient",
"CompanyServiceAsyncClient",
"CompletionAsyncClient",
"EventServiceAsyncClient",
"JobServiceAsyncClient",
"ProfileServiceAsyncClient",
"TenantServiceAsyncClient",
"Activity",
"AdditionalContactInfo",
"Address",
"Application",
"ApplicationDateFilter",
"ApplicationJobFilter",
"ApplicationOutcomeNotesFilter",
"ApplicationServiceClient",
"AvailabilityFilter",
"AvailabilitySignal",
"AvailabilitySignalType",
"BatchCreateJobsRequest",
"BatchDeleteJobsRequest",
"BatchOperationMetadata",
"BatchUpdateJobsRequest",
"CandidateAvailabilityFilter",
"Certification",
"ClientEvent",
"CommuteFilter",
"CommuteMethod",
"Company",
"CompanyServiceClient",
"CompanySize",
"CompensationFilter",
"CompensationInfo",
"CompleteQueryRequest",
"CompleteQueryResponse",
"CompletionClient",
"ContactInfoUsage",
"CreateApplicationRequest",
"CreateClientEventRequest",
"CreateCompanyRequest",
"CreateJobRequest",
"CreateProfileRequest",
"CreateTenantRequest",
"CustomAttribute",
"Degree",
"DegreeType",
"DeleteApplicationRequest",
"DeleteCompanyRequest",
"DeleteJobRequest",
"DeleteProfileRequest",
"DeleteTenantRequest",
"DeviceInfo",
"EducationFilter",
"EducationRecord",
"Email",
"EmployerFilter",
"EmploymentRecord",
"EmploymentType",
"EventServiceClient",
"GetApplicationRequest",
"GetCompanyRequest",
"GetJobRequest",
"GetProfileRequest",
"GetTenantRequest",
"HistogramQuery",
"HistogramQueryResult",
"HtmlSanitization",
"Interview",
"Job",
"JobBenefit",
"JobCategory",
"JobEvent",
"JobLevel",
"JobOperationResult",
"JobQuery",
"JobServiceClient",
"JobTitleFilter",
"JobView",
"ListApplicationsRequest",
"ListApplicationsResponse",
"ListCompaniesRequest",
"ListCompaniesResponse",
"ListJobsRequest",
"ListJobsResponse",
"ListProfilesRequest",
"ListProfilesResponse",
"ListTenantsRequest",
"ListTenantsResponse",
"Location",
"LocationFilter",
"Outcome",
"Patent",
"PersonName",
"PersonNameFilter",
"PersonalUri",
"Phone",
"PostingRegion",
"Profile",
"ProfileEvent",
"ProfileQuery",
"ProfileServiceClient",
"Publication",
"Rating",
"RequestMetadata",
"ResponseMetadata",
"Resume",
"SearchJobsRequest",
"SearchJobsResponse",
"SearchProfilesRequest",
"SearchProfilesResponse",
"Skill",
"SkillFilter",
"SkillProficiencyLevel",
"SpellingCorrection",
"SummarizedProfile",
"Tenant",
"TenantServiceClient",
"TimeFilter",
"TimestampRange",
"UpdateApplicationRequest",
"UpdateCompanyRequest",
"UpdateJobRequest",
"UpdateProfileRequest",
"UpdateTenantRequest",
"Visibility",
"WorkExperienceFilter",
)
|
GbalsaC/bitnamiP | refs/heads/master | pika/pika/exceptions.py | 1 | """Pika specific exceptions"""
class AMQPError(Exception):
def __repr__(self):
return 'An unspecified AMQP error has occurred'
class AMQPConnectionError(AMQPError):
def __repr__(self):
if len(self.args) == 1:
if (self.args[0] == 1):
return ('No connection could be opened after 1 connection attempt')
else:
return ('No connection could be opened after %s connection attempts' %
self.args[0])
elif len(self.args) == 2:
return '%s: %s' % (self.args[0], self.args[1])
class IncompatibleProtocolError(AMQPConnectionError):
def __repr__(self):
return 'The protocol returned by the server is not supported'
class AuthenticationError(AMQPConnectionError):
def __repr__(self):
return ('Server and client could not negotiate use of the %s '
'authentication mechanism' % self.args[0])
class ProbableAuthenticationError(AMQPConnectionError):
def __repr__(self):
return ('Client was disconnected at a connection stage indicating a '
'probable authentication error')
class ProbableAccessDeniedError(AMQPConnectionError):
def __repr__(self):
return ('Client was disconnected at a connection stage indicating a '
'probable denial of access to the specified virtual host')
class NoFreeChannels(AMQPConnectionError):
def __repr__(self):
return 'The connection has run out of free channels'
class ConnectionClosed(AMQPConnectionError):
def __repr__(self):
return 'The AMQP connection was closed (%s) %s' % (self.args[0],
self.args[1])
class AMQPChannelError(AMQPError):
def __repr__(self):
return 'An unspecified AMQP channel error has occurred'
class ChannelClosed(AMQPChannelError):
def __repr__(self):
return 'The channel is closed'
class DuplicateConsumerTag(AMQPChannelError):
def __repr__(self):
return ('The consumer tag specified already exists for this '
'channel: %s' % self.args[0])
class ConsumerCancelled(AMQPChannelError):
def __repr__(self):
return 'Server cancelled consumer (%s): %s' % (self.args[0].reply_code,
self.args[0].reply_text)
class InvalidChannelNumber(AMQPError):
def __repr__(self):
return 'An invalid channel number has been specified: %s' % self.args[0]
class ProtocolSyntaxError(AMQPError):
def __repr__(self):
return 'An unspecified protocol syntax error occurred'
class UnexpectedFrameError(ProtocolSyntaxError):
def __repr__(self):
return 'Received a frame out of sequence: %r' % self.args[0]
class ProtocolVersionMismatch(ProtocolSyntaxError):
def __repr__(self):
return 'Protocol versions did not match: %r vs %r' % (self.args[0],
self.args[1])
class BodyTooLongError(ProtocolSyntaxError):
def __repr__(self):
return ('Received too many bytes for a message delivery: '
'Received %i, expected %i' % (self.args[0], self.args[1]))
class InvalidFrameError(ProtocolSyntaxError):
def __repr__(self):
return 'Invalid frame received: %r' % self.args[0]
class InvalidFieldTypeException(ProtocolSyntaxError):
def __repr__(self):
return 'Unsupported field kind %s' % self.args[0]
class UnspportedAMQPFieldException(ProtocolSyntaxError):
def __repr__(self):
return 'Unsupported field kind %s' % type(self.args[1])
class MethodNotImplemented(AMQPError):
pass
class ChannelError(Exception):
def __repr__(self):
return 'An unspecified error occurred with the Channel'
class InvalidMinimumFrameSize(ProtocolSyntaxError):
def __repr__(self):
return 'AMQP Minimum Frame Size is 4096 Bytes'
class InvalidMaximumFrameSize(ProtocolSyntaxError):
def __repr__(self):
return 'AMQP Maximum Frame Size is 131072 Bytes'
|
kaltsimon/youtube-dl | refs/heads/master | youtube_dl/extractor/mlb.py | 142 | from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
parse_duration,
parse_iso8601,
)
class MLBIE(InfoExtractor):
_VALID_URL = r'''(?x)
https?://
(?:[\da-z_-]+\.)*mlb\.com/
(?:
(?:
(?:.*?/)?video/(?:topic/[\da-z_-]+/)?v|
(?:
shared/video/embed/(?:embed|m-internal-embed)\.html|
(?:[^/]+/)+(?:play|index)\.jsp|
)\?.*?\bcontent_id=
)
(?P<id>n?\d+)|
(?:[^/]+/)*(?P<path>[^/]+)
)
'''
_TESTS = [
{
'url': 'http://m.mlb.com/sea/video/topic/51231442/v34698933/nymsea-ackley-robs-a-home-run-with-an-amazing-catch/?c_id=sea',
'md5': 'ff56a598c2cf411a9a38a69709e97079',
'info_dict': {
'id': '34698933',
'ext': 'mp4',
'title': "Ackley's spectacular catch",
'description': 'md5:7f5a981eb4f3cbc8daf2aeffa2215bf0',
'duration': 66,
'timestamp': 1405980600,
'upload_date': '20140721',
'thumbnail': 're:^https?://.*\.jpg$',
},
},
{
'url': 'http://m.mlb.com/video/topic/81536970/v34496663/mianym-stanton-practices-for-the-home-run-derby',
'md5': 'd9c022c10d21f849f49c05ae12a8a7e9',
'info_dict': {
'id': '34496663',
'ext': 'mp4',
'title': 'Stanton prepares for Derby',
'description': 'md5:d00ce1e5fd9c9069e9c13ab4faedfa57',
'duration': 46,
'timestamp': 1405105800,
'upload_date': '20140711',
'thumbnail': 're:^https?://.*\.jpg$',
},
},
{
'url': 'http://m.mlb.com/video/topic/vtp_hrd_sponsor/v34578115/hrd-cespedes-wins-2014-gillette-home-run-derby',
'md5': '0e6e73d509321e142409b695eadd541f',
'info_dict': {
'id': '34578115',
'ext': 'mp4',
'title': 'Cespedes repeats as Derby champ',
'description': 'md5:08df253ce265d4cf6fb09f581fafad07',
'duration': 488,
'timestamp': 1405399936,
'upload_date': '20140715',
'thumbnail': 're:^https?://.*\.jpg$',
},
},
{
'url': 'http://m.mlb.com/video/v34577915/bautista-on-derby-captaining-duties-his-performance',
'md5': 'b8fd237347b844365d74ea61d4245967',
'info_dict': {
'id': '34577915',
'ext': 'mp4',
'title': 'Bautista on Home Run Derby',
'description': 'md5:b80b34031143d0986dddc64a8839f0fb',
'duration': 52,
'timestamp': 1405390722,
'upload_date': '20140715',
'thumbnail': 're:^https?://.*\.jpg$',
},
},
{
'url': 'http://m.mlb.com/news/article/118550098/blue-jays-kevin-pillar-goes-spidey-up-the-wall-to-rob-tim-beckham-of-a-homer',
'md5': 'b190e70141fb9a1552a85426b4da1b5d',
'info_dict': {
'id': '75609783',
'ext': 'mp4',
'title': 'Must C: Pillar climbs for catch',
'description': '4/15/15: Blue Jays outfielder Kevin Pillar continues his defensive dominance by climbing the wall in left to rob Tim Beckham of a home run',
'timestamp': 1429124820,
'upload_date': '20150415',
}
},
{
'url': 'http://m.mlb.com/shared/video/embed/embed.html?content_id=35692085&topic_id=6479266&width=400&height=224&property=mlb',
'only_matching': True,
},
{
'url': 'http://mlb.mlb.com/shared/video/embed/embed.html?content_id=36599553',
'only_matching': True,
},
{
'url': 'http://mlb.mlb.com/es/video/play.jsp?content_id=36599553',
'only_matching': True,
},
{
'url': 'http://m.cardinals.mlb.com/stl/video/v51175783/atlstl-piscotty-makes-great-sliding-catch-on-line/?partnerId=as_mlb_20150321_42500876&adbid=579409712979910656&adbpl=tw&adbpr=52847728',
'only_matching': True,
},
{
# From http://m.mlb.com/news/article/118550098/blue-jays-kevin-pillar-goes-spidey-up-the-wall-to-rob-tim-beckham-of-a-homer
'url': 'http://mlb.mlb.com/shared/video/embed/m-internal-embed.html?content_id=75609783&property=mlb&autoplay=true&hashmode=false&siteSection=mlb/multimedia/article_118550098/article_embed&club=mlb',
'only_matching': True,
},
{
'url': 'http://washington.nationals.mlb.com/mlb/gameday/index.jsp?c_id=was&gid=2015_05_09_atlmlb_wasmlb_1&lang=en&content_id=108309983&mode=video#',
'only_matching': True,
}
]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
if not video_id:
video_path = mobj.group('path')
webpage = self._download_webpage(url, video_path)
video_id = self._search_regex(
[r'data-video-?id="(\d+)"', r'content_id=(\d+)'], webpage, 'video id')
detail = self._download_xml(
'http://m.mlb.com/gen/multimedia/detail/%s/%s/%s/%s.xml'
% (video_id[-3], video_id[-2], video_id[-1], video_id), video_id)
title = detail.find('./headline').text
description = detail.find('./big-blurb').text
duration = parse_duration(detail.find('./duration').text)
timestamp = parse_iso8601(detail.attrib['date'][:-5])
thumbnails = [{
'url': thumbnail.text,
} for thumbnail in detail.findall('./thumbnailScenarios/thumbnailScenario')]
formats = []
for media_url in detail.findall('./url'):
playback_scenario = media_url.attrib['playback_scenario']
fmt = {
'url': media_url.text,
'format_id': playback_scenario,
}
m = re.search(r'(?P<vbr>\d+)K_(?P<width>\d+)X(?P<height>\d+)', playback_scenario)
if m:
fmt.update({
'vbr': int(m.group('vbr')) * 1000,
'width': int(m.group('width')),
'height': int(m.group('height')),
})
formats.append(fmt)
self._sort_formats(formats)
return {
'id': video_id,
'title': title,
'description': description,
'duration': duration,
'timestamp': timestamp,
'formats': formats,
'thumbnails': thumbnails,
}
|
mad-lab/transit | refs/heads/master | src/pytransit/fileDisplay.py | 1 | # Copyright 2015.
# Michael A. DeJesus, Chaitra Ambadipudi, and Thomas R. Ioerger.
#
#
# This file is part of TRANSIT.
#
# TRANSIT is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License.
#
#
# TRANSIT is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with TRANSIT. If not, see <http://www.gnu.org/licenses/>.
try:
import wx
import wx.xrc
import wx.lib.mixins.listctrl as listmix
hasWx = True
except Exception as e:
hasWx = False
import ntpath
import subprocess
import os
import sys
from functools import partial
import pytransit.trash
import wx.grid
import pytransit
import pytransit.analysis
########################################################################
class SortableListCtrl(wx.ListCtrl):
#----------------------------------------------------------------------
def __init__(self, parent, ID=wx.ID_ANY, pos=wx.DefaultPosition,
size=wx.DefaultSize, style=0):
wx.ListCtrl.__init__(self, parent, ID, pos, size, style)
#########################################
#menu_titles = [ "Display Histogram",
# "Display Tracks",]
#
#menu_title_by_id = {}
#for title in menu_titles:
# menu_title_by_id[ wx.NewId() ] = title
#
##########################################
class ImgFrame(wx.Frame):
def __init__(self, parent, filePath):
wx.Frame.__init__ ( self, parent, id = wx.ID_ANY, title = "%s" % (filePath), pos = wx.DefaultPosition, size = wx.Size( 1150,740 ), style = wx.DEFAULT_FRAME_STYLE|wx.TAB_TRAVERSAL )
self.SetSizeHintsSz( wx.DefaultSize, wx.DefaultSize )
bSizer1 = wx.BoxSizer( wx.VERTICAL )
self.m_bitmap1 = wx.StaticBitmap( self, wx.ID_ANY, wx.NullBitmap, wx.DefaultPosition, wx.DefaultSize, 0 )
bSizer1.Add( self.m_bitmap1, 1, wx.ALL|wx.EXPAND, 5 )
self.SetSizer( bSizer1 )
self.Layout()
self.Centre( wx.BOTH )
img = wx.Image(filePath, wx.BITMAP_TYPE_ANY)
self.m_bitmap1.SetBitmap(wx.BitmapFromImage(img))
self.Refresh()
self.Fit()
def unknownColNames(path):
colNames = []
final_line = ""
for line in open(path):
if line.startswith("#"):
colNames = line.split("\t")
else:
final_line = line
if not final_line:
print("Error: file appears to be empty")
tmp = final_line.split("\t")
if len(colNames) < len(tmp):
colNames = ["Col%d" % (i) for i in range(len(tmp))]
return colNames
def unknownTableData(path, colnames):
row = 0
data = []
for line in open(path):
if line.startswith("#"): continue
tmp = line.split("\t")
tmp[-1] = tmp[-1].strip()
rowdict = dict([(colnames[i], tmp[i]) for i in range(len(colnames))])
data.append((row, rowdict))
row+=1
return data
def unknownFileHeaderText(path):
return "Unknown results file."
def getInfoFromFileType(X):
for method in pytransit.analysis.methods:
for filetype in pytransit.analysis.methods[method].filetypes:
FT = filetype()
if X == FT.identifier:
return (method, FT)
return ("unknown", pytransit.analysis.base.TransitFile())
class TransitTable(wx.grid.GridTableBase):
"""
A custom wx.Grid Table using user supplied data
"""
def __init__(self, data, colnames):
"""data is a list of the form
[(rowname, dictionary),
dictionary.get(colname, None) returns the data for column
colname
"""
# The base class must be initialized *first*
wx.grid.GridTableBase.__init__(self)
self.data = data
self.colnames = colnames
# XXX
# we need to store the row length and column length to
# see if the table has changed size
self._rows = self.GetNumberRows()
self._cols = self.GetNumberCols()
self.sorted_col = None
self.sorted_dir = None
def GetNumberCols(self):
return len(self.colnames)
def GetNumberRows(self):
return len(self.data)
def GetColLabelValue(self, col):
return self.colnames[col]
def GetRowLabelValue(self, row):
return "%d" % int(self.data[row][0])
def GetValue(self, row, col):
return str(self.data[row][1].get(self.GetColLabelValue(col), ""))
def GetRawValue(self, row, col):
return self.data[row][1].get(self.GetColLabelValue(col), "")
def SetValue(self, row, col, value):
self.data[row][1][self.GetColLabelValue(col)] = value
def SortColumn(self, col):
if self.sorted_col == col:
self.sorted_dir = not self.sorted_dir
else:
self.sorted_col = col
self.sorted_dir = False
name = self.colnames[col]
tempdata = []
for row in self.data:
rowname, entry = row
try:
tempval = float(entry.get(name, None))
except:
tempval = entry.get(name, None)
tempdata.append((tempval, row))
tempdata.sort(reverse=self.sorted_dir)
self.data = []
for sortvalue, row in tempdata:
self.data.append(row)
class TransitGridFrame(wx.Frame):
def __init__(self, parent, path, size=(-1,-1)):
wx.Frame.__init__(self, parent, size=size)
self.SetTitle(path)
bSizer1 = wx.BoxSizer( wx.VERTICAL )
sbSizer1 = wx.StaticBoxSizer( wx.StaticBox( self, wx.ID_ANY, u"Information" ), wx.HORIZONTAL )
self.parent = parent
self.path = path
self.col = 0
self.row = 0
self.ctrldata = self.parent.ctrlSelected()
self.expdata = self.parent.expSelected()
self.annotation = self.parent.annotation
line = open(self.path).readline().strip()
(method, FT) = getInfoFromFileType(line)
self.filetype = FT
if self.filetype.identifier == "#Unknown":
self.columnlabels = unknownColNames(self.path)
else:
self.columnlabels = self.filetype.colnames
data = self.filetype.getData(self.path, self.columnlabels)
wxheader_list = []
text = self.filetype.getHeader(self.path)
wxheader_list.append(wx.StaticText( self, wx.ID_ANY, text, wx.DefaultPosition, wx.DefaultSize, 0 ))
wxheader_list[-1].Wrap( -1 )
sbSizer1.Add( wxheader_list[-1], 0, wx.ALL, 5 )
self.grid = wx.grid.Grid(self, -1)
bSizer1.Add( sbSizer1, 0, wx.EXPAND, 5 )
bSizer1.Add( self.grid, 1, wx.EXPAND, 5 )
self.SetSizer( bSizer1 )
self.Centre( wx.BOTH )
self.Bind(wx.grid.EVT_GRID_LABEL_LEFT_DCLICK, self.OnLabelDoubleClicked)
self.Bind(wx.grid.EVT_GRID_CELL_RIGHT_CLICK, self.OnCellRightClicked)
mytable = TransitTable(data, self.columnlabels)
self.grid.SetTable(mytable, True)
self.grid.EnableEditing(False)
self.grid.AdjustScrollbars()
self.grid.SetColLabelSize(wx.grid.GRID_AUTOSIZE)
self.grid.AutoSizeColumns()
self.AutoResizeCols()
self.grid.ForceRefresh()
(width, height) = bSizer1.GetMinSize()
max_width = 1500
max_height = 800
width = min(width+50, max_width)
height = min(height, max_height)
self.SetMinSize((width, height))
self.Layout()
#self.Show()
def AutoResizeCols(self):
max_column_size = 200
min_column_size = 100
self.grid.AutoSizeColumns(False)
for i,label in enumerate(self.columnlabels):
size = self.grid.GetColSize(i)
if size > max_column_size:
self.grid.SetColSize(i, max_column_size)
elif size < min_column_size:
self.grid.SetColSize(i, min_column_size)
def OnLabelDoubleClicked(self, evt):
col = evt.GetCol()
if col != -1:
self.grid.GetTable().SortColumn(col)
self.grid.ForceRefresh()
def OnCellRightClicked(self, evt):
menu = wx.Menu()
id1 = wx.NewId()
sortID = wx.NewId()
xo, yo = evt.GetPosition()
self.row = self.grid.YToRow(yo) - 1
self.col = 0
val = self.grid.GetCellValue(self.row, 0)
self.Refresh()
for (menuname, menufunc) in self.filetype.getMenus():
newid = wx.NewId()
menu.Append(newid, menuname)
newmenufunc = partial(menufunc, self)
self.Bind(wx.EVT_MENU, newmenufunc, id=newid)
self.PopupMenu(menu)
menu.Destroy()
|
shrikantgond/ecView | refs/heads/master | node_modules/node-gyp/gyp/pylib/gyp/MSVSNew.py | 1835 | # Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""New implementation of Visual Studio project generation."""
import os
import random
import gyp.common
# hashlib is supplied as of Python 2.5 as the replacement interface for md5
# and other secure hashes. In 2.6, md5 is deprecated. Import hashlib if
# available, avoiding a deprecation warning under 2.6. Import md5 otherwise,
# preserving 2.4 compatibility.
try:
import hashlib
_new_md5 = hashlib.md5
except ImportError:
import md5
_new_md5 = md5.new
# Initialize random number generator
random.seed()
# GUIDs for project types
ENTRY_TYPE_GUIDS = {
'project': '{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}',
'folder': '{2150E333-8FDC-42A3-9474-1A3956D46DE8}',
}
#------------------------------------------------------------------------------
# Helper functions
def MakeGuid(name, seed='msvs_new'):
"""Returns a GUID for the specified target name.
Args:
name: Target name.
seed: Seed for MD5 hash.
Returns:
A GUID-line string calculated from the name and seed.
This generates something which looks like a GUID, but depends only on the
name and seed. This means the same name/seed will always generate the same
GUID, so that projects and solutions which refer to each other can explicitly
determine the GUID to refer to explicitly. It also means that the GUID will
not change when the project for a target is rebuilt.
"""
# Calculate a MD5 signature for the seed and name.
d = _new_md5(str(seed) + str(name)).hexdigest().upper()
# Convert most of the signature to GUID form (discard the rest)
guid = ('{' + d[:8] + '-' + d[8:12] + '-' + d[12:16] + '-' + d[16:20]
+ '-' + d[20:32] + '}')
return guid
#------------------------------------------------------------------------------
class MSVSSolutionEntry(object):
def __cmp__(self, other):
# Sort by name then guid (so things are in order on vs2008).
return cmp((self.name, self.get_guid()), (other.name, other.get_guid()))
class MSVSFolder(MSVSSolutionEntry):
"""Folder in a Visual Studio project or solution."""
def __init__(self, path, name = None, entries = None,
guid = None, items = None):
"""Initializes the folder.
Args:
path: Full path to the folder.
name: Name of the folder.
entries: List of folder entries to nest inside this folder. May contain
Folder or Project objects. May be None, if the folder is empty.
guid: GUID to use for folder, if not None.
items: List of solution items to include in the folder project. May be
None, if the folder does not directly contain items.
"""
if name:
self.name = name
else:
# Use last layer.
self.name = os.path.basename(path)
self.path = path
self.guid = guid
# Copy passed lists (or set to empty lists)
self.entries = sorted(list(entries or []))
self.items = list(items or [])
self.entry_type_guid = ENTRY_TYPE_GUIDS['folder']
def get_guid(self):
if self.guid is None:
# Use consistent guids for folders (so things don't regenerate).
self.guid = MakeGuid(self.path, seed='msvs_folder')
return self.guid
#------------------------------------------------------------------------------
class MSVSProject(MSVSSolutionEntry):
"""Visual Studio project."""
def __init__(self, path, name = None, dependencies = None, guid = None,
spec = None, build_file = None, config_platform_overrides = None,
fixpath_prefix = None):
"""Initializes the project.
Args:
path: Absolute path to the project file.
name: Name of project. If None, the name will be the same as the base
name of the project file.
dependencies: List of other Project objects this project is dependent
upon, if not None.
guid: GUID to use for project, if not None.
spec: Dictionary specifying how to build this project.
build_file: Filename of the .gyp file that the vcproj file comes from.
config_platform_overrides: optional dict of configuration platforms to
used in place of the default for this target.
fixpath_prefix: the path used to adjust the behavior of _fixpath
"""
self.path = path
self.guid = guid
self.spec = spec
self.build_file = build_file
# Use project filename if name not specified
self.name = name or os.path.splitext(os.path.basename(path))[0]
# Copy passed lists (or set to empty lists)
self.dependencies = list(dependencies or [])
self.entry_type_guid = ENTRY_TYPE_GUIDS['project']
if config_platform_overrides:
self.config_platform_overrides = config_platform_overrides
else:
self.config_platform_overrides = {}
self.fixpath_prefix = fixpath_prefix
self.msbuild_toolset = None
def set_dependencies(self, dependencies):
self.dependencies = list(dependencies or [])
def get_guid(self):
if self.guid is None:
# Set GUID from path
# TODO(rspangler): This is fragile.
# 1. We can't just use the project filename sans path, since there could
# be multiple projects with the same base name (for example,
# foo/unittest.vcproj and bar/unittest.vcproj).
# 2. The path needs to be relative to $SOURCE_ROOT, so that the project
# GUID is the same whether it's included from base/base.sln or
# foo/bar/baz/baz.sln.
# 3. The GUID needs to be the same each time this builder is invoked, so
# that we don't need to rebuild the solution when the project changes.
# 4. We should be able to handle pre-built project files by reading the
# GUID from the files.
self.guid = MakeGuid(self.name)
return self.guid
def set_msbuild_toolset(self, msbuild_toolset):
self.msbuild_toolset = msbuild_toolset
#------------------------------------------------------------------------------
class MSVSSolution(object):
"""Visual Studio solution."""
def __init__(self, path, version, entries=None, variants=None,
websiteProperties=True):
"""Initializes the solution.
Args:
path: Path to solution file.
version: Format version to emit.
entries: List of entries in solution. May contain Folder or Project
objects. May be None, if the folder is empty.
variants: List of build variant strings. If none, a default list will
be used.
websiteProperties: Flag to decide if the website properties section
is generated.
"""
self.path = path
self.websiteProperties = websiteProperties
self.version = version
# Copy passed lists (or set to empty lists)
self.entries = list(entries or [])
if variants:
# Copy passed list
self.variants = variants[:]
else:
# Use default
self.variants = ['Debug|Win32', 'Release|Win32']
# TODO(rspangler): Need to be able to handle a mapping of solution config
# to project config. Should we be able to handle variants being a dict,
# or add a separate variant_map variable? If it's a dict, we can't
# guarantee the order of variants since dict keys aren't ordered.
# TODO(rspangler): Automatically write to disk for now; should delay until
# node-evaluation time.
self.Write()
def Write(self, writer=gyp.common.WriteOnDiff):
"""Writes the solution file to disk.
Raises:
IndexError: An entry appears multiple times.
"""
# Walk the entry tree and collect all the folders and projects.
all_entries = set()
entries_to_check = self.entries[:]
while entries_to_check:
e = entries_to_check.pop(0)
# If this entry has been visited, nothing to do.
if e in all_entries:
continue
all_entries.add(e)
# If this is a folder, check its entries too.
if isinstance(e, MSVSFolder):
entries_to_check += e.entries
all_entries = sorted(all_entries)
# Open file and print header
f = writer(self.path)
f.write('Microsoft Visual Studio Solution File, '
'Format Version %s\r\n' % self.version.SolutionVersion())
f.write('# %s\r\n' % self.version.Description())
# Project entries
sln_root = os.path.split(self.path)[0]
for e in all_entries:
relative_path = gyp.common.RelativePath(e.path, sln_root)
# msbuild does not accept an empty folder_name.
# use '.' in case relative_path is empty.
folder_name = relative_path.replace('/', '\\') or '.'
f.write('Project("%s") = "%s", "%s", "%s"\r\n' % (
e.entry_type_guid, # Entry type GUID
e.name, # Folder name
folder_name, # Folder name (again)
e.get_guid(), # Entry GUID
))
# TODO(rspangler): Need a way to configure this stuff
if self.websiteProperties:
f.write('\tProjectSection(WebsiteProperties) = preProject\r\n'
'\t\tDebug.AspNetCompiler.Debug = "True"\r\n'
'\t\tRelease.AspNetCompiler.Debug = "False"\r\n'
'\tEndProjectSection\r\n')
if isinstance(e, MSVSFolder):
if e.items:
f.write('\tProjectSection(SolutionItems) = preProject\r\n')
for i in e.items:
f.write('\t\t%s = %s\r\n' % (i, i))
f.write('\tEndProjectSection\r\n')
if isinstance(e, MSVSProject):
if e.dependencies:
f.write('\tProjectSection(ProjectDependencies) = postProject\r\n')
for d in e.dependencies:
f.write('\t\t%s = %s\r\n' % (d.get_guid(), d.get_guid()))
f.write('\tEndProjectSection\r\n')
f.write('EndProject\r\n')
# Global section
f.write('Global\r\n')
# Configurations (variants)
f.write('\tGlobalSection(SolutionConfigurationPlatforms) = preSolution\r\n')
for v in self.variants:
f.write('\t\t%s = %s\r\n' % (v, v))
f.write('\tEndGlobalSection\r\n')
# Sort config guids for easier diffing of solution changes.
config_guids = []
config_guids_overrides = {}
for e in all_entries:
if isinstance(e, MSVSProject):
config_guids.append(e.get_guid())
config_guids_overrides[e.get_guid()] = e.config_platform_overrides
config_guids.sort()
f.write('\tGlobalSection(ProjectConfigurationPlatforms) = postSolution\r\n')
for g in config_guids:
for v in self.variants:
nv = config_guids_overrides[g].get(v, v)
# Pick which project configuration to build for this solution
# configuration.
f.write('\t\t%s.%s.ActiveCfg = %s\r\n' % (
g, # Project GUID
v, # Solution build configuration
nv, # Project build config for that solution config
))
# Enable project in this solution configuration.
f.write('\t\t%s.%s.Build.0 = %s\r\n' % (
g, # Project GUID
v, # Solution build configuration
nv, # Project build config for that solution config
))
f.write('\tEndGlobalSection\r\n')
# TODO(rspangler): Should be able to configure this stuff too (though I've
# never seen this be any different)
f.write('\tGlobalSection(SolutionProperties) = preSolution\r\n')
f.write('\t\tHideSolutionNode = FALSE\r\n')
f.write('\tEndGlobalSection\r\n')
# Folder mappings
# Omit this section if there are no folders
if any([e.entries for e in all_entries if isinstance(e, MSVSFolder)]):
f.write('\tGlobalSection(NestedProjects) = preSolution\r\n')
for e in all_entries:
if not isinstance(e, MSVSFolder):
continue # Does not apply to projects, only folders
for subentry in e.entries:
f.write('\t\t%s = %s\r\n' % (subentry.get_guid(), e.get_guid()))
f.write('\tEndGlobalSection\r\n')
f.write('EndGlobal\r\n')
f.close()
|
wevote/WeVoteServer | refs/heads/develop | apis_v1/documentation_source/organization_stop_ignoring_doc.py | 1 | # apis_v1/documentation_source/organization_stop_ignoring_doc.py
# Brought to you by We Vote. Be good.
# -*- coding: UTF-8 -*-
def organization_stop_ignoring_doc_template_values(url_root):
"""
Show documentation about organizationStopIgnoring
"""
required_query_parameter_list = [
{
'name': 'voter_device_id',
'value': 'string', # boolean, integer, long, string
'description': 'An 88 character unique identifier linked to a voter record on the server',
},
{
'name': 'organization_id',
'value': 'integer', # boolean, integer, long, string
'description': 'Internal database unique identifier for organization',
},
{
'name': 'organization_we_vote_id',
'value': 'string', # boolean, integer, long, string
'description': 'The unique identifier for this organization across all networks '
'(either organization_id OR organization_we_vote_id required -- not both.) '
'NOTE: In the future we '
'might support other identifiers used in the industry.',
},
{
'name': 'api_key',
'value': 'string (from post, cookie, or get (in that order))', # boolean, integer, long, string
'description': 'The unique key provided to any organization using the WeVoteServer APIs',
},
]
optional_query_parameter_list = [
]
potential_status_codes_list = [
{
'code': 'VALID_VOTER_DEVICE_ID_MISSING',
'description': 'A valid voter_device_id parameter was not included. Cannot proceed.',
},
{
'code': 'VALID_VOTER_ID_MISSING',
'description': 'A valid voter_id was not found from voter_device_id. Cannot proceed.',
},
{
'code': 'VALID_ORGANIZATION_ID_MISSING',
'description': 'A valid organization_id was not found. Cannot proceed.',
},
{
'code': 'ORGANIZATION_NOT_FOUND_ON_CREATE STOP_FOLLOWING',
'description': 'An organization with that organization_id was not found. Cannot proceed.',
},
{
'code': 'STOPPED_FOLLOWING',
'description': 'Successfully stopped following this organization',
},
]
try_now_link_variables_dict = {
'organization_id': '1',
}
api_response = '{\n' \
' "status": string,\n' \
' "success": boolean,\n' \
' "voter_device_id": string (88 characters long),\n' \
' "organization_id": integer,\n' \
' "organization_we_vote_id": string,\n' \
'}'
template_values = {
'api_name': 'organizationStopIgnoring',
'api_slug': 'organizationStopIgnoring',
'api_introduction':
"Call this to save that the voter has decided to stop ignoring this organization. Logically equivalent"
"to never ignoring in the first place, but leaves a record in the database.",
'try_now_link': 'apis_v1:organizationStopIgnoringView',
'try_now_link_variables_dict': try_now_link_variables_dict,
'url_root': url_root,
'get_or_post': 'GET',
'required_query_parameter_list': required_query_parameter_list,
'optional_query_parameter_list': optional_query_parameter_list,
'api_response': api_response,
'api_response_notes':
"",
'potential_status_codes_list': potential_status_codes_list,
}
return template_values
|
ysu-hust/cosmodel | refs/heads/master | collectors/collectors.py | 1 | #!/usr/bin/env python
from utils import default_config
from onlinemetrics import online_metrics
import time
import redis
import json
def save_metrics_measure_and_predict(metrics, f):
try:
print "###### start saving current online metrics ######"
cur_metrics = {}
cur_metrics["slostatus_obj"] = metrics.slostatus_obj
cur_metrics["slomeetcounter_obj"] = metrics.slomeetcounter_obj
cur_metrics["sloviolatecounter_obj"] = metrics.sloviolatecounter_obj
cur_metrics["slostatus_proxy"] = metrics.slostatus_proxy
cur_metrics["slomeetcounter_proxy"] = metrics.slomeetcounter_proxy
cur_metrics["sloviolatecounter_proxy"] = metrics.sloviolatecounter_proxy
cur_metrics["devices_incoming_req_count_rate"] = metrics.devices_incoming_req_count_rate
cur_metrics["devices_req_count_rate"] = metrics.devices_req_count_rate
cur_metrics["devices_req_size_rate"] = metrics.devices_req_size_rate
cur_metrics["devices_req_read_rate"] = metrics.devices_req_read_rate
cur_metrics["devices_diskio_reads"] = metrics.devices_diskio_reads
cur_metrics["devices_diskio_readkbs"] = metrics.devices_diskio_readkbs
cur_metrics["devices_service_time_measure"] = metrics.devices_service_time_measure
cur_metrics["partitions_devices_hit_ratio"] = metrics.partitions_devices_hit_ratio
cur_metrics["partitions_devices_hit_count"] = metrics.partitions_devices_hit_count
cur_metrics["partitions_devices_miss_count"] = metrics.partitions_devices_miss_count
cur_metrics["partitions_devices_metahit_ratio"] = metrics.partitions_devices_metahit_ratio
cur_metrics["partitions_devices_metahit_count"] = metrics.partitions_devices_metahit_count
cur_metrics["partitions_devices_metamiss_count"] = metrics.partitions_devices_metamiss_count
cur_metrics["partitions_devices_openhit_ratio"] = metrics.partitions_devices_openhit_ratio
cur_metrics["partitions_devices_openhit_count"] = metrics.partitions_devices_openhit_count
cur_metrics["partitions_devices_openmiss_count"] = metrics.partitions_devices_openmiss_count
cur_metrics["status_collecting_time"] = time.time()
cur_metrics_json = json.dumps(cur_metrics)
f.write(cur_metrics_json+"\n")
print "##### end saving current metrics ########"
except Exception as e:
print e
def collect_online_metrics(params, online_metrics_file = "/tmp/online_metrics.log"):
metrics = params["metrics"]
with open(online_metrics_file, 'w') as f:
round_n = 0
while True:
try:
start_t = time.time()
print "###### start update status ######"
metrics.update_devices_workload(default_config.devices)
metrics.update_proxy_slostatus(default_config.proxy_servers)
for device in default_config.devices:
metrics.update_partitions_devices_hit_ratio(device)
save_metrics_measure_and_predict(metrics, f)
except Exception as e:
print 'exception happend at round_n %d' % (round_n)
print str(e)
sleep_t = default_config.schedule_interval - (time.time() - start_t)
if sleep_t > 0:
time.sleep(sleep_t)
round_n += 1
def get_proclat(obj_redis_clients, proxy_redis_clients):
proxy_obj_lats = {}
for rc in obj_redis_clients:
for object_s in default_config.object_servers:
if rc.exists('%s:6000_duration' % (object_s)):
lats = rc.lrange('%s:6000_duration' % (object_s), 0, -1)
rc.delete('%s:6000_duration' % (object_s))
obj_lat = proxy_obj_lats.get(object_s+'_obj', [])
obj_lat += lats
proxy_obj_lats[object_s+'_obj'] = obj_lat
for rc in proxy_redis_clients:
for object_s in default_config.object_servers:
if rc.exists('%s:6000_duration' % (object_s)):
lats = rc.lrange('%s:6000_duration' % (object_s), 0, -1)
rc.delete('%s:6000_duration' % (object_s))
proxy_lat = proxy_obj_lats.get(object_s+'_proxy', [])
proxy_lat += lats
proxy_obj_lats[object_s+'_proxy'] = proxy_lat
return proxy_obj_lats
def collect_proclat(params, proclat_file = '/tmp/processing_latencies.log'):
obj_redis_clients = []
proxy_redis_clients = []
try:
for object_s in default_config.object_servers:
rc = redis.StrictRedis(host=object_s, port=6379, db=0)
obj_redis_clients.append(rc)
for proxy_s in default_config.proxy_servers:
rc = redis.StrictRedis(host=proxy_s, port=6379, db=0)
proxy_redis_clients.append(rc)
except Exception as e:
print str(e)
print 'exception during build redis clients'
try:
with open(proclat_file, 'w') as f:
while True:
start_t = time.time()
lats = get_proclat(obj_redis_clients, proxy_redis_clients)
f.write(json.dumps(lats) + '\n')
sleep_t = default_config.schedule_interval - (time.time() - start_t)
print sleep_t
if sleep_t > 0:
time.sleep(sleep_t)
except Exception as e:
print 'exception during collect data'
print str(e) |
xiandiancloud/edxplaltfom-xusong | refs/heads/master | cms/djangoapps/contentstore/views/tests/test_item.py | 2 | """Tests for items views."""
import os
import json
from datetime import datetime, timedelta
import ddt
from unittest import skipUnless
from mock import patch
from pytz import UTC
from webob import Response
from django.http import Http404
from django.test import TestCase
from django.test.client import RequestFactory
from django.core.urlresolvers import reverse
from contentstore.utils import reverse_usage_url, reverse_course_url
from contentstore.views.preview import StudioUserService
from contentstore.views.component import (
component_handler, get_component_templates
)
from contentstore.views.item import create_xblock_info, ALWAYS, VisibilityState, _xblock_type_and_display_name
from contentstore.tests.utils import CourseTestCase
from student.tests.factories import UserFactory
from xmodule.capa_module import CapaDescriptor
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.tests.factories import ItemFactory
from xmodule.x_module import STUDIO_VIEW, STUDENT_VIEW
from xblock.exceptions import NoSuchHandlerError
from opaque_keys.edx.keys import UsageKey, CourseKey
from opaque_keys.edx.locations import Location
from xmodule.partitions.partitions import Group, UserPartition
class ItemTest(CourseTestCase):
""" Base test class for create, save, and delete """
def setUp(self):
super(ItemTest, self).setUp()
self.course_key = self.course.id
self.usage_key = self.course.location
def get_item_from_modulestore(self, usage_key, verify_is_draft=False):
"""
Get the item referenced by the UsageKey from the modulestore
"""
item = self.store.get_item(usage_key)
if verify_is_draft:
self.assertTrue(getattr(item, 'is_draft', False))
return item
def response_usage_key(self, response):
"""
Get the UsageKey from the response payload and verify that the status_code was 200.
:param response:
"""
parsed = json.loads(response.content)
self.assertEqual(response.status_code, 200)
key = UsageKey.from_string(parsed['locator'])
if key.course_key.run is None:
key = key.map_into_course(CourseKey.from_string(parsed['courseKey']))
return key
def create_xblock(self, parent_usage_key=None, display_name=None, category=None, boilerplate=None):
data = {
'parent_locator': unicode(self.usage_key) if parent_usage_key is None else unicode(parent_usage_key),
'category': category
}
if display_name is not None:
data['display_name'] = display_name
if boilerplate is not None:
data['boilerplate'] = boilerplate
return self.client.ajax_post(reverse('contentstore.views.xblock_handler'), json.dumps(data))
def _create_vertical(self, parent_usage_key=None):
"""
Creates a vertical, returning its UsageKey.
"""
resp = self.create_xblock(category='vertical', parent_usage_key=parent_usage_key)
self.assertEqual(resp.status_code, 200)
return self.response_usage_key(resp)
class GetItem(ItemTest):
"""Tests for '/xblock' GET url."""
def _get_container_preview(self, usage_key):
"""
Returns the HTML and resources required for the xblock at the specified UsageKey
"""
preview_url = reverse_usage_url("xblock_view_handler", usage_key, {'view_name': 'container_preview'})
resp = self.client.get(preview_url, HTTP_ACCEPT='application/json')
self.assertEqual(resp.status_code, 200)
resp_content = json.loads(resp.content)
html = resp_content['html']
self.assertTrue(html)
resources = resp_content['resources']
self.assertIsNotNone(resources)
return html, resources
def test_get_vertical(self):
# Add a vertical
resp = self.create_xblock(category='vertical')
usage_key = self.response_usage_key(resp)
# Retrieve it
resp = self.client.get(reverse_usage_url('xblock_handler', usage_key))
self.assertEqual(resp.status_code, 200)
def test_get_empty_container_fragment(self):
root_usage_key = self._create_vertical()
html, __ = self._get_container_preview(root_usage_key)
# Verify that the Studio wrapper is not added
self.assertNotIn('wrapper-xblock', html)
# Verify that the header and article tags are still added
self.assertIn('<header class="xblock-header xblock-header-vertical">', html)
self.assertIn('<article class="xblock-render">', html)
def test_get_container_fragment(self):
root_usage_key = self._create_vertical()
# Add a problem beneath a child vertical
child_vertical_usage_key = self._create_vertical(parent_usage_key=root_usage_key)
resp = self.create_xblock(parent_usage_key=child_vertical_usage_key, category='problem', boilerplate='multiplechoice.yaml')
self.assertEqual(resp.status_code, 200)
# Get the preview HTML
html, __ = self._get_container_preview(root_usage_key)
# Verify that the Studio nesting wrapper has been added
self.assertIn('level-nesting', html)
self.assertIn('<header class="xblock-header xblock-header-vertical">', html)
self.assertIn('<article class="xblock-render">', html)
# Verify that the Studio element wrapper has been added
self.assertIn('level-element', html)
def test_get_container_nested_container_fragment(self):
"""
Test the case of the container page containing a link to another container page.
"""
# Add a wrapper with child beneath a child vertical
root_usage_key = self._create_vertical()
resp = self.create_xblock(parent_usage_key=root_usage_key, category="wrapper")
self.assertEqual(resp.status_code, 200)
wrapper_usage_key = self.response_usage_key(resp)
resp = self.create_xblock(parent_usage_key=wrapper_usage_key, category='problem', boilerplate='multiplechoice.yaml')
self.assertEqual(resp.status_code, 200)
# Get the preview HTML and verify the View -> link is present.
html, __ = self._get_container_preview(root_usage_key)
self.assertIn('wrapper-xblock', html)
self.assertRegexpMatches(
html,
# The instance of the wrapper class will have an auto-generated ID. Allow any
# characters after wrapper.
(r'"/container/i4x://MITx/999/wrapper/\w+" class="action-button">\s*'
'<span class="action-button-text">View</span>')
)
def test_split_test(self):
"""
Test that a split_test module renders all of its children in Studio.
"""
root_usage_key = self._create_vertical()
resp = self.create_xblock(category='split_test', parent_usage_key=root_usage_key)
split_test_usage_key = self.response_usage_key(resp)
resp = self.create_xblock(parent_usage_key=split_test_usage_key, category='html', boilerplate='announcement.yaml')
self.assertEqual(resp.status_code, 200)
resp = self.create_xblock(parent_usage_key=split_test_usage_key, category='html', boilerplate='zooming_image.yaml')
self.assertEqual(resp.status_code, 200)
html, __ = self._get_container_preview(split_test_usage_key)
self.assertIn('Announcement', html)
self.assertIn('Zooming', html)
@skipUnless(os.environ.get('FEATURE_GROUP_CONFIGURATIONS'), 'Tests Group Configurations feature')
def test_split_test_edited(self):
"""
Test that rename of a group changes display name of child vertical.
"""
self.course.user_partitions = [UserPartition(
0, 'first_partition', 'First Partition',
[Group("0", 'alpha'), Group("1", 'beta')]
)]
self.store.update_item(self.course, self.user.id)
root_usage_key = self._create_vertical()
resp = self.create_xblock(category='split_test', parent_usage_key=root_usage_key)
split_test_usage_key = self.response_usage_key(resp)
self.client.ajax_post(
reverse_usage_url("xblock_handler", split_test_usage_key),
data={'metadata': {'user_partition_id': str(0)}}
)
html, __ = self._get_container_preview(split_test_usage_key)
self.assertIn('alpha', html)
self.assertIn('beta', html)
# Rename groups in group configuration
GROUP_CONFIGURATION_JSON = {
u'id': 0,
u'name': u'first_partition',
u'description': u'First Partition',
u'version': 1,
u'groups': [
{u'id': 0, u'name': u'New_NAME_A', u'version': 1},
{u'id': 1, u'name': u'New_NAME_B', u'version': 1},
],
}
response = self.client.put(
reverse_course_url('group_configurations_detail_handler', self.course.id, kwargs={'group_configuration_id': 0}),
data=json.dumps(GROUP_CONFIGURATION_JSON),
content_type="application/json",
HTTP_ACCEPT="application/json",
HTTP_X_REQUESTED_WITH="XMLHttpRequest",
)
self.assertEqual(response.status_code, 201)
html, __ = self._get_container_preview(split_test_usage_key)
self.assertNotIn('alpha', html)
self.assertNotIn('beta', html)
self.assertIn('New_NAME_A', html)
self.assertIn('New_NAME_B', html)
class DeleteItem(ItemTest):
"""Tests for '/xblock' DELETE url."""
def test_delete_static_page(self):
# Add static tab
resp = self.create_xblock(category='static_tab')
usage_key = self.response_usage_key(resp)
# Now delete it. There was a bug that the delete was failing (static tabs do not exist in draft modulestore).
resp = self.client.delete(reverse_usage_url('xblock_handler', usage_key))
self.assertEqual(resp.status_code, 204)
class TestCreateItem(ItemTest):
"""
Test the create_item handler thoroughly
"""
def test_create_nicely(self):
"""
Try the straightforward use cases
"""
# create a chapter
display_name = 'Nicely created'
resp = self.create_xblock(display_name=display_name, category='chapter')
# get the new item and check its category and display_name
chap_usage_key = self.response_usage_key(resp)
new_obj = self.get_item_from_modulestore(chap_usage_key)
self.assertEqual(new_obj.scope_ids.block_type, 'chapter')
self.assertEqual(new_obj.display_name, display_name)
self.assertEqual(new_obj.location.org, self.course.location.org)
self.assertEqual(new_obj.location.course, self.course.location.course)
# get the course and ensure it now points to this one
course = self.get_item_from_modulestore(self.usage_key)
self.assertIn(chap_usage_key, course.children)
# use default display name
resp = self.create_xblock(parent_usage_key=chap_usage_key, category='vertical')
vert_usage_key = self.response_usage_key(resp)
# create problem w/ boilerplate
template_id = 'multiplechoice.yaml'
resp = self.create_xblock(
parent_usage_key=vert_usage_key,
category='problem',
boilerplate=template_id
)
prob_usage_key = self.response_usage_key(resp)
problem = self.get_item_from_modulestore(prob_usage_key, verify_is_draft=True)
# check against the template
template = CapaDescriptor.get_template(template_id)
self.assertEqual(problem.data, template['data'])
self.assertEqual(problem.display_name, template['metadata']['display_name'])
self.assertEqual(problem.markdown, template['metadata']['markdown'])
def test_create_item_negative(self):
"""
Negative tests for create_item
"""
# non-existent boilerplate: creates a default
resp = self.create_xblock(category='problem', boilerplate='nosuchboilerplate.yaml')
self.assertEqual(resp.status_code, 200)
def test_create_with_future_date(self):
self.assertEqual(self.course.start, datetime(2030, 1, 1, tzinfo=UTC))
resp = self.create_xblock(category='chapter')
usage_key = self.response_usage_key(resp)
obj = self.get_item_from_modulestore(usage_key)
self.assertEqual(obj.start, datetime(2030, 1, 1, tzinfo=UTC))
def test_static_tabs_initialization(self):
"""
Test that static tab display names are not being initialized as None.
"""
# Add a new static tab with no explicit name
resp = self.create_xblock(category='static_tab')
usage_key = self.response_usage_key(resp)
# Check that its name is not None
new_tab = self.get_item_from_modulestore(usage_key)
self.assertEquals(new_tab.display_name, 'Empty')
class TestDuplicateItem(ItemTest):
"""
Test the duplicate method.
"""
def setUp(self):
""" Creates the test course structure and a few components to 'duplicate'. """
super(TestDuplicateItem, self).setUp()
# Create a parent chapter (for testing children of children).
resp = self.create_xblock(parent_usage_key=self.usage_key, category='chapter')
self.chapter_usage_key = self.response_usage_key(resp)
# create a sequential containing a problem and an html component
resp = self.create_xblock(parent_usage_key=self.chapter_usage_key, category='sequential')
self.seq_usage_key = self.response_usage_key(resp)
# create problem and an html component
resp = self.create_xblock(parent_usage_key=self.seq_usage_key, category='problem', boilerplate='multiplechoice.yaml')
self.problem_usage_key = self.response_usage_key(resp)
resp = self.create_xblock(parent_usage_key=self.seq_usage_key, category='html')
self.html_usage_key = self.response_usage_key(resp)
# Create a second sequential just (testing children of children)
self.create_xblock(parent_usage_key=self.chapter_usage_key, category='sequential2')
def test_duplicate_equality(self):
"""
Tests that a duplicated xblock is identical to the original,
except for location and display name.
"""
def duplicate_and_verify(source_usage_key, parent_usage_key):
usage_key = self._duplicate_item(parent_usage_key, source_usage_key)
self.assertTrue(check_equality(source_usage_key, usage_key), "Duplicated item differs from original")
def check_equality(source_usage_key, duplicate_usage_key):
original_item = self.get_item_from_modulestore(source_usage_key)
duplicated_item = self.get_item_from_modulestore(duplicate_usage_key)
self.assertNotEqual(
original_item.location,
duplicated_item.location,
"Location of duplicate should be different from original"
)
# Set the location and display name to be the same so we can make sure the rest of the duplicate is equal.
duplicated_item.location = original_item.location
duplicated_item.display_name = original_item.display_name
# Children will also be duplicated, so for the purposes of testing equality, we will set
# the children to the original after recursively checking the children.
if original_item.has_children:
self.assertEqual(
len(original_item.children),
len(duplicated_item.children),
"Duplicated item differs in number of children"
)
for i in xrange(len(original_item.children)):
if not check_equality(original_item.children[i], duplicated_item.children[i]):
return False
duplicated_item.children = original_item.children
return original_item == duplicated_item
duplicate_and_verify(self.problem_usage_key, self.seq_usage_key)
duplicate_and_verify(self.html_usage_key, self.seq_usage_key)
duplicate_and_verify(self.seq_usage_key, self.chapter_usage_key)
duplicate_and_verify(self.chapter_usage_key, self.usage_key)
def test_ordering(self):
"""
Tests the a duplicated xblock appears immediately after its source
(if duplicate and source share the same parent), else at the
end of the children of the parent.
"""
def verify_order(source_usage_key, parent_usage_key, source_position=None):
usage_key = self._duplicate_item(parent_usage_key, source_usage_key)
parent = self.get_item_from_modulestore(parent_usage_key)
children = parent.children
if source_position is None:
self.assertFalse(source_usage_key in children, 'source item not expected in children array')
self.assertEqual(
children[len(children) - 1],
usage_key,
"duplicated item not at end"
)
else:
self.assertEqual(
children[source_position],
source_usage_key,
"source item at wrong position"
)
self.assertEqual(
children[source_position + 1],
usage_key,
"duplicated item not ordered after source item"
)
verify_order(self.problem_usage_key, self.seq_usage_key, 0)
# 2 because duplicate of problem should be located before.
verify_order(self.html_usage_key, self.seq_usage_key, 2)
verify_order(self.seq_usage_key, self.chapter_usage_key, 0)
# Test duplicating something into a location that is not the parent of the original item.
# Duplicated item should appear at the end.
verify_order(self.html_usage_key, self.usage_key)
def test_display_name(self):
"""
Tests the expected display name for the duplicated xblock.
"""
def verify_name(source_usage_key, parent_usage_key, expected_name, display_name=None):
usage_key = self._duplicate_item(parent_usage_key, source_usage_key, display_name)
duplicated_item = self.get_item_from_modulestore(usage_key)
self.assertEqual(duplicated_item.display_name, expected_name)
return usage_key
# Display name comes from template.
dupe_usage_key = verify_name(self.problem_usage_key, self.seq_usage_key, "Duplicate of 'Multiple Choice'")
# Test dupe of dupe.
verify_name(dupe_usage_key, self.seq_usage_key, "Duplicate of 'Duplicate of 'Multiple Choice''")
# Uses default display_name of 'Text' from HTML component.
verify_name(self.html_usage_key, self.seq_usage_key, "Duplicate of 'Text'")
# The sequence does not have a display_name set, so category is shown.
verify_name(self.seq_usage_key, self.chapter_usage_key, "Duplicate of sequential")
# Now send a custom display name for the duplicate.
verify_name(self.seq_usage_key, self.chapter_usage_key, "customized name", display_name="customized name")
def _duplicate_item(self, parent_usage_key, source_usage_key, display_name=None):
data = {
'parent_locator': unicode(parent_usage_key),
'duplicate_source_locator': unicode(source_usage_key)
}
if display_name is not None:
data['display_name'] = display_name
resp = self.client.ajax_post(reverse('contentstore.views.xblock_handler'), json.dumps(data))
return self.response_usage_key(resp)
class TestEditItem(ItemTest):
"""
Test xblock update.
"""
def setUp(self):
""" Creates the test course structure and a couple problems to 'edit'. """
super(TestEditItem, self).setUp()
# create a chapter
display_name = 'chapter created'
resp = self.create_xblock(display_name=display_name, category='chapter')
chap_usage_key = self.response_usage_key(resp)
resp = self.create_xblock(parent_usage_key=chap_usage_key, category='sequential')
self.seq_usage_key = self.response_usage_key(resp)
self.seq_update_url = reverse_usage_url("xblock_handler", self.seq_usage_key)
# create problem w/ boilerplate
template_id = 'multiplechoice.yaml'
resp = self.create_xblock(parent_usage_key=self.seq_usage_key, category='problem', boilerplate=template_id)
self.problem_usage_key = self.response_usage_key(resp)
self.problem_update_url = reverse_usage_url("xblock_handler", self.problem_usage_key)
self.course_update_url = reverse_usage_url("xblock_handler", self.usage_key)
def test_delete_field(self):
"""
Sending null in for a field 'deletes' it
"""
self.client.ajax_post(
self.problem_update_url,
data={'metadata': {'rerandomize': 'onreset'}}
)
problem = self.get_item_from_modulestore(self.problem_usage_key, verify_is_draft=True)
self.assertEqual(problem.rerandomize, 'onreset')
self.client.ajax_post(
self.problem_update_url,
data={'metadata': {'rerandomize': None}}
)
problem = self.get_item_from_modulestore(self.problem_usage_key, verify_is_draft=True)
self.assertEqual(problem.rerandomize, 'never')
def test_null_field(self):
"""
Sending null in for a field 'deletes' it
"""
problem = self.get_item_from_modulestore(self.problem_usage_key, verify_is_draft=True)
self.assertIsNotNone(problem.markdown)
self.client.ajax_post(
self.problem_update_url,
data={'nullout': ['markdown']}
)
problem = self.get_item_from_modulestore(self.problem_usage_key, verify_is_draft=True)
self.assertIsNone(problem.markdown)
def test_date_fields(self):
"""
Test setting due & start dates on sequential
"""
sequential = self.get_item_from_modulestore(self.seq_usage_key)
self.assertIsNone(sequential.due)
self.client.ajax_post(
self.seq_update_url,
data={'metadata': {'due': '2010-11-22T04:00Z'}}
)
sequential = self.get_item_from_modulestore(self.seq_usage_key)
self.assertEqual(sequential.due, datetime(2010, 11, 22, 4, 0, tzinfo=UTC))
self.client.ajax_post(
self.seq_update_url,
data={'metadata': {'start': '2010-09-12T14:00Z'}}
)
sequential = self.get_item_from_modulestore(self.seq_usage_key)
self.assertEqual(sequential.due, datetime(2010, 11, 22, 4, 0, tzinfo=UTC))
self.assertEqual(sequential.start, datetime(2010, 9, 12, 14, 0, tzinfo=UTC))
def test_delete_child(self):
"""
Test deleting a child.
"""
# Create 2 children of main course.
resp_1 = self.create_xblock(display_name='child 1', category='chapter')
resp_2 = self.create_xblock(display_name='child 2', category='chapter')
chapter1_usage_key = self.response_usage_key(resp_1)
chapter2_usage_key = self.response_usage_key(resp_2)
course = self.get_item_from_modulestore(self.usage_key)
self.assertIn(chapter1_usage_key, course.children)
self.assertIn(chapter2_usage_key, course.children)
# Remove one child from the course.
resp = self.client.ajax_post(
self.course_update_url,
data={'children': [unicode(chapter2_usage_key)]}
)
self.assertEqual(resp.status_code, 200)
# Verify that the child is removed.
course = self.get_item_from_modulestore(self.usage_key)
self.assertNotIn(chapter1_usage_key, course.children)
self.assertIn(chapter2_usage_key, course.children)
def test_reorder_children(self):
"""
Test reordering children that can be in the draft store.
"""
# Create 2 child units and re-order them. There was a bug about @draft getting added
# to the IDs.
unit_1_resp = self.create_xblock(parent_usage_key=self.seq_usage_key, category='vertical')
unit_2_resp = self.create_xblock(parent_usage_key=self.seq_usage_key, category='vertical')
unit1_usage_key = self.response_usage_key(unit_1_resp)
unit2_usage_key = self.response_usage_key(unit_2_resp)
# The sequential already has a child defined in the setUp (a problem).
# Children must be on the sequential to reproduce the original bug,
# as it is important that the parent (sequential) NOT be in the draft store.
children = self.get_item_from_modulestore(self.seq_usage_key).children
self.assertEqual(unit1_usage_key, children[1])
self.assertEqual(unit2_usage_key, children[2])
resp = self.client.ajax_post(
self.seq_update_url,
data={'children': [unicode(self.problem_usage_key), unicode(unit2_usage_key), unicode(unit1_usage_key)]}
)
self.assertEqual(resp.status_code, 200)
children = self.get_item_from_modulestore(self.seq_usage_key).children
self.assertEqual(self.problem_usage_key, children[0])
self.assertEqual(unit1_usage_key, children[2])
self.assertEqual(unit2_usage_key, children[1])
def _is_location_published(self, location):
"""
Returns whether or not the item with given location has a published version.
"""
return modulestore().has_item(location, revision=ModuleStoreEnum.RevisionOption.published_only)
def _verify_published_with_no_draft(self, location):
"""
Verifies the item with given location has a published version and no draft (unpublished changes).
"""
self.assertTrue(self._is_location_published(location))
self.assertFalse(modulestore().has_changes(modulestore().get_item(location)))
def _verify_published_with_draft(self, location):
"""
Verifies the item with given location has a published version and also a draft version (unpublished changes).
"""
self.assertTrue(self._is_location_published(location))
self.assertTrue(modulestore().has_changes(modulestore().get_item(location)))
def test_make_public(self):
""" Test making a private problem public (publishing it). """
# When the problem is first created, it is only in draft (because of its category).
self.assertFalse(self._is_location_published(self.problem_usage_key))
self.client.ajax_post(
self.problem_update_url,
data={'publish': 'make_public'}
)
self._verify_published_with_no_draft(self.problem_usage_key)
def test_make_draft(self):
""" Test creating a draft version of a public problem. """
self._make_draft_content_different_from_published()
def test_revert_to_published(self):
""" Test reverting draft content to published """
self._make_draft_content_different_from_published()
self.client.ajax_post(
self.problem_update_url,
data={'publish': 'discard_changes'}
)
self._verify_published_with_no_draft(self.problem_usage_key)
published = modulestore().get_item(self.problem_usage_key, revision=ModuleStoreEnum.RevisionOption.published_only)
self.assertIsNone(published.due)
def test_republish(self):
""" Test republishing an item. """
new_display_name = 'New Display Name'
# When the problem is first created, it is only in draft (because of its category).
self.assertFalse(self._is_location_published(self.problem_usage_key))
# Republishing when only in draft will update the draft but not cause a public item to be created.
self.client.ajax_post(
self.problem_update_url,
data={
'publish': 'republish',
'metadata': {
'display_name': new_display_name
}
}
)
self.assertFalse(self._is_location_published(self.problem_usage_key))
draft = self.get_item_from_modulestore(self.problem_usage_key, verify_is_draft=True)
self.assertEqual(draft.display_name, new_display_name)
# Publish the item
self.client.ajax_post(
self.problem_update_url,
data={'publish': 'make_public'}
)
# Now republishing should update the published version
new_display_name_2 = 'New Display Name 2'
self.client.ajax_post(
self.problem_update_url,
data={
'publish': 'republish',
'metadata': {
'display_name': new_display_name_2
}
}
)
self._verify_published_with_no_draft(self.problem_usage_key)
published = modulestore().get_item(
self.problem_usage_key,
revision=ModuleStoreEnum.RevisionOption.published_only
)
self.assertEqual(published.display_name, new_display_name_2)
def test_direct_only_categories_not_republished(self):
"""Verify that republish is ignored for items in DIRECT_ONLY_CATEGORIES"""
# Create a vertical child with published and unpublished versions.
# If the parent sequential is not re-published, then the child problem should also not be re-published.
resp = self.create_xblock(parent_usage_key=self.seq_usage_key, display_name='vertical', category='vertical')
vertical_usage_key = self.response_usage_key(resp)
vertical_update_url = reverse_usage_url('xblock_handler', vertical_usage_key)
self.client.ajax_post(vertical_update_url, data={'publish': 'make_public'})
self.client.ajax_post(vertical_update_url, data={'metadata': {'display_name': 'New Display Name'}})
self._verify_published_with_draft(self.seq_usage_key)
self.client.ajax_post(self.seq_update_url, data={'publish': 'republish'})
self._verify_published_with_draft(self.seq_usage_key)
def _make_draft_content_different_from_published(self):
"""
Helper method to create different draft and published versions of a problem.
"""
# Make problem public.
self.client.ajax_post(
self.problem_update_url,
data={'publish': 'make_public'}
)
self._verify_published_with_no_draft(self.problem_usage_key)
published = modulestore().get_item(self.problem_usage_key, revision=ModuleStoreEnum.RevisionOption.published_only)
# Update the draft version and check that published is different.
self.client.ajax_post(
self.problem_update_url,
data={'metadata': {'due': '2077-10-10T04:00Z'}}
)
updated_draft = self.get_item_from_modulestore(self.problem_usage_key, verify_is_draft=True)
self.assertEqual(updated_draft.due, datetime(2077, 10, 10, 4, 0, tzinfo=UTC))
self.assertIsNone(published.due)
# Fetch the published version again to make sure the due date is still unset.
published = modulestore().get_item(published.location, revision=ModuleStoreEnum.RevisionOption.published_only)
self.assertIsNone(published.due)
def test_make_public_with_update(self):
""" Update a problem and make it public at the same time. """
self.client.ajax_post(
self.problem_update_url,
data={
'metadata': {'due': '2077-10-10T04:00Z'},
'publish': 'make_public'
}
)
published = self.get_item_from_modulestore(self.problem_usage_key)
self.assertEqual(published.due, datetime(2077, 10, 10, 4, 0, tzinfo=UTC))
def test_published_and_draft_contents_with_update(self):
""" Create a draft and publish it then modify the draft and check that published content is not modified """
# Make problem public.
self.client.ajax_post(
self.problem_update_url,
data={'publish': 'make_public'}
)
self._verify_published_with_no_draft(self.problem_usage_key)
published = modulestore().get_item(self.problem_usage_key, revision=ModuleStoreEnum.RevisionOption.published_only)
# Now make a draft
self.client.ajax_post(
self.problem_update_url,
data={
'id': unicode(self.problem_usage_key),
'metadata': {},
'data': "<p>Problem content draft.</p>"
}
)
# Both published and draft content should be different
draft = self.get_item_from_modulestore(self.problem_usage_key, verify_is_draft=True)
self.assertNotEqual(draft.data, published.data)
# Get problem by 'xblock_handler'
view_url = reverse_usage_url("xblock_view_handler", self.problem_usage_key, {"view_name": STUDENT_VIEW})
resp = self.client.get(view_url, HTTP_ACCEPT='application/json')
self.assertEqual(resp.status_code, 200)
# Activate the editing view
view_url = reverse_usage_url("xblock_view_handler", self.problem_usage_key, {"view_name": STUDIO_VIEW})
resp = self.client.get(view_url, HTTP_ACCEPT='application/json')
self.assertEqual(resp.status_code, 200)
# Both published and draft content should still be different
draft = self.get_item_from_modulestore(self.problem_usage_key, verify_is_draft=True)
self.assertNotEqual(draft.data, published.data)
# Fetch the published version again to make sure the data is correct.
published = modulestore().get_item(published.location, revision=ModuleStoreEnum.RevisionOption.published_only)
self.assertNotEqual(draft.data, published.data)
def test_publish_states_of_nested_xblocks(self):
""" Test publishing of a unit page containing a nested xblock """
resp = self.create_xblock(parent_usage_key=self.seq_usage_key, display_name='Test Unit', category='vertical')
unit_usage_key = self.response_usage_key(resp)
resp = self.create_xblock(parent_usage_key=unit_usage_key, category='wrapper')
wrapper_usage_key = self.response_usage_key(resp)
resp = self.create_xblock(parent_usage_key=wrapper_usage_key, category='html')
html_usage_key = self.response_usage_key(resp)
# The unit and its children should be private initially
unit_update_url = reverse_usage_url('xblock_handler', unit_usage_key)
self.assertFalse(self._is_location_published(unit_usage_key))
self.assertFalse(self._is_location_published(html_usage_key))
# Make the unit public and verify that the problem is also made public
resp = self.client.ajax_post(
unit_update_url,
data={'publish': 'make_public'}
)
self.assertEqual(resp.status_code, 200)
self._verify_published_with_no_draft(unit_usage_key)
self._verify_published_with_no_draft(html_usage_key)
# Make a draft for the unit and verify that the problem also has a draft
resp = self.client.ajax_post(
unit_update_url,
data={
'id': unicode(unit_usage_key),
'metadata': {},
}
)
self.assertEqual(resp.status_code, 200)
self._verify_published_with_draft(unit_usage_key)
self._verify_published_with_draft(html_usage_key)
class TestEditSplitModule(ItemTest):
"""
Tests around editing instances of the split_test module.
"""
def setUp(self):
super(TestEditSplitModule, self).setUp()
self.course.user_partitions = [
UserPartition(
0, 'first_partition', 'First Partition',
[Group("0", 'alpha'), Group("1", 'beta')]
),
UserPartition(
1, 'second_partition', 'Second Partition',
[Group("0", 'Group 0'), Group("1", 'Group 1'), Group("2", 'Group 2')]
)
]
self.store.update_item(self.course, self.user.id)
root_usage_key = self._create_vertical()
resp = self.create_xblock(category='split_test', parent_usage_key=root_usage_key)
self.split_test_usage_key = self.response_usage_key(resp)
self.split_test_update_url = reverse_usage_url("xblock_handler", self.split_test_usage_key)
self.request_factory = RequestFactory()
self.request = self.request_factory.get('/dummy-url')
self.request.user = self.user
def _update_partition_id(self, partition_id):
"""
Helper method that sets the user_partition_id to the supplied value.
The updated split_test instance is returned.
"""
self.client.ajax_post(
self.split_test_update_url,
# Even though user_partition_id is Scope.content, it will get saved by the Studio editor as
# metadata. The code in item.py will update the field correctly, even though it is not the
# expected scope.
data={'metadata': {'user_partition_id': str(partition_id)}}
)
# Verify the partition_id was saved.
split_test = self.get_item_from_modulestore(self.split_test_usage_key, verify_is_draft=True)
self.assertEqual(partition_id, split_test.user_partition_id)
return split_test
def _assert_children(self, expected_number):
"""
Verifies the number of children of the split_test instance.
"""
split_test = self.get_item_from_modulestore(self.split_test_usage_key, True)
self.assertEqual(expected_number, len(split_test.children))
return split_test
def test_create_groups(self):
"""
Test that verticals are created for the configuration groups when
a spit test module is edited.
"""
split_test = self.get_item_from_modulestore(self.split_test_usage_key, verify_is_draft=True)
# Initially, no user_partition_id is set, and the split_test has no children.
self.assertEqual(-1, split_test.user_partition_id)
self.assertEqual(0, len(split_test.children))
# Set the user_partition_id to 0.
split_test = self._update_partition_id(0)
# Verify that child verticals have been set to match the groups
self.assertEqual(2, len(split_test.children))
vertical_0 = self.get_item_from_modulestore(split_test.children[0], verify_is_draft=True)
vertical_1 = self.get_item_from_modulestore(split_test.children[1], verify_is_draft=True)
self.assertEqual("vertical", vertical_0.category)
self.assertEqual("vertical", vertical_1.category)
self.assertEqual("Group ID 0", vertical_0.display_name)
self.assertEqual("Group ID 1", vertical_1.display_name)
# Verify that the group_id_to_child mapping is correct.
self.assertEqual(2, len(split_test.group_id_to_child))
self.assertEqual(vertical_0.location, split_test.group_id_to_child['0'])
self.assertEqual(vertical_1.location, split_test.group_id_to_child['1'])
def test_change_user_partition_id(self):
"""
Test what happens when the user_partition_id is changed to a different groups
group configuration.
"""
# Set to first group configuration.
split_test = self._update_partition_id(0)
self.assertEqual(2, len(split_test.children))
initial_vertical_0_location = split_test.children[0]
initial_vertical_1_location = split_test.children[1]
# Set to second group configuration
split_test = self._update_partition_id(1)
# We don't remove existing children.
self.assertEqual(5, len(split_test.children))
self.assertEqual(initial_vertical_0_location, split_test.children[0])
self.assertEqual(initial_vertical_1_location, split_test.children[1])
vertical_0 = self.get_item_from_modulestore(split_test.children[2], verify_is_draft=True)
vertical_1 = self.get_item_from_modulestore(split_test.children[3], verify_is_draft=True)
vertical_2 = self.get_item_from_modulestore(split_test.children[4], verify_is_draft=True)
# Verify that the group_id_to child mapping is correct.
self.assertEqual(3, len(split_test.group_id_to_child))
self.assertEqual(vertical_0.location, split_test.group_id_to_child['0'])
self.assertEqual(vertical_1.location, split_test.group_id_to_child['1'])
self.assertEqual(vertical_2.location, split_test.group_id_to_child['2'])
self.assertNotEqual(initial_vertical_0_location, vertical_0.location)
self.assertNotEqual(initial_vertical_1_location, vertical_1.location)
def test_change_same_user_partition_id(self):
"""
Test that nothing happens when the user_partition_id is set to the same value twice.
"""
# Set to first group configuration.
split_test = self._update_partition_id(0)
self.assertEqual(2, len(split_test.children))
initial_group_id_to_child = split_test.group_id_to_child
# Set again to first group configuration.
split_test = self._update_partition_id(0)
self.assertEqual(2, len(split_test.children))
self.assertEqual(initial_group_id_to_child, split_test.group_id_to_child)
def test_change_non_existent_user_partition_id(self):
"""
Test that nothing happens when the user_partition_id is set to a value that doesn't exist.
The user_partition_id will be updated, but children and group_id_to_child map will not change.
"""
# Set to first group configuration.
split_test = self._update_partition_id(0)
self.assertEqual(2, len(split_test.children))
initial_group_id_to_child = split_test.group_id_to_child
# Set to an group configuration that doesn't exist.
split_test = self._update_partition_id(-50)
self.assertEqual(2, len(split_test.children))
self.assertEqual(initial_group_id_to_child, split_test.group_id_to_child)
def test_delete_children(self):
"""
Test that deleting a child in the group_id_to_child map updates the map.
Also test that deleting a child not in the group_id_to_child_map behaves properly.
"""
# Set to first group configuration.
self._update_partition_id(0)
split_test = self._assert_children(2)
vertical_1_usage_key = split_test.children[1]
# Add an extra child to the split_test
resp = self.create_xblock(category='html', parent_usage_key=self.split_test_usage_key)
extra_child_usage_key = self.response_usage_key(resp)
self._assert_children(3)
# Remove the first child (which is part of the group configuration).
resp = self.client.ajax_post(
self.split_test_update_url,
data={'children': [unicode(vertical_1_usage_key), unicode(extra_child_usage_key)]}
)
self.assertEqual(resp.status_code, 200)
split_test = self._assert_children(2)
# Check that group_id_to_child was updated appropriately
group_id_to_child = split_test.group_id_to_child
self.assertEqual(1, len(group_id_to_child))
self.assertEqual(vertical_1_usage_key, group_id_to_child['1'])
# Remove the "extra" child and make sure that group_id_to_child did not change.
resp = self.client.ajax_post(
self.split_test_update_url,
data={'children': [unicode(vertical_1_usage_key)]}
)
self.assertEqual(resp.status_code, 200)
split_test = self._assert_children(1)
self.assertEqual(group_id_to_child, split_test.group_id_to_child)
def test_add_groups(self):
"""
Test the "fix up behavior" when groups are missing (after a group is added to a group configuration).
This test actually belongs over in common, but it relies on a mutable modulestore.
TODO: move tests that can go over to common after the mixed modulestore work is done. # pylint: disable=fixme
"""
# Set to first group configuration.
split_test = self._update_partition_id(0)
# Add a group to the first group configuration.
split_test.user_partitions = [
UserPartition(
0, 'first_partition', 'First Partition',
[Group("0", 'alpha'), Group("1", 'beta'), Group("2", 'pie')]
)
]
self.store.update_item(split_test, self.user.id)
# group_id_to_child and children have not changed yet.
split_test = self._assert_children(2)
group_id_to_child = split_test.group_id_to_child.copy()
self.assertEqual(2, len(group_id_to_child))
# Test environment and Studio use different module systems
# (CachingDescriptorSystem is used in tests, PreviewModuleSystem in Studio).
# CachingDescriptorSystem doesn't have user service, that's needed for
# SplitTestModule. So, in this line of code we add this service manually.
split_test.runtime._services['user'] = StudioUserService(self.request) # pylint: disable=protected-access
# Call add_missing_groups method to add the missing group.
split_test.add_missing_groups(self.request)
split_test = self._assert_children(3)
self.assertNotEqual(group_id_to_child, split_test.group_id_to_child)
group_id_to_child = split_test.group_id_to_child
self.assertEqual(split_test.children[2], group_id_to_child["2"])
# Call add_missing_groups again -- it should be a no-op.
split_test.add_missing_groups(self.request)
split_test = self._assert_children(3)
self.assertEqual(group_id_to_child, split_test.group_id_to_child)
@ddt.ddt
class TestComponentHandler(TestCase):
def setUp(self):
self.request_factory = RequestFactory()
patcher = patch('contentstore.views.component.modulestore')
self.modulestore = patcher.start()
self.addCleanup(patcher.stop)
# component_handler calls modulestore.get_item to get the descriptor of the requested xBlock.
# Here, we mock the return value of modulestore.get_item so it can be used to mock the handler
# of the xBlock descriptor.
self.descriptor = self.modulestore.return_value.get_item.return_value
self.usage_key_string = unicode(
Location('dummy_org', 'dummy_course', 'dummy_run', 'dummy_category', 'dummy_name')
)
self.user = UserFactory()
self.request = self.request_factory.get('/dummy-url')
self.request.user = self.user
def test_invalid_handler(self):
self.descriptor.handle.side_effect = NoSuchHandlerError
with self.assertRaises(Http404):
component_handler(self.request, self.usage_key_string, 'invalid_handler')
@ddt.data('GET', 'POST', 'PUT', 'DELETE')
def test_request_method(self, method):
def check_handler(handler, request, suffix):
self.assertEquals(request.method, method)
return Response()
self.descriptor.handle = check_handler
# Have to use the right method to create the request to get the HTTP method that we want
req_factory_method = getattr(self.request_factory, method.lower())
request = req_factory_method('/dummy-url')
request.user = self.user
component_handler(request, self.usage_key_string, 'dummy_handler')
@ddt.data(200, 404, 500)
def test_response_code(self, status_code):
def create_response(handler, request, suffix):
return Response(status_code=status_code)
self.descriptor.handle = create_response
self.assertEquals(component_handler(self.request, self.usage_key_string, 'dummy_handler').status_code, status_code)
class TestComponentTemplates(CourseTestCase):
"""
Unit tests for the generation of the component templates for a course.
"""
def setUp(self):
super(TestComponentTemplates, self).setUp()
self.templates = get_component_templates(self.course)
def get_templates_of_type(self, template_type):
"""
Returns the templates for the specified type, or None if none is found.
"""
template_dict = next((template for template in self.templates if template.get('type') == template_type), None)
return template_dict.get('templates') if template_dict else None
def get_template(self, templates, display_name):
"""
Returns the template which has the specified display name.
"""
return next((template for template in templates if template.get('display_name') == display_name), None)
def test_basic_components(self):
"""
Test the handling of the basic component templates.
"""
self.assertIsNotNone(self.get_templates_of_type('discussion'))
self.assertIsNotNone(self.get_templates_of_type('html'))
self.assertIsNotNone(self.get_templates_of_type('problem'))
self.assertIsNotNone(self.get_templates_of_type('video'))
self.assertIsNone(self.get_templates_of_type('advanced'))
def test_advanced_components(self):
"""
Test the handling of advanced component templates.
"""
self.course.advanced_modules.append('word_cloud')
self.templates = get_component_templates(self.course)
advanced_templates = self.get_templates_of_type('advanced')
self.assertEqual(len(advanced_templates), 1)
world_cloud_template = advanced_templates[0]
self.assertEqual(world_cloud_template.get('category'), 'word_cloud')
self.assertEqual(world_cloud_template.get('display_name'), u'Word cloud')
self.assertIsNone(world_cloud_template.get('boilerplate_name', None))
# Verify that non-advanced components are not added twice
self.course.advanced_modules.append('video')
self.course.advanced_modules.append('openassessment')
self.templates = get_component_templates(self.course)
advanced_templates = self.get_templates_of_type('advanced')
self.assertEqual(len(advanced_templates), 1)
only_template = advanced_templates[0]
self.assertNotEqual(only_template.get('category'), 'video')
self.assertNotEqual(only_template.get('category'), 'openassessment')
def test_advanced_components_without_display_name(self):
"""
Test that advanced components without display names display their category instead.
"""
self.course.advanced_modules.append('graphical_slider_tool')
self.templates = get_component_templates(self.course)
template = self.get_templates_of_type('advanced')[0]
self.assertEqual(template.get('display_name'), 'graphical_slider_tool')
def test_advanced_problems(self):
"""
Test the handling of advanced problem templates.
"""
problem_templates = self.get_templates_of_type('problem')
ora_template = self.get_template(problem_templates, u'Peer Assessment')
self.assertIsNotNone(ora_template)
self.assertEqual(ora_template.get('category'), 'openassessment')
self.assertIsNone(ora_template.get('boilerplate_name', None))
class TestXBlockInfo(ItemTest):
"""
Unit tests for XBlock's outline handling.
"""
def setUp(self):
super(TestXBlockInfo, self).setUp()
user_id = self.user.id
self.chapter = ItemFactory.create(
parent_location=self.course.location, category='chapter', display_name="Week 1", user_id=user_id
)
self.sequential = ItemFactory.create(
parent_location=self.chapter.location, category='sequential', display_name="Lesson 1", user_id=user_id
)
self.vertical = ItemFactory.create(
parent_location=self.sequential.location, category='vertical', display_name='Unit 1', user_id=user_id
)
self.video = ItemFactory.create(
parent_location=self.vertical.location, category='video', display_name='My Video', user_id=user_id
)
def test_json_responses(self):
outline_url = reverse_usage_url('xblock_outline_handler', self.usage_key)
resp = self.client.get(outline_url, HTTP_ACCEPT='application/json')
json_response = json.loads(resp.content)
self.validate_course_xblock_info(json_response, course_outline=True)
def test_chapter_xblock_info(self):
chapter = modulestore().get_item(self.chapter.location)
xblock_info = create_xblock_info(
chapter,
include_child_info=True,
include_children_predicate=ALWAYS,
)
self.validate_chapter_xblock_info(xblock_info)
def test_sequential_xblock_info(self):
sequential = modulestore().get_item(self.sequential.location)
xblock_info = create_xblock_info(
sequential,
include_child_info=True,
include_children_predicate=ALWAYS,
)
self.validate_sequential_xblock_info(xblock_info)
def test_vertical_xblock_info(self):
vertical = modulestore().get_item(self.vertical.location)
xblock_info = create_xblock_info(
vertical,
include_child_info=True,
include_children_predicate=ALWAYS,
include_ancestor_info=True
)
self.validate_vertical_xblock_info(xblock_info)
def test_component_xblock_info(self):
video = modulestore().get_item(self.video.location)
xblock_info = create_xblock_info(
video,
include_child_info=True,
include_children_predicate=ALWAYS
)
self.validate_component_xblock_info(xblock_info)
def validate_course_xblock_info(self, xblock_info, has_child_info=True, course_outline=False):
"""
Validate that the xblock info is correct for the test course.
"""
self.assertEqual(xblock_info['category'], 'course')
self.assertEqual(xblock_info['id'], 'i4x://MITx/999/course/Robot_Super_Course')
self.assertEqual(xblock_info['display_name'], 'Robot Super Course')
self.assertTrue(xblock_info['published'])
# Finally, validate the entire response for consistency
self.validate_xblock_info_consistency(xblock_info, has_child_info=has_child_info, course_outline=course_outline)
def validate_chapter_xblock_info(self, xblock_info, has_child_info=True):
"""
Validate that the xblock info is correct for the test chapter.
"""
self.assertEqual(xblock_info['category'], 'chapter')
self.assertEqual(xblock_info['id'], 'i4x://MITx/999/chapter/Week_1')
self.assertEqual(xblock_info['display_name'], 'Week 1')
self.assertTrue(xblock_info['published'])
self.assertIsNone(xblock_info.get('edited_by', None))
self.assertEqual(xblock_info['course_graders'], '["Homework", "Lab", "Midterm Exam", "Final Exam"]')
self.assertEqual(xblock_info['start'], '2030-01-01T00:00:00Z')
self.assertEqual(xblock_info['graded'], False)
self.assertEqual(xblock_info['due'], None)
self.assertEqual(xblock_info['format'], None)
# Finally, validate the entire response for consistency
self.validate_xblock_info_consistency(xblock_info, has_child_info=has_child_info)
def validate_sequential_xblock_info(self, xblock_info, has_child_info=True):
"""
Validate that the xblock info is correct for the test sequential.
"""
self.assertEqual(xblock_info['category'], 'sequential')
self.assertEqual(xblock_info['id'], 'i4x://MITx/999/sequential/Lesson_1')
self.assertEqual(xblock_info['display_name'], 'Lesson 1')
self.assertTrue(xblock_info['published'])
self.assertIsNone(xblock_info.get('edited_by', None))
# Finally, validate the entire response for consistency
self.validate_xblock_info_consistency(xblock_info, has_child_info=has_child_info)
def validate_vertical_xblock_info(self, xblock_info):
"""
Validate that the xblock info is correct for the test vertical.
"""
self.assertEqual(xblock_info['category'], 'vertical')
self.assertEqual(xblock_info['id'], 'i4x://MITx/999/vertical/Unit_1')
self.assertEqual(xblock_info['display_name'], 'Unit 1')
self.assertTrue(xblock_info['published'])
self.assertEqual(xblock_info['edited_by'], 'testuser')
# Validate that the correct ancestor info has been included
ancestor_info = xblock_info.get('ancestor_info', None)
self.assertIsNotNone(ancestor_info)
ancestors = ancestor_info['ancestors']
self.assertEqual(len(ancestors), 3)
self.validate_sequential_xblock_info(ancestors[0], has_child_info=True)
self.validate_chapter_xblock_info(ancestors[1], has_child_info=False)
self.validate_course_xblock_info(ancestors[2], has_child_info=False)
# Finally, validate the entire response for consistency
self.validate_xblock_info_consistency(xblock_info, has_child_info=True, has_ancestor_info=True)
def validate_component_xblock_info(self, xblock_info):
"""
Validate that the xblock info is correct for the test component.
"""
self.assertEqual(xblock_info['category'], 'video')
self.assertEqual(xblock_info['id'], 'i4x://MITx/999/video/My_Video')
self.assertEqual(xblock_info['display_name'], 'My Video')
self.assertTrue(xblock_info['published'])
self.assertIsNone(xblock_info.get('edited_by', None))
# Finally, validate the entire response for consistency
self.validate_xblock_info_consistency(xblock_info)
def validate_xblock_info_consistency(self, xblock_info, has_ancestor_info=False, has_child_info=False,
course_outline=False):
"""
Validate that the xblock info is internally consistent.
"""
self.assertIsNotNone(xblock_info['display_name'])
self.assertIsNotNone(xblock_info['id'])
self.assertIsNotNone(xblock_info['category'])
self.assertTrue(xblock_info['published'])
if has_ancestor_info:
self.assertIsNotNone(xblock_info.get('ancestor_info', None))
ancestors = xblock_info['ancestor_info']['ancestors']
for ancestor in xblock_info['ancestor_info']['ancestors']:
self.validate_xblock_info_consistency(
ancestor,
has_child_info=(ancestor == ancestors[0]), # Only the direct ancestor includes children
course_outline=course_outline
)
else:
self.assertIsNone(xblock_info.get('ancestor_info', None))
if has_child_info:
self.assertIsNotNone(xblock_info.get('child_info', None))
if xblock_info['child_info'].get('children', None):
for child_response in xblock_info['child_info']['children']:
self.validate_xblock_info_consistency(
child_response,
has_child_info=(not child_response.get('child_info', None) is None),
course_outline=course_outline
)
else:
self.assertIsNone(xblock_info.get('child_info', None))
if xblock_info['category'] == 'vertical' and not course_outline:
self.assertEqual(xblock_info['edited_by'], 'testuser')
else:
self.assertIsNone(xblock_info.get('edited_by', None))
class TestXBlockPublishingInfo(ItemTest):
"""
Unit tests for XBlock's outline handling.
"""
FIRST_SUBSECTION_PATH = [0]
FIRST_UNIT_PATH = [0, 0]
SECOND_UNIT_PATH = [0, 1]
def _create_child(self, parent, category, display_name, publish_item=False, staff_only=False):
"""
Creates a child xblock for the given parent.
"""
child = ItemFactory.create(
parent_location=parent.location, category=category, display_name=display_name,
user_id=self.user.id, publish_item=publish_item
)
if staff_only:
self._enable_staff_only(child.location)
return child
def _get_child_xblock_info(self, xblock_info, index):
"""
Returns the child xblock info at the specified index.
"""
children = xblock_info['child_info']['children']
self.assertTrue(len(children) > index)
return children[index]
def _get_xblock_info(self, location):
"""
Returns the xblock info for the specified location.
"""
return create_xblock_info(
modulestore().get_item(location),
include_child_info=True,
include_children_predicate=ALWAYS,
)
def _get_xblock_outline_info(self, location):
"""
Returns the xblock info for the specified location as neeeded for the course outline page.
"""
return create_xblock_info(
modulestore().get_item(location),
include_child_info=True,
include_children_predicate=ALWAYS,
course_outline=True
)
def _set_release_date(self, location, start):
"""
Sets the release date for the specified xblock.
"""
xblock = modulestore().get_item(location)
xblock.start = start
self.store.update_item(xblock, self.user.id)
def _enable_staff_only(self, location):
"""
Enables staff only for the specified xblock.
"""
xblock = modulestore().get_item(location)
xblock.visible_to_staff_only = True
self.store.update_item(xblock, self.user.id)
def _set_display_name(self, location, display_name):
"""
Sets the display name for the specified xblock.
"""
xblock = modulestore().get_item(location)
xblock.display_name = display_name
self.store.update_item(xblock, self.user.id)
def _verify_xblock_info_state(self, xblock_info, xblock_info_field, expected_state, path=None, should_equal=True):
"""
Verify the state of an xblock_info field. If no path is provided then the root item will be verified.
If should_equal is True, assert that the current state matches the expected state, otherwise assert that they
do not match.
"""
if path:
direct_child_xblock_info = self._get_child_xblock_info(xblock_info, path[0])
remaining_path = path[1:] if len(path) > 1 else None
self._verify_xblock_info_state(direct_child_xblock_info, xblock_info_field, expected_state, remaining_path, should_equal)
else:
if should_equal:
self.assertEqual(xblock_info[xblock_info_field], expected_state)
else:
self.assertNotEqual(xblock_info[xblock_info_field], expected_state)
def _verify_has_staff_only_message(self, xblock_info, expected_state, path=None):
"""
Verify the staff_only_message field of xblock_info.
"""
self._verify_xblock_info_state(xblock_info, 'staff_only_message', expected_state, path)
def _verify_visibility_state(self, xblock_info, expected_state, path=None, should_equal=True):
"""
Verify the publish state of an item in the xblock_info.
"""
self._verify_xblock_info_state(xblock_info, 'visibility_state', expected_state, path, should_equal)
def _verify_explicit_staff_lock_state(self, xblock_info, expected_state, path=None, should_equal=True):
"""
Verify the explicit staff lock state of an item in the xblock_info.
"""
self._verify_xblock_info_state(xblock_info, 'has_explicit_staff_lock', expected_state, path, should_equal)
def _verify_staff_lock_from_state(self, xblock_info, expected_state, path=None, should_equal=True):
"""
Verify the staff_lock_from state of an item in the xblock_info.
"""
self._verify_xblock_info_state(xblock_info, 'staff_lock_from', expected_state, path, should_equal)
def test_empty_chapter(self):
empty_chapter = self._create_child(self.course, 'chapter', "Empty Chapter")
xblock_info = self._get_xblock_info(empty_chapter.location)
self._verify_visibility_state(xblock_info, VisibilityState.unscheduled)
def test_empty_sequential(self):
chapter = self._create_child(self.course, 'chapter', "Test Chapter")
self._create_child(chapter, 'sequential', "Empty Sequential")
xblock_info = self._get_xblock_info(chapter.location)
self._verify_visibility_state(xblock_info, VisibilityState.unscheduled)
self._verify_visibility_state(xblock_info, VisibilityState.unscheduled, path=self.FIRST_SUBSECTION_PATH)
def test_published_unit(self):
"""
Tests the visibility state of a published unit with release date in the future.
"""
chapter = self._create_child(self.course, 'chapter', "Test Chapter")
sequential = self._create_child(chapter, 'sequential', "Test Sequential")
self._create_child(sequential, 'vertical', "Published Unit", publish_item=True)
self._create_child(sequential, 'vertical', "Staff Only Unit", staff_only=True)
self._set_release_date(chapter.location, datetime.now(UTC) + timedelta(days=1))
xblock_info = self._get_xblock_info(chapter.location)
self._verify_visibility_state(xblock_info, VisibilityState.ready)
self._verify_visibility_state(xblock_info, VisibilityState.ready, path=self.FIRST_SUBSECTION_PATH)
self._verify_visibility_state(xblock_info, VisibilityState.ready, path=self.FIRST_UNIT_PATH)
self._verify_visibility_state(xblock_info, VisibilityState.staff_only, path=self.SECOND_UNIT_PATH)
def test_released_unit(self):
"""
Tests the visibility state of a published unit with release date in the past.
"""
chapter = self._create_child(self.course, 'chapter', "Test Chapter")
sequential = self._create_child(chapter, 'sequential', "Test Sequential")
self._create_child(sequential, 'vertical', "Published Unit", publish_item=True)
self._create_child(sequential, 'vertical', "Staff Only Unit", staff_only=True)
self._set_release_date(chapter.location, datetime.now(UTC) - timedelta(days=1))
xblock_info = self._get_xblock_info(chapter.location)
self._verify_visibility_state(xblock_info, VisibilityState.live)
self._verify_visibility_state(xblock_info, VisibilityState.live, path=self.FIRST_SUBSECTION_PATH)
self._verify_visibility_state(xblock_info, VisibilityState.live, path=self.FIRST_UNIT_PATH)
self._verify_visibility_state(xblock_info, VisibilityState.staff_only, path=self.SECOND_UNIT_PATH)
def test_unpublished_changes(self):
"""
Tests the visibility state of a published unit with draft (unpublished) changes.
"""
chapter = self._create_child(self.course, 'chapter', "Test Chapter")
sequential = self._create_child(chapter, 'sequential', "Test Sequential")
unit = self._create_child(sequential, 'vertical', "Published Unit", publish_item=True)
self._create_child(sequential, 'vertical', "Staff Only Unit", staff_only=True)
# Setting the display name creates a draft version of unit.
self._set_display_name(unit.location, 'Updated Unit')
xblock_info = self._get_xblock_info(chapter.location)
self._verify_visibility_state(xblock_info, VisibilityState.needs_attention)
self._verify_visibility_state(xblock_info, VisibilityState.needs_attention, path=self.FIRST_SUBSECTION_PATH)
self._verify_visibility_state(xblock_info, VisibilityState.needs_attention, path=self.FIRST_UNIT_PATH)
self._verify_visibility_state(xblock_info, VisibilityState.staff_only, path=self.SECOND_UNIT_PATH)
def test_partially_released_section(self):
chapter = self._create_child(self.course, 'chapter', "Test Chapter")
released_sequential = self._create_child(chapter, 'sequential', "Released Sequential")
self._create_child(released_sequential, 'vertical', "Released Unit", publish_item=True)
self._create_child(released_sequential, 'vertical', "Staff Only Unit", staff_only=True)
self._set_release_date(chapter.location, datetime.now(UTC) - timedelta(days=1))
published_sequential = self._create_child(chapter, 'sequential', "Published Sequential")
self._create_child(published_sequential, 'vertical', "Published Unit", publish_item=True)
self._create_child(published_sequential, 'vertical', "Staff Only Unit", staff_only=True)
self._set_release_date(published_sequential.location, datetime.now(UTC) + timedelta(days=1))
xblock_info = self._get_xblock_info(chapter.location)
# Verify the state of the released sequential
self._verify_visibility_state(xblock_info, VisibilityState.live, path=[0])
self._verify_visibility_state(xblock_info, VisibilityState.live, path=[0, 0])
self._verify_visibility_state(xblock_info, VisibilityState.staff_only, path=[0, 1])
# Verify the state of the published sequential
self._verify_visibility_state(xblock_info, VisibilityState.ready, path=[1])
self._verify_visibility_state(xblock_info, VisibilityState.ready, path=[1, 0])
self._verify_visibility_state(xblock_info, VisibilityState.staff_only, path=[1, 1])
# Finally verify the state of the chapter
self._verify_visibility_state(xblock_info, VisibilityState.ready)
def test_staff_only_section(self):
"""
Tests that an explicitly staff-locked section and all of its children are visible to staff only.
"""
chapter = self._create_child(self.course, 'chapter', "Test Chapter", staff_only=True)
sequential = self._create_child(chapter, 'sequential', "Test Sequential")
self._create_child(sequential, 'vertical', "Unit")
xblock_info = self._get_xblock_info(chapter.location)
self._verify_visibility_state(xblock_info, VisibilityState.staff_only)
self._verify_visibility_state(xblock_info, VisibilityState.staff_only, path=self.FIRST_SUBSECTION_PATH)
self._verify_visibility_state(xblock_info, VisibilityState.staff_only, path=self.FIRST_UNIT_PATH)
self._verify_explicit_staff_lock_state(xblock_info, True)
self._verify_explicit_staff_lock_state(xblock_info, False, path=self.FIRST_SUBSECTION_PATH)
self._verify_explicit_staff_lock_state(xblock_info, False, path=self.FIRST_UNIT_PATH)
self._verify_staff_lock_from_state(xblock_info, _xblock_type_and_display_name(chapter), path=self.FIRST_UNIT_PATH)
def test_no_staff_only_section(self):
"""
Tests that a section with a staff-locked subsection and a visible subsection is not staff locked itself.
"""
chapter = self._create_child(self.course, 'chapter', "Test Chapter")
self._create_child(chapter, 'sequential', "Test Visible Sequential")
self._create_child(chapter, 'sequential', "Test Staff Locked Sequential", staff_only=True)
xblock_info = self._get_xblock_info(chapter.location)
self._verify_visibility_state(xblock_info, VisibilityState.staff_only, should_equal=False)
self._verify_visibility_state(xblock_info, VisibilityState.staff_only, path=[0], should_equal=False)
self._verify_visibility_state(xblock_info, VisibilityState.staff_only, path=[1])
def test_staff_only_subsection(self):
"""
Tests that an explicitly staff-locked subsection and all of its children are visible to staff only.
In this case the parent section is also visible to staff only because all of its children are staff only.
"""
chapter = self._create_child(self.course, 'chapter', "Test Chapter")
sequential = self._create_child(chapter, 'sequential', "Test Sequential", staff_only=True)
self._create_child(sequential, 'vertical', "Unit")
xblock_info = self._get_xblock_info(chapter.location)
self._verify_visibility_state(xblock_info, VisibilityState.staff_only)
self._verify_visibility_state(xblock_info, VisibilityState.staff_only, path=self.FIRST_SUBSECTION_PATH)
self._verify_visibility_state(xblock_info, VisibilityState.staff_only, path=self.FIRST_UNIT_PATH)
self._verify_explicit_staff_lock_state(xblock_info, False)
self._verify_explicit_staff_lock_state(xblock_info, True, path=self.FIRST_SUBSECTION_PATH)
self._verify_explicit_staff_lock_state(xblock_info, False, path=self.FIRST_UNIT_PATH)
self._verify_staff_lock_from_state(xblock_info, _xblock_type_and_display_name(sequential), path=self.FIRST_UNIT_PATH)
def test_no_staff_only_subsection(self):
"""
Tests that a subsection with a staff-locked unit and a visible unit is not staff locked itself.
"""
chapter = self._create_child(self.course, 'chapter', "Test Chapter")
sequential = self._create_child(chapter, 'sequential', "Test Sequential")
self._create_child(sequential, 'vertical', "Unit")
self._create_child(sequential, 'vertical', "Locked Unit", staff_only=True)
xblock_info = self._get_xblock_info(chapter.location)
self._verify_visibility_state(xblock_info, VisibilityState.staff_only, self.FIRST_SUBSECTION_PATH, should_equal=False)
self._verify_visibility_state(xblock_info, VisibilityState.staff_only, self.FIRST_UNIT_PATH, should_equal=False)
self._verify_visibility_state(xblock_info, VisibilityState.staff_only, self.SECOND_UNIT_PATH)
def test_staff_only_unit(self):
chapter = self._create_child(self.course, 'chapter', "Test Chapter")
sequential = self._create_child(chapter, 'sequential', "Test Sequential")
unit = self._create_child(sequential, 'vertical', "Unit", staff_only=True)
xblock_info = self._get_xblock_info(chapter.location)
self._verify_visibility_state(xblock_info, VisibilityState.staff_only)
self._verify_visibility_state(xblock_info, VisibilityState.staff_only, path=self.FIRST_SUBSECTION_PATH)
self._verify_visibility_state(xblock_info, VisibilityState.staff_only, path=self.FIRST_UNIT_PATH)
self._verify_explicit_staff_lock_state(xblock_info, False)
self._verify_explicit_staff_lock_state(xblock_info, False, path=self.FIRST_SUBSECTION_PATH)
self._verify_explicit_staff_lock_state(xblock_info, True, path=self.FIRST_UNIT_PATH)
self._verify_staff_lock_from_state(xblock_info, _xblock_type_and_display_name(unit), path=self.FIRST_UNIT_PATH)
def test_unscheduled_section_with_live_subsection(self):
chapter = self._create_child(self.course, 'chapter', "Test Chapter")
sequential = self._create_child(chapter, 'sequential', "Test Sequential")
self._create_child(sequential, 'vertical', "Published Unit", publish_item=True)
self._create_child(sequential, 'vertical', "Staff Only Unit", staff_only=True)
self._set_release_date(sequential.location, datetime.now(UTC) - timedelta(days=1))
xblock_info = self._get_xblock_info(chapter.location)
self._verify_visibility_state(xblock_info, VisibilityState.needs_attention)
self._verify_visibility_state(xblock_info, VisibilityState.live, path=self.FIRST_SUBSECTION_PATH)
self._verify_visibility_state(xblock_info, VisibilityState.live, path=self.FIRST_UNIT_PATH)
self._verify_visibility_state(xblock_info, VisibilityState.staff_only, path=self.SECOND_UNIT_PATH)
def test_unreleased_section_with_live_subsection(self):
chapter = self._create_child(self.course, 'chapter', "Test Chapter")
sequential = self._create_child(chapter, 'sequential', "Test Sequential")
self._create_child(sequential, 'vertical', "Published Unit", publish_item=True)
self._create_child(sequential, 'vertical', "Staff Only Unit", staff_only=True)
self._set_release_date(chapter.location, datetime.now(UTC) + timedelta(days=1))
self._set_release_date(sequential.location, datetime.now(UTC) - timedelta(days=1))
xblock_info = self._get_xblock_info(chapter.location)
self._verify_visibility_state(xblock_info, VisibilityState.needs_attention)
self._verify_visibility_state(xblock_info, VisibilityState.live, path=self.FIRST_SUBSECTION_PATH)
self._verify_visibility_state(xblock_info, VisibilityState.live, path=self.FIRST_UNIT_PATH)
self._verify_visibility_state(xblock_info, VisibilityState.staff_only, path=self.SECOND_UNIT_PATH)
def test_locked_section_staff_only_message(self):
"""
Tests that a locked section has a staff only message and its descendants do not.
"""
chapter = self._create_child(self.course, 'chapter', "Test Chapter", staff_only=True)
sequential = self._create_child(chapter, 'sequential', "Test Sequential")
self._create_child(sequential, 'vertical', "Unit")
xblock_info = self._get_xblock_outline_info(chapter.location)
self._verify_has_staff_only_message(xblock_info, True)
self._verify_has_staff_only_message(xblock_info, False, path=self.FIRST_SUBSECTION_PATH)
self._verify_has_staff_only_message(xblock_info, False, path=self.FIRST_UNIT_PATH)
def test_locked_unit_staff_only_message(self):
"""
Tests that a lone locked unit has a staff only message along with its ancestors.
"""
chapter = self._create_child(self.course, 'chapter', "Test Chapter")
sequential = self._create_child(chapter, 'sequential', "Test Sequential")
self._create_child(sequential, 'vertical', "Unit", staff_only=True)
xblock_info = self._get_xblock_outline_info(chapter.location)
self._verify_has_staff_only_message(xblock_info, True)
self._verify_has_staff_only_message(xblock_info, True, path=self.FIRST_SUBSECTION_PATH)
self._verify_has_staff_only_message(xblock_info, True, path=self.FIRST_UNIT_PATH)
|
OWASP/django-DefectDojo | refs/heads/master | dojo/unittests/test_php_symfony_security_check_parser.py | 2 | from django.test import TestCase
from dojo.tools.php_symfony_security_check.parser import PhpSymfonySecurityCheckParser
from dojo.models import Test
class TestPhpSymfonySecurityCheckerParser(TestCase):
def test_php_symfony_security_check_parser_without_file_has_no_findings(self):
parser = PhpSymfonySecurityCheckParser(None, Test())
self.assertEqual(0, len(parser.items))
def test_php_symfony_security_check_parser_with_no_vuln_has_no_findings(self):
testfile = open("dojo/unittests/scans/php_symfony_security_check_sample/php_symfony_no_vuln.json")
parser = PhpSymfonySecurityCheckParser(testfile, Test())
testfile.close()
self.assertEqual(0, len(parser.items))
def test_php_symfony_security_check_parser_with_one_criticle_vuln_has_one_findings(self):
testfile = open("dojo/unittests/scans/php_symfony_security_check_sample/php_symfony_one_vuln.json")
parser = PhpSymfonySecurityCheckParser(testfile, Test())
testfile.close()
self.assertEqual(1, len(parser.items))
def test_php_symfony_security_check_parser_with_many_vuln_has_many_findings(self):
testfile = open("dojo/unittests/scans/php_symfony_security_check_sample/php_symfony_many_vuln.json")
parser = PhpSymfonySecurityCheckParser(testfile, Test())
testfile.close()
self.assertEqual(8, len(parser.items))
|
zhanghui9700/eonboard | refs/heads/master | eoncloud_web/eoncloud_web/formats/__init__.py | 12133432 | |
zdary/intellij-community | refs/heads/master | python/helpers/tests/generator3_tests/data/FileSystemUtil/copy_skeletons_failed_version_stamps_ignored/dst/after/generated.py | 12133432 | |
nju520/django | refs/heads/master | django/conf/locale/zh_Hant/__init__.py | 12133432 | |
CSC301H-Fall2013/JuakStore | refs/heads/master | site-packages/build/lib/django/contrib/auth/management/commands/__init__.py | 12133432 | |
vrv/tensorflow | refs/heads/master | tensorflow/tools/docs/doc_generator_visitor.py | 17 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""A `traverse` visitor for processing documentation."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import six
from tensorflow.python.util import tf_inspect
class DocGeneratorVisitor(object):
"""A visitor that generates docs for a python object when __call__ed."""
def __init__(self, root_name=''):
"""Make a visitor.
As this visitor is starting its traversal at a module or class, it will not
be old the name of that object during traversal. `root_name` is the name it
should use for that object, effectively prefixing all names with
"root_name.".
Args:
root_name: The name of the root module/class.
"""
self.set_root_name(root_name)
self._index = {}
self._tree = {}
self._reverse_index = None
self._duplicates = None
self._duplicate_of = None
def set_root_name(self, root_name):
"""Sets the root name for subsequent __call__s."""
self._root_name = root_name or ''
self._prefix = (root_name + '.') if root_name else ''
@property
def index(self):
"""A map from fully qualified names to objects to be documented.
The index is filled when the visitor is passed to `traverse`.
Returns:
The index filled by traversal.
"""
return self._index
@property
def tree(self):
"""A map from fully qualified names to all its child names for traversal.
The full name to member names map is filled when the visitor is passed to
`traverse`.
Returns:
The full name to member name map filled by traversal.
"""
return self._tree
@property
def reverse_index(self):
"""A map from `id(object)` to the preferred fully qualified name.
This map only contains non-primitive objects (no numbers or strings) present
in `index` (for primitive objects, `id()` doesn't quite do the right thing).
It is computed when it, `duplicate_of`, or `duplicates` are first accessed.
Returns:
The `id(object)` to full name map.
"""
self._maybe_find_duplicates()
return self._reverse_index
@property
def duplicate_of(self):
"""A map from duplicate full names to a preferred fully qualified name.
This map only contains names that are not themself a preferred name.
It is computed when it, `reverse_index`, or `duplicates` are first accessed.
Returns:
The map from duplicate name to preferred name.
"""
self._maybe_find_duplicates()
return self._duplicate_of
@property
def duplicates(self):
"""A map from preferred full names to a list of all names for this symbol.
This function returns a map from preferred (master) name for a symbol to a
lexicographically sorted list of all aliases for that name (incl. the master
name). Symbols without duplicate names do not appear in this map.
It is computed when it, `reverse_index`, or `duplicate_of` are first
accessed.
Returns:
The map from master name to list of all duplicate names.
"""
self._maybe_find_duplicates()
return self._duplicates
def _add_prefix(self, name):
"""Adds the root name to a name."""
return self._prefix + name if name else self._root_name
def __call__(self, parent_name, parent, children):
"""Visitor interface, see `tensorflow/tools/common:traverse` for details.
This method is called for each symbol found in a traversal using
`tensorflow/tools/common:traverse`. It should not be called directly in
user code.
Args:
parent_name: The fully qualified name of a symbol found during traversal.
parent: The Python object referenced by `parent_name`.
children: A list of `(name, py_object)` pairs enumerating, in alphabetical
order, the children (as determined by `tf_inspect.getmembers`) of
`parent`. `name` is the local name of `py_object` in `parent`.
Raises:
RuntimeError: If this visitor is called with a `parent` that is not a
class or module.
"""
parent_name = self._add_prefix(parent_name)
self._index[parent_name] = parent
self._tree[parent_name] = []
if not (tf_inspect.ismodule(parent) or tf_inspect.isclass(parent)):
raise RuntimeError('Unexpected type in visitor -- %s: %r' % (parent_name,
parent))
for i, (name, child) in enumerate(list(children)):
# Don't document __metaclass__
if name in ['__metaclass__']:
del children[i]
continue
full_name = '.'.join([parent_name, name]) if parent_name else name
self._index[full_name] = child
self._tree[parent_name].append(name)
def _maybe_find_duplicates(self):
"""Compute data structures containing information about duplicates.
Find duplicates in `index` and decide on one to be the "master" name.
Computes a reverse_index mapping each object id to its master name.
Also computes a map `duplicate_of` from aliases to their master name (the
master name itself has no entry in this map), and a map `duplicates` from
master names to a lexicographically sorted list of all aliases for that name
(incl. the master name).
All these are computed and set as fields if they haven't already.
"""
if self._reverse_index is not None:
return
# Maps the id of a symbol to its fully qualified name. For symbols that have
# several aliases, this map contains the first one found.
# We use id(py_object) to get a hashable value for py_object. Note all
# objects in _index are in memory at the same time so this is safe.
reverse_index = {}
# Make a preliminary duplicates map. For all sets of duplicate names, it
# maps the first name found to a list of all duplicate names.
raw_duplicates = {}
for full_name, py_object in six.iteritems(self._index):
# We cannot use the duplicate mechanism for some constants, since e.g.,
# id(c1) == id(c2) with c1=1, c2=1. This is unproblematic since constants
# have no usable docstring and won't be documented automatically.
if (py_object is not None and
not isinstance(py_object, six.integer_types + six.string_types +
(six.binary_type, six.text_type, float, complex, bool))
and py_object is not ()):
object_id = id(py_object)
if object_id in reverse_index:
master_name = reverse_index[object_id]
if master_name in raw_duplicates:
raw_duplicates[master_name].append(full_name)
else:
raw_duplicates[master_name] = [master_name, full_name]
else:
reverse_index[object_id] = full_name
# Decide on master names, rewire duplicates and make a duplicate_of map
# mapping all non-master duplicates to the master name. The master symbol
# does not have an entry in this map.
duplicate_of = {}
# Duplicates maps the main symbols to the set of all duplicates of that
# symbol (incl. itself).
duplicates = {}
for names in raw_duplicates.values():
names = sorted(names)
# Choose the lexicographically first name with the minimum number of
# submodules. This will prefer highest level namespace for any symbol.
master_name = min(names, key=lambda name: name.count('.'))
duplicates[master_name] = names
for name in names:
if name != master_name:
duplicate_of[name] = master_name
# Set the reverse index to the canonical name.
reverse_index[id(self._index[master_name])] = master_name
self._duplicate_of = duplicate_of
self._duplicates = duplicates
self._reverse_index = reverse_index
|
machinalis/machinalis-movie-reviews | refs/heads/master | movie_recommendations/factories.py | 1 | from faker import Faker
faker = Faker()
def get_fake_user_data():
return {
'name': faker.name(),
'username': faker.user_name(),
'email': faker.email(),
'password': 's3cr4t$'
}
|
pannarale/pycbc | refs/heads/master | pycbc/noise/gaussian.py | 6 | # Copyright (C) 2012 Alex Nitz
#
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 3 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# =============================================================================
#
# Preamble
#
# =============================================================================
#
"""This module contains functions to generate gaussian noise colored with a
noise spectrum.
"""
from pycbc.types import TimeSeries, zeros
from pycbc.types import complex_same_precision_as, FrequencySeries
from lalsimulation import SimNoise
import lal
import numpy.random
def frequency_noise_from_psd(psd, seed=None):
""" Create noise with a given psd.
Return noise coloured with the given psd. The returned noise
FrequencySeries has the same length and frequency step as the given psd.
Note that if unique noise is desired a unique seed should be provided.
Parameters
----------
psd : FrequencySeries
The noise weighting to color the noise.
seed : {0, int} or None
The seed to generate the noise. If None specified,
the seed will not be reset.
Returns
--------
noise : FrequencySeriesSeries
A FrequencySeries containing gaussian noise colored by the given psd.
"""
sigma = 0.5 * (psd / psd.delta_f) ** (0.5)
if seed is not None:
numpy.random.seed(seed)
sigma = sigma.numpy()
dtype = complex_same_precision_as(psd)
not_zero = (sigma != 0)
sigma_red = sigma[not_zero]
noise_re = numpy.random.normal(0, sigma_red)
noise_co = numpy.random.normal(0, sigma_red)
noise_red = noise_re + 1j * noise_co
noise = numpy.zeros(len(sigma), dtype=dtype)
noise[not_zero] = noise_red
return FrequencySeries(noise,
delta_f=psd.delta_f,
dtype=dtype)
def noise_from_psd(length, delta_t, psd, seed=None):
""" Create noise with a given psd.
Return noise with a given psd. Note that if unique noise is desired
a unique seed should be provided.
Parameters
----------
length : int
The length of noise to generate in samples.
delta_t : float
The time step of the noise.
psd : FrequencySeries
The noise weighting to color the noise.
seed : {0, int}
The seed to generate the noise.
Returns
--------
noise : TimeSeries
A TimeSeries containing gaussian noise colored by the given psd.
"""
noise_ts = TimeSeries(zeros(length), delta_t=delta_t)
if seed is None:
seed = numpy.random.randint(2**32)
randomness = lal.gsl_rng("ranlux", seed)
N = int (1.0 / delta_t / psd.delta_f)
n = N//2+1
stride = N//2
if n > len(psd):
raise ValueError("PSD not compatible with requested delta_t")
psd = (psd[0:n]).lal()
psd.data.data[n-1] = 0
segment = TimeSeries(zeros(N), delta_t=delta_t).lal()
length_generated = 0
SimNoise(segment, 0, psd, randomness)
while (length_generated < length):
if (length_generated + stride) < length:
noise_ts.data[length_generated:length_generated+stride] = segment.data.data[0:stride]
else:
noise_ts.data[length_generated:length] = segment.data.data[0:length-length_generated]
length_generated += stride
SimNoise(segment, stride, psd, randomness)
return noise_ts
def noise_from_string(psd_name, length, delta_t, seed=None, low_frequency_cutoff=10.0):
""" Create noise from an analytic PSD
Return noise from the chosen PSD. Note that if unique noise is desired
a unique seed should be provided.
Parameters
----------
psd_name : str
Name of the analytic PSD to use.
low_fr
length : int
The length of noise to generate in samples.
delta_t : float
The time step of the noise.
seed : {None, int}
The seed to generate the noise.
low_frequency_cutof : {10.0, float}
The low frequency cutoff to pass to the PSD generation.
Returns
--------
noise : TimeSeries
A TimeSeries containing gaussian noise colored by the given psd.
"""
import pycbc.psd
# We just need enough resolution to resolve lines
delta_f = 1.0 / 8
flen = int(.5 / delta_t / delta_f) + 1
psd = pycbc.psd.from_string(psd_name, flen, delta_f, low_frequency_cutoff)
return noise_from_psd(int(length), delta_t, psd, seed=seed)
|
plxaye/chromium | refs/heads/master | src/tools/valgrind/test_suppressions.py | 5 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from collections import defaultdict
import os
import re
import sys
import suppressions
def ReadReportsFromFile(filename):
""" Returns a list of (report_hash, report) and the URL of the report on the
waterfall.
"""
input_file = file(filename, 'r')
# reports is a list of (error hash, report) pairs.
reports = []
in_suppression = False
cur_supp = []
# This stores the last error hash found while reading the file.
last_hash = ""
for line in input_file:
line = line.strip()
line = line.replace("</span><span class=\"stdout\">", "")
line = line.replace("</span><span class=\"stderr\">", "")
line = line.replace("<", "<")
line = line.replace(">", ">")
if in_suppression:
if line == "}":
cur_supp += ["}"]
reports += [[last_hash, "\n".join(cur_supp)]]
in_suppression = False
cur_supp = []
last_hash = ""
else:
cur_supp += [" "*3 + line]
elif line == "{":
in_suppression = True
cur_supp = ["{"]
elif line.find("Suppression (error hash=#") == 0:
last_hash = line[25:41]
# The line at the end of the file is assumed to store the URL of the report.
return reports,line
def main(argv):
supp = suppressions.GetSuppressions()
# all_reports is a map {report: list of urls containing this report}
all_reports = defaultdict(list)
report_hashes = {}
for f in argv:
f_reports, url = ReadReportsFromFile(f)
for (hash, report) in f_reports:
all_reports[report] += [url]
report_hashes[report] = hash
reports_count = 0
for r in all_reports:
cur_supp = supp['common_suppressions']
if all([re.search("%20Mac%20|mac_valgrind", url)
for url in all_reports[r]]):
# Include mac suppressions if the report is only present on Mac
cur_supp += supp['mac_suppressions']
elif all([re.search("Windows%20", url) for url in all_reports[r]]):
# Include win32 suppressions if the report is only present on Windows
cur_supp += supp['win_suppressions']
elif all([re.search("Linux%20", url) for url in all_reports[r]]):
cur_supp += supp['linux_suppressions']
elif all([re.search("%20Heapcheck", url)
for url in all_reports[r]]):
cur_supp += supp['heapcheck_suppressions']
if all(["DrMemory" in url for url in all_reports[r]]):
cur_supp += supp['drmem_suppressions']
if all(["DrMemory%20full" in url for url in all_reports[r]]):
cur_supp += supp['drmem_full_suppressions']
match = False
for s in cur_supp:
if s.Match(r.split("\n")):
match = True
break
if not match:
reports_count += 1
print "==================================="
print "This report observed at"
for url in all_reports[r]:
print " %s" % url
print "didn't match any suppressions:"
print "Suppression (error hash=#%s#):" % (report_hashes[r])
print r
print "==================================="
if reports_count > 0:
print ("%d unique reports don't match any of the suppressions" %
reports_count)
else:
print "Congratulations! All reports are suppressed!"
# TODO(timurrrr): also make sure none of the old suppressions
# were narrowed too much.
if __name__ == "__main__":
main(sys.argv[1:])
|
ice9js/servo | refs/heads/master | tests/wpt/css-tests/tools/manifest/vcs.py | 287 | import os
import subprocess
def get_git_func(repo_path):
def git(cmd, *args):
full_cmd = ["git", cmd] + list(args)
return subprocess.check_output(full_cmd, cwd=repo_path, stderr=subprocess.STDOUT)
return git
def is_git_repo(tests_root):
return os.path.exists(os.path.join(tests_root, ".git"))
_repo_root = None
def get_repo_root(initial_dir=None):
global _repo_root
if initial_dir is None:
initial_dir = os.path.dirname(__file__)
if _repo_root is None:
git = get_git_func(initial_dir)
_repo_root = git("rev-parse", "--show-toplevel").rstrip()
return _repo_root
|
sils1297/PyGithub | refs/heads/master | github/tests/GitTree.py | 39 | # -*- coding: utf-8 -*-
# ########################## Copyrights and license ############################
# #
# Copyright 2012 Vincent Jacques <vincent@vincent-jacques.net> #
# Copyright 2012 Zearin <zearin@gonk.net> #
# Copyright 2013 Vincent Jacques <vincent@vincent-jacques.net> #
# #
# This file is part of PyGithub. http://jacquev6.github.com/PyGithub/ #
# #
# PyGithub is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Lesser General Public License as published by the Free #
# Software Foundation, either version 3 of the License, or (at your option) #
# any later version. #
# #
# PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY #
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS #
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more #
# details. #
# #
# You should have received a copy of the GNU Lesser General Public License #
# along with PyGithub. If not, see <http://www.gnu.org/licenses/>. #
# #
# ##############################################################################
import Framework
class GitTree(Framework.TestCase):
def setUp(self):
Framework.TestCase.setUp(self)
self.tree = self.g.get_user().get_repo("PyGithub").get_git_tree("f492784d8ca837779650d1fb406a1a3587a764ad")
def testAttributes(self):
self.assertEqual(self.tree.sha, "f492784d8ca837779650d1fb406a1a3587a764ad")
self.assertEqual(len(self.tree.tree), 11)
self.assertEqual(self.tree.tree[0].mode, "100644")
self.assertEqual(self.tree.tree[0].path, ".gitignore")
self.assertEqual(self.tree.tree[0].sha, "8a9af1462c3f4e3358315c2d2e6ef1e7334c59dd")
self.assertEqual(self.tree.tree[0].size, 53)
self.assertEqual(self.tree.tree[0].type, "blob")
self.assertEqual(self.tree.tree[0].url, "https://api.github.com/repos/jacquev6/PyGithub/git/blobs/8a9af1462c3f4e3358315c2d2e6ef1e7334c59dd")
self.assertEqual(self.tree.tree[6].mode, "040000")
self.assertEqual(self.tree.tree[6].path, "ReplayDataForIntegrationTest")
self.assertEqual(self.tree.tree[6].sha, "60b4602b2c2070246c5df078fb7a5150b45815eb")
self.assertEqual(self.tree.tree[6].size, None)
self.assertEqual(self.tree.tree[6].type, "tree")
self.assertEqual(self.tree.tree[6].url, "https://api.github.com/repos/jacquev6/PyGithub/git/trees/60b4602b2c2070246c5df078fb7a5150b45815eb")
self.assertEqual(self.tree.url, "https://api.github.com/repos/jacquev6/PyGithub/git/trees/f492784d8ca837779650d1fb406a1a3587a764ad")
|
cwgreene/Nanostructure-Simulator | refs/heads/master | photocurrent.py | 1 | #import driftscatter as ds
import move_particles_c as mpc
import montecarlo_mockup as mc
import dolfin_util as du
import kdtree_c
import time
import numpy as np
import ctypes
import constants
#import bandstructure as bs
def reap_list(full,remove_ids):
remove_ids.sort()
count = 0
for id in remove_ids:
p = full.pop(id-count)
count += 1
for id in xrange(len(full)):
p = full[id]
p.part_id = id
def random_list(list,num):
sublist = []
for index in np.random.random_integers(0,len(list),num):
sublist = list[index]
return sublist
def run_simulation(mesh,e_field):
reaper=[]
current = 0
for particle in mesh.particles:
ds.randomElectronMovement(particle,e_field,
None,mesh,reaper)
for index in xrange(len(mesh.particles)):
p = mesh.particles[index]
if p.dead == False: #if we didn't kill it.
if(du.out_of_bounds(mesh,p.pos)):
#need to figure out exit
reaper.append(index)
p.dead = True
current += mc.current_exit(p,mesh)
else:
p.id = kdtree_c.find_point_id(mesh.kdt,p.pos)
reap_list(mesh.particles,reaper)
return current
def clean_mesh(mesh):
mesh.particles = []
for point in mesh.particles_point:
point = []
def init_photo_pair(mesh,pos,energy):
#create electron, and corresponding hole.
momentum = mesh.bandstructure.random_momentum(mesh,pos,energy)
def new_particle(mesh_pos,particles,problem,charge_sign,mesh):
coord = tuple(mesh.coordinates()[mesh_pos])
material = mesh.material[coord]
if charge_sign < 0:
mass = material.electron_mass
if charge_sign > 0:
mass = material.hole_mass
dim = mesh.geometry().dim()
nextDensity = problem.density_funcs.combined_density.vector().array().astype('int')
index = mpc.lib.create_particleC(ctypes.c_int(mesh_pos),
particles.ptr,
nextDensity.ctypes.data,
ctypes.c_int(charge_sign),
ctypes.c_double(mass),
mesh.c_mesh)
particles.pos[index][dim:2*dim]= [0.]*dim #bottom of gap
def photons_per_watt(wavelength_nm):
#1 photon per X number of joules
return 1./(constants.h*constants.c/wavelength_nm)
def generate_photo_current(mesh,e_field,problem):
current = 0
accumulated_charge = 0.
total_photons = len(mesh.coordinates())
particles = mpc.CParticles(2000,mesh.c_mesh,mesh.dim)
e_field = np.array(mc.pre_compute_field(mesh,e_field))
nextDensity = problem.density_funcs.combined_density.vector().array().astype('int')
for point in xrange(len(mesh.coordinates())):
new_particle(point,particles,problem,-1,mesh)
new_particle(point,particles,problem,+1,mesh)
for rep in xrange(300):
accumulated_charge+=mpc.lib.photocurrentC(particles.ptr,
nextDensity.ctypes.data,
e_field.ctypes.data,
mesh.c_mesh,
ctypes.c_double(mesh.dt),
ctypes.c_double(mesh.length_scale))
print "accumulated_charge",accumulated_charge,total_photons
charge_per_photon = constants.eC*accumulated_charge/total_photons
power = 1000. #per meter squared
photons_sec = power*photons_per_watt(400*10**-9)
current = charge_per_photon*photons_sec
print "photocurrent:",current
return current
|
tttthemanCorp/CardmeleonAppEngine | refs/heads/master | django/conf/locale/en_GB/formats.py | 234 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'N j, Y' # 'Oct. 25, 2006'
TIME_FORMAT = 'P' # '2:30 pm'
DATETIME_FORMAT = 'N j, Y, P' # 'Oct. 25, 2006, 2:30 pm'
YEAR_MONTH_FORMAT = 'F Y' # 'October 2006'
MONTH_DAY_FORMAT = 'F j' # 'October 25'
SHORT_DATE_FORMAT = 'd/m/Y' # '25/10/2006'
SHORT_DATETIME_FORMAT = 'd/m/Y P' # '25/10/2006 2:30 pm'
FIRST_DAY_OF_WEEK = 0 # Sunday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = (
'%d/%m/%Y', '%d/%m/%y', # '25/10/2006', '25/10/06'
'%Y-%m-%d', # '2006-10-25'
# '%b %d %Y', '%b %d, %Y', # 'Oct 25 2006', 'Oct 25, 2006'
# '%d %b %Y', '%d %b, %Y', # '25 Oct 2006', '25 Oct, 2006'
# '%B %d %Y', '%B %d, %Y', # 'October 25 2006', 'October 25, 2006'
# '%d %B %Y', '%d %B, %Y', # '25 October 2006', '25 October, 2006'
)
TIME_INPUT_FORMATS = (
'%H:%M:%S', # '14:30:59'
'%H:%M', # '14:30'
)
DATETIME_INPUT_FORMATS = (
'%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59'
'%Y-%m-%d %H:%M', # '2006-10-25 14:30'
'%Y-%m-%d', # '2006-10-25'
'%d/%m/%Y %H:%M:%S', # '25/10/2006 14:30:59'
'%d/%m/%Y %H:%M', # '25/10/2006 14:30'
'%d/%m/%Y', # '25/10/2006'
'%d/%m/%y %H:%M:%S', # '25/10/06 14:30:59'
'%d/%m/%y %H:%M', # '25/10/06 14:30'
'%d/%m/%y', # '25/10/06'
)
DECIMAL_SEPARATOR = '.'
THOUSAND_SEPARATOR = ','
NUMBER_GROUPING = 3
|
clumsy/intellij-community | refs/heads/master | python/testData/stubs/DunderAll.py | 83 | __all__ = ['foo', 'bar']
|
andfoy/margffoy-tuay-server | refs/heads/master | env/lib/python2.7/site-packages/django_contrib_comments-1.6.1-py2.7.egg/tests/custom_comments/views.py | 12 | from django.http import HttpResponse
def custom_submit_comment(request):
return HttpResponse("Hello from the custom submit comment view.")
def custom_flag_comment(request, comment_id):
return HttpResponse("Hello from the custom flag view.")
def custom_delete_comment(request, comment_id):
return HttpResponse("Hello from the custom delete view.")
def custom_approve_comment(request, comment_id):
return HttpResponse("Hello from the custom approve view.")
|
fusion809/fusion809.github.io-old | refs/heads/master | vendor/bundle/ruby/2.3.0/gems/pygments.rb-0.6.3/vendor/pygments-main/pygments/formatters/_mapping.py | 47 | # -*- coding: utf-8 -*-
"""
pygments.formatters._mapping
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Formatter mapping definitions. This file is generated by itself. Everytime
you change something on a builtin formatter definition, run this script from
the formatters folder to update it.
Do not alter the FORMATTERS dictionary by hand.
:copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from __future__ import print_function
try:
import pygments
except ImportError:
# This block makes this mapping work like the lexer one -- not requiring
# that Pygments already be on your PYTHONPATH.
import os.path, sys
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
# start
from pygments.formatters.bbcode import BBCodeFormatter
from pygments.formatters.html import HtmlFormatter
from pygments.formatters.img import BmpImageFormatter
from pygments.formatters.img import GifImageFormatter
from pygments.formatters.img import ImageFormatter
from pygments.formatters.img import JpgImageFormatter
from pygments.formatters.latex import LatexFormatter
from pygments.formatters.other import NullFormatter
from pygments.formatters.other import RawTokenFormatter
from pygments.formatters.other import TestcaseFormatter
from pygments.formatters.rtf import RtfFormatter
from pygments.formatters.svg import SvgFormatter
from pygments.formatters.terminal import TerminalFormatter
from pygments.formatters.terminal256 import Terminal256Formatter
FORMATTERS = {
BBCodeFormatter: ('BBCode', ('bbcode', 'bb'), (), 'Format tokens with BBcodes. These formatting codes are used by many bulletin boards, so you can highlight your sourcecode with pygments before posting it there.'),
BmpImageFormatter: ('img_bmp', ('bmp', 'bitmap'), ('*.bmp',), 'Create a bitmap image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'),
GifImageFormatter: ('img_gif', ('gif',), ('*.gif',), 'Create a GIF image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'),
HtmlFormatter: ('HTML', ('html',), ('*.html', '*.htm'), "Format tokens as HTML 4 ``<span>`` tags within a ``<pre>`` tag, wrapped in a ``<div>`` tag. The ``<div>``'s CSS class can be set by the `cssclass` option."),
ImageFormatter: ('img', ('img', 'IMG', 'png'), ('*.png',), 'Create a PNG image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'),
JpgImageFormatter: ('img_jpg', ('jpg', 'jpeg'), ('*.jpg',), 'Create a JPEG image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'),
LatexFormatter: ('LaTeX', ('latex', 'tex'), ('*.tex',), 'Format tokens as LaTeX code. This needs the `fancyvrb` and `color` standard packages.'),
NullFormatter: ('Text only', ('text', 'null'), ('*.txt',), 'Output the text unchanged without any formatting.'),
RawTokenFormatter: ('Raw tokens', ('raw', 'tokens'), ('*.raw',), 'Format tokens as a raw representation for storing token streams.'),
RtfFormatter: ('RTF', ('rtf',), ('*.rtf',), 'Format tokens as RTF markup. This formatter automatically outputs full RTF documents with color information and other useful stuff. Perfect for Copy and Paste into Microsoft\xc2\xae Word\xc2\xae documents.'),
SvgFormatter: ('SVG', ('svg',), ('*.svg',), 'Format tokens as an SVG graphics file. This formatter is still experimental. Each line of code is a ``<text>`` element with explicit ``x`` and ``y`` coordinates containing ``<tspan>`` elements with the individual token styles.'),
Terminal256Formatter: ('Terminal256', ('terminal256', 'console256', '256'), (), 'Format tokens with ANSI color sequences, for output in a 256-color terminal or console. Like in `TerminalFormatter` color sequences are terminated at newlines, so that paging the output works correctly.'),
TerminalFormatter: ('Terminal', ('terminal', 'console'), (), 'Format tokens with ANSI color sequences, for output in a text console. Color sequences are terminated at newlines, so that paging the output works correctly.'),
TestcaseFormatter: ('Testcase', ('testcase',), (), 'Format tokens as appropriate for a new testcase.')
}
if __name__ == '__main__':
import sys
import os
# lookup formatters
found_formatters = []
imports = []
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
from pygments.util import docstring_headline
for filename in os.listdir('.'):
if filename.endswith('.py') and not filename.startswith('_'):
module_name = 'pygments.formatters.%s' % filename[:-3]
print(module_name)
module = __import__(module_name, None, None, [''])
for formatter_name in module.__all__:
imports.append((module_name, formatter_name))
formatter = getattr(module, formatter_name)
found_formatters.append(
'%s: %r' % (formatter_name,
(formatter.name,
tuple(formatter.aliases),
tuple(formatter.filenames),
docstring_headline(formatter))))
# sort them, that should make the diff files for svn smaller
found_formatters.sort()
imports.sort()
# extract useful sourcecode from this file
f = open(__file__)
try:
content = f.read()
finally:
f.close()
header = content[:content.find('# start')]
footer = content[content.find("if __name__ == '__main__':"):]
# write new file
f = open(__file__, 'w')
f.write(header)
f.write('# start\n')
f.write('\n'.join(['from %s import %s' % imp for imp in imports]))
f.write('\n\n')
f.write('FORMATTERS = {\n %s\n}\n\n' % ',\n '.join(found_formatters))
f.write(footer)
f.close()
|
RichHelle/data-science-from-scratch | refs/heads/master | first-edition/code/naive_bayes.py | 60 | from __future__ import division
from collections import Counter, defaultdict
from machine_learning import split_data
import math, random, re, glob
def tokenize(message):
message = message.lower() # convert to lowercase
all_words = re.findall("[a-z0-9']+", message) # extract the words
return set(all_words) # remove duplicates
def count_words(training_set):
"""training set consists of pairs (message, is_spam)"""
counts = defaultdict(lambda: [0, 0])
for message, is_spam in training_set:
for word in tokenize(message):
counts[word][0 if is_spam else 1] += 1
return counts
def word_probabilities(counts, total_spams, total_non_spams, k=0.5):
"""turn the word_counts into a list of triplets
w, p(w | spam) and p(w | ~spam)"""
return [(w,
(spam + k) / (total_spams + 2 * k),
(non_spam + k) / (total_non_spams + 2 * k))
for w, (spam, non_spam) in counts.iteritems()]
def spam_probability(word_probs, message):
message_words = tokenize(message)
log_prob_if_spam = log_prob_if_not_spam = 0.0
for word, prob_if_spam, prob_if_not_spam in word_probs:
# for each word in the message,
# add the log probability of seeing it
if word in message_words:
log_prob_if_spam += math.log(prob_if_spam)
log_prob_if_not_spam += math.log(prob_if_not_spam)
# for each word that's not in the message
# add the log probability of _not_ seeing it
else:
log_prob_if_spam += math.log(1.0 - prob_if_spam)
log_prob_if_not_spam += math.log(1.0 - prob_if_not_spam)
prob_if_spam = math.exp(log_prob_if_spam)
prob_if_not_spam = math.exp(log_prob_if_not_spam)
return prob_if_spam / (prob_if_spam + prob_if_not_spam)
class NaiveBayesClassifier:
def __init__(self, k=0.5):
self.k = k
self.word_probs = []
def train(self, training_set):
# count spam and non-spam messages
num_spams = len([is_spam
for message, is_spam in training_set
if is_spam])
num_non_spams = len(training_set) - num_spams
# run training data through our "pipeline"
word_counts = count_words(training_set)
self.word_probs = word_probabilities(word_counts,
num_spams,
num_non_spams,
self.k)
def classify(self, message):
return spam_probability(self.word_probs, message)
def get_subject_data(path):
data = []
# regex for stripping out the leading "Subject:" and any spaces after it
subject_regex = re.compile(r"^Subject:\s+")
# glob.glob returns every filename that matches the wildcarded path
for fn in glob.glob(path):
is_spam = "ham" not in fn
with open(fn,'r') as file:
for line in file:
if line.startswith("Subject:"):
subject = subject_regex.sub("", line).strip()
data.append((subject, is_spam))
return data
def p_spam_given_word(word_prob):
word, prob_if_spam, prob_if_not_spam = word_prob
return prob_if_spam / (prob_if_spam + prob_if_not_spam)
def train_and_test_model(path):
data = get_subject_data(path)
random.seed(0) # just so you get the same answers as me
train_data, test_data = split_data(data, 0.75)
classifier = NaiveBayesClassifier()
classifier.train(train_data)
classified = [(subject, is_spam, classifier.classify(subject))
for subject, is_spam in test_data]
counts = Counter((is_spam, spam_probability > 0.5) # (actual, predicted)
for _, is_spam, spam_probability in classified)
print counts
classified.sort(key=lambda row: row[2])
spammiest_hams = filter(lambda row: not row[1], classified)[-5:]
hammiest_spams = filter(lambda row: row[1], classified)[:5]
print "spammiest_hams", spammiest_hams
print "hammiest_spams", hammiest_spams
words = sorted(classifier.word_probs, key=p_spam_given_word)
spammiest_words = words[-5:]
hammiest_words = words[:5]
print "spammiest_words", spammiest_words
print "hammiest_words", hammiest_words
if __name__ == "__main__":
train_and_test_model(r"c:\spam\*\*") |
0x46616c6b/ansible | refs/heads/devel | lib/ansible/modules/cloud/azure/azure_rm_deployment.py | 16 | #!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'committer',
'version': '1.0'}
DOCUMENTATION = '''
---
module: azure_rm_deployment
short_description: Create or destroy Azure Resource Manager template deployments
version_added: "2.1"
description:
- "Create or destroy Azure Resource Manager template deployments via the Azure SDK for Python.
You can find some quick start templates in GitHub here https://github.com/azure/azure-quickstart-templates.
For more information on Azue resource manager templates see https://azure.microsoft.com/en-us/documentation/articles/resource-group-template-deploy/."
options:
resource_group_name:
description:
- The resource group name to use or create to host the deployed template
required: true
location:
description:
- The geo-locations in which the resource group will be located.
required: false
default: westus
deployment_mode:
description:
- In incremental mode, resources are deployed without deleting existing resources that are not included in the template.
In complete mode resources are deployed and existing resources in the resource group not included in the template are deleted.
required: false
default: incremental
choices:
- complete
- incremental
state:
description:
- If state is "present", template will be created. If state is "present" and if deployment exists, it will be
updated. If state is "absent", stack will be removed.
default: present
required: false
choices:
- present
- absent
template:
description:
- A hash containing the templates inline. This parameter is mutually exclusive with 'template_link'.
Either one of them is required if "state" parameter is "present".
required: false
default: null
template_link:
description:
- Uri of file containing the template body. This parameter is mutually exclusive with 'template'. Either one
of them is required if "state" parameter is "present".
required: false
default: null
parameters:
description:
- A hash of all the required template variables for the deployment template. This parameter is mutually exclusive
with 'parameters_link'. Either one of them is required if "state" parameter is "present".
required: false
default: null
parameters_link:
description:
- Uri of file containing the parameters body. This parameter is mutually exclusive with 'parameters'. Either
one of them is required if "state" parameter is "present".
required: false
default: null
deployment_name:
description:
- The name of the deployment to be tracked in the resource group deployment history. Re-using a deployment name
will overwrite the previous value in the resource group's deployment history.
default: ansible-arm
wait_for_deployment_completion:
description:
- Whether or not to block until the deployment has completed.
default: yes
choices: ['yes', 'no']
wait_for_deployment_polling_period:
description:
- Time (in seconds) to wait between polls when waiting for deployment completion.
default: 10
extends_documentation_fragment:
- azure
author:
- David Justice (@devigned)
- Laurent Mazuel (@lmazuel)
- Andre Price (@obsoleted)
'''
EXAMPLES = '''
# Destroy a template deployment
- name: Destroy Azure Deploy
azure_rm_deployment:
state: absent
subscription_id: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
resource_group_name: dev-ops-cle
# Create or update a template deployment based on uris using parameter and template links
- name: Create Azure Deploy
azure_rm_deployment:
state: present
resource_group_name: dev-ops-cle
template_link: 'https://raw.githubusercontent.com/Azure/azure-quickstart-templates/master/101-vm-simple-linux/azuredeploy.json'
parameters_link: 'https://raw.githubusercontent.com/Azure/azure-quickstart-templates/master/101-vm-simple-linux/azuredeploy.parameters.json'
# Create or update a template deployment based on a uri to the template and parameters specified inline.
# This deploys a VM with SSH support for a given public key, then stores the result in 'azure_vms'. The result is then
# used to create a new host group. This host group is then used to wait for each instance to respond to the public IP SSH.
---
- hosts: localhost
connection: local
gather_facts: no
tasks:
- name: Destroy Azure Deploy
azure_rm_deployment:
state: absent
subscription_id: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
resource_group_name: dev-ops-cle
- name: Create Azure Deploy
azure_rm_deployment:
state: present
subscription_id: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
resource_group_name: dev-ops-cle
parameters:
newStorageAccountName:
value: devopsclestorage1
adminUsername:
value: devopscle
dnsNameForPublicIP:
value: devopscleazure
location:
value: West US
vmSize:
value: Standard_A2
vmName:
value: ansibleSshVm
sshKeyData:
value: YOUR_SSH_PUBLIC_KEY
template_link: 'https://raw.githubusercontent.com/Azure/azure-quickstart-templates/master/101-vm-sshkey/azuredeploy.json'
register: azure
- name: Add new instance to host group
add_host:
hostname: "{{ item['ips'][0].public_ip }}"
groupname: azure_vms
with_items: "{{ azure.deployment.instances }}"
- hosts: azure_vms
user: devopscle
tasks:
- name: Wait for SSH to come up
wait_for:
port: 22
timeout: 2000
state: started
- name: echo the hostname of the vm
shell: hostname
# Deploy an Azure WebApp running a hello world'ish node app
- name: Create Azure WebApp Deployment at http://devopscleweb.azurewebsites.net/hello.js
azure_rm_deployment:
state: present
subscription_id: cbbdaed0-fea9-4693-bf0c-d446ac93c030
resource_group_name: dev-ops-cle-webapp
parameters:
repoURL:
value: 'https://github.com/devigned/az-roadshow-oss.git'
siteName:
value: devopscleweb
hostingPlanName:
value: someplan
siteLocation:
value: westus
sku:
value: Standard
template_link: 'https://raw.githubusercontent.com/azure/azure-quickstart-templates/master/201-web-app-github-deploy/azuredeploy.json'
# Create or update a template deployment based on an inline template and parameters
- name: Create Azure Deploy
azure_rm_deployment:
state: present
subscription_id: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
resource_group_name: dev-ops-cle
template:
$schema: "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#"
contentVersion: "1.0.0.0"
parameters:
newStorageAccountName:
type: "string"
metadata:
description: "Unique DNS Name for the Storage Account where the Virtual Machine's disks will be placed."
adminUsername:
type: "string"
metadata:
description: "User name for the Virtual Machine."
adminPassword:
type: "securestring"
metadata:
description: "Password for the Virtual Machine."
dnsNameForPublicIP:
type: "string"
metadata:
description: "Unique DNS Name for the Public IP used to access the Virtual Machine."
ubuntuOSVersion:
type: "string"
defaultValue: "14.04.2-LTS"
allowedValues:
- "12.04.5-LTS"
- "14.04.2-LTS"
- "15.04"
metadata:
description: "The Ubuntu version for the VM. This will pick a fully patched image of this given Ubuntu version. Allowed values: 12.04.5-LTS, 14.04.2-LTS, 15.04."
variables:
location: "West US"
imagePublisher: "Canonical"
imageOffer: "UbuntuServer"
OSDiskName: "osdiskforlinuxsimple"
nicName: "myVMNic"
addressPrefix: "192.0.2.0/24"
subnetName: "Subnet"
subnetPrefix: "10.0.0.0/24"
storageAccountType: "Standard_LRS"
publicIPAddressName: "myPublicIP"
publicIPAddressType: "Dynamic"
vmStorageAccountContainerName: "vhds"
vmName: "MyUbuntuVM"
vmSize: "Standard_D1"
virtualNetworkName: "MyVNET"
vnetID: "[resourceId('Microsoft.Network/virtualNetworks',variables('virtualNetworkName'))]"
subnetRef: "[concat(variables('vnetID'),'/subnets/',variables('subnetName'))]"
resources:
- type: "Microsoft.Storage/storageAccounts"
name: "[parameters('newStorageAccountName')]"
apiVersion: "2015-05-01-preview"
location: "[variables('location')]"
properties:
accountType: "[variables('storageAccountType')]"
- apiVersion: "2015-05-01-preview"
type: "Microsoft.Network/publicIPAddresses"
name: "[variables('publicIPAddressName')]"
location: "[variables('location')]"
properties:
publicIPAllocationMethod: "[variables('publicIPAddressType')]"
dnsSettings:
domainNameLabel: "[parameters('dnsNameForPublicIP')]"
- type: "Microsoft.Network/virtualNetworks"
apiVersion: "2015-05-01-preview"
name: "[variables('virtualNetworkName')]"
location: "[variables('location')]"
properties:
addressSpace:
addressPrefixes:
- "[variables('addressPrefix')]"
subnets:
-
name: "[variables('subnetName')]"
properties:
addressPrefix: "[variables('subnetPrefix')]"
- type: "Microsoft.Network/networkInterfaces"
apiVersion: "2015-05-01-preview"
name: "[variables('nicName')]"
location: "[variables('location')]"
dependsOn:
- "[concat('Microsoft.Network/publicIPAddresses/', variables('publicIPAddressName'))]"
- "[concat('Microsoft.Network/virtualNetworks/', variables('virtualNetworkName'))]"
properties:
ipConfigurations:
-
name: "ipconfig1"
properties:
privateIPAllocationMethod: "Dynamic"
publicIPAddress:
id: "[resourceId('Microsoft.Network/publicIPAddresses',variables('publicIPAddressName'))]"
subnet:
id: "[variables('subnetRef')]"
- type: "Microsoft.Compute/virtualMachines"
apiVersion: "2015-06-15"
name: "[variables('vmName')]"
location: "[variables('location')]"
dependsOn:
- "[concat('Microsoft.Storage/storageAccounts/', parameters('newStorageAccountName'))]"
- "[concat('Microsoft.Network/networkInterfaces/', variables('nicName'))]"
properties:
hardwareProfile:
vmSize: "[variables('vmSize')]"
osProfile:
computername: "[variables('vmName')]"
adminUsername: "[parameters('adminUsername')]"
adminPassword: "[parameters('adminPassword')]"
storageProfile:
imageReference:
publisher: "[variables('imagePublisher')]"
offer: "[variables('imageOffer')]"
sku: "[parameters('ubuntuOSVersion')]"
version: "latest"
osDisk:
name: "osdisk"
vhd:
uri: "[concat('http://',parameters('newStorageAccountName'),'.blob.core.windows.net/',variables('vmStorageAccountContainerName'),'/',variables('OSDiskName'),'.vhd')]"
caching: "ReadWrite"
createOption: "FromImage"
networkProfile:
networkInterfaces:
-
id: "[resourceId('Microsoft.Network/networkInterfaces',variables('nicName'))]"
diagnosticsProfile:
bootDiagnostics:
enabled: "true"
storageUri: "[concat('http://',parameters('newStorageAccountName'),'.blob.core.windows.net')]"
parameters:
newStorageAccountName:
value: devopsclestorage
adminUsername:
value: devopscle
adminPassword:
value: Password1!
dnsNameForPublicIP:
value: devopscleazure
'''
RETURN = '''
deployment:
description: Deployment details
type: dict
returned: always
sample:
group_name:
description: Name of the resource group
type: string
returned: always
id:
description: The Azure ID of the deployment
type: string
returned: always
instances:
description: Provides the public IP addresses for each VM instance.
type: list
returned: always
name:
description: Name of the deployment
type: string
returned: always
outputs:
description: Dictionary of outputs received from the deployment
type: dict
returned: always
'''
PREREQ_IMPORT_ERROR = None
try:
import time
import yaml
except ImportError as exc:
IMPORT_ERROR = "Error importing module prerequisites: %s" % exc
from ansible.module_utils.azure_rm_common import *
try:
from itertools import chain
from azure.common.credentials import ServicePrincipalCredentials
from azure.common.exceptions import CloudError
from azure.mgmt.resource.resources.models import (DeploymentProperties,
ParametersLink,
TemplateLink,
Deployment,
ResourceGroup,
Dependency)
from azure.mgmt.resource.resources import ResourceManagementClient
from azure.mgmt.network import NetworkManagementClient
except ImportError:
# This is handled in azure_rm_common
pass
class AzureRMDeploymentManager(AzureRMModuleBase):
def __init__(self):
self.module_arg_spec = dict(
resource_group_name=dict(type='str', required=True, aliases=['resource_group']),
state=dict(type='str', default='present', choices=['present', 'absent']),
template=dict(type='dict', default=None),
parameters=dict(type='dict', default=None),
template_link=dict(type='str', default=None),
parameters_link=dict(type='str', default=None),
location=dict(type='str', default="westus"),
deployment_mode=dict(type='str', default='incremental', choices=['complete', 'incremental']),
deployment_name=dict(type='str', default="ansible-arm"),
wait_for_deployment_completion=dict(type='bool', default=True),
wait_for_deployment_polling_period=dict(type='int', default=10)
)
mutually_exclusive = [('template', 'template_link'),
('parameters', 'parameters_link')]
self.resource_group_name = None
self.state = None
self.template = None
self.parameters = None
self.template_link = None
self.parameters_link = None
self.location = None
self.deployment_mode = None
self.deployment_name = None
self.wait_for_deployment_completion = None
self.wait_for_deployment_polling_period = None
self.tags = None
self.results = dict(
deployment=dict(),
changed=False,
msg=""
)
super(AzureRMDeploymentManager, self).__init__(derived_arg_spec=self.module_arg_spec,
mutually_exclusive=mutually_exclusive,
supports_check_mode=False)
def exec_module(self, **kwargs):
if PREREQ_IMPORT_ERROR:
self.fail(PREREQ_IMPORT_ERROR)
for key in self.module_arg_spec.keys() + ['tags']:
setattr(self, key, kwargs[key])
if self.state == 'present':
deployment = self.deploy_template()
self.results['deployment'] = dict(
name=deployment.name,
group_name=self.resource_group_name,
id=deployment.id,
outputs=deployment.properties.outputs,
instances=self._get_instances(deployment)
)
self.results['changed'] = True
self.results['msg'] = 'deployment succeeded'
else:
if self.resource_group_exists(self.resource_group_name):
self.destroy_resource_group()
self.results['changed'] = True
self.results['msg'] = "deployment deleted"
return self.results
def deploy_template(self):
"""
Deploy the targeted template and parameters
:param module: Ansible module containing the validated configuration for the deployment template
:param client: resource management client for azure
:param conn_info: connection info needed
:return:
"""
deploy_parameter = DeploymentProperties(self.deployment_mode)
if not self.parameters_link:
deploy_parameter.parameters = self.parameters
else:
deploy_parameter.parameters_link = ParametersLink(
uri=self.parameters_link
)
if not self.template_link:
deploy_parameter.template = self.template
else:
deploy_parameter.template_link = TemplateLink(
uri=self.template_link
)
params = ResourceGroup(location=self.location, tags=self.tags)
try:
self.rm_client.resource_groups.create_or_update(self.resource_group_name, params)
except CloudError as exc:
self.fail("Resource group create_or_update failed with status code: %s and message: %s" %
(exc.status_code, exc.message))
try:
result = self.rm_client.deployments.create_or_update(self.resource_group_name,
self.deployment_name,
deploy_parameter)
deployment_result = self.get_poller_result(result)
if self.wait_for_deployment_completion:
while deployment_result.properties is None or deployment_result.properties.provisioning_state not in ['Canceled', 'Failed', 'Deleted',
'Succeeded']:
time.sleep(self.wait_for_deployment_polling_period)
deployment_result = self.rm_client.deployments.get(self.resource_group_name, self.deployment_name)
except CloudError as exc:
failed_deployment_operations = self._get_failed_deployment_operations(self.deployment_name)
self.log("Deployment failed %s: %s" % (exc.status_code, exc.message))
self.fail("Deployment failed with status code: %s and message: %s" % (exc.status_code, exc.message),
failed_deployment_operations=failed_deployment_operations)
if self.wait_for_deployment_completion and deployment_result.properties.provisioning_state != 'Succeeded':
self.log("provisioning state: %s" % deployment_result.properties.provisioning_state)
failed_deployment_operations = self._get_failed_deployment_operations(self.deployment_name)
self.fail('Deployment failed. Deployment id: %s' % deployment_result.id,
failed_deployment_operations=failed_deployment_operations)
return deployment_result
def destroy_resource_group(self):
"""
Destroy the targeted resource group
"""
try:
result = self.rm_client.resource_groups.delete(self.resource_group_name)
result.wait() # Blocking wait till the delete is finished
except CloudError as e:
if e.status_code == 404 or e.status_code == 204:
return
else:
self.fail("Delete resource group and deploy failed with status code: %s and message: %s" %
(e.status_code, e.message))
def resource_group_exists(self, resource_group):
'''
Return True/False based on existence of requested resource group.
:param resource_group: string. Name of a resource group.
:return: boolean
'''
try:
self.rm_client.resource_groups.get(resource_group)
except CloudError:
return False
return True
def _get_failed_nested_operations(self, current_operations):
new_operations = []
for operation in current_operations:
if operation.properties.provisioning_state == 'Failed':
new_operations.append(operation)
if operation.properties.target_resource and \
'Microsoft.Resources/deployments' in operation.properties.target_resource.id:
nested_deployment = operation.properties.target_resource.resource_name
try:
nested_operations = self.rm_client.deployment_operations.list(self.resource_group_name,
nested_deployment)
except CloudError as exc:
self.fail("List nested deployment operations failed with status code: %s and message: %s" %
(e.status_code, e.message))
new_nested_operations = self._get_failed_nested_operations(nested_operations)
new_operations += new_nested_operations
return new_operations
def _get_failed_deployment_operations(self, deployment_name):
results = []
# time.sleep(15) # there is a race condition between when we ask for deployment status and when the
# # status is available.
try:
operations = self.rm_client.deployment_operations.list(self.resource_group_name, deployment_name)
except CloudError as exc:
self.fail("Get deployment failed with status code: %s and message: %s" %
(exc.status_code, exc.message))
try:
results = [
dict(
id=op.id,
operation_id=op.operation_id,
status_code=op.properties.status_code,
status_message=op.properties.status_message,
target_resource=dict(
id=op.properties.target_resource.id,
resource_name=op.properties.target_resource.resource_name,
resource_type=op.properties.target_resource.resource_type
) if op.properties.target_resource else None,
provisioning_state=op.properties.provisioning_state,
)
for op in self._get_failed_nested_operations(operations)
]
except:
# If we fail here, the original error gets lost and user receives wrong error message/stacktrace
pass
self.log(dict(failed_deployment_operations=results), pretty_print=True)
return results
def _get_instances(self, deployment):
dep_tree = self._build_hierarchy(deployment.properties.dependencies)
vms = self._get_dependencies(dep_tree, resource_type="Microsoft.Compute/virtualMachines")
vms_and_nics = [(vm, self._get_dependencies(vm['children'], "Microsoft.Network/networkInterfaces"))
for vm in vms]
vms_and_ips = [(vm['dep'], self._nic_to_public_ips_instance(nics))
for vm, nics in vms_and_nics]
return [dict(vm_name=vm.resource_name, ips=[self._get_ip_dict(ip)
for ip in ips]) for vm, ips in vms_and_ips if len(ips) > 0]
def _get_dependencies(self, dep_tree, resource_type):
matches = [value for value in dep_tree.values() if value['dep'].resource_type == resource_type]
for child_tree in [value['children'] for value in dep_tree.values()]:
matches += self._get_dependencies(child_tree, resource_type)
return matches
def _build_hierarchy(self, dependencies, tree=None):
tree = dict(top=True) if tree is None else tree
for dep in dependencies:
if dep.resource_name not in tree:
tree[dep.resource_name] = dict(dep=dep, children=dict())
if isinstance(dep, Dependency) and dep.depends_on is not None and len(dep.depends_on) > 0:
self._build_hierarchy(dep.depends_on, tree[dep.resource_name]['children'])
if 'top' in tree:
tree.pop('top', None)
keys = list(tree.keys())
for key1 in keys:
for key2 in keys:
if key2 in tree and key1 in tree[key2]['children'] and key1 in tree:
tree[key2]['children'][key1] = tree[key1]
tree.pop(key1)
return tree
def _get_ip_dict(self, ip):
ip_dict = dict(name=ip.name,
id=ip.id,
public_ip=ip.ip_address,
public_ip_allocation_method=str(ip.public_ip_allocation_method)
)
if ip.dns_settings:
ip_dict['dns_settings'] = {
'domain_name_label':ip.dns_settings.domain_name_label,
'fqdn':ip.dns_settings.fqdn
}
return ip_dict
def _nic_to_public_ips_instance(self, nics):
return [self.network_client.public_ip_addresses.get(public_ip_id.split('/')[4], public_ip_id.split('/')[-1])
for nic_obj in [self.network_client.network_interfaces.get(self.resource_group_name,
nic['dep'].resource_name) for nic in nics]
for public_ip_id in [ip_conf_instance.public_ip_address.id
for ip_conf_instance in nic_obj.ip_configurations
if ip_conf_instance.public_ip_address]]
def main():
AzureRMDeploymentManager()
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
|
googleapis/python-aiplatform | refs/heads/master | google/cloud/aiplatform_v1beta1/types/data_labeling_job.py | 1 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.cloud.aiplatform_v1beta1.types import encryption_spec as gca_encryption_spec
from google.cloud.aiplatform_v1beta1.types import job_state
from google.protobuf import struct_pb2 # type: ignore
from google.protobuf import timestamp_pb2 # type: ignore
from google.rpc import status_pb2 # type: ignore
from google.type import money_pb2 # type: ignore
__protobuf__ = proto.module(
package="google.cloud.aiplatform.v1beta1",
manifest={
"DataLabelingJob",
"ActiveLearningConfig",
"SampleConfig",
"TrainingConfig",
},
)
class DataLabelingJob(proto.Message):
r"""DataLabelingJob is used to trigger a human labeling job on
unlabeled data from the following Dataset:
Attributes:
name (str):
Output only. Resource name of the
DataLabelingJob.
display_name (str):
Required. The user-defined name of the
DataLabelingJob. The name can be up to 128
characters long and can be consist of any UTF-8
characters.
Display name of a DataLabelingJob.
datasets (Sequence[str]):
Required. Dataset resource names. Right now we only support
labeling from a single Dataset. Format:
``projects/{project}/locations/{location}/datasets/{dataset}``
annotation_labels (Sequence[google.cloud.aiplatform_v1beta1.types.DataLabelingJob.AnnotationLabelsEntry]):
Labels to assign to annotations generated by
this DataLabelingJob.
Label keys and values can be no longer than 64
characters (Unicode codepoints), can only
contain lowercase letters, numeric characters,
underscores and dashes. International characters
are allowed. See https://goo.gl/xmQnxf for more
information and examples of labels. System
reserved label keys are prefixed with
"aiplatform.googleapis.com/" and are immutable.
labeler_count (int):
Required. Number of labelers to work on each
DataItem.
instruction_uri (str):
Required. The Google Cloud Storage location
of the instruction pdf. This pdf is shared with
labelers, and provides detailed description on
how to label DataItems in Datasets.
inputs_schema_uri (str):
Required. Points to a YAML file stored on
Google Cloud Storage describing the config for a
specific type of DataLabelingJob. The schema
files that can be used here are found in the
https://storage.googleapis.com/google-cloud-
aiplatform bucket in the
/schema/datalabelingjob/inputs/ folder.
inputs (google.protobuf.struct_pb2.Value):
Required. Input config parameters for the
DataLabelingJob.
state (google.cloud.aiplatform_v1beta1.types.JobState):
Output only. The detailed state of the job.
labeling_progress (int):
Output only. Current labeling job progress percentage scaled
in interval [0, 100], indicating the percentage of DataItems
that has been finished.
current_spend (google.type.money_pb2.Money):
Output only. Estimated cost(in US dollars)
that the DataLabelingJob has incurred to date.
create_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. Timestamp when this
DataLabelingJob was created.
update_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. Timestamp when this
DataLabelingJob was updated most recently.
error (google.rpc.status_pb2.Status):
Output only. DataLabelingJob errors. It is only populated
when job's state is ``JOB_STATE_FAILED`` or
``JOB_STATE_CANCELLED``.
labels (Sequence[google.cloud.aiplatform_v1beta1.types.DataLabelingJob.LabelsEntry]):
The labels with user-defined metadata to organize your
DataLabelingJobs.
Label keys and values can be no longer than 64 characters
(Unicode codepoints), can only contain lowercase letters,
numeric characters, underscores and dashes. International
characters are allowed.
See https://goo.gl/xmQnxf for more information and examples
of labels. System reserved label keys are prefixed with
"aiplatform.googleapis.com/" and are immutable. Following
system labels exist for each DataLabelingJob:
- "aiplatform.googleapis.com/schema": output only, its
value is the
[inputs_schema][google.cloud.aiplatform.v1beta1.DataLabelingJob.inputs_schema_uri]'s
title.
specialist_pools (Sequence[str]):
The SpecialistPools' resource names
associated with this job.
encryption_spec (google.cloud.aiplatform_v1beta1.types.EncryptionSpec):
Customer-managed encryption key spec for a
DataLabelingJob. If set, this DataLabelingJob
will be secured by this key.
Note: Annotations created in the DataLabelingJob
are associated with the EncryptionSpec of the
Dataset they are exported to.
active_learning_config (google.cloud.aiplatform_v1beta1.types.ActiveLearningConfig):
Parameters that configure the active learning
pipeline. Active learning will label the data
incrementally via several iterations. For every
iteration, it will select a batch of data based
on the sampling strategy.
"""
name = proto.Field(proto.STRING, number=1,)
display_name = proto.Field(proto.STRING, number=2,)
datasets = proto.RepeatedField(proto.STRING, number=3,)
annotation_labels = proto.MapField(proto.STRING, proto.STRING, number=12,)
labeler_count = proto.Field(proto.INT32, number=4,)
instruction_uri = proto.Field(proto.STRING, number=5,)
inputs_schema_uri = proto.Field(proto.STRING, number=6,)
inputs = proto.Field(proto.MESSAGE, number=7, message=struct_pb2.Value,)
state = proto.Field(proto.ENUM, number=8, enum=job_state.JobState,)
labeling_progress = proto.Field(proto.INT32, number=13,)
current_spend = proto.Field(proto.MESSAGE, number=14, message=money_pb2.Money,)
create_time = proto.Field(proto.MESSAGE, number=9, message=timestamp_pb2.Timestamp,)
update_time = proto.Field(
proto.MESSAGE, number=10, message=timestamp_pb2.Timestamp,
)
error = proto.Field(proto.MESSAGE, number=22, message=status_pb2.Status,)
labels = proto.MapField(proto.STRING, proto.STRING, number=11,)
specialist_pools = proto.RepeatedField(proto.STRING, number=16,)
encryption_spec = proto.Field(
proto.MESSAGE, number=20, message=gca_encryption_spec.EncryptionSpec,
)
active_learning_config = proto.Field(
proto.MESSAGE, number=21, message="ActiveLearningConfig",
)
class ActiveLearningConfig(proto.Message):
r"""Parameters that configure the active learning pipeline.
Active learning will label the data incrementally by several
iterations. For every iteration, it will select a batch of data
based on the sampling strategy.
Attributes:
max_data_item_count (int):
Max number of human labeled DataItems.
max_data_item_percentage (int):
Max percent of total DataItems for human
labeling.
sample_config (google.cloud.aiplatform_v1beta1.types.SampleConfig):
Active learning data sampling config. For
every active learning labeling iteration, it
will select a batch of data based on the
sampling strategy.
training_config (google.cloud.aiplatform_v1beta1.types.TrainingConfig):
CMLE training config. For every active
learning labeling iteration, system will train a
machine learning model on CMLE. The trained
model will be used by data sampling algorithm to
select DataItems.
"""
max_data_item_count = proto.Field(
proto.INT64, number=1, oneof="human_labeling_budget",
)
max_data_item_percentage = proto.Field(
proto.INT32, number=2, oneof="human_labeling_budget",
)
sample_config = proto.Field(proto.MESSAGE, number=3, message="SampleConfig",)
training_config = proto.Field(proto.MESSAGE, number=4, message="TrainingConfig",)
class SampleConfig(proto.Message):
r"""Active learning data sampling config. For every active
learning labeling iteration, it will select a batch of data
based on the sampling strategy.
Attributes:
initial_batch_sample_percentage (int):
The percentage of data needed to be labeled
in the first batch.
following_batch_sample_percentage (int):
The percentage of data needed to be labeled
in each following batch (except the first
batch).
sample_strategy (google.cloud.aiplatform_v1beta1.types.SampleConfig.SampleStrategy):
Field to choose sampling strategy. Sampling
strategy will decide which data should be
selected for human labeling in every batch.
"""
class SampleStrategy(proto.Enum):
r"""Sample strategy decides which subset of DataItems should be
selected for human labeling in every batch.
"""
SAMPLE_STRATEGY_UNSPECIFIED = 0
UNCERTAINTY = 1
initial_batch_sample_percentage = proto.Field(
proto.INT32, number=1, oneof="initial_batch_sample_size",
)
following_batch_sample_percentage = proto.Field(
proto.INT32, number=3, oneof="following_batch_sample_size",
)
sample_strategy = proto.Field(proto.ENUM, number=5, enum=SampleStrategy,)
class TrainingConfig(proto.Message):
r"""CMLE training config. For every active learning labeling
iteration, system will train a machine learning model on CMLE.
The trained model will be used by data sampling algorithm to
select DataItems.
Attributes:
timeout_training_milli_hours (int):
The timeout hours for the CMLE training job,
expressed in milli hours i.e. 1,000 value in
this field means 1 hour.
"""
timeout_training_milli_hours = proto.Field(proto.INT64, number=1,)
__all__ = tuple(sorted(__protobuf__.manifest))
|
osvalr/odoo | refs/heads/8.0 | openerp/addons/base/tests/__init__.py | 286 | import test_acl
import test_api
import test_base
import test_basecase
import test_db_cursor
import test_expression
import test_func
import test_ir_actions
import test_ir_attachment
import test_ir_filters
import test_ir_sequence
import test_ir_values
import test_mail
import test_menu
import test_orm
import test_osv
import test_qweb
import test_res_config
import test_res_lang
import test_search
import test_translate
#import test_uninstall
import test_view_validation
import test_views
import test_xmlrpc
|
Afnarel/django-forms-builder | refs/heads/master | forms_builder/example_project/settings.py | 9 | from __future__ import absolute_import, unicode_literals
import os, sys
DEBUG = True
SITE_ID = 1
PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__))
PROJECT_DIRNAME = PROJECT_ROOT.split(os.sep)[-1]
STATIC_URL = "/static/"
STATIC_ROOT = os.path.join(PROJECT_ROOT, STATIC_URL.strip("/"))
MEDIA_URL = STATIC_URL + "media/"
MEDIA_ROOT = os.path.join(PROJECT_ROOT, *MEDIA_URL.strip("/").split("/"))
ADMIN_MEDIA_PREFIX = STATIC_URL + "admin/"
ROOT_URLCONF = "%s.urls" % PROJECT_DIRNAME
TEMPLATE_DIRS = (os.path.join(PROJECT_ROOT, "templates"),)
SECRET_KEY = "asdfa4wtW#$Gse4aGdfs"
ADMINS = ()
MANAGERS = ADMINS
if "test" not in sys.argv:
LOGIN_URL = "/admin/"
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'dev.db',
}
}
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
TEMPLATE_CONTEXT_PROCESSORS = (
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
"django.core.context_processors.debug",
"django.core.context_processors.i18n",
"django.core.context_processors.static",
"django.core.context_processors.media",
"django.core.context_processors.request",
)
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.staticfiles',
'forms_builder.forms',
)
from django import VERSION
if VERSION < (1, 7):
try:
import south
except ImportError:
pass
else:
INSTALLED_APPS += ("south",)
FORMS_BUILDER_EXTRA_FIELDS = (
(100, "django.forms.BooleanField", "My cool checkbox"),
)
try:
from local_settings import *
except ImportError:
pass
TEMPLATE_DEBUG = DEBUG
|
xamanu/osm2gtfs | refs/heads/master | osm2gtfs/core/creator_factory.py | 4 | # coding=utf-8
import importlib
import logging
from osm2gtfs.creators.agency_creator import AgencyCreator
from osm2gtfs.creators.feed_info_creator import FeedInfoCreator
from osm2gtfs.creators.routes_creator import RoutesCreator
from osm2gtfs.creators.stops_creator import StopsCreator
from osm2gtfs.creators.schedule_creator import ScheduleCreator
from osm2gtfs.creators.trips_creator import TripsCreator
class CreatorFactory(object):
def __init__(self, config):
self.config = config
if 'selector' in self.config.data:
self.selector = self.config.data['selector']
else:
self.selector = None
def __repr__(self):
rep = ""
if self.config is not None:
rep += str(self.config) + " | "
if self.selector is not None:
rep += self.selector
return rep
def get_agency_creator(self):
selector = self.selector
try:
module = importlib.import_module(
".creators." + selector + ".agency_creator_" + selector,
package="osm2gtfs")
agency_creator_override = getattr(
module, "AgencyCreator" + self._generate_class_name(selector))
logging.info("Agency creator: %s", selector)
return agency_creator_override(self.config)
except ImportError:
logging.info("Agency creator: Default")
return AgencyCreator(self.config)
def get_feed_info_creator(self):
selector = self.selector
try:
module = importlib.import_module(
".creators." + selector + ".feed_info_creator_" + selector,
package="osm2gtfs")
feed_info_creator_override = getattr(
module, "FeedInfoCreator" + self._generate_class_name(selector))
logging.info("Feed info creator: %s", selector)
return feed_info_creator_override(self.config)
except ImportError:
logging.info("Feed info creator: Default")
return FeedInfoCreator(self.config)
def get_routes_creator(self):
selector = self.selector
try:
module = importlib.import_module(
".creators." + selector + ".routes_creator_" + selector,
package="osm2gtfs")
routes_creator_override = getattr(
module, "RoutesCreator" + self._generate_class_name(selector))
logging.info("Routes creator: %s", selector)
return routes_creator_override(self.config)
except ImportError:
logging.info("Routes creator: Default")
return RoutesCreator(self.config)
def get_stops_creator(self):
selector = self.selector
try:
module = importlib.import_module(
".creators." + selector + ".stops_creator_" + selector,
package="osm2gtfs")
stops_creator_override = getattr(
module, "StopsCreator" + self._generate_class_name(selector))
logging.info("Stops creator: %s", selector)
return stops_creator_override(self.config)
except ImportError:
logging.info("Stops creator: Default")
return StopsCreator(self.config)
def get_schedule_creator(self):
selector = self.selector
try:
module = importlib.import_module(
".creators." + selector + ".schedule_creator_" + selector,
package="osm2gtfs")
schedule_creator_override = getattr(
module, "ScheduleCreator" + self._generate_class_name(selector))
logging.info("Schedule creator: %s", selector)
return schedule_creator_override(self.config)
except ImportError:
logging.info("Schedule creator: Default")
return ScheduleCreator(self.config)
def get_trips_creator(self):
selector = self.selector
try:
module = importlib.import_module(
".creators." + selector + ".trips_creator_" + selector,
package="osm2gtfs")
trips_creator_override = getattr(
module, "TripsCreator" + self._generate_class_name(selector))
logging.info("Trips creator: %s", selector)
return trips_creator_override(self.config)
except ImportError:
logging.info("Trips creator: Default")
return TripsCreator(self.config)
@staticmethod
def _generate_class_name(selector):
"""
Converts the underscore selector into class names sticking to Python's
naming convention.
"""
if "_" in selector:
split_selector = selector.split("_")
class_name = str()
for part in split_selector:
class_name += part.capitalize()
else:
class_name = selector.capitalize()
return class_name
|
darktears/chromium-crosswalk | refs/heads/master | tools/telemetry/third_party/modulegraph/modulegraph_tests/testpkg-packages/main_script.py | 805 | import sys
|
LennonChin/Django-Practices | refs/heads/master | MxShop/MxShop/wsgi.py | 1 | """
WSGI config for MxShop project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "MxShop.settings")
application = get_wsgi_application()
|
skyoo/jumpserver | refs/heads/master | apps/terminal/migrations/0004_session_remote_addr.py | 6 | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-01-05 10:07
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('terminal', '0003_auto_20171230_0308'),
]
operations = [
migrations.AddField(
model_name='session',
name='remote_addr',
field=models.CharField(blank=True, max_length=15, null=True, verbose_name='Remote addr'),
),
]
|
agaurav/ansible | refs/heads/devel | test/integration/consul_running.py | 106 | ''' Checks that the consul agent is running locally. '''
if __name__ == '__main__':
try:
import consul
consul = consul.Consul(host='0.0.0.0', port=8500)
consul.catalog.nodes()
print "True"
except:
pass
|
gboudreau/CouchPotato | refs/heads/master | cherrypy/lib/__init__.py | 83 | """CherryPy Library"""
# Deprecated in CherryPy 3.2 -- remove in CherryPy 3.3
from cherrypy.lib.reprconf import unrepr, modules, attributes
class file_generator(object):
"""Yield the given input (a file object) in chunks (default 64k). (Core)"""
def __init__(self, input, chunkSize=65536):
self.input = input
self.chunkSize = chunkSize
def __iter__(self):
return self
def __next__(self):
chunk = self.input.read(self.chunkSize)
if chunk:
return chunk
else:
if hasattr(self.input, 'close'):
self.input.close()
raise StopIteration()
next = __next__
def file_generator_limited(fileobj, count, chunk_size=65536):
"""Yield the given file object in chunks, stopping after `count`
bytes has been emitted. Default chunk size is 64kB. (Core)
"""
remaining = count
while remaining > 0:
chunk = fileobj.read(min(chunk_size, remaining))
chunklen = len(chunk)
if chunklen == 0:
return
remaining -= chunklen
yield chunk
def set_vary_header(response, header_name):
"Add a Vary header to a response"
varies = response.headers.get("Vary", "")
varies = [x.strip() for x in varies.split(",") if x.strip()]
if header_name not in varies:
varies.append(header_name)
response.headers['Vary'] = ", ".join(varies)
|
codewarrior0/pytest | refs/heads/master | testing/python/fixture.py | 2 | import pytest, py, sys
from _pytest import python as funcargs
from _pytest.python import FixtureLookupError
from _pytest.pytester import get_public_names
from textwrap import dedent
def test_getfuncargnames():
def f(): pass
assert not funcargs.getfuncargnames(f)
def g(arg): pass
assert funcargs.getfuncargnames(g) == ('arg',)
def h(arg1, arg2="hello"): pass
assert funcargs.getfuncargnames(h) == ('arg1',)
def h(arg1, arg2, arg3="hello"): pass
assert funcargs.getfuncargnames(h) == ('arg1', 'arg2')
class A:
def f(self, arg1, arg2="hello"):
pass
assert funcargs.getfuncargnames(A().f) == ('arg1',)
if sys.version_info < (3,0):
assert funcargs.getfuncargnames(A.f) == ('arg1',)
class TestFillFixtures:
def test_fillfuncargs_exposed(self):
# used by oejskit, kept for compatibility
assert pytest._fillfuncargs == funcargs.fillfixtures
def test_funcarg_lookupfails(self, testdir):
testdir.makepyfile("""
def pytest_funcarg__xyzsomething(request):
return 42
def test_func(some):
pass
""")
result = testdir.runpytest() # "--collect-only")
assert result.ret != 0
result.stdout.fnmatch_lines([
"*def test_func(some)*",
"*fixture*some*not found*",
"*xyzsomething*",
])
def test_funcarg_basic(self, testdir):
item = testdir.getitem("""
def pytest_funcarg__some(request):
return request.function.__name__
def pytest_funcarg__other(request):
return 42
def test_func(some, other):
pass
""")
funcargs.fillfixtures(item)
del item.funcargs["request"]
assert len(get_public_names(item.funcargs)) == 2
assert item.funcargs['some'] == "test_func"
assert item.funcargs['other'] == 42
def test_funcarg_lookup_modulelevel(self, testdir):
testdir.makepyfile("""
def pytest_funcarg__something(request):
return request.function.__name__
class TestClass:
def test_method(self, something):
assert something == "test_method"
def test_func(something):
assert something == "test_func"
""")
reprec = testdir.inline_run()
reprec.assertoutcome(passed=2)
def test_funcarg_lookup_classlevel(self, testdir):
p = testdir.makepyfile("""
class TestClass:
def pytest_funcarg__something(self, request):
return request.instance
def test_method(self, something):
assert something is self
""")
result = testdir.runpytest(p)
result.stdout.fnmatch_lines([
"*1 passed*"
])
def test_conftest_funcargs_only_available_in_subdir(self, testdir):
sub1 = testdir.mkpydir("sub1")
sub2 = testdir.mkpydir("sub2")
sub1.join("conftest.py").write(py.code.Source("""
import pytest
def pytest_funcarg__arg1(request):
pytest.raises(Exception, "request.getfuncargvalue('arg2')")
"""))
sub2.join("conftest.py").write(py.code.Source("""
import pytest
def pytest_funcarg__arg2(request):
pytest.raises(Exception, "request.getfuncargvalue('arg1')")
"""))
sub1.join("test_in_sub1.py").write("def test_1(arg1): pass")
sub2.join("test_in_sub2.py").write("def test_2(arg2): pass")
result = testdir.runpytest("-v")
result.assert_outcomes(passed=2)
def test_extend_fixture_module_class(self, testdir):
testfile = testdir.makepyfile("""
import pytest
@pytest.fixture
def spam():
return 'spam'
class TestSpam:
@pytest.fixture
def spam(self, spam):
return spam * 2
def test_spam(self, spam):
assert spam == 'spamspam'
""")
result = testdir.runpytest()
result.stdout.fnmatch_lines(["*1 passed*"])
result = testdir.runpytest(testfile)
result.stdout.fnmatch_lines(["*1 passed*"])
def test_extend_fixture_conftest_module(self, testdir):
testdir.makeconftest("""
import pytest
@pytest.fixture
def spam():
return 'spam'
""")
testfile = testdir.makepyfile("""
import pytest
@pytest.fixture
def spam(spam):
return spam * 2
def test_spam(spam):
assert spam == 'spamspam'
""")
result = testdir.runpytest()
result.stdout.fnmatch_lines(["*1 passed*"])
result = testdir.runpytest(testfile)
result.stdout.fnmatch_lines(["*1 passed*"])
def test_extend_fixture_conftest_conftest(self, testdir):
testdir.makeconftest("""
import pytest
@pytest.fixture
def spam():
return 'spam'
""")
pkg = testdir.mkpydir("pkg")
pkg.join("conftest.py").write(py.code.Source("""
import pytest
@pytest.fixture
def spam(spam):
return spam * 2
"""))
testfile = pkg.join("test_spam.py")
testfile.write(py.code.Source("""
def test_spam(spam):
assert spam == "spamspam"
"""))
result = testdir.runpytest()
result.stdout.fnmatch_lines(["*1 passed*"])
result = testdir.runpytest(testfile)
result.stdout.fnmatch_lines(["*1 passed*"])
def test_extend_fixture_conftest_plugin(self, testdir):
testdir.makepyfile(testplugin="""
import pytest
@pytest.fixture
def foo():
return 7
""")
testdir.syspathinsert()
testdir.makeconftest("""
import pytest
pytest_plugins = 'testplugin'
@pytest.fixture
def foo(foo):
return foo + 7
""")
testdir.makepyfile("""
def test_foo(foo):
assert foo == 14
""")
result = testdir.runpytest('-s')
assert result.ret == 0
def test_extend_fixture_plugin_plugin(self, testdir):
# Two plugins should extend each order in loading order
testdir.makepyfile(testplugin0="""
import pytest
@pytest.fixture
def foo():
return 7
""")
testdir.makepyfile(testplugin1="""
import pytest
@pytest.fixture
def foo(foo):
return foo + 7
""")
testdir.syspathinsert()
testdir.makepyfile("""
pytest_plugins = ['testplugin0', 'testplugin1']
def test_foo(foo):
assert foo == 14
""")
result = testdir.runpytest()
assert result.ret == 0
def test_override_parametrized_fixture_conftest_module(self, testdir):
"""Test override of the parametrized fixture with non-parametrized one on the test module level."""
testdir.makeconftest("""
import pytest
@pytest.fixture(params=[1, 2, 3])
def spam(request):
return request.param
""")
testfile = testdir.makepyfile("""
import pytest
@pytest.fixture
def spam():
return 'spam'
def test_spam(spam):
assert spam == 'spam'
""")
result = testdir.runpytest()
result.stdout.fnmatch_lines(["*1 passed*"])
result = testdir.runpytest(testfile)
result.stdout.fnmatch_lines(["*1 passed*"])
def test_override_parametrized_fixture_conftest_conftest(self, testdir):
"""Test override of the parametrized fixture with non-parametrized one on the conftest level."""
testdir.makeconftest("""
import pytest
@pytest.fixture(params=[1, 2, 3])
def spam(request):
return request.param
""")
subdir = testdir.mkpydir('subdir')
subdir.join("conftest.py").write(py.code.Source("""
import pytest
@pytest.fixture
def spam():
return 'spam'
"""))
testfile = subdir.join("test_spam.py")
testfile.write(py.code.Source("""
def test_spam(spam):
assert spam == "spam"
"""))
result = testdir.runpytest()
result.stdout.fnmatch_lines(["*1 passed*"])
result = testdir.runpytest(testfile)
result.stdout.fnmatch_lines(["*1 passed*"])
def test_override_non_parametrized_fixture_conftest_module(self, testdir):
"""Test override of the non-parametrized fixture with parametrized one on the test module level."""
testdir.makeconftest("""
import pytest
@pytest.fixture
def spam():
return 'spam'
""")
testfile = testdir.makepyfile("""
import pytest
@pytest.fixture(params=[1, 2, 3])
def spam(request):
return request.param
params = {'spam': 1}
def test_spam(spam):
assert spam == params['spam']
params['spam'] += 1
""")
result = testdir.runpytest()
result.stdout.fnmatch_lines(["*3 passed*"])
result = testdir.runpytest(testfile)
result.stdout.fnmatch_lines(["*3 passed*"])
def test_override_non_parametrized_fixture_conftest_conftest(self, testdir):
"""Test override of the non-parametrized fixture with parametrized one on the conftest level."""
testdir.makeconftest("""
import pytest
@pytest.fixture
def spam():
return 'spam'
""")
subdir = testdir.mkpydir('subdir')
subdir.join("conftest.py").write(py.code.Source("""
import pytest
@pytest.fixture(params=[1, 2, 3])
def spam(request):
return request.param
"""))
testfile = subdir.join("test_spam.py")
testfile.write(py.code.Source("""
params = {'spam': 1}
def test_spam(spam):
assert spam == params['spam']
params['spam'] += 1
"""))
result = testdir.runpytest()
result.stdout.fnmatch_lines(["*3 passed*"])
result = testdir.runpytest(testfile)
result.stdout.fnmatch_lines(["*3 passed*"])
def test_autouse_fixture_plugin(self, testdir):
# A fixture from a plugin has no baseid set, which screwed up
# the autouse fixture handling.
testdir.makepyfile(testplugin="""
import pytest
@pytest.fixture(autouse=True)
def foo(request):
request.function.foo = 7
""")
testdir.syspathinsert()
testdir.makepyfile("""
pytest_plugins = 'testplugin'
def test_foo(request):
assert request.function.foo == 7
""")
result = testdir.runpytest()
assert result.ret == 0
def test_funcarg_lookup_error(self, testdir):
testdir.makepyfile("""
def test_lookup_error(unknown):
pass
""")
result = testdir.runpytest()
result.stdout.fnmatch_lines([
"*ERROR*test_lookup_error*",
"*def test_lookup_error(unknown):*",
"*fixture*unknown*not found*",
"*available fixtures*",
"*1 error*",
])
assert "INTERNAL" not in result.stdout.str()
def test_fixture_excinfo_leak(self, testdir):
# on python2 sys.excinfo would leak into fixture executions
testdir.makepyfile("""
import sys
import traceback
import pytest
@pytest.fixture
def leak():
if sys.exc_info()[0]: # python3 bug :)
traceback.print_exc()
#fails
assert sys.exc_info() == (None, None, None)
def test_leak(leak):
if sys.exc_info()[0]: # python3 bug :)
traceback.print_exc()
assert sys.exc_info() == (None, None, None)
""")
result = testdir.runpytest()
assert result.ret == 0
class TestRequestBasic:
def test_request_attributes(self, testdir):
item = testdir.getitem("""
def pytest_funcarg__something(request): pass
def test_func(something): pass
""")
req = funcargs.FixtureRequest(item)
assert req.function == item.obj
assert req.keywords == item.keywords
assert hasattr(req.module, 'test_func')
assert req.cls is None
assert req.function.__name__ == "test_func"
assert req.config == item.config
assert repr(req).find(req.function.__name__) != -1
def test_request_attributes_method(self, testdir):
item, = testdir.getitems("""
class TestB:
def pytest_funcarg__something(self, request):
return 1
def test_func(self, something):
pass
""")
req = item._request
assert req.cls.__name__ == "TestB"
assert req.instance.__class__ == req.cls
def XXXtest_request_contains_funcarg_arg2fixturedefs(self, testdir):
modcol = testdir.getmodulecol("""
def pytest_funcarg__something(request):
pass
class TestClass:
def test_method(self, something):
pass
""")
item1, = testdir.genitems([modcol])
assert item1.name == "test_method"
arg2fixturedefs = funcargs.FixtureRequest(item1)._arg2fixturedefs
assert len(arg2fixturedefs) == 1
assert arg2fixturedefs[0].__name__ == "pytest_funcarg__something"
def test_getfuncargvalue_recursive(self, testdir):
testdir.makeconftest("""
def pytest_funcarg__something(request):
return 1
""")
testdir.makepyfile("""
def pytest_funcarg__something(request):
return request.getfuncargvalue("something") + 1
def test_func(something):
assert something == 2
""")
reprec = testdir.inline_run()
reprec.assertoutcome(passed=1)
def test_getfuncargvalue(self, testdir):
item = testdir.getitem("""
l = [2]
def pytest_funcarg__something(request): return 1
def pytest_funcarg__other(request):
return l.pop()
def test_func(something): pass
""")
req = item._request
pytest.raises(FixtureLookupError, req.getfuncargvalue, "notexists")
val = req.getfuncargvalue("something")
assert val == 1
val = req.getfuncargvalue("something")
assert val == 1
val2 = req.getfuncargvalue("other")
assert val2 == 2
val2 = req.getfuncargvalue("other") # see about caching
assert val2 == 2
pytest._fillfuncargs(item)
assert item.funcargs["something"] == 1
assert len(get_public_names(item.funcargs)) == 2
assert "request" in item.funcargs
#assert item.funcargs == {'something': 1, "other": 2}
def test_request_addfinalizer(self, testdir):
item = testdir.getitem("""
teardownlist = []
def pytest_funcarg__something(request):
request.addfinalizer(lambda: teardownlist.append(1))
def test_func(something): pass
""")
item.session._setupstate.prepare(item)
pytest._fillfuncargs(item)
# successively check finalization calls
teardownlist = item.getparent(pytest.Module).obj.teardownlist
ss = item.session._setupstate
assert not teardownlist
ss.teardown_exact(item, None)
print(ss.stack)
assert teardownlist == [1]
def test_request_addfinalizer_failing_setup(self, testdir):
testdir.makepyfile("""
import pytest
l = [1]
@pytest.fixture
def myfix(request):
request.addfinalizer(l.pop)
assert 0
def test_fix(myfix):
pass
def test_finalizer_ran():
assert not l
""")
reprec = testdir.inline_run("-s")
reprec.assertoutcome(failed=1, passed=1)
def test_request_addfinalizer_failing_setup_module(self, testdir):
testdir.makepyfile("""
import pytest
l = [1, 2]
@pytest.fixture(scope="module")
def myfix(request):
request.addfinalizer(l.pop)
request.addfinalizer(l.pop)
assert 0
def test_fix(myfix):
pass
""")
reprec = testdir.inline_run("-s")
mod = reprec.getcalls("pytest_runtest_setup")[0].item.module
assert not mod.l
def test_request_addfinalizer_partial_setup_failure(self, testdir):
p = testdir.makepyfile("""
l = []
def pytest_funcarg__something(request):
request.addfinalizer(lambda: l.append(None))
def test_func(something, missingarg):
pass
def test_second():
assert len(l) == 1
""")
result = testdir.runpytest(p)
result.stdout.fnmatch_lines([
"*1 error*" # XXX the whole module collection fails
])
def test_request_getmodulepath(self, testdir):
modcol = testdir.getmodulecol("def test_somefunc(): pass")
item, = testdir.genitems([modcol])
req = funcargs.FixtureRequest(item)
assert req.fspath == modcol.fspath
def test_request_fixturenames(self, testdir):
testdir.makepyfile("""
import pytest
from _pytest.pytester import get_public_names
@pytest.fixture()
def arg1():
pass
@pytest.fixture()
def farg(arg1):
pass
@pytest.fixture(autouse=True)
def sarg(tmpdir):
pass
def test_function(request, farg):
assert set(get_public_names(request.fixturenames)) == \
set(["tmpdir", "sarg", "arg1", "request", "farg",
"tmpdir_factory"])
""")
reprec = testdir.inline_run()
reprec.assertoutcome(passed=1)
def test_funcargnames_compatattr(self, testdir):
testdir.makepyfile("""
def pytest_generate_tests(metafunc):
assert metafunc.funcargnames == metafunc.fixturenames
def pytest_funcarg__fn(request):
assert request._pyfuncitem.funcargnames == \
request._pyfuncitem.fixturenames
return request.funcargnames, request.fixturenames
def test_hello(fn):
assert fn[0] == fn[1]
""")
reprec = testdir.inline_run()
reprec.assertoutcome(passed=1)
def test_setupdecorator_and_xunit(self, testdir):
testdir.makepyfile("""
import pytest
l = []
@pytest.fixture(scope='module', autouse=True)
def setup_module():
l.append("module")
@pytest.fixture(autouse=True)
def setup_function():
l.append("function")
def test_func():
pass
class TestClass:
@pytest.fixture(scope="class", autouse=True)
def setup_class(self):
l.append("class")
@pytest.fixture(autouse=True)
def setup_method(self):
l.append("method")
def test_method(self):
pass
def test_all():
assert l == ["module", "function", "class",
"function", "method", "function"]
""")
reprec = testdir.inline_run("-v")
reprec.assertoutcome(passed=3)
def test_fixtures_sub_subdir_normalize_sep(self, testdir):
# this tests that normalization of nodeids takes place
b = testdir.mkdir("tests").mkdir("unit")
b.join("conftest.py").write(py.code.Source("""
def pytest_funcarg__arg1():
pass
"""))
p = b.join("test_module.py")
p.write("def test_func(arg1): pass")
result = testdir.runpytest(p, "--fixtures")
assert result.ret == 0
result.stdout.fnmatch_lines("""
*fixtures defined*conftest*
*arg1*
""")
def test_show_fixtures_color_yes(self, testdir):
testdir.makepyfile("def test_this(): assert 1")
result = testdir.runpytest('--color=yes', '--fixtures')
assert '\x1b[32mtmpdir' in result.stdout.str()
def test_newstyle_with_request(self, testdir):
testdir.makepyfile("""
import pytest
@pytest.fixture()
def arg(request):
pass
def test_1(arg):
pass
""")
reprec = testdir.inline_run()
reprec.assertoutcome(passed=1)
def test_setupcontext_no_param(self, testdir):
testdir.makepyfile("""
import pytest
@pytest.fixture(params=[1,2])
def arg(request):
return request.param
@pytest.fixture(autouse=True)
def mysetup(request, arg):
assert not hasattr(request, "param")
def test_1(arg):
assert arg in (1,2)
""")
reprec = testdir.inline_run()
reprec.assertoutcome(passed=2)
class TestRequestMarking:
def test_applymarker(self, testdir):
item1,item2 = testdir.getitems("""
def pytest_funcarg__something(request):
pass
class TestClass:
def test_func1(self, something):
pass
def test_func2(self, something):
pass
""")
req1 = funcargs.FixtureRequest(item1)
assert 'xfail' not in item1.keywords
req1.applymarker(pytest.mark.xfail)
assert 'xfail' in item1.keywords
assert 'skipif' not in item1.keywords
req1.applymarker(pytest.mark.skipif)
assert 'skipif' in item1.keywords
pytest.raises(ValueError, "req1.applymarker(42)")
def test_accesskeywords(self, testdir):
testdir.makepyfile("""
import pytest
@pytest.fixture()
def keywords(request):
return request.keywords
@pytest.mark.XYZ
def test_function(keywords):
assert keywords["XYZ"]
assert "abc" not in keywords
""")
reprec = testdir.inline_run()
reprec.assertoutcome(passed=1)
def test_accessmarker_dynamic(self, testdir):
testdir.makeconftest("""
import pytest
@pytest.fixture()
def keywords(request):
return request.keywords
@pytest.fixture(scope="class", autouse=True)
def marking(request):
request.applymarker(pytest.mark.XYZ("hello"))
""")
testdir.makepyfile("""
import pytest
def test_fun1(keywords):
assert keywords["XYZ"] is not None
assert "abc" not in keywords
def test_fun2(keywords):
assert keywords["XYZ"] is not None
assert "abc" not in keywords
""")
reprec = testdir.inline_run()
reprec.assertoutcome(passed=2)
class TestRequestCachedSetup:
def test_request_cachedsetup_defaultmodule(self, testdir):
reprec = testdir.inline_runsource("""
mysetup = ["hello",].pop
def pytest_funcarg__something(request):
return request.cached_setup(mysetup, scope="module")
def test_func1(something):
assert something == "hello"
class TestClass:
def test_func1a(self, something):
assert something == "hello"
""")
reprec.assertoutcome(passed=2)
def test_request_cachedsetup_class(self, testdir):
reprec = testdir.inline_runsource("""
mysetup = ["hello", "hello2", "hello3"].pop
def pytest_funcarg__something(request):
return request.cached_setup(mysetup, scope="class")
def test_func1(something):
assert something == "hello3"
def test_func2(something):
assert something == "hello2"
class TestClass:
def test_func1a(self, something):
assert something == "hello"
def test_func2b(self, something):
assert something == "hello"
""")
reprec.assertoutcome(passed=4)
def test_request_cachedsetup_extrakey(self, testdir):
item1 = testdir.getitem("def test_func(): pass")
req1 = funcargs.FixtureRequest(item1)
l = ["hello", "world"]
def setup():
return l.pop()
ret1 = req1.cached_setup(setup, extrakey=1)
ret2 = req1.cached_setup(setup, extrakey=2)
assert ret2 == "hello"
assert ret1 == "world"
ret1b = req1.cached_setup(setup, extrakey=1)
ret2b = req1.cached_setup(setup, extrakey=2)
assert ret1 == ret1b
assert ret2 == ret2b
def test_request_cachedsetup_cache_deletion(self, testdir):
item1 = testdir.getitem("def test_func(): pass")
req1 = funcargs.FixtureRequest(item1)
l = []
def setup():
l.append("setup")
def teardown(val):
l.append("teardown")
req1.cached_setup(setup, teardown, scope="function")
assert l == ['setup']
# artificial call of finalizer
setupstate = req1._pyfuncitem.session._setupstate
setupstate._callfinalizers(item1)
assert l == ["setup", "teardown"]
req1.cached_setup(setup, teardown, scope="function")
assert l == ["setup", "teardown", "setup"]
setupstate._callfinalizers(item1)
assert l == ["setup", "teardown", "setup", "teardown"]
def test_request_cached_setup_two_args(self, testdir):
testdir.makepyfile("""
def pytest_funcarg__arg1(request):
return request.cached_setup(lambda: 42)
def pytest_funcarg__arg2(request):
return request.cached_setup(lambda: 17)
def test_two_different_setups(arg1, arg2):
assert arg1 != arg2
""")
result = testdir.runpytest("-v")
result.stdout.fnmatch_lines([
"*1 passed*"
])
def test_request_cached_setup_getfuncargvalue(self, testdir):
testdir.makepyfile("""
def pytest_funcarg__arg1(request):
arg1 = request.getfuncargvalue("arg2")
return request.cached_setup(lambda: arg1 + 1)
def pytest_funcarg__arg2(request):
return request.cached_setup(lambda: 10)
def test_two_funcarg(arg1):
assert arg1 == 11
""")
result = testdir.runpytest("-v")
result.stdout.fnmatch_lines([
"*1 passed*"
])
def test_request_cached_setup_functional(self, testdir):
testdir.makepyfile(test_0="""
l = []
def pytest_funcarg__something(request):
val = request.cached_setup(fsetup, fteardown)
return val
def fsetup(mycache=[1]):
l.append(mycache.pop())
return l
def fteardown(something):
l.remove(something[0])
l.append(2)
def test_list_once(something):
assert something == [1]
def test_list_twice(something):
assert something == [1]
""")
testdir.makepyfile(test_1="""
import test_0 # should have run already
def test_check_test0_has_teardown_correct():
assert test_0.l == [2]
""")
result = testdir.runpytest("-v")
result.stdout.fnmatch_lines([
"*3 passed*"
])
def test_issue117_sessionscopeteardown(self, testdir):
testdir.makepyfile("""
def pytest_funcarg__app(request):
app = request.cached_setup(
scope='session',
setup=lambda: 0,
teardown=lambda x: 3/x)
return app
def test_func(app):
pass
""")
result = testdir.runpytest()
assert result.ret != 0
result.stdout.fnmatch_lines([
"*3/x*",
"*ZeroDivisionError*",
])
class TestFixtureUsages:
def test_noargfixturedec(self, testdir):
testdir.makepyfile("""
import pytest
@pytest.fixture
def arg1():
return 1
def test_func(arg1):
assert arg1 == 1
""")
reprec = testdir.inline_run()
reprec.assertoutcome(passed=1)
def test_receives_funcargs(self, testdir):
testdir.makepyfile("""
import pytest
@pytest.fixture()
def arg1():
return 1
@pytest.fixture()
def arg2(arg1):
return arg1 + 1
def test_add(arg2):
assert arg2 == 2
def test_all(arg1, arg2):
assert arg1 == 1
assert arg2 == 2
""")
reprec = testdir.inline_run()
reprec.assertoutcome(passed=2)
def test_receives_funcargs_scope_mismatch(self, testdir):
testdir.makepyfile("""
import pytest
@pytest.fixture(scope="function")
def arg1():
return 1
@pytest.fixture(scope="module")
def arg2(arg1):
return arg1 + 1
def test_add(arg2):
assert arg2 == 2
""")
result = testdir.runpytest()
result.stdout.fnmatch_lines([
"*ScopeMismatch*involved factories*",
"* def arg2*",
"* def arg1*",
"*1 error*"
])
def test_receives_funcargs_scope_mismatch_issue660(self, testdir):
testdir.makepyfile("""
import pytest
@pytest.fixture(scope="function")
def arg1():
return 1
@pytest.fixture(scope="module")
def arg2(arg1):
return arg1 + 1
def test_add(arg1, arg2):
assert arg2 == 2
""")
result = testdir.runpytest()
result.stdout.fnmatch_lines([
"*ScopeMismatch*involved factories*",
"* def arg2*",
"*1 error*"
])
def test_funcarg_parametrized_and_used_twice(self, testdir):
testdir.makepyfile("""
import pytest
l = []
@pytest.fixture(params=[1,2])
def arg1(request):
l.append(1)
return request.param
@pytest.fixture()
def arg2(arg1):
return arg1 + 1
def test_add(arg1, arg2):
assert arg2 == arg1 + 1
assert len(l) == arg1
""")
result = testdir.runpytest()
result.stdout.fnmatch_lines([
"*2 passed*"
])
def test_factory_uses_unknown_funcarg_as_dependency_error(self, testdir):
testdir.makepyfile("""
import pytest
@pytest.fixture()
def fail(missing):
return
@pytest.fixture()
def call_fail(fail):
return
def test_missing(call_fail):
pass
""")
result = testdir.runpytest()
result.stdout.fnmatch_lines("""
*pytest.fixture()*
*def call_fail(fail)*
*pytest.fixture()*
*def fail*
*fixture*'missing'*not found*
""")
def test_factory_setup_as_classes_fails(self, testdir):
testdir.makepyfile("""
import pytest
class arg1:
def __init__(self, request):
self.x = 1
arg1 = pytest.fixture()(arg1)
""")
reprec = testdir.inline_run()
l = reprec.getfailedcollections()
assert len(l) == 1
def test_request_can_be_overridden(self, testdir):
testdir.makepyfile("""
import pytest
@pytest.fixture()
def request(request):
request.a = 1
return request
def test_request(request):
assert request.a == 1
""")
reprec = testdir.inline_run()
reprec.assertoutcome(passed=1)
def test_usefixtures_marker(self, testdir):
testdir.makepyfile("""
import pytest
l = []
@pytest.fixture(scope="class")
def myfix(request):
request.cls.hello = "world"
l.append(1)
class TestClass:
def test_one(self):
assert self.hello == "world"
assert len(l) == 1
def test_two(self):
assert self.hello == "world"
assert len(l) == 1
pytest.mark.usefixtures("myfix")(TestClass)
""")
reprec = testdir.inline_run()
reprec.assertoutcome(passed=2)
def test_usefixtures_ini(self, testdir):
testdir.makeini("""
[pytest]
usefixtures = myfix
""")
testdir.makeconftest("""
import pytest
@pytest.fixture(scope="class")
def myfix(request):
request.cls.hello = "world"
""")
testdir.makepyfile("""
class TestClass:
def test_one(self):
assert self.hello == "world"
def test_two(self):
assert self.hello == "world"
""")
reprec = testdir.inline_run()
reprec.assertoutcome(passed=2)
def test_usefixtures_seen_in_showmarkers(self, testdir):
result = testdir.runpytest("--markers")
result.stdout.fnmatch_lines("""
*usefixtures(fixturename1*mark tests*fixtures*
""")
def test_request_instance_issue203(self, testdir):
testdir.makepyfile("""
import pytest
class TestClass:
@pytest.fixture
def setup1(self, request):
assert self == request.instance
self.arg1 = 1
def test_hello(self, setup1):
assert self.arg1 == 1
""")
reprec = testdir.inline_run()
reprec.assertoutcome(passed=1)
def test_fixture_parametrized_with_iterator(self, testdir):
testdir.makepyfile("""
import pytest
l = []
def f():
yield 1
yield 2
dec = pytest.fixture(scope="module", params=f())
@dec
def arg(request):
return request.param
@dec
def arg2(request):
return request.param
def test_1(arg):
l.append(arg)
def test_2(arg2):
l.append(arg2*10)
""")
reprec = testdir.inline_run("-v")
reprec.assertoutcome(passed=4)
l = reprec.getcalls("pytest_runtest_call")[0].item.module.l
assert l == [1,2, 10,20]
class TestFixtureManagerParseFactories:
def pytest_funcarg__testdir(self, request):
testdir = request.getfuncargvalue("testdir")
testdir.makeconftest("""
def pytest_funcarg__hello(request):
return "conftest"
def pytest_funcarg__fm(request):
return request._fixturemanager
def pytest_funcarg__item(request):
return request._pyfuncitem
""")
return testdir
def test_parsefactories_evil_objects_issue214(self, testdir):
testdir.makepyfile("""
class A:
def __call__(self):
pass
def __getattr__(self, name):
raise RuntimeError()
a = A()
def test_hello():
pass
""")
reprec = testdir.inline_run()
reprec.assertoutcome(passed=1, failed=0)
def test_parsefactories_conftest(self, testdir):
testdir.makepyfile("""
def test_hello(item, fm):
for name in ("fm", "hello", "item"):
faclist = fm.getfixturedefs(name, item.nodeid)
assert len(faclist) == 1
fac = faclist[0]
assert fac.func.__name__ == "pytest_funcarg__" + name
""")
reprec = testdir.inline_run("-s")
reprec.assertoutcome(passed=1)
def test_parsefactories_conftest_and_module_and_class(self, testdir):
testdir.makepyfile("""
def pytest_funcarg__hello(request):
return "module"
class TestClass:
def pytest_funcarg__hello(self, request):
return "class"
def test_hello(self, item, fm):
faclist = fm.getfixturedefs("hello", item.nodeid)
print (faclist)
assert len(faclist) == 3
assert faclist[0].func(item._request) == "conftest"
assert faclist[1].func(item._request) == "module"
assert faclist[2].func(item._request) == "class"
""")
reprec = testdir.inline_run("-s")
reprec.assertoutcome(passed=1)
def test_parsefactories_relative_node_ids(self, testdir):
# example mostly taken from:
# https://mail.python.org/pipermail/pytest-dev/2014-September/002617.html
runner = testdir.mkdir("runner")
package = testdir.mkdir("package")
package.join("conftest.py").write(dedent("""\
import pytest
@pytest.fixture
def one():
return 1
"""))
package.join("test_x.py").write(dedent("""\
def test_x(one):
assert one == 1
"""))
sub = package.mkdir("sub")
sub.join("__init__.py").ensure()
sub.join("conftest.py").write(dedent("""\
import pytest
@pytest.fixture
def one():
return 2
"""))
sub.join("test_y.py").write(dedent("""\
def test_x(one):
assert one == 2
"""))
reprec = testdir.inline_run()
reprec.assertoutcome(passed=2)
with runner.as_cwd():
reprec = testdir.inline_run("..")
reprec.assertoutcome(passed=2)
class TestAutouseDiscovery:
def pytest_funcarg__testdir(self, testdir):
testdir.makeconftest("""
import pytest
@pytest.fixture(autouse=True)
def perfunction(request, tmpdir):
pass
@pytest.fixture()
def arg1(tmpdir):
pass
@pytest.fixture(autouse=True)
def perfunction2(arg1):
pass
def pytest_funcarg__fm(request):
return request._fixturemanager
def pytest_funcarg__item(request):
return request._pyfuncitem
""")
return testdir
def test_parsefactories_conftest(self, testdir):
testdir.makepyfile("""
from _pytest.pytester import get_public_names
def test_check_setup(item, fm):
autousenames = fm._getautousenames(item.nodeid)
assert len(get_public_names(autousenames)) == 2
assert "perfunction2" in autousenames
assert "perfunction" in autousenames
""")
reprec = testdir.inline_run("-s")
reprec.assertoutcome(passed=1)
def test_two_classes_separated_autouse(self, testdir):
testdir.makepyfile("""
import pytest
class TestA:
l = []
@pytest.fixture(autouse=True)
def setup1(self):
self.l.append(1)
def test_setup1(self):
assert self.l == [1]
class TestB:
l = []
@pytest.fixture(autouse=True)
def setup2(self):
self.l.append(1)
def test_setup2(self):
assert self.l == [1]
""")
reprec = testdir.inline_run()
reprec.assertoutcome(passed=2)
def test_setup_at_classlevel(self, testdir):
testdir.makepyfile("""
import pytest
class TestClass:
@pytest.fixture(autouse=True)
def permethod(self, request):
request.instance.funcname = request.function.__name__
def test_method1(self):
assert self.funcname == "test_method1"
def test_method2(self):
assert self.funcname == "test_method2"
""")
reprec = testdir.inline_run("-s")
reprec.assertoutcome(passed=2)
@pytest.mark.xfail(reason="'enabled' feature not implemented")
def test_setup_enabled_functionnode(self, testdir):
testdir.makepyfile("""
import pytest
def enabled(parentnode, markers):
return "needsdb" in markers
@pytest.fixture(params=[1,2])
def db(request):
return request.param
@pytest.fixture(enabled=enabled, autouse=True)
def createdb(db):
pass
def test_func1(request):
assert "db" not in request.fixturenames
@pytest.mark.needsdb
def test_func2(request):
assert "db" in request.fixturenames
""")
reprec = testdir.inline_run("-s")
reprec.assertoutcome(passed=2)
def test_callables_nocode(self, testdir):
"""
a imported mock.call would break setup/factory discovery
due to it being callable and __code__ not being a code object
"""
testdir.makepyfile("""
class _call(tuple):
def __call__(self, *k, **kw):
pass
def __getattr__(self, k):
return self
call = _call()
""")
reprec = testdir.inline_run("-s")
reprec.assertoutcome(failed=0, passed=0)
def test_autouse_in_conftests(self, testdir):
a = testdir.mkdir("a")
b = testdir.mkdir("a1")
conftest = testdir.makeconftest("""
import pytest
@pytest.fixture(autouse=True)
def hello():
xxx
""")
conftest.move(a.join(conftest.basename))
a.join("test_something.py").write("def test_func(): pass")
b.join("test_otherthing.py").write("def test_func(): pass")
result = testdir.runpytest()
result.stdout.fnmatch_lines("""
*1 passed*1 error*
""")
def test_autouse_in_module_and_two_classes(self, testdir):
testdir.makepyfile("""
import pytest
l = []
@pytest.fixture(autouse=True)
def append1():
l.append("module")
def test_x():
assert l == ["module"]
class TestA:
@pytest.fixture(autouse=True)
def append2(self):
l.append("A")
def test_hello(self):
assert l == ["module", "module", "A"], l
class TestA2:
def test_world(self):
assert l == ["module", "module", "A", "module"], l
""")
reprec = testdir.inline_run()
reprec.assertoutcome(passed=3)
class TestAutouseManagement:
def test_autouse_conftest_mid_directory(self, testdir):
pkgdir = testdir.mkpydir("xyz123")
pkgdir.join("conftest.py").write(py.code.Source("""
import pytest
@pytest.fixture(autouse=True)
def app():
import sys
sys._myapp = "hello"
"""))
t = pkgdir.ensure("tests", "test_app.py")
t.write(py.code.Source("""
import sys
def test_app():
assert sys._myapp == "hello"
"""))
reprec = testdir.inline_run("-s")
reprec.assertoutcome(passed=1)
def test_autouse_honored_for_yield(self, testdir):
testdir.makepyfile("""
import pytest
@pytest.fixture(autouse=True)
def tst():
global x
x = 3
def test_gen():
def f(hello):
assert x == abs(hello)
yield f, 3
yield f, -3
""")
reprec = testdir.inline_run()
reprec.assertoutcome(passed=2)
def test_funcarg_and_setup(self, testdir):
testdir.makepyfile("""
import pytest
l = []
@pytest.fixture(scope="module")
def arg():
l.append(1)
return 0
@pytest.fixture(scope="module", autouse=True)
def something(arg):
l.append(2)
def test_hello(arg):
assert len(l) == 2
assert l == [1,2]
assert arg == 0
def test_hello2(arg):
assert len(l) == 2
assert l == [1,2]
assert arg == 0
""")
reprec = testdir.inline_run()
reprec.assertoutcome(passed=2)
def test_uses_parametrized_resource(self, testdir):
testdir.makepyfile("""
import pytest
l = []
@pytest.fixture(params=[1,2])
def arg(request):
return request.param
@pytest.fixture(autouse=True)
def something(arg):
l.append(arg)
def test_hello():
if len(l) == 1:
assert l == [1]
elif len(l) == 2:
assert l == [1, 2]
else:
0/0
""")
reprec = testdir.inline_run("-s")
reprec.assertoutcome(passed=2)
def test_session_parametrized_function(self, testdir):
testdir.makepyfile("""
import pytest
l = []
@pytest.fixture(scope="session", params=[1,2])
def arg(request):
return request.param
@pytest.fixture(scope="function", autouse=True)
def append(request, arg):
if request.function.__name__ == "test_some":
l.append(arg)
def test_some():
pass
def test_result(arg):
assert len(l) == arg
assert l[:arg] == [1,2][:arg]
""")
reprec = testdir.inline_run("-v", "-s")
reprec.assertoutcome(passed=4)
def test_class_function_parametrization_finalization(self, testdir):
p = testdir.makeconftest("""
import pytest
import pprint
l = []
@pytest.fixture(scope="function", params=[1,2])
def farg(request):
return request.param
@pytest.fixture(scope="class", params=list("ab"))
def carg(request):
return request.param
@pytest.fixture(scope="function", autouse=True)
def append(request, farg, carg):
def fin():
l.append("fin_%s%s" % (carg, farg))
request.addfinalizer(fin)
""")
testdir.makepyfile("""
import pytest
class TestClass:
def test_1(self):
pass
class TestClass2:
def test_2(self):
pass
""")
reprec = testdir.inline_run("-v","-s")
reprec.assertoutcome(passed=8)
config = reprec.getcalls("pytest_unconfigure")[0].config
l = config.pluginmanager._getconftestmodules(p)[0].l
assert l == ["fin_a1", "fin_a2", "fin_b1", "fin_b2"] * 2
def test_scope_ordering(self, testdir):
testdir.makepyfile("""
import pytest
l = []
@pytest.fixture(scope="function", autouse=True)
def fappend2():
l.append(2)
@pytest.fixture(scope="class", autouse=True)
def classappend3():
l.append(3)
@pytest.fixture(scope="module", autouse=True)
def mappend():
l.append(1)
class TestHallo:
def test_method(self):
assert l == [1,3,2]
""")
reprec = testdir.inline_run()
reprec.assertoutcome(passed=1)
def test_parametrization_setup_teardown_ordering(self, testdir):
testdir.makepyfile("""
import pytest
l = []
def pytest_generate_tests(metafunc):
if metafunc.cls is not None:
metafunc.parametrize("item", [1,2], scope="class")
class TestClass:
@pytest.fixture(scope="class", autouse=True)
def addteardown(self, item, request):
l.append("setup-%d" % item)
request.addfinalizer(lambda: l.append("teardown-%d" % item))
def test_step1(self, item):
l.append("step1-%d" % item)
def test_step2(self, item):
l.append("step2-%d" % item)
def test_finish():
print (l)
assert l == ["setup-1", "step1-1", "step2-1", "teardown-1",
"setup-2", "step1-2", "step2-2", "teardown-2",]
""")
reprec = testdir.inline_run()
reprec.assertoutcome(passed=5)
def test_ordering_autouse_before_explicit(self, testdir):
testdir.makepyfile("""
import pytest
l = []
@pytest.fixture(autouse=True)
def fix1():
l.append(1)
@pytest.fixture()
def arg1():
l.append(2)
def test_hello(arg1):
assert l == [1,2]
""")
reprec = testdir.inline_run()
reprec.assertoutcome(passed=1)
@pytest.mark.issue226
@pytest.mark.parametrize("param1", ["", "params=[1]"], ids=["p00","p01"])
@pytest.mark.parametrize("param2", ["", "params=[1]"], ids=["p10","p11"])
def test_ordering_dependencies_torndown_first(self, testdir, param1, param2):
testdir.makepyfile("""
import pytest
l = []
@pytest.fixture(%(param1)s)
def arg1(request):
request.addfinalizer(lambda: l.append("fin1"))
l.append("new1")
@pytest.fixture(%(param2)s)
def arg2(request, arg1):
request.addfinalizer(lambda: l.append("fin2"))
l.append("new2")
def test_arg(arg2):
pass
def test_check():
assert l == ["new1", "new2", "fin2", "fin1"]
""" % locals())
reprec = testdir.inline_run("-s")
reprec.assertoutcome(passed=2)
class TestFixtureMarker:
def test_parametrize(self, testdir):
testdir.makepyfile("""
import pytest
@pytest.fixture(params=["a", "b", "c"])
def arg(request):
return request.param
l = []
def test_param(arg):
l.append(arg)
def test_result():
assert l == list("abc")
""")
reprec = testdir.inline_run()
reprec.assertoutcome(passed=4)
def test_multiple_parametrization_issue_736(self, testdir):
testdir.makepyfile("""
import pytest
@pytest.fixture(params=[1,2,3])
def foo(request):
return request.param
@pytest.mark.parametrize('foobar', [4,5,6])
def test_issue(foo, foobar):
assert foo in [1,2,3]
assert foobar in [4,5,6]
""")
reprec = testdir.inline_run()
reprec.assertoutcome(passed=9)
def test_scope_session(self, testdir):
testdir.makepyfile("""
import pytest
l = []
@pytest.fixture(scope="module")
def arg():
l.append(1)
return 1
def test_1(arg):
assert arg == 1
def test_2(arg):
assert arg == 1
assert len(l) == 1
class TestClass:
def test3(self, arg):
assert arg == 1
assert len(l) == 1
""")
reprec = testdir.inline_run()
reprec.assertoutcome(passed=3)
def test_scope_session_exc(self, testdir):
testdir.makepyfile("""
import pytest
l = []
@pytest.fixture(scope="session")
def fix():
l.append(1)
pytest.skip('skipping')
def test_1(fix):
pass
def test_2(fix):
pass
def test_last():
assert l == [1]
""")
reprec = testdir.inline_run()
reprec.assertoutcome(skipped=2, passed=1)
def test_scope_session_exc_two_fix(self, testdir):
testdir.makepyfile("""
import pytest
l = []
m = []
@pytest.fixture(scope="session")
def a():
l.append(1)
pytest.skip('skipping')
@pytest.fixture(scope="session")
def b(a):
m.append(1)
def test_1(b):
pass
def test_2(b):
pass
def test_last():
assert l == [1]
assert m == []
""")
reprec = testdir.inline_run()
reprec.assertoutcome(skipped=2, passed=1)
def test_scope_exc(self, testdir):
testdir.makepyfile(
test_foo="""
def test_foo(fix):
pass
""",
test_bar="""
def test_bar(fix):
pass
""",
conftest="""
import pytest
reqs = []
@pytest.fixture(scope="session")
def fix(request):
reqs.append(1)
pytest.skip()
@pytest.fixture
def req_list():
return reqs
""",
test_real="""
def test_last(req_list):
assert req_list == [1]
"""
)
reprec = testdir.inline_run()
reprec.assertoutcome(skipped=2, passed=1)
def test_scope_module_uses_session(self, testdir):
testdir.makepyfile("""
import pytest
l = []
@pytest.fixture(scope="module")
def arg():
l.append(1)
return 1
def test_1(arg):
assert arg == 1
def test_2(arg):
assert arg == 1
assert len(l) == 1
class TestClass:
def test3(self, arg):
assert arg == 1
assert len(l) == 1
""")
reprec = testdir.inline_run()
reprec.assertoutcome(passed=3)
def test_scope_module_and_finalizer(self, testdir):
testdir.makeconftest("""
import pytest
finalized = []
created = []
@pytest.fixture(scope="module")
def arg(request):
created.append(1)
assert request.scope == "module"
request.addfinalizer(lambda: finalized.append(1))
def pytest_funcarg__created(request):
return len(created)
def pytest_funcarg__finalized(request):
return len(finalized)
""")
testdir.makepyfile(
test_mod1="""
def test_1(arg, created, finalized):
assert created == 1
assert finalized == 0
def test_2(arg, created, finalized):
assert created == 1
assert finalized == 0""",
test_mod2="""
def test_3(arg, created, finalized):
assert created == 2
assert finalized == 1""",
test_mode3="""
def test_4(arg, created, finalized):
assert created == 3
assert finalized == 2
""")
reprec = testdir.inline_run()
reprec.assertoutcome(passed=4)
@pytest.mark.parametrize("method", [
'request.getfuncargvalue("arg")',
'request.cached_setup(lambda: None, scope="function")',
], ids=["getfuncargvalue", "cached_setup"])
def test_scope_mismatch_various(self, testdir, method):
testdir.makeconftest("""
import pytest
finalized = []
created = []
@pytest.fixture(scope="function")
def arg(request):
pass
""")
testdir.makepyfile(
test_mod1="""
import pytest
@pytest.fixture(scope="session")
def arg(request):
%s
def test_1(arg):
pass
""" % method)
result = testdir.runpytest()
assert result.ret != 0
result.stdout.fnmatch_lines([
"*ScopeMismatch*You tried*function*session*request*",
])
def test_register_only_with_mark(self, testdir):
testdir.makeconftest("""
import pytest
@pytest.fixture()
def arg():
return 1
""")
testdir.makepyfile(
test_mod1="""
import pytest
@pytest.fixture()
def arg(arg):
return arg + 1
def test_1(arg):
assert arg == 2
""")
reprec = testdir.inline_run()
reprec.assertoutcome(passed=1)
def test_parametrize_and_scope(self, testdir):
testdir.makepyfile("""
import pytest
@pytest.fixture(scope="module", params=["a", "b", "c"])
def arg(request):
return request.param
l = []
def test_param(arg):
l.append(arg)
""")
reprec = testdir.inline_run("-v")
reprec.assertoutcome(passed=3)
l = reprec.getcalls("pytest_runtest_call")[0].item.module.l
assert len(l) == 3
assert "a" in l
assert "b" in l
assert "c" in l
def test_scope_mismatch(self, testdir):
testdir.makeconftest("""
import pytest
@pytest.fixture(scope="function")
def arg(request):
pass
""")
testdir.makepyfile("""
import pytest
@pytest.fixture(scope="session")
def arg(arg):
pass
def test_mismatch(arg):
pass
""")
result = testdir.runpytest()
result.stdout.fnmatch_lines([
"*ScopeMismatch*",
"*1 error*",
])
def test_parametrize_separated_order(self, testdir):
testdir.makepyfile("""
import pytest
@pytest.fixture(scope="module", params=[1, 2])
def arg(request):
return request.param
l = []
def test_1(arg):
l.append(arg)
def test_2(arg):
l.append(arg)
""")
reprec = testdir.inline_run("-v")
reprec.assertoutcome(passed=4)
l = reprec.getcalls("pytest_runtest_call")[0].item.module.l
assert l == [1,1,2,2]
def test_module_parametrized_ordering(self, testdir):
testdir.makeconftest("""
import pytest
@pytest.fixture(scope="session", params="s1 s2".split())
def sarg():
pass
@pytest.fixture(scope="module", params="m1 m2".split())
def marg():
pass
""")
testdir.makepyfile(test_mod1="""
def test_func(sarg):
pass
def test_func1(marg):
pass
""", test_mod2="""
def test_func2(sarg):
pass
def test_func3(sarg, marg):
pass
def test_func3b(sarg, marg):
pass
def test_func4(marg):
pass
""")
result = testdir.runpytest("-v")
result.stdout.fnmatch_lines("""
test_mod1.py::test_func[s1] PASSED
test_mod2.py::test_func2[s1] PASSED
test_mod2.py::test_func3[s1-m1] PASSED
test_mod2.py::test_func3b[s1-m1] PASSED
test_mod2.py::test_func3[s1-m2] PASSED
test_mod2.py::test_func3b[s1-m2] PASSED
test_mod1.py::test_func[s2] PASSED
test_mod2.py::test_func2[s2] PASSED
test_mod2.py::test_func3[s2-m1] PASSED
test_mod2.py::test_func3b[s2-m1] PASSED
test_mod2.py::test_func4[m1] PASSED
test_mod2.py::test_func3[s2-m2] PASSED
test_mod2.py::test_func3b[s2-m2] PASSED
test_mod2.py::test_func4[m2] PASSED
test_mod1.py::test_func1[m1] PASSED
test_mod1.py::test_func1[m2] PASSED
""")
def test_class_ordering(self, testdir):
testdir.makeconftest("""
import pytest
l = []
@pytest.fixture(scope="function", params=[1,2])
def farg(request):
return request.param
@pytest.fixture(scope="class", params=list("ab"))
def carg(request):
return request.param
@pytest.fixture(scope="function", autouse=True)
def append(request, farg, carg):
def fin():
l.append("fin_%s%s" % (carg, farg))
request.addfinalizer(fin)
""")
testdir.makepyfile("""
import pytest
class TestClass2:
def test_1(self):
pass
def test_2(self):
pass
class TestClass:
def test_3(self):
pass
""")
result = testdir.runpytest("-vs")
result.stdout.fnmatch_lines("""
test_class_ordering.py::TestClass2::test_1[1-a] PASSED
test_class_ordering.py::TestClass2::test_1[2-a] PASSED
test_class_ordering.py::TestClass2::test_2[1-a] PASSED
test_class_ordering.py::TestClass2::test_2[2-a] PASSED
test_class_ordering.py::TestClass2::test_1[1-b] PASSED
test_class_ordering.py::TestClass2::test_1[2-b] PASSED
test_class_ordering.py::TestClass2::test_2[1-b] PASSED
test_class_ordering.py::TestClass2::test_2[2-b] PASSED
test_class_ordering.py::TestClass::test_3[1-a] PASSED
test_class_ordering.py::TestClass::test_3[2-a] PASSED
test_class_ordering.py::TestClass::test_3[1-b] PASSED
test_class_ordering.py::TestClass::test_3[2-b] PASSED
""")
def test_parametrize_separated_order_higher_scope_first(self, testdir):
testdir.makepyfile("""
import pytest
@pytest.fixture(scope="function", params=[1, 2])
def arg(request):
param = request.param
request.addfinalizer(lambda: l.append("fin:%s" % param))
l.append("create:%s" % param)
return request.param
@pytest.fixture(scope="module", params=["mod1", "mod2"])
def modarg(request):
param = request.param
request.addfinalizer(lambda: l.append("fin:%s" % param))
l.append("create:%s" % param)
return request.param
l = []
def test_1(arg):
l.append("test1")
def test_2(modarg):
l.append("test2")
def test_3(arg, modarg):
l.append("test3")
def test_4(modarg, arg):
l.append("test4")
""")
reprec = testdir.inline_run("-v")
reprec.assertoutcome(passed=12)
l = reprec.getcalls("pytest_runtest_call")[0].item.module.l
expected = [
'create:1', 'test1', 'fin:1', 'create:2', 'test1',
'fin:2', 'create:mod1', 'test2', 'create:1', 'test3',
'fin:1', 'create:2', 'test3', 'fin:2', 'create:1',
'test4', 'fin:1', 'create:2', 'test4', 'fin:2',
'fin:mod1', 'create:mod2', 'test2', 'create:1', 'test3',
'fin:1', 'create:2', 'test3', 'fin:2', 'create:1',
'test4', 'fin:1', 'create:2', 'test4', 'fin:2',
'fin:mod2']
import pprint
pprint.pprint(list(zip(l, expected)))
assert l == expected
def test_parametrized_fixture_teardown_order(self, testdir):
testdir.makepyfile("""
import pytest
@pytest.fixture(params=[1,2], scope="class")
def param1(request):
return request.param
l = []
class TestClass:
@classmethod
@pytest.fixture(scope="class", autouse=True)
def setup1(self, request, param1):
l.append(1)
request.addfinalizer(self.teardown1)
@classmethod
def teardown1(self):
assert l.pop() == 1
@pytest.fixture(scope="class", autouse=True)
def setup2(self, request, param1):
l.append(2)
request.addfinalizer(self.teardown2)
@classmethod
def teardown2(self):
assert l.pop() == 2
def test(self):
pass
def test_finish():
assert not l
""")
result = testdir.runpytest("-v")
result.stdout.fnmatch_lines("""
*3 passed*
""")
assert "error" not in result.stdout.str()
def test_fixture_finalizer(self, testdir):
testdir.makeconftest("""
import pytest
import sys
@pytest.fixture
def browser(request):
def finalize():
sys.stdout.write('Finalized')
request.addfinalizer(finalize)
return {}
""")
b = testdir.mkdir("subdir")
b.join("test_overriden_fixture_finalizer.py").write(dedent("""
import pytest
@pytest.fixture
def browser(browser):
browser['visited'] = True
return browser
def test_browser(browser):
assert browser['visited'] is True
"""))
reprec = testdir.runpytest("-s")
for test in ['test_browser']:
reprec.stdout.fnmatch_lines('*Finalized*')
def test_class_scope_with_normal_tests(self, testdir):
testpath = testdir.makepyfile("""
import pytest
class Box:
value = 0
@pytest.fixture(scope='class')
def a(request):
Box.value += 1
return Box.value
def test_a(a):
assert a == 1
class Test1:
def test_b(self, a):
assert a == 2
class Test2:
def test_c(self, a):
assert a == 3""")
reprec = testdir.inline_run(testpath)
for test in ['test_a', 'test_b', 'test_c']:
assert reprec.matchreport(test).passed
def test_request_is_clean(self, testdir):
testdir.makepyfile("""
import pytest
l = []
@pytest.fixture(params=[1, 2])
def fix(request):
request.addfinalizer(lambda: l.append(request.param))
def test_fix(fix):
pass
""")
reprec = testdir.inline_run("-s")
l = reprec.getcalls("pytest_runtest_call")[0].item.module.l
assert l == [1,2]
def test_parametrize_separated_lifecycle(self, testdir):
testdir.makepyfile("""
import pytest
l = []
@pytest.fixture(scope="module", params=[1, 2])
def arg(request):
x = request.param
request.addfinalizer(lambda: l.append("fin%s" % x))
return request.param
def test_1(arg):
l.append(arg)
def test_2(arg):
l.append(arg)
""")
reprec = testdir.inline_run("-vs")
reprec.assertoutcome(passed=4)
l = reprec.getcalls("pytest_runtest_call")[0].item.module.l
import pprint
pprint.pprint(l)
#assert len(l) == 6
assert l[0] == l[1] == 1
assert l[2] == "fin1"
assert l[3] == l[4] == 2
assert l[5] == "fin2"
def test_parametrize_function_scoped_finalizers_called(self, testdir):
testdir.makepyfile("""
import pytest
@pytest.fixture(scope="function", params=[1, 2])
def arg(request):
x = request.param
request.addfinalizer(lambda: l.append("fin%s" % x))
return request.param
l = []
def test_1(arg):
l.append(arg)
def test_2(arg):
l.append(arg)
def test_3():
assert len(l) == 8
assert l == [1, "fin1", 2, "fin2", 1, "fin1", 2, "fin2"]
""")
reprec = testdir.inline_run("-v")
reprec.assertoutcome(passed=5)
@pytest.mark.issue246
@pytest.mark.parametrize("scope", ["session", "function", "module"])
def test_finalizer_order_on_parametrization(self, scope, testdir):
testdir.makepyfile("""
import pytest
l = []
@pytest.fixture(scope=%(scope)r, params=["1"])
def fix1(request):
return request.param
@pytest.fixture(scope=%(scope)r)
def fix2(request, base):
def cleanup_fix2():
assert not l, "base should not have been finalized"
request.addfinalizer(cleanup_fix2)
@pytest.fixture(scope=%(scope)r)
def base(request, fix1):
def cleanup_base():
l.append("fin_base")
print ("finalizing base")
request.addfinalizer(cleanup_base)
def test_begin():
pass
def test_baz(base, fix2):
pass
def test_other():
pass
""" % {"scope": scope})
reprec = testdir.inline_run("-lvs")
reprec.assertoutcome(passed=3)
@pytest.mark.issue396
def test_class_scope_parametrization_ordering(self, testdir):
testdir.makepyfile("""
import pytest
l = []
@pytest.fixture(params=["John", "Doe"], scope="class")
def human(request):
request.addfinalizer(lambda: l.append("fin %s" % request.param))
return request.param
class TestGreetings:
def test_hello(self, human):
l.append("test_hello")
class TestMetrics:
def test_name(self, human):
l.append("test_name")
def test_population(self, human):
l.append("test_population")
""")
reprec = testdir.inline_run()
reprec.assertoutcome(passed=6)
l = reprec.getcalls("pytest_runtest_call")[0].item.module.l
assert l == ["test_hello", "fin John", "test_hello", "fin Doe",
"test_name", "test_population", "fin John",
"test_name", "test_population", "fin Doe"]
def test_parametrize_setup_function(self, testdir):
testdir.makepyfile("""
import pytest
@pytest.fixture(scope="module", params=[1, 2])
def arg(request):
return request.param
@pytest.fixture(scope="module", autouse=True)
def mysetup(request, arg):
request.addfinalizer(lambda: l.append("fin%s" % arg))
l.append("setup%s" % arg)
l = []
def test_1(arg):
l.append(arg)
def test_2(arg):
l.append(arg)
def test_3():
import pprint
pprint.pprint(l)
if arg == 1:
assert l == ["setup1", 1, 1, ]
elif arg == 2:
assert l == ["setup1", 1, 1, "fin1",
"setup2", 2, 2, ]
""")
reprec = testdir.inline_run("-v")
reprec.assertoutcome(passed=6)
def test_fixture_marked_function_not_collected_as_test(self, testdir):
testdir.makepyfile("""
import pytest
@pytest.fixture
def test_app():
return 1
def test_something(test_app):
assert test_app == 1
""")
reprec = testdir.inline_run()
reprec.assertoutcome(passed=1)
def test_params_and_ids(self, testdir):
testdir.makepyfile("""
import pytest
@pytest.fixture(params=[object(), object()],
ids=['alpha', 'beta'])
def fix(request):
return request.param
def test_foo(fix):
assert 1
""")
res = testdir.runpytest('-v')
res.stdout.fnmatch_lines([
'*test_foo*alpha*',
'*test_foo*beta*'])
def test_params_and_ids_yieldfixture(self, testdir):
testdir.makepyfile("""
import pytest
@pytest.yield_fixture(params=[object(), object()],
ids=['alpha', 'beta'])
def fix(request):
yield request.param
def test_foo(fix):
assert 1
""")
res = testdir.runpytest('-v')
res.stdout.fnmatch_lines([
'*test_foo*alpha*',
'*test_foo*beta*'])
class TestRequestScopeAccess:
pytestmark = pytest.mark.parametrize(("scope", "ok", "error"),[
["session", "", "fspath class function module"],
["module", "module fspath", "cls function"],
["class", "module fspath cls", "function"],
["function", "module fspath cls function", ""]
])
def test_setup(self, testdir, scope, ok, error):
testdir.makepyfile("""
import pytest
@pytest.fixture(scope=%r, autouse=True)
def myscoped(request):
for x in %r:
assert hasattr(request, x)
for x in %r:
pytest.raises(AttributeError, lambda:
getattr(request, x))
assert request.session
assert request.config
def test_func():
pass
""" %(scope, ok.split(), error.split()))
reprec = testdir.inline_run("-l")
reprec.assertoutcome(passed=1)
def test_funcarg(self, testdir, scope, ok, error):
testdir.makepyfile("""
import pytest
@pytest.fixture(scope=%r)
def arg(request):
for x in %r:
assert hasattr(request, x)
for x in %r:
pytest.raises(AttributeError, lambda:
getattr(request, x))
assert request.session
assert request.config
def test_func(arg):
pass
""" %(scope, ok.split(), error.split()))
reprec = testdir.inline_run()
reprec.assertoutcome(passed=1)
class TestErrors:
def test_subfactory_missing_funcarg(self, testdir):
testdir.makepyfile("""
import pytest
@pytest.fixture()
def gen(qwe123):
return 1
def test_something(gen):
pass
""")
result = testdir.runpytest()
assert result.ret != 0
result.stdout.fnmatch_lines([
"*def gen(qwe123):*",
"*fixture*qwe123*not found*",
"*1 error*",
])
def test_issue498_fixture_finalizer_failing(self, testdir):
testdir.makepyfile("""
import pytest
@pytest.fixture
def fix1(request):
def f():
raise KeyError
request.addfinalizer(f)
return object()
l = []
def test_1(fix1):
l.append(fix1)
def test_2(fix1):
l.append(fix1)
def test_3():
assert l[0] != l[1]
""")
result = testdir.runpytest()
result.stdout.fnmatch_lines("""
*ERROR*teardown*test_1*
*KeyError*
*ERROR*teardown*test_2*
*KeyError*
*3 pass*2 error*
""")
def test_setupfunc_missing_funcarg(self, testdir):
testdir.makepyfile("""
import pytest
@pytest.fixture(autouse=True)
def gen(qwe123):
return 1
def test_something():
pass
""")
result = testdir.runpytest()
assert result.ret != 0
result.stdout.fnmatch_lines([
"*def gen(qwe123):*",
"*fixture*qwe123*not found*",
"*1 error*",
])
class TestShowFixtures:
def test_funcarg_compat(self, testdir):
config = testdir.parseconfigure("--funcargs")
assert config.option.showfixtures
def test_show_fixtures(self, testdir):
result = testdir.runpytest("--fixtures")
result.stdout.fnmatch_lines([
"*tmpdir*",
"*temporary directory*",
]
)
def test_show_fixtures_verbose(self, testdir):
result = testdir.runpytest("--fixtures", "-v")
result.stdout.fnmatch_lines([
"*tmpdir*--*tmpdir.py*",
"*temporary directory*",
]
)
def test_show_fixtures_testmodule(self, testdir):
p = testdir.makepyfile('''
import pytest
@pytest.fixture
def _arg0():
""" hidden """
@pytest.fixture
def arg1():
""" hello world """
''')
result = testdir.runpytest("--fixtures", p)
result.stdout.fnmatch_lines("""
*tmpdir
*fixtures defined from*
*arg1*
*hello world*
""")
assert "arg0" not in result.stdout.str()
@pytest.mark.parametrize("testmod", [True, False])
def test_show_fixtures_conftest(self, testdir, testmod):
testdir.makeconftest('''
import pytest
@pytest.fixture
def arg1():
""" hello world """
''')
if testmod:
testdir.makepyfile("""
def test_hello():
pass
""")
result = testdir.runpytest("--fixtures")
result.stdout.fnmatch_lines("""
*tmpdir*
*fixtures defined from*conftest*
*arg1*
*hello world*
""")
def test_show_fixtures_trimmed_doc(self, testdir):
p = testdir.makepyfile('''
import pytest
@pytest.fixture
def arg1():
"""
line1
line2
"""
@pytest.fixture
def arg2():
"""
line1
line2
"""
''')
result = testdir.runpytest("--fixtures", p)
result.stdout.fnmatch_lines("""
* fixtures defined from test_show_fixtures_trimmed_doc *
arg2
line1
line2
arg1
line1
line2
""")
def test_show_fixtures_different_files(self, testdir):
"""
#833: --fixtures only shows fixtures from first file
"""
testdir.makepyfile(test_a='''
import pytest
@pytest.fixture
def fix_a():
"""Fixture A"""
pass
def test_a(fix_a):
pass
''')
testdir.makepyfile(test_b='''
import pytest
@pytest.fixture
def fix_b():
"""Fixture B"""
pass
def test_b(fix_b):
pass
''')
result = testdir.runpytest("--fixtures")
result.stdout.fnmatch_lines("""
* fixtures defined from test_a *
fix_a
Fixture A
* fixtures defined from test_b *
fix_b
Fixture B
""")
class TestContextManagerFixtureFuncs:
def test_simple(self, testdir):
testdir.makepyfile("""
import pytest
@pytest.yield_fixture
def arg1():
print ("setup")
yield 1
print ("teardown")
def test_1(arg1):
print ("test1 %s" % arg1)
def test_2(arg1):
print ("test2 %s" % arg1)
assert 0
""")
result = testdir.runpytest("-s")
result.stdout.fnmatch_lines("""
*setup*
*test1 1*
*teardown*
*setup*
*test2 1*
*teardown*
""")
def test_scoped(self, testdir):
testdir.makepyfile("""
import pytest
@pytest.yield_fixture(scope="module")
def arg1():
print ("setup")
yield 1
print ("teardown")
def test_1(arg1):
print ("test1 %s" % arg1)
def test_2(arg1):
print ("test2 %s" % arg1)
""")
result = testdir.runpytest("-s")
result.stdout.fnmatch_lines("""
*setup*
*test1 1*
*test2 1*
*teardown*
""")
def test_setup_exception(self, testdir):
testdir.makepyfile("""
import pytest
@pytest.yield_fixture(scope="module")
def arg1():
pytest.fail("setup")
yield 1
def test_1(arg1):
pass
""")
result = testdir.runpytest("-s")
result.stdout.fnmatch_lines("""
*pytest.fail*setup*
*1 error*
""")
def test_teardown_exception(self, testdir):
testdir.makepyfile("""
import pytest
@pytest.yield_fixture(scope="module")
def arg1():
yield 1
pytest.fail("teardown")
def test_1(arg1):
pass
""")
result = testdir.runpytest("-s")
result.stdout.fnmatch_lines("""
*pytest.fail*teardown*
*1 passed*1 error*
""")
def test_yields_more_than_one(self, testdir):
testdir.makepyfile("""
import pytest
@pytest.yield_fixture(scope="module")
def arg1():
yield 1
yield 2
def test_1(arg1):
pass
""")
result = testdir.runpytest("-s")
result.stdout.fnmatch_lines("""
*fixture function*
*test_yields*:2*
""")
def test_no_yield(self, testdir):
testdir.makepyfile("""
import pytest
@pytest.yield_fixture(scope="module")
def arg1():
return 1
def test_1(arg1):
pass
""")
result = testdir.runpytest("-s")
result.stdout.fnmatch_lines("""
*yield_fixture*requires*yield*
*yield_fixture*
*def arg1*
""")
def test_yield_not_allowed_in_non_yield(self, testdir):
testdir.makepyfile("""
import pytest
@pytest.fixture(scope="module")
def arg1():
yield 1
def test_1(arg1):
pass
""")
result = testdir.runpytest("-s")
result.stdout.fnmatch_lines("""
*fixture*cannot use*yield*
*def arg1*
""")
|
marqueedev/django | refs/heads/master | tests/inspectdb/models.py | 89 | # -*- encoding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
class People(models.Model):
name = models.CharField(max_length=255)
parent = models.ForeignKey('self')
class Message(models.Model):
from_field = models.ForeignKey(People, db_column='from_id')
class PeopleData(models.Model):
people_pk = models.ForeignKey(People, primary_key=True)
ssn = models.CharField(max_length=11)
class PeopleMoreData(models.Model):
people_unique = models.ForeignKey(People, unique=True)
license = models.CharField(max_length=255)
class DigitsInColumnName(models.Model):
all_digits = models.CharField(max_length=11, db_column='123')
leading_digit = models.CharField(max_length=11, db_column='4extra')
leading_digits = models.CharField(max_length=11, db_column='45extra')
class SpecialName(models.Model):
field = models.IntegerField(db_column='field')
# Underscores
field_field_0 = models.IntegerField(db_column='Field_')
field_field_1 = models.IntegerField(db_column='Field__')
field_field_2 = models.IntegerField(db_column='__field')
# Other chars
prc_x = models.IntegerField(db_column='prc(%) x')
non_ascii = models.IntegerField(db_column='tamaño')
class Meta:
db_table = "inspectdb_special.table name"
class ColumnTypes(models.Model):
id = models.AutoField(primary_key=True)
big_int_field = models.BigIntegerField()
bool_field = models.BooleanField(default=False)
null_bool_field = models.NullBooleanField()
char_field = models.CharField(max_length=10)
null_char_field = models.CharField(max_length=10, blank=True, null=True)
comma_separated_int_field = models.CommaSeparatedIntegerField(max_length=99)
date_field = models.DateField()
date_time_field = models.DateTimeField()
decimal_field = models.DecimalField(max_digits=6, decimal_places=1)
email_field = models.EmailField()
file_field = models.FileField(upload_to="unused")
file_path_field = models.FilePathField()
float_field = models.FloatField()
int_field = models.IntegerField()
gen_ip_adress_field = models.GenericIPAddressField(protocol="ipv4")
pos_int_field = models.PositiveIntegerField()
pos_small_int_field = models.PositiveSmallIntegerField()
slug_field = models.SlugField()
small_int_field = models.SmallIntegerField()
text_field = models.TextField()
time_field = models.TimeField()
url_field = models.URLField()
class UniqueTogether(models.Model):
field1 = models.IntegerField()
field2 = models.CharField(max_length=10)
class Meta:
unique_together = ('field1', 'field2')
|
sharhar/USB-Thing | refs/heads/master | UpdaterFiles/Lib/python-3.5.1.amd64/Lib/site-packages/setuptools/site-patch.py | 720 | def __boot():
import sys
import os
PYTHONPATH = os.environ.get('PYTHONPATH')
if PYTHONPATH is None or (sys.platform=='win32' and not PYTHONPATH):
PYTHONPATH = []
else:
PYTHONPATH = PYTHONPATH.split(os.pathsep)
pic = getattr(sys,'path_importer_cache',{})
stdpath = sys.path[len(PYTHONPATH):]
mydir = os.path.dirname(__file__)
#print "searching",stdpath,sys.path
for item in stdpath:
if item==mydir or not item:
continue # skip if current dir. on Windows, or my own directory
importer = pic.get(item)
if importer is not None:
loader = importer.find_module('site')
if loader is not None:
# This should actually reload the current module
loader.load_module('site')
break
else:
try:
import imp # Avoid import loop in Python >= 3.3
stream, path, descr = imp.find_module('site',[item])
except ImportError:
continue
if stream is None:
continue
try:
# This should actually reload the current module
imp.load_module('site',stream,path,descr)
finally:
stream.close()
break
else:
raise ImportError("Couldn't find the real 'site' module")
#print "loaded", __file__
known_paths = dict([(makepath(item)[1],1) for item in sys.path]) # 2.2 comp
oldpos = getattr(sys,'__egginsert',0) # save old insertion position
sys.__egginsert = 0 # and reset the current one
for item in PYTHONPATH:
addsitedir(item)
sys.__egginsert += oldpos # restore effective old position
d, nd = makepath(stdpath[0])
insert_at = None
new_path = []
for item in sys.path:
p, np = makepath(item)
if np==nd and insert_at is None:
# We've hit the first 'system' path entry, so added entries go here
insert_at = len(new_path)
if np in known_paths or insert_at is None:
new_path.append(item)
else:
# new path after the insert point, back-insert it
new_path.insert(insert_at, item)
insert_at += 1
sys.path[:] = new_path
if __name__=='site':
__boot()
del __boot
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.