repo_name stringlengths 5 100 | ref stringlengths 12 67 | path stringlengths 4 244 | copies stringlengths 1 8 | content stringlengths 0 1.05M ⌀ |
|---|---|---|---|---|
timsnyder/bokeh | refs/heads/master | examples/plotting/file/interactive_legend.py | 12 | import pandas as pd
from bokeh.palettes import Spectral4
from bokeh.plotting import figure, output_file, show
from bokeh.sampledata.stocks import AAPL, IBM, MSFT, GOOG
p = figure(plot_width=800, plot_height=250, x_axis_type='datetime')
p.title.text = 'Click on legend entries to hide lines'
for data, name, color in zip([AAPL, IBM, MSFT, GOOG], ["AAPL", "IBM", "MSFT", "GOOG"], Spectral4):
df = pd.DataFrame(data)
df['date'] = pd.to_datetime(df['date'])
p.line(df['date'], df['close'], line_width=2, color=color, alpha=0.8, legend=name)
p.legend.location = 'top_left'
p.legend.click_policy = 'hide'
output_file('interactive_legend.html', title='interactive_legend.py example')
show(p)
|
davidmueller13/android_kernel_samsung_lt03lte | refs/heads/android-5.1 | tools/perf/util/setup.py | 4998 | #!/usr/bin/python2
from distutils.core import setup, Extension
from os import getenv
from distutils.command.build_ext import build_ext as _build_ext
from distutils.command.install_lib import install_lib as _install_lib
class build_ext(_build_ext):
def finalize_options(self):
_build_ext.finalize_options(self)
self.build_lib = build_lib
self.build_temp = build_tmp
class install_lib(_install_lib):
def finalize_options(self):
_install_lib.finalize_options(self)
self.build_dir = build_lib
cflags = ['-fno-strict-aliasing', '-Wno-write-strings']
cflags += getenv('CFLAGS', '').split()
build_lib = getenv('PYTHON_EXTBUILD_LIB')
build_tmp = getenv('PYTHON_EXTBUILD_TMP')
ext_sources = [f.strip() for f in file('util/python-ext-sources')
if len(f.strip()) > 0 and f[0] != '#']
perf = Extension('perf',
sources = ext_sources,
include_dirs = ['util/include'],
extra_compile_args = cflags,
)
setup(name='perf',
version='0.1',
description='Interface with the Linux profiling infrastructure',
author='Arnaldo Carvalho de Melo',
author_email='acme@redhat.com',
license='GPLv2',
url='http://perf.wiki.kernel.org',
ext_modules=[perf],
cmdclass={'build_ext': build_ext, 'install_lib': install_lib})
|
drrk/micropython | refs/heads/master | tests/basics/builtin_bin.py | 72 | # test builtin bin function
print(bin(1))
print(bin(-1))
print(bin(15))
print(bin(-15))
print(bin(12345))
print(bin(0b10101))
print(bin(12345678901234567890))
print(bin(0b10101010101010101010))
|
KULinacs/python-asymmetric-key-toolkit | refs/heads/master | python/asn1key.py | 2 | import intcodecs
class ASN1_Object(object):
def __init__(self, value):
self.__set_value(value)
def __len__(self):
return len(self.__encoded_value)
def __str__(self):
header = 'OBJECT ' + hex(len(self)) + ' ('
return header + hex(self.__value) + ')'
def identity(self):
return 0
@property
def value(self):
return self.__value
@value.setter
def value(self, value):
self.__value = self.__set_value(value)
def encode(self):
byte_values = bytearray(chr(self.identity).encode('utf-8'))
byte_values.extend(intcodecs.defencode(len(self)))
byte_values.extend(self.__encoded_value)
return byte_values
class ASN1_Integer(ASN1_Object):
'''An ASN1 Encoded Integer'''
def __init__(self, value):
self.__set_value(value)
def __len__(self):
return len(self.__encoded_value)
def __str__(self):
header = 'INTEGER ' + hex(len(self)) + ' ('
return header + hex(self.__value) + ')'
def __set_value(self, value):
'''Accepts integer or byte values'''
if isinstance(value, int):
self.__value = value
self.__encoded_value = intcodecs.derencode(value)
else:
try:
self.__value = intcodecs.derdecode(value)
self.__encoded_value = value
except:
raise TypeError('Value must be an integer or a list of bytes')
@property
def identity(self):
return 2
@property
def value(self):
return self.__value
@value.setter
def value(self, value):
self.__set_value(value)
def encode(self):
byte_values = bytearray(chr(self.identity).encode('utf-8'))
byte_values.extend(intcodecs.defencode(len(self)))
byte_values.extend(self.__encoded_value)
return byte_values
class ASN1_Bit_String(ASN1_Object):
'''An ASN1 encoded Bit_String. Currently does not support unused bits'''
def __init__(self, value):
self.__set_value(value)
def __len__(self):
return len(self.__encoded_value) + 1
def __str__(self):
header = 'BIT_STRING ' + hex(len(self)) + ' {\n\t'
return header + '\n\t'.join(str(item).replace('\t', '\t\t')
for item in self.__value) + '\n\t}'
def __set_value(self, value):
self.__value = []
self.__encoded_value = bytearray()
self.extend(value[1:])
def __getitem__(self, i):
return self.__value[i]
def __setitem__(self, i, value):
if isinstance(value, ASN1_Object):
self.__value[i] = value
else:
raise TypeError('Only Object values my be set')
def append(self, value):
if isinstance(value, ASN1_Object):
self.extend([value])
else:
self.extend(value)
def extend(self, value):
if isinstance(value, list):
try:
self.__value.extend(value)
self.__encoded_value.extend(bytearray([item.encode()
for item in value]))
except:
raise TypeError('List must contain ASN1 objects')
else:
self.__encoded_value.extend(value)
try:
i = 0
while i < len(value):
ident = value[i]
ident_len = value[i + 1]
if ident_len < 0x80:
data_len = ident_len
data_start = i + 2
else:
len_start = i + 1
data_start = len_start - ident_len - 0x80 + 1
data_len = intcodecs.defdecode(value[len_start :
data_start])
data_stop = data_start + data_len
self.__value.append(make_object(ident, value[data_start :
data_stop]))
i = data_stop
except:
self.__value = self.__encoded_value
@property
def identity(self):
return 3
@property
def value(self):
return self.__value
@value.setter
def value(self, value):
self.__value = self.__set_value(value)
def encode(self):
self.__encoded_value = bytearray()
for item in self.__value:
self.__encoded_value.extend(item.encode())
byte_values = bytearray(chr(self.identity).encode('utf-8'))
byte_values.extend(intcodecs.defencode(len(self)))
byte_values.extend(bytearray(chr(0x00).encode('utf-8')))
byte_values.extend(self.__encoded_value)
return byte_values
class ASN1_Null(ASN1_Object):
'''An ASN1 Null Object'''
def __init__(self, value):
self.__set_value(value)
def __len__(self):
return len(self.__encoded_value)
def __str__(self):
header = 'NULL ' + hex(len(self)) + ' ('
return header + ')'
def __set_value(self, value):
if value == None or value == 0 or len(value) == 0:
self.__value = None
self.__encoded_value = bytearray()
else:
raise TypeError('ASN1 Null Objects can have no value')
@property
def identity(self):
return 5
@property
def value(self):
return self.__value
@value.setter
def value(self, value):
self.__set_value(value)
def encode(self):
byte_values = bytearray(chr(self.identity).encode('utf-8'))
byte_values.extend(intcodecs.defencode(len(self)))
byte_values.extend(self.__encoded_value)
return byte_values
class ASN1_ObjectID(ASN1_Object):
'''An ASN1 Object ID'''
def __init__(self, value):
self.__set_value(value)
def __len__(self):
return len(self.__encoded_value)
def __str__(self):
header = 'OBJECTID ' + hex(len(self)) + ' ('
return header + str(self.__value) + ')'
def __set_value(self, value):
if isinstance(value, str):
self.__value = value
# Check for correctness
self.__encoded_value = intcodecs.multivlqencode(value.split('.'))
else:
try:
self.__value = str(value[0] // 40) + '.'
self.__value += str(value[0] % 40) + '.'
self.__value += '.'.join(str(octet) for octet in
intcodecs.multivlqdecode(value[1:]))
self.__encoded_value = value
except:
raise TypeError('Input must a "." separated string or a list of bytes')
@property
def identity(self):
return 6
@property
def value(self):
return self.__value
@value.setter
def value(self, value):
self.__set_value(value)
def encode(self):
byte_values = bytearray(chr(self.identity).encode('utf-8'))
byte_values.extend(intcodecs.defencode(len(self)))
byte_values.extend(self.__encoded_value)
return byte_values
class ASN1_Sequence(ASN1_Object):
'''An ASN1 encoded Bit_String. Currently does not support unused bits'''
def __init__(self, value):
self.__set_value(value)
def __len__(self):
return len(self.__encoded_value)
def __str__(self):
header = 'SEQUENCE ' + hex(len(self)) + ' {\n\t'
return header + '\n\t'.join(str(item).replace('\t', '\t\t')
for item in self.__value) + '\n\t}'
def __set_value(self, value):
self.__value = []
self.__encoded_value = bytearray()
self.extend(value)
def __getitem__(self, i):
return self.__value[i]
def __setitem__(self, i, value):
if isinstance(value, ASN1_Object):
self.__value[i] = value
else:
raise TypeError('Only Object values my be set')
def append(self, value):
if isinstance(value, ASN1_Object):
self.extend([value])
else:
self.extend(value)
def extend(self, value):
if isinstance(value, list):
try:
self.__value.extend(value)
self.__encoded_value.extend(bytearray([item.encode()
for item in value]))
except:
raise TypeError('List must contain ASN1 objects')
else:
self.__encoded_value.extend(value)
try:
i = 0
while i < len(value):
ident = value[i]
ident_len = value[i + 1]
if ident_len < 0x80:
data_len = ident_len
data_start = i + 2
else:
len_start = i + 1
data_start = len_start + (ident_len - 0x80) + 1
data_len = intcodecs.defdecode(value[len_start :
data_start])
data_stop = data_start + data_len
self.__value.append(make_object(ident, value[data_start :
data_stop]))
i = data_stop
except:
raise TypeError('Invalid bytes passed')
@property
def identity(self):
return 48
@property
def value(self):
return self.__value
@value.setter
def value(self, value):
self.__set_value(value)
def encode(self):
self.__encoded_value = bytearray()
for item in self.__value:
self.__encoded_value.extend(item.encode())
byte_values = bytearray(chr(self.identity).encode('utf-8'))
byte_values.extend(intcodecs.defencode(len(self)))
byte_values.extend(self.__encoded_value)
return byte_values
class ASN1_Error(Exception):
pass
objects = {1 : ASN1_Object, 2 : ASN1_Integer, 3 : ASN1_Bit_String,
5 : ASN1_Null, 6 : ASN1_ObjectID, 48 : ASN1_Sequence}
def make_object(identity, value):
#try:
return objects[identity](value)
#except:
# raise ASN1_Error('Unknown ASN1 Type specifed: ' + str(identity))
|
paterson/servo | refs/heads/master | tests/wpt/web-platform-tests/tools/html5lib/html5lib/__init__.py | 426 | """
HTML parsing library based on the WHATWG "HTML5"
specification. The parser is designed to be compatible with existing
HTML found in the wild and implements well-defined error recovery that
is largely compatible with modern desktop web browsers.
Example usage:
import html5lib
f = open("my_document.html")
tree = html5lib.parse(f)
"""
from __future__ import absolute_import, division, unicode_literals
from .html5parser import HTMLParser, parse, parseFragment
from .treebuilders import getTreeBuilder
from .treewalkers import getTreeWalker
from .serializer import serialize
__all__ = ["HTMLParser", "parse", "parseFragment", "getTreeBuilder",
"getTreeWalker", "serialize"]
__version__ = "0.9999-dev"
|
andrejb/cloudant_bigcouch | refs/heads/master | couchjs/scons/scons-local-2.0.1/SCons/Variables/PackageVariable.py | 61 | """engine.SCons.Variables.PackageVariable
This file defines the option type for SCons implementing 'package
activation'.
To be used whenever a 'package' may be enabled/disabled and the
package path may be specified.
Usage example:
Examples:
x11=no (disables X11 support)
x11=yes (will search for the package installation dir)
x11=/usr/local/X11 (will check this path for existance)
To replace autoconf's --with-xxx=yyy
opts = Variables()
opts.Add(PackageVariable('x11',
'use X11 installed here (yes = search some places',
'yes'))
...
if env['x11'] == True:
dir = ... search X11 in some standard places ...
env['x11'] = dir
if env['x11']:
... build with x11 ...
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Variables/PackageVariable.py 5134 2010/08/16 23:02:40 bdeegan"
__all__ = ['PackageVariable',]
import SCons.Errors
__enable_strings = ('1', 'yes', 'true', 'on', 'enable', 'search')
__disable_strings = ('0', 'no', 'false', 'off', 'disable')
def _converter(val):
"""
"""
lval = val.lower()
if lval in __enable_strings: return True
if lval in __disable_strings: return False
#raise ValueError("Invalid value for boolean option: %s" % val)
return val
def _validator(key, val, env, searchfunc):
# NB: searchfunc is currenty undocumented and unsupported
"""
"""
# todo: write validator, check for path
import os
if env[key] is True:
if searchfunc:
env[key] = searchfunc(key, val)
elif env[key] and not os.path.exists(val):
raise SCons.Errors.UserError(
'Path does not exist for option %s: %s' % (key, val))
def PackageVariable(key, help, default, searchfunc=None):
# NB: searchfunc is currenty undocumented and unsupported
"""
The input parameters describe a 'package list' option, thus they
are returned with the correct converter and validator appended. The
result is usable for input to opts.Add() .
A 'package list' option may either be 'all', 'none' or a list of
package names (seperated by space).
"""
help = '\n '.join(
(help, '( yes | no | /path/to/%s )' % key))
return (key, help, default,
lambda k, v, e: _validator(k,v,e,searchfunc),
_converter)
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
jackTheRipper/iotrussia | refs/heads/master | web_server/lib/werkzeug-master/examples/plnt/database.py | 45 | # -*- coding: utf-8 -*-
"""
plnt.database
~~~~~~~~~~~~~
The database definitions for the planet.
:copyright: (c) 2009 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD.
"""
from sqlalchemy import MetaData, Table, Column, ForeignKey, Boolean, \
Integer, String, DateTime
from sqlalchemy.orm import dynamic_loader, scoped_session, create_session, \
mapper
from plnt.utils import application, local_manager
def new_db_session():
return create_session(application.database_engine, autoflush=True,
autocommit=False)
metadata = MetaData()
session = scoped_session(new_db_session, local_manager.get_ident)
blog_table = Table('blogs', metadata,
Column('id', Integer, primary_key=True),
Column('name', String(120)),
Column('description', String),
Column('url', String(200)),
Column('feed_url', String(250))
)
entry_table = Table('entries', metadata,
Column('id', Integer, primary_key=True),
Column('blog_id', Integer, ForeignKey('blogs.id')),
Column('guid', String(200), unique=True),
Column('title', String(140)),
Column('url', String(200)),
Column('text', String),
Column('pub_date', DateTime),
Column('last_update', DateTime)
)
class Blog(object):
query = session.query_property()
def __init__(self, name, url, feed_url, description=u''):
self.name = name
self.url = url
self.feed_url = feed_url
self.description = description
def __repr__(self):
return '<%s %r>' % (self.__class__.__name__, self.url)
class Entry(object):
query = session.query_property()
def __repr__(self):
return '<%s %r>' % (self.__class__.__name__, self.guid)
mapper(Entry, entry_table)
mapper(Blog, blog_table, properties=dict(
entries=dynamic_loader(Entry, backref='blog')
))
|
akbargumbira/django-user-map | refs/heads/develop | user_map/forms/user.py | 1 | # coding=utf-8
"""Django forms for User related routines."""
from django.contrib.gis import forms
from leaflet.forms.widgets import LeafletWidget
from user_map.models import UserMap
from user_map.app_settings import LEAFLET_TILES
from user_map.forms.custom_widget import CustomClearableFileInput
class UserMapForm(forms.ModelForm):
"""Form for user model."""
class Meta:
"""Association between models and this form."""
model = UserMap
exclude = ['user']
widgets = {
'location': LeafletWidget(attrs={
'settings_overrides': {
'TILES': LEAFLET_TILES
}}),
'roles': forms.CheckboxSelectMultiple(),
'image': CustomClearableFileInput(),
'website': forms.URLInput(
attrs={'placeholder': 'http://john.doe.com'})
}
|
ahmed-mahran/hue | refs/heads/master | apps/impala/src/impala/forms.py | 1198 | #!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
|
elahejalalpour/ELRyu | refs/heads/master | ryu/tests/unit/packet/test_bmp.py | 25 | # Copyright (C) 2014 Nippon Telegraph and Telephone Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from nose.tools import eq_
from nose.tools import ok_
from time import time
from ryu.lib.packet import bmp
from ryu.lib.packet import bgp
from ryu.lib.packet import afi
from ryu.lib.packet import safi
class Test_bmp(unittest.TestCase):
""" Test case for ryu.lib.packet.bmp
"""
def setUp(self):
pass
def tearDown(self):
pass
def _time(self):
# time() can give sub-microsecond precision, which results
# in an assertion failure
return round(time(), 6)
def test_route_monitoring(self):
update = bgp.BGPUpdate()
msg = bmp.BMPRouteMonitoring(bgp_update=update,
peer_type=bmp.BMP_PEER_TYPE_GLOBAL,
is_post_policy=True,
peer_distinguisher=0,
peer_address='192.0.2.1',
peer_as=30000,
peer_bgp_id='192.0.2.1',
timestamp=self._time())
binmsg = msg.serialize()
msg2, rest = bmp.BMPMessage.parser(binmsg)
eq_(msg.to_jsondict(), msg2.to_jsondict())
eq_(rest, b'')
def test_statistics_report(self):
stats = [{'type': bmp.BMP_STAT_TYPE_REJECTED, 'value': 100},
{'type': bmp.BMP_STAT_TYPE_DUPLICATE_PREFIX, 'value': 200},
{'type': bmp.BMP_STAT_TYPE_DUPLICATE_WITHDRAW, 'value': 300},
{'type': bmp.BMP_STAT_TYPE_ADJ_RIB_IN, 'value': 100000},
{'type': bmp.BMP_STAT_TYPE_LOC_RIB, 'value': 500000}]
msg = bmp.BMPStatisticsReport(stats=stats,
peer_type=bmp.BMP_PEER_TYPE_GLOBAL,
is_post_policy=True,
peer_distinguisher=0,
peer_address='192.0.2.1',
peer_as=30000,
peer_bgp_id='192.0.2.1',
timestamp=self._time())
binmsg = msg.serialize()
msg2, rest = bmp.BMPMessage.parser(binmsg)
eq_(msg.to_jsondict(), msg2.to_jsondict())
eq_(rest, b'')
def test_peer_down_notification(self):
reason = bmp.BMP_PEER_DOWN_REASON_LOCAL_BGP_NOTIFICATION
data = b'hoge'
data = bgp.BGPNotification(error_code=1, error_subcode=2, data=data)
msg = bmp.BMPPeerDownNotification(reason=reason, data=data,
peer_type=bmp.BMP_PEER_TYPE_GLOBAL,
is_post_policy=True,
peer_distinguisher=0,
peer_address='192.0.2.1',
peer_as=30000,
peer_bgp_id='192.0.2.1',
timestamp=self._time())
binmsg = msg.serialize()
msg2, rest = bmp.BMPMessage.parser(binmsg)
eq_(msg.to_jsondict(), msg2.to_jsondict())
eq_(rest, b'')
def test_peer_up_notification(self):
opt_param = [bgp.BGPOptParamCapabilityUnknown(cap_code=200,
cap_value=b'hoge'),
bgp.BGPOptParamCapabilityRouteRefresh(),
bgp.BGPOptParamCapabilityMultiprotocol(
afi=afi.IP, safi=safi.MPLS_VPN)]
open_message = bgp.BGPOpen(my_as=40000, bgp_identifier='192.0.2.2',
opt_param=opt_param)
msg = bmp.BMPPeerUpNotification(local_address='192.0.2.2',
local_port=179,
remote_port=11089,
sent_open_message=open_message,
received_open_message=open_message,
peer_type=bmp.BMP_PEER_TYPE_GLOBAL,
is_post_policy=True,
peer_distinguisher=0,
peer_address='192.0.2.1',
peer_as=30000,
peer_bgp_id='192.0.2.1',
timestamp=self._time())
binmsg = msg.serialize()
msg2, rest = bmp.BMPMessage.parser(binmsg)
eq_(msg.to_jsondict(), msg2.to_jsondict())
eq_(rest, b'')
def test_initiation(self):
initiation_info = [{'type': bmp.BMP_INIT_TYPE_STRING,
'value': u'This is Ryu BGP BMP message'}]
msg = bmp.BMPInitiation(info=initiation_info)
binmsg = msg.serialize()
msg2, rest = bmp.BMPMessage.parser(binmsg)
eq_(msg.to_jsondict(lambda v: v), msg2.to_jsondict(lambda v: v))
eq_(rest, b'')
def test_termination(self):
termination_info = [{'type': bmp.BMP_TERM_TYPE_STRING,
'value': u'Session administatively closed'},
{'type': bmp.BMP_TERM_TYPE_REASON,
'value': bmp.BMP_TERM_REASON_ADMIN}]
msg = bmp.BMPTermination(info=termination_info)
binmsg = msg.serialize()
msg2, rest = bmp.BMPMessage.parser(binmsg)
eq_(msg.to_jsondict(lambda v: v), msg2.to_jsondict(lambda v: v))
eq_(rest, b'')
|
ofbennett/bad-boids | refs/heads/master | setup.py | 1 | from setuptools import setup, find_packages
setup(
name = 'Improved_Boids',
version = '1.0.0',
author = 'Oscar Bennett',
licence = 'The MIT License',
description = 'An improved refactored version of the BadBoids code',
packages = find_packages(exclude = ['*test']),
scripts = ['scripts/runBoids'],
install_requires = ['numpy','matplotlib','pyyaml']
)
|
jwren/intellij-community | refs/heads/master | python/testData/types/AnnotatedClsReturnOverloadedClassMethod/mytime.py | 12 | from typing import Type, TypeVar
T = TypeVar("T")
class mytime:
if sys.version_info >= (3, 8):
@classmethod
def now(cls: Type[T], tz: Optional[int] = ...) -> T: ...
else:
@overload
@classmethod
def now(cls: Type[T], tz: int = ...) -> T: ... |
AkshayHarshe/developer-support | refs/heads/master | python/arcpy-python/remove-all-basemaps-batch/remove-all-basemaps-from-mxd-batch.py | 15 | #-------------------------------------------------------------------------------
# Title: Remove all basemap layers
# Purpose: Remove all basemaps from MXDs. This script takes an input
# directory and walks through it, finding all MXDs and searching
# for basemap layerss
#
# Author: Luke Danzinger
#
# Created: 16/12/2013
#-------------------------------------------------------------------------------
"""This script is designed to open an MXD, check to see if
a basemap exists, and if it does, removes it from the map"""
#import modules
import arcpy
import os
def remove_basemaps(path):
for r,d,f in os.walk(path):
for m in f:
if m.endswith(".mxd"):
mxd = arcpy.mapping.MapDocument(os.path.join(r, m))
#get dataframes
dataframes = arcpy.mapping.ListDataFrames(mxd)
for df in dataframes:
#loop through layers
layers = arcpy.mapping.ListLayers(mxd, "*Basemap*")
for layer in layers:
print("Basemap Layer found in " + m)
#remove an layers in this list, as they contain the Basemap wildcard
try:
arcpy.mapping.RemoveLayer(df, layer)
print(str(layer.name) + " removed from " + m)
#save the changes
mxd.save()
print("changes applied to " + m)
del mxd, layers
except:
print(m + " cannot be saved")
print("script complete")
if __name__ == '__main__':
#set workspace
path = "C:\TestData"
remove_basemaps(path) |
jadelord/TomoKTH | refs/heads/master | tomokth/preprocess/io.py | 1 | """Handles I/O of images to tools(:mod:`fluidimage.pre_proc.io`)
==================================================================================
Provides decorators to handle input and output of images for tools/functions in
`fluidimage.pre_proc.toolbox`
.. currentmodule:: fluidimage.pre_proc.io
Provides:
:members:
"""
import numpy as np
from decorator import decorator
def _get_img_arg(*args, **kwargs):
if 'img' in kwargs:
return kwargs['img']
else:
return args[0]
def _replace_img_arg(new_img, *args, **kwargs):
if not isinstance(args, list):
args = list(args)
if 'img' in kwargs:
kwargs['img'] = new_img
else:
args[0] = new_img
return args, kwargs
def _get_array_like_type(array_like, convert_to_ndarray=False):
if not isinstance(array_like, np.ndarray):
ArrayLike = array_like.__class__
else:
ArrayLike = np.array
if convert_to_ndarray:
return np.array(array_like), ArrayLike
else:
return ArrayLike
@decorator
def iterate_multiple_imgs(tool, *args, **kwargs):
"""
Feeds one image at a time to the function `tool`,
typically a spatial filter, or a brightness/contrast adjustment tool.
"""
img_array_in = _get_img_arg(*args, **kwargs)
if isinstance(img_array_in, np.ndarray):
if img_array_in.ndim == 2:
return tool(*args, **kwargs) # Function call!
for i, img in enumerate(img_array_in):
args, kwargs = _replace_img_arg(img, *args, **kwargs)
img_array_in[i] = tool(*args, **kwargs) # Function call!
return img_array_in
@decorator
def multiple_imgs_as_ndarray(tool, *args, **kwargs):
"""
Images are processed as follows, esp. for temporal filters:
.. array-like (input) --> nd-array ---> [`tool`] --> array_like (output)
"""
img_array_in = _get_img_arg(*args, **kwargs)
if isinstance(img_array_in, np.ndarray) and img_array_in.ndim == 3:
return tool(*args, **kwargs) # Function call!
img_ndarray_in, ImgArrayLike = _get_array_like_type(img_array_in, True)
args, kwargs = _replace_img_arg(img_ndarray_in, *args, **kwargs)
img_array_out = tool(*args, **kwargs) # Function call!
return ImgArrayLike(img_array_out)
|
hhsprings/cython | refs/heads/master | Cython/Compiler/Buffer.py | 7 | from __future__ import absolute_import
from .Visitor import CythonTransform
from .ModuleNode import ModuleNode
from .Errors import CompileError
from .UtilityCode import CythonUtilityCode
from .Code import UtilityCode, TempitaUtilityCode
from . import Options
from . import Interpreter
from . import PyrexTypes
from . import Naming
from . import Symtab
def dedent(text, reindent=0):
from textwrap import dedent
text = dedent(text)
if reindent > 0:
indent = " " * reindent
text = '\n'.join([indent + x for x in text.split('\n')])
return text
class IntroduceBufferAuxiliaryVars(CythonTransform):
#
# Entry point
#
buffers_exists = False
using_memoryview = False
def __call__(self, node):
assert isinstance(node, ModuleNode)
self.max_ndim = 0
result = super(IntroduceBufferAuxiliaryVars, self).__call__(node)
if self.buffers_exists:
use_bufstruct_declare_code(node.scope)
use_py2_buffer_functions(node.scope)
node.scope.use_utility_code(empty_bufstruct_utility)
return result
#
# Basic operations for transforms
#
def handle_scope(self, node, scope):
# For all buffers, insert extra variables in the scope.
# The variables are also accessible from the buffer_info
# on the buffer entry
scope_items = scope.entries.items()
bufvars = [entry for name, entry in scope_items if entry.type.is_buffer]
if len(bufvars) > 0:
bufvars.sort(key=lambda entry: entry.name)
self.buffers_exists = True
memviewslicevars = [entry for name, entry in scope_items if entry.type.is_memoryviewslice]
if len(memviewslicevars) > 0:
self.buffers_exists = True
for (name, entry) in scope_items:
if name == 'memoryview' and isinstance(entry.utility_code_definition, CythonUtilityCode):
self.using_memoryview = True
break
del scope_items
if isinstance(node, ModuleNode) and len(bufvars) > 0:
# for now...note that pos is wrong
raise CompileError(node.pos, "Buffer vars not allowed in module scope")
for entry in bufvars:
if entry.type.dtype.is_ptr:
raise CompileError(node.pos, "Buffers with pointer types not yet supported.")
name = entry.name
buftype = entry.type
if buftype.ndim > Options.buffer_max_dims:
raise CompileError(node.pos,
"Buffer ndims exceeds Options.buffer_max_dims = %d" % Options.buffer_max_dims)
if buftype.ndim > self.max_ndim:
self.max_ndim = buftype.ndim
# Declare auxiliary vars
def decvar(type, prefix):
cname = scope.mangle(prefix, name)
aux_var = scope.declare_var(name=None, cname=cname,
type=type, pos=node.pos)
if entry.is_arg:
aux_var.used = True # otherwise, NameNode will mark whether it is used
return aux_var
auxvars = ((PyrexTypes.c_pyx_buffer_nd_type, Naming.pybuffernd_prefix),
(PyrexTypes.c_pyx_buffer_type, Naming.pybufferstruct_prefix))
pybuffernd, rcbuffer = [decvar(type, prefix) for (type, prefix) in auxvars]
entry.buffer_aux = Symtab.BufferAux(pybuffernd, rcbuffer)
scope.buffer_entries = bufvars
self.scope = scope
def visit_ModuleNode(self, node):
self.handle_scope(node, node.scope)
self.visitchildren(node)
return node
def visit_FuncDefNode(self, node):
self.handle_scope(node, node.local_scope)
self.visitchildren(node)
return node
#
# Analysis
#
buffer_options = ("dtype", "ndim", "mode", "negative_indices", "cast") # ordered!
buffer_defaults = {"ndim": 1, "mode": "full", "negative_indices": True, "cast": False}
buffer_positional_options_count = 1 # anything beyond this needs keyword argument
ERR_BUF_OPTION_UNKNOWN = '"%s" is not a buffer option'
ERR_BUF_TOO_MANY = 'Too many buffer options'
ERR_BUF_DUP = '"%s" buffer option already supplied'
ERR_BUF_MISSING = '"%s" missing'
ERR_BUF_MODE = 'Only allowed buffer modes are: "c", "fortran", "full", "strided" (as a compile-time string)'
ERR_BUF_NDIM = 'ndim must be a non-negative integer'
ERR_BUF_DTYPE = 'dtype must be "object", numeric type or a struct'
ERR_BUF_BOOL = '"%s" must be a boolean'
def analyse_buffer_options(globalpos, env, posargs, dictargs, defaults=None, need_complete=True):
"""
Must be called during type analysis, as analyse is called
on the dtype argument.
posargs and dictargs should consist of a list and a dict
of tuples (value, pos). Defaults should be a dict of values.
Returns a dict containing all the options a buffer can have and
its value (with the positions stripped).
"""
if defaults is None:
defaults = buffer_defaults
posargs, dictargs = Interpreter.interpret_compiletime_options(
posargs, dictargs, type_env=env, type_args=(0, 'dtype'))
if len(posargs) > buffer_positional_options_count:
raise CompileError(posargs[-1][1], ERR_BUF_TOO_MANY)
options = {}
for name, (value, pos) in dictargs.items():
if not name in buffer_options:
raise CompileError(pos, ERR_BUF_OPTION_UNKNOWN % name)
options[name] = value
for name, (value, pos) in zip(buffer_options, posargs):
if not name in buffer_options:
raise CompileError(pos, ERR_BUF_OPTION_UNKNOWN % name)
if name in options:
raise CompileError(pos, ERR_BUF_DUP % name)
options[name] = value
# Check that they are all there and copy defaults
for name in buffer_options:
if not name in options:
try:
options[name] = defaults[name]
except KeyError:
if need_complete:
raise CompileError(globalpos, ERR_BUF_MISSING % name)
dtype = options.get("dtype")
if dtype and dtype.is_extension_type:
raise CompileError(globalpos, ERR_BUF_DTYPE)
ndim = options.get("ndim")
if ndim and (not isinstance(ndim, int) or ndim < 0):
raise CompileError(globalpos, ERR_BUF_NDIM)
mode = options.get("mode")
if mode and not (mode in ('full', 'strided', 'c', 'fortran')):
raise CompileError(globalpos, ERR_BUF_MODE)
def assert_bool(name):
x = options.get(name)
if not isinstance(x, bool):
raise CompileError(globalpos, ERR_BUF_BOOL % name)
assert_bool('negative_indices')
assert_bool('cast')
return options
#
# Code generation
#
class BufferEntry(object):
def __init__(self, entry):
self.entry = entry
self.type = entry.type
self.cname = entry.buffer_aux.buflocal_nd_var.cname
self.buf_ptr = "%s.rcbuffer->pybuffer.buf" % self.cname
self.buf_ptr_type = entry.type.buffer_ptr_type
self.init_attributes()
def init_attributes(self):
self.shape = self.get_buf_shapevars()
self.strides = self.get_buf_stridevars()
self.suboffsets = self.get_buf_suboffsetvars()
def get_buf_suboffsetvars(self):
return self._for_all_ndim("%s.diminfo[%d].suboffsets")
def get_buf_stridevars(self):
return self._for_all_ndim("%s.diminfo[%d].strides")
def get_buf_shapevars(self):
return self._for_all_ndim("%s.diminfo[%d].shape")
def _for_all_ndim(self, s):
return [s % (self.cname, i) for i in range(self.type.ndim)]
def generate_buffer_lookup_code(self, code, index_cnames):
# Create buffer lookup and return it
# This is done via utility macros/inline functions, which vary
# according to the access mode used.
params = []
nd = self.type.ndim
mode = self.type.mode
if mode == 'full':
for i, s, o in zip(index_cnames,
self.get_buf_stridevars(),
self.get_buf_suboffsetvars()):
params.append(i)
params.append(s)
params.append(o)
funcname = "__Pyx_BufPtrFull%dd" % nd
funcgen = buf_lookup_full_code
else:
if mode == 'strided':
funcname = "__Pyx_BufPtrStrided%dd" % nd
funcgen = buf_lookup_strided_code
elif mode == 'c':
funcname = "__Pyx_BufPtrCContig%dd" % nd
funcgen = buf_lookup_c_code
elif mode == 'fortran':
funcname = "__Pyx_BufPtrFortranContig%dd" % nd
funcgen = buf_lookup_fortran_code
else:
assert False
for i, s in zip(index_cnames, self.get_buf_stridevars()):
params.append(i)
params.append(s)
# Make sure the utility code is available
if funcname not in code.globalstate.utility_codes:
code.globalstate.utility_codes.add(funcname)
protocode = code.globalstate['utility_code_proto']
defcode = code.globalstate['utility_code_def']
funcgen(protocode, defcode, name=funcname, nd=nd)
buf_ptr_type_code = self.buf_ptr_type.empty_declaration_code()
ptrcode = "%s(%s, %s, %s)" % (funcname, buf_ptr_type_code, self.buf_ptr,
", ".join(params))
return ptrcode
def get_flags(buffer_aux, buffer_type):
flags = 'PyBUF_FORMAT'
mode = buffer_type.mode
if mode == 'full':
flags += '| PyBUF_INDIRECT'
elif mode == 'strided':
flags += '| PyBUF_STRIDES'
elif mode == 'c':
flags += '| PyBUF_C_CONTIGUOUS'
elif mode == 'fortran':
flags += '| PyBUF_F_CONTIGUOUS'
else:
assert False
if buffer_aux.writable_needed: flags += "| PyBUF_WRITABLE"
return flags
def used_buffer_aux_vars(entry):
buffer_aux = entry.buffer_aux
buffer_aux.buflocal_nd_var.used = True
buffer_aux.rcbuf_var.used = True
def put_unpack_buffer_aux_into_scope(buf_entry, code):
# Generate code to copy the needed struct info into local
# variables.
buffer_aux, mode = buf_entry.buffer_aux, buf_entry.type.mode
pybuffernd_struct = buffer_aux.buflocal_nd_var.cname
fldnames = ['strides', 'shape']
if mode == 'full':
fldnames.append('suboffsets')
ln = []
for i in range(buf_entry.type.ndim):
for fldname in fldnames:
ln.append("%s.diminfo[%d].%s = %s.rcbuffer->pybuffer.%s[%d];" % \
(pybuffernd_struct, i, fldname,
pybuffernd_struct, fldname, i))
code.putln(' '.join(ln))
def put_init_vars(entry, code):
bufaux = entry.buffer_aux
pybuffernd_struct = bufaux.buflocal_nd_var.cname
pybuffer_struct = bufaux.rcbuf_var.cname
# init pybuffer_struct
code.putln("%s.pybuffer.buf = NULL;" % pybuffer_struct)
code.putln("%s.refcount = 0;" % pybuffer_struct)
# init the buffer object
# code.put_init_var_to_py_none(entry)
# init the pybuffernd_struct
code.putln("%s.data = NULL;" % pybuffernd_struct)
code.putln("%s.rcbuffer = &%s;" % (pybuffernd_struct, pybuffer_struct))
def put_acquire_arg_buffer(entry, code, pos):
code.globalstate.use_utility_code(acquire_utility_code)
buffer_aux = entry.buffer_aux
getbuffer = get_getbuffer_call(code, entry.cname, buffer_aux, entry.type)
# Acquire any new buffer
code.putln("{")
code.putln("__Pyx_BufFmt_StackElem __pyx_stack[%d];" % entry.type.dtype.struct_nesting_depth())
code.putln(code.error_goto_if("%s == -1" % getbuffer, pos))
code.putln("}")
# An exception raised in arg parsing cannot be catched, so no
# need to care about the buffer then.
put_unpack_buffer_aux_into_scope(entry, code)
def put_release_buffer_code(code, entry):
code.globalstate.use_utility_code(acquire_utility_code)
code.putln("__Pyx_SafeReleaseBuffer(&%s.rcbuffer->pybuffer);" % entry.buffer_aux.buflocal_nd_var.cname)
def get_getbuffer_call(code, obj_cname, buffer_aux, buffer_type):
ndim = buffer_type.ndim
cast = int(buffer_type.cast)
flags = get_flags(buffer_aux, buffer_type)
pybuffernd_struct = buffer_aux.buflocal_nd_var.cname
dtype_typeinfo = get_type_information_cname(code, buffer_type.dtype)
return ("__Pyx_GetBufferAndValidate(&%(pybuffernd_struct)s.rcbuffer->pybuffer, "
"(PyObject*)%(obj_cname)s, &%(dtype_typeinfo)s, %(flags)s, %(ndim)d, "
"%(cast)d, __pyx_stack)" % locals())
def put_assign_to_buffer(lhs_cname, rhs_cname, buf_entry,
is_initialized, pos, code):
"""
Generate code for reassigning a buffer variables. This only deals with getting
the buffer auxiliary structure and variables set up correctly, the assignment
itself and refcounting is the responsibility of the caller.
However, the assignment operation may throw an exception so that the reassignment
never happens.
Depending on the circumstances there are two possible outcomes:
- Old buffer released, new acquired, rhs assigned to lhs
- Old buffer released, new acquired which fails, reaqcuire old lhs buffer
(which may or may not succeed).
"""
buffer_aux, buffer_type = buf_entry.buffer_aux, buf_entry.type
code.globalstate.use_utility_code(acquire_utility_code)
pybuffernd_struct = buffer_aux.buflocal_nd_var.cname
flags = get_flags(buffer_aux, buffer_type)
code.putln("{") # Set up necesarry stack for getbuffer
code.putln("__Pyx_BufFmt_StackElem __pyx_stack[%d];" % buffer_type.dtype.struct_nesting_depth())
getbuffer = get_getbuffer_call(code, "%s", buffer_aux, buffer_type) # fill in object below
if is_initialized:
# Release any existing buffer
code.putln('__Pyx_SafeReleaseBuffer(&%s.rcbuffer->pybuffer);' % pybuffernd_struct)
# Acquire
retcode_cname = code.funcstate.allocate_temp(PyrexTypes.c_int_type, manage_ref=False)
code.putln("%s = %s;" % (retcode_cname, getbuffer % rhs_cname))
code.putln('if (%s) {' % (code.unlikely("%s < 0" % retcode_cname)))
# If acquisition failed, attempt to reacquire the old buffer
# before raising the exception. A failure of reacquisition
# will cause the reacquisition exception to be reported, one
# can consider working around this later.
type, value, tb = [code.funcstate.allocate_temp(PyrexTypes.py_object_type, manage_ref=False)
for i in range(3)]
code.putln('PyErr_Fetch(&%s, &%s, &%s);' % (type, value, tb))
code.putln('if (%s) {' % code.unlikely("%s == -1" % (getbuffer % lhs_cname)))
code.putln('Py_XDECREF(%s); Py_XDECREF(%s); Py_XDECREF(%s);' % (type, value, tb)) # Do not refnanny these!
code.globalstate.use_utility_code(raise_buffer_fallback_code)
code.putln('__Pyx_RaiseBufferFallbackError();')
code.putln('} else {')
code.putln('PyErr_Restore(%s, %s, %s);' % (type, value, tb))
for t in (type, value, tb):
code.funcstate.release_temp(t)
code.putln('}')
code.putln('}')
# Unpack indices
put_unpack_buffer_aux_into_scope(buf_entry, code)
code.putln(code.error_goto_if_neg(retcode_cname, pos))
code.funcstate.release_temp(retcode_cname)
else:
# Our entry had no previous value, so set to None when acquisition fails.
# In this case, auxiliary vars should be set up right in initialization to a zero-buffer,
# so it suffices to set the buf field to NULL.
code.putln('if (%s) {' % code.unlikely("%s == -1" % (getbuffer % rhs_cname)))
code.putln('%s = %s; __Pyx_INCREF(Py_None); %s.rcbuffer->pybuffer.buf = NULL;' %
(lhs_cname,
PyrexTypes.typecast(buffer_type, PyrexTypes.py_object_type, "Py_None"),
pybuffernd_struct))
code.putln(code.error_goto(pos))
code.put('} else {')
# Unpack indices
put_unpack_buffer_aux_into_scope(buf_entry, code)
code.putln('}')
code.putln("}") # Release stack
def put_buffer_lookup_code(entry, index_signeds, index_cnames, directives,
pos, code, negative_indices, in_nogil_context):
"""
Generates code to process indices and calculate an offset into
a buffer. Returns a C string which gives a pointer which can be
read from or written to at will (it is an expression so caller should
store it in a temporary if it is used more than once).
As the bounds checking can have any number of combinations of unsigned
arguments, smart optimizations etc. we insert it directly in the function
body. The lookup however is delegated to a inline function that is instantiated
once per ndim (lookup with suboffsets tend to get quite complicated).
entry is a BufferEntry
"""
negative_indices = directives['wraparound'] and negative_indices
if directives['boundscheck']:
# Check bounds and fix negative indices.
# We allocate a temporary which is initialized to -1, meaning OK (!).
# If an error occurs, the temp is set to the index dimension the
# error is occurring at.
failed_dim_temp = code.funcstate.allocate_temp(PyrexTypes.c_int_type, manage_ref=False)
code.putln("%s = -1;" % failed_dim_temp)
for dim, (signed, cname, shape) in enumerate(zip(index_signeds, index_cnames, entry.get_buf_shapevars())):
if signed != 0:
# not unsigned, deal with negative index
code.putln("if (%s < 0) {" % cname)
if negative_indices:
code.putln("%s += %s;" % (cname, shape))
code.putln("if (%s) %s = %d;" % (
code.unlikely("%s < 0" % cname),
failed_dim_temp, dim))
else:
code.putln("%s = %d;" % (failed_dim_temp, dim))
code.put("} else ")
# check bounds in positive direction
if signed != 0:
cast = ""
else:
cast = "(size_t)"
code.putln("if (%s) %s = %d;" % (
code.unlikely("%s >= %s%s" % (cname, cast, shape)),
failed_dim_temp, dim))
if in_nogil_context:
code.globalstate.use_utility_code(raise_indexerror_nogil)
func = '__Pyx_RaiseBufferIndexErrorNogil'
else:
code.globalstate.use_utility_code(raise_indexerror_code)
func = '__Pyx_RaiseBufferIndexError'
code.putln("if (%s) {" % code.unlikely("%s != -1" % failed_dim_temp))
code.putln('%s(%s);' % (func, failed_dim_temp))
code.putln(code.error_goto(pos))
code.putln('}')
code.funcstate.release_temp(failed_dim_temp)
elif negative_indices:
# Only fix negative indices.
for signed, cname, shape in zip(index_signeds, index_cnames, entry.get_buf_shapevars()):
if signed != 0:
code.putln("if (%s < 0) %s += %s;" % (cname, cname, shape))
return entry.generate_buffer_lookup_code(code, index_cnames)
def use_bufstruct_declare_code(env):
env.use_utility_code(buffer_struct_declare_code)
def get_empty_bufstruct_code(max_ndim):
code = dedent("""
static Py_ssize_t __Pyx_zeros[] = {%s};
static Py_ssize_t __Pyx_minusones[] = {%s};
""") % (", ".join(["0"] * max_ndim), ", ".join(["-1"] * max_ndim))
return UtilityCode(proto=code)
empty_bufstruct_utility = get_empty_bufstruct_code(Options.buffer_max_dims)
def buf_lookup_full_code(proto, defin, name, nd):
"""
Generates a buffer lookup function for the right number
of dimensions. The function gives back a void* at the right location.
"""
# _i_ndex, _s_tride, sub_o_ffset
macroargs = ", ".join(["i%d, s%d, o%d" % (i, i, i) for i in range(nd)])
proto.putln("#define %s(type, buf, %s) (type)(%s_imp(buf, %s))" % (name, macroargs, name, macroargs))
funcargs = ", ".join(["Py_ssize_t i%d, Py_ssize_t s%d, Py_ssize_t o%d" % (i, i, i) for i in range(nd)])
proto.putln("static CYTHON_INLINE void* %s_imp(void* buf, %s);" % (name, funcargs))
defin.putln(dedent("""
static CYTHON_INLINE void* %s_imp(void* buf, %s) {
char* ptr = (char*)buf;
""") % (name, funcargs) + "".join([dedent("""\
ptr += s%d * i%d;
if (o%d >= 0) ptr = *((char**)ptr) + o%d;
""") % (i, i, i, i) for i in range(nd)]
) + "\nreturn ptr;\n}")
def buf_lookup_strided_code(proto, defin, name, nd):
"""
Generates a buffer lookup function for the right number
of dimensions. The function gives back a void* at the right location.
"""
# _i_ndex, _s_tride
args = ", ".join(["i%d, s%d" % (i, i) for i in range(nd)])
offset = " + ".join(["i%d * s%d" % (i, i) for i in range(nd)])
proto.putln("#define %s(type, buf, %s) (type)((char*)buf + %s)" % (name, args, offset))
def buf_lookup_c_code(proto, defin, name, nd):
"""
Similar to strided lookup, but can assume that the last dimension
doesn't need a multiplication as long as.
Still we keep the same signature for now.
"""
if nd == 1:
proto.putln("#define %s(type, buf, i0, s0) ((type)buf + i0)" % name)
else:
args = ", ".join(["i%d, s%d" % (i, i) for i in range(nd)])
offset = " + ".join(["i%d * s%d" % (i, i) for i in range(nd - 1)])
proto.putln("#define %s(type, buf, %s) ((type)((char*)buf + %s) + i%d)" % (name, args, offset, nd - 1))
def buf_lookup_fortran_code(proto, defin, name, nd):
"""
Like C lookup, but the first index is optimized instead.
"""
if nd == 1:
proto.putln("#define %s(type, buf, i0, s0) ((type)buf + i0)" % name)
else:
args = ", ".join(["i%d, s%d" % (i, i) for i in range(nd)])
offset = " + ".join(["i%d * s%d" % (i, i) for i in range(1, nd)])
proto.putln("#define %s(type, buf, %s) ((type)((char*)buf + %s) + i%d)" % (name, args, offset, 0))
def use_py2_buffer_functions(env):
env.use_utility_code(GetAndReleaseBufferUtilityCode())
class GetAndReleaseBufferUtilityCode(object):
# Emulation of PyObject_GetBuffer and PyBuffer_Release for Python 2.
# For >= 2.6 we do double mode -- use the new buffer interface on objects
# which has the right tp_flags set, but emulation otherwise.
requires = None
is_cython_utility = False
def __init__(self):
pass
def __eq__(self, other):
return isinstance(other, GetAndReleaseBufferUtilityCode)
def __hash__(self):
return 24342342
def get_tree(self): pass
def put_code(self, output):
code = output['utility_code_def']
proto_code = output['utility_code_proto']
env = output.module_node.scope
cython_scope = env.context.cython_scope
# Search all types for __getbuffer__ overloads
types = []
visited_scopes = set()
def find_buffer_types(scope):
if scope in visited_scopes:
return
visited_scopes.add(scope)
for m in scope.cimported_modules:
find_buffer_types(m)
for e in scope.type_entries:
if isinstance(e.utility_code_definition, CythonUtilityCode):
continue
t = e.type
if t.is_extension_type:
if scope is cython_scope and not e.used:
continue
release = get = None
for x in t.scope.pyfunc_entries:
if x.name == u"__getbuffer__": get = x.func_cname
elif x.name == u"__releasebuffer__": release = x.func_cname
if get:
types.append((t.typeptr_cname, get, release))
find_buffer_types(env)
util_code = TempitaUtilityCode.load(
"GetAndReleaseBuffer", from_file="Buffer.c",
context=dict(types=types))
proto = util_code.format_code(util_code.proto)
impl = util_code.format_code(
util_code.inject_string_constants(util_code.impl, output)[1])
proto_code.putln(proto)
code.putln(impl)
def mangle_dtype_name(dtype):
# Use prefixes to seperate user defined types from builtins
# (consider "typedef float unsigned_int")
if dtype.is_pyobject:
return "object"
elif dtype.is_ptr:
return "ptr"
else:
if dtype.is_typedef or dtype.is_struct_or_union:
prefix = "nn_"
else:
prefix = ""
type_decl = dtype.empty_declaration_code()
type_decl = type_decl.replace(" ", "_")
return prefix + type_decl.replace("[", "_").replace("]", "_")
def get_type_information_cname(code, dtype, maxdepth=None):
"""
Output the run-time type information (__Pyx_TypeInfo) for given dtype,
and return the name of the type info struct.
Structs with two floats of the same size are encoded as complex numbers.
One can seperate between complex numbers declared as struct or with native
encoding by inspecting to see if the fields field of the type is
filled in.
"""
namesuffix = mangle_dtype_name(dtype)
name = "__Pyx_TypeInfo_%s" % namesuffix
structinfo_name = "__Pyx_StructFields_%s" % namesuffix
if dtype.is_error: return "<error>"
# It's critical that walking the type info doesn't use more stack
# depth than dtype.struct_nesting_depth() returns, so use an assertion for this
if maxdepth is None: maxdepth = dtype.struct_nesting_depth()
if maxdepth <= 0:
assert False
if name not in code.globalstate.utility_codes:
code.globalstate.utility_codes.add(name)
typecode = code.globalstate['typeinfo']
arraysizes = []
if dtype.is_array:
while dtype.is_array:
arraysizes.append(dtype.size)
dtype = dtype.base_type
complex_possible = dtype.is_struct_or_union and dtype.can_be_complex()
declcode = dtype.empty_declaration_code()
if dtype.is_simple_buffer_dtype():
structinfo_name = "NULL"
elif dtype.is_struct:
fields = dtype.scope.var_entries
# Must pre-call all used types in order not to recurse utility code
# writing.
assert len(fields) > 0
types = [get_type_information_cname(code, f.type, maxdepth - 1)
for f in fields]
typecode.putln("static __Pyx_StructField %s[] = {" % structinfo_name, safe=True)
for f, typeinfo in zip(fields, types):
typecode.putln(' {&%s, "%s", offsetof(%s, %s)},' %
(typeinfo, f.name, dtype.empty_declaration_code(), f.cname), safe=True)
typecode.putln(' {NULL, NULL, 0}', safe=True)
typecode.putln("};", safe=True)
else:
assert False
rep = str(dtype)
flags = "0"
is_unsigned = "0"
if dtype is PyrexTypes.c_char_type:
is_unsigned = "IS_UNSIGNED(%s)" % declcode
typegroup = "'H'"
elif dtype.is_int:
is_unsigned = "IS_UNSIGNED(%s)" % declcode
typegroup = "%s ? 'U' : 'I'" % is_unsigned
elif complex_possible or dtype.is_complex:
typegroup = "'C'"
elif dtype.is_float:
typegroup = "'R'"
elif dtype.is_struct:
typegroup = "'S'"
if dtype.packed:
flags = "__PYX_BUF_FLAGS_PACKED_STRUCT"
elif dtype.is_pyobject:
typegroup = "'O'"
else:
assert False, dtype
typeinfo = ('static __Pyx_TypeInfo %s = '
'{ "%s", %s, sizeof(%s), { %s }, %s, %s, %s, %s };')
tup = (name, rep, structinfo_name, declcode,
', '.join([str(x) for x in arraysizes]) or '0', len(arraysizes),
typegroup, is_unsigned, flags)
typecode.putln(typeinfo % tup, safe=True)
return name
def load_buffer_utility(util_code_name, context=None, **kwargs):
if context is None:
return UtilityCode.load(util_code_name, "Buffer.c", **kwargs)
else:
return TempitaUtilityCode.load(util_code_name, "Buffer.c", context=context, **kwargs)
context = dict(max_dims=str(Options.buffer_max_dims))
buffer_struct_declare_code = load_buffer_utility("BufferStructDeclare",
context=context)
# Utility function to set the right exception
# The caller should immediately goto_error
raise_indexerror_code = load_buffer_utility("BufferIndexError")
raise_indexerror_nogil = load_buffer_utility("BufferIndexErrorNogil")
raise_buffer_fallback_code = load_buffer_utility("BufferFallbackError")
buffer_structs_code = load_buffer_utility(
"BufferFormatStructs", proto_block='utility_code_proto_before_types')
acquire_utility_code = load_buffer_utility("BufferFormatCheck",
context=context,
requires=[buffer_structs_code])
# See utility code BufferFormatFromTypeInfo
_typeinfo_to_format_code = load_buffer_utility("TypeInfoToFormat", context={},
requires=[buffer_structs_code])
typeinfo_compare_code = load_buffer_utility("TypeInfoCompare", context={},
requires=[buffer_structs_code])
|
daviddrysdale/python-phonenumbers | refs/heads/dev | python/phonenumbers/data/alt_format_64.py | 1 | """Auto-generated file, do not edit by hand. 64 metadata"""
from ..phonemetadata import NumberFormat
PHONE_ALT_FORMAT_64 = [NumberFormat(pattern='(\\d)(\\d{4})(\\d{3})', format='\\1 \\2 \\3', leading_digits_pattern=['[3467]|9[2-9]']), NumberFormat(pattern='(\\d{3})(\\d{3})(\\d{2})', format='\\1 \\2 \\3', leading_digits_pattern=['[89]0'])]
|
MetrodataTeam/incubator-airflow | refs/heads/master | airflow/api/client/local_client.py | 25 | # -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from airflow.api.client import api_client
from airflow.api.common.experimental import pool
from airflow.api.common.experimental import trigger_dag
class Client(api_client.Client):
"""Local API client implementation."""
def trigger_dag(self, dag_id, run_id=None, conf=None, execution_date=None):
dr = trigger_dag.trigger_dag(dag_id=dag_id,
run_id=run_id,
conf=conf,
execution_date=execution_date)
return "Created {}".format(dr)
def get_pool(self, name):
p = pool.get_pool(name=name)
return p.pool, p.slots, p.description
def get_pools(self):
return [(p.pool, p.slots, p.description) for p in pool.get_pools()]
def create_pool(self, name, slots, description):
p = pool.create_pool(name=name, slots=slots, description=description)
return p.pool, p.slots, p.description
def delete_pool(self, name):
p = pool.delete_pool(name=name)
return p.pool, p.slots, p.description
|
mitchelljkotler/django | refs/heads/master | tests/auth_tests/test_context_processors.py | 269 | import datetime
from django.contrib.auth import authenticate
from django.contrib.auth.context_processors import PermLookupDict, PermWrapper
from django.contrib.auth.models import Permission, User
from django.contrib.contenttypes.models import ContentType
from django.db.models import Q
from django.test import SimpleTestCase, TestCase, override_settings
from .settings import AUTH_MIDDLEWARE_CLASSES, AUTH_TEMPLATES
class MockUser(object):
def has_module_perms(self, perm):
if perm == 'mockapp':
return True
return False
def has_perm(self, perm):
if perm == 'mockapp.someperm':
return True
return False
class PermWrapperTests(SimpleTestCase):
"""
Test some details of the PermWrapper implementation.
"""
class EQLimiterObject(object):
"""
This object makes sure __eq__ will not be called endlessly.
"""
def __init__(self):
self.eq_calls = 0
def __eq__(self, other):
if self.eq_calls > 0:
return True
self.eq_calls += 1
return False
def test_permwrapper_in(self):
"""
Test that 'something' in PermWrapper works as expected.
"""
perms = PermWrapper(MockUser())
# Works for modules and full permissions.
self.assertIn('mockapp', perms)
self.assertNotIn('nonexisting', perms)
self.assertIn('mockapp.someperm', perms)
self.assertNotIn('mockapp.nonexisting', perms)
def test_permlookupdict_in(self):
"""
No endless loops if accessed with 'in' - refs #18979.
"""
pldict = PermLookupDict(MockUser(), 'mockapp')
with self.assertRaises(TypeError):
self.EQLimiterObject() in pldict
@override_settings(
PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF='auth_tests.urls',
TEMPLATES=AUTH_TEMPLATES,
USE_TZ=False, # required for loading the fixture
)
class AuthContextProcessorTests(TestCase):
"""
Tests for the ``django.contrib.auth.context_processors.auth`` processor
"""
@classmethod
def setUpTestData(cls):
# password = "secret"
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
@override_settings(MIDDLEWARE_CLASSES=AUTH_MIDDLEWARE_CLASSES)
def test_session_not_accessed(self):
"""
Tests that the session is not accessed simply by including
the auth context processor
"""
response = self.client.get('/auth_processor_no_attr_access/')
self.assertContains(response, "Session not accessed")
@override_settings(MIDDLEWARE_CLASSES=AUTH_MIDDLEWARE_CLASSES)
def test_session_is_accessed(self):
"""
Tests that the session is accessed if the auth context processor
is used and relevant attributes accessed.
"""
response = self.client.get('/auth_processor_attr_access/')
self.assertContains(response, "Session accessed")
def test_perms_attrs(self):
u = User.objects.create_user(username='normal', password='secret')
u.user_permissions.add(
Permission.objects.get(
content_type=ContentType.objects.get_for_model(Permission),
codename='add_permission'))
self.client.login(username='normal', password='secret')
response = self.client.get('/auth_processor_perms/')
self.assertContains(response, "Has auth permissions")
self.assertContains(response, "Has auth.add_permission permissions")
self.assertNotContains(response, "nonexisting")
def test_perm_in_perms_attrs(self):
u = User.objects.create_user(username='normal', password='secret')
u.user_permissions.add(
Permission.objects.get(
content_type=ContentType.objects.get_for_model(Permission),
codename='add_permission'))
self.client.login(username='normal', password='secret')
response = self.client.get('/auth_processor_perm_in_perms/')
self.assertContains(response, "Has auth permissions")
self.assertContains(response, "Has auth.add_permission permissions")
self.assertNotContains(response, "nonexisting")
def test_message_attrs(self):
self.client.login(username='super', password='secret')
response = self.client.get('/auth_processor_messages/')
self.assertContains(response, "Message 1")
def test_user_attrs(self):
"""
Test that the lazy objects returned behave just like the wrapped objects.
"""
# These are 'functional' level tests for common use cases. Direct
# testing of the implementation (SimpleLazyObject) is in the 'utils'
# tests.
self.client.login(username='super', password='secret')
user = authenticate(username='super', password='secret')
response = self.client.get('/auth_processor_user/')
self.assertContains(response, "unicode: super")
self.assertContains(response, "id: 100")
self.assertContains(response, "username: super")
# bug #12037 is tested by the {% url %} in the template:
self.assertContains(response, "url: /userpage/super/")
# See if this object can be used for queries where a Q() comparing
# a user can be used with another Q() (in an AND or OR fashion).
# This simulates what a template tag might do with the user from the
# context. Note that we don't need to execute a query, just build it.
#
# The failure case (bug #12049) on Python 2.4 with a LazyObject-wrapped
# User is a fatal TypeError: "function() takes at least 2 arguments
# (0 given)" deep inside deepcopy().
#
# Python 2.5 and 2.6 succeeded, but logged internally caught exception
# spew:
#
# Exception RuntimeError: 'maximum recursion depth exceeded while
# calling a Python object' in <type 'exceptions.AttributeError'>
# ignored"
Q(user=response.context['user']) & Q(someflag=True)
# Tests for user equality. This is hard because User defines
# equality in a non-duck-typing way
# See bug #12060
self.assertEqual(response.context['user'], user)
self.assertEqual(user, response.context['user'])
|
reflexsc/reflex | refs/heads/master | src/rfx/client.py | 1 | #$#HEADER-START
# vim:set expandtab ts=4 sw=4 ai ft=python:
#
# Reflex Configuration Event Engine
#
# Copyright (C) 2016 Brandon Gillespie
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#$#HEADER-END
"""
Reflex Engine Client
Wrapper handling Reflex Apikey
Future: add SSL client certificates
"""
import urllib
import time
import base64
import requests
import nacl.utils
#import uuid
import jwt
import dictlib
import rfx
from rfx import threadlock, json2data, json4human, json4store
################################################################################
class Unauthorized(Exception):
"""Client Error"""
pass
################################################################################
class ClientError(Exception):
"""Client Error"""
pass
################################################################################
# pylint: disable=too-many-instance-attributes
class Session(rfx.Base):
"""Session object, which logs into Reflex Engine and manages the session"""
session_jti = None
session_sid = None
session_secret = None
session_expires = 0
apikey_name = None
apikey_secret = None
_cache = None
headers = None
def __init__(self, **kwargs):
super(Session, self).__init__(**kwargs)
base = kwargs.get('base')
if base:
rfx.Base.__inherit__(self, base)
self._cache = dict()
self.headers = dict()
############################################################################
def _login(self, force=False):
"""Use the Apikey to get a session key"""
if not force and self.session_expires > time.time():
return
if not self.apikey_name:
name, b64_secret = (self.cfg['REFLEX_APIKEY'] + ".").split(".")[0:2]
self.apikey_name = name
self.apikey_secret = base64.b64decode(b64_secret)
key_jwt = jwt.encode(dict(
jti=self.apikey_name,
#seed=str(uuid.uuid4()), #base64.b64encode(nacl.utils.random(256)).decode(),
seed=base64.b64encode(nacl.utils.random(256)).decode(),
exp=time.time() + 300
), self.apikey_secret)
headers = self.headers.copy()
headers["X-Apikey"] = key_jwt
headers["Content-Type"] = "application/json"
result = requests.get(self.cfg['REFLEX_URL'] + "/token", headers=headers)
if result.status_code == 200:
self.DEBUG("Authorized")
data = json2data(result.content.decode())
self.session_sid = data['session']
self.session_jti = data['jti']
self.session_secret = base64.b64decode(data['secret'])
self.session_expires = data['expires_at']
else:
if self.do_DEBUG():
self.DEBUG("Failed to authorize:\n\tHTTP {}\n{}\n\n{}\n".format(
result.status_code,
json4human(dict(result.headers)),
json2data(result.content.decode())))
raise Unauthorized("Unable to authorize session")
############################################################################
# pylint: disable=too-many-branches
def _call(self, func, target, *args, **kwargs):
"""Call Reflex Engine, wrapped with authentication and session management"""
try:
self._login()
except Unauthorized as err:
self.ABORT("Unauthorized: " + str(err))
except requests.exceptions.ConnectionError:
self.ABORT("Unable to connect to REFLEX_URL ({})".format(self.cfg['REFLEX_URL']))
# enrich the arguments
headers = self.headers.copy()
if kwargs.get('headers'):
headers = dictlib.union(headers, kwargs['headers'])
if not kwargs.get('cookies'):
kwargs['cookies'] = {}
if not headers.get('Content-Type'):
headers['Content-Type'] = "application/json"
kwargs['headers'] = headers
query = self.cfg['REFLEX_URL'] + "/" + target
if self.debug.get('remote-abac'):
if "?" in query:
query += "&"
else:
query += "?"
query += "abac=log"
# make the call
result = self._call_sub(func, query, *args, **kwargs)
# unlikely, as self._login() should take care of this, unless our timing
# is off from the server's, but just in case...
if result.status_code == 401:
self.DEBUG("Unauthorized received, Retrying Login")
self._login(force=True)
result = self._call_sub(func, query, *args, **kwargs)
if result.status_code == 500:
raise ClientError("Server side error")
if result.status_code == 404:
raise ClientError("Endpoint or object not found (" + query + ")")
if "application/json" not in result.headers.get('Content-Type', ''):
self.DEBUG("error", result.content.decode())
raise ClientError("Result is not valid content type")
if result.status_code == 204:
return {}
if result.status_code in (200, 201, 202):
return result.json()
raise ClientError(result.json()['message'])
############################################################################
def _call_sub(self, func, *args, **kwargs):
"""Subcall of call"""
auth_jwt = jwt.encode(dict(
jti=self.session_jti,
exp=time.time() + 60
), self.session_secret)
dbg = self.do_DEBUG()
kwargs['cookies']['sid'] = self.session_sid
if self.headers:
kwargs['headers'].update(self.headers)
kwargs['headers']['X-ApiToken'] = auth_jwt
if dbg:
self.DEBUG("auth", jwt=auth_jwt)
self.DEBUG("call", func=func, args=args, kwargs=kwargs)
return func(*args, **kwargs)
############################################################################
def get(self, obj_type, obj_target, archive=False):
"""session GET"""
args = []
if archive:
args.append("archive=" + str(archive['start']))
return self._call(requests.get,
obj_type + "/" + str(obj_target) + "?" + "&".join(args))
############################################################################
# pylint: disable=too-many-arguments
def list(self, obj_type, match=None, cols=None, raise_error=True, archive=False):
"""
session LIST. Match is a glob pattern (optional), cols is a list
of column names
"""
args = []
if match:
try: # stupid python2
args.append("match=" + urllib.parse.quote(match))
except: # pylint: disable=bare-except, no-member
args.append("match=" + urllib.pathname2url(match))
if archive:
args.append("archive=" +
str(archive['start']) + "~" +
str(archive['end']))
if cols:
args.append("cols=" + ",".join(cols))
querystr = obj_type + "/"
if args:
querystr += "?" + "&".join(args)
if raise_error:
return self._call(requests.get, querystr)
else:
try:
return self._call(requests.get, querystr)
except: # pylint: disable=bare-except
return list()
############################################################################
def create(self, obj_type, obj_data):
"""session CREATE"""
return self._call(requests.post, obj_type, data=json4store(obj_data))
############################################################################
def update(self, obj_type, obj_target, obj_data):
"""session UPDATE"""
path = obj_type + "/" + str(obj_target)
return self._call(requests.put, path, data=json4store(obj_data))
############################################################################
def patch(self, obj_type, obj_target, obj_data):
"""session PATCH"""
path = obj_type + "/" + str(obj_target) + "?merge=true"
return self._call(requests.put, path, data=json4store(obj_data))
############################################################################
def instance_ping(self, obj_target, obj_data):
"""special instance update ping"""
path = "instance-ping/" + str(obj_target)
return self._call(requests.put, path, data=json4store(obj_data))
############################################################################
def delete(self, obj_type, obj_target):
"""session DELETE"""
return self._call(requests.delete, obj_type + "/" + str(obj_target))
############################################################################
@threadlock
def cache_get(self, obj_type, obj_target, **kwargs):
"""
Cache wrapper around .get()
"""
if obj_type in self._cache:
if obj_target in self._cache[obj_type]:
return self._cache[obj_type][obj_target]
else:
self._cache[obj_type] = dict()
try:
obj = self.get(obj_type, obj_target, **kwargs)
except ClientError:
self._cache[obj_type][obj_target] = None
raise
self._cache[obj_type][obj_target] = obj
return obj
############################################################################
@threadlock
def cache_update(self, obj_type, obj_target, payload, **kwargs):
"""
Cache wrapper around .update()
"""
value = self.update(obj_type, obj_target, payload, **kwargs)
if value:
if obj_type in self._cache:
if obj_target in self._cache[obj_type]:
del self._cache[obj_type][obj_target]
return value
###########################################################################
@threadlock
def cache_list(self, obj_type, **kwargs):
"""
Cache wrapper around .list()
"""
if not obj_type in self._cache:
self._cache[obj_type] = {}
objs = self.list(obj_type, **kwargs)
for obj in objs:
oname = obj.get('name')
if oname:
self._cache[obj_type][oname] = obj
return objs
###########################################################################
@threadlock
def cache_reset(self):
"""Clear the cache"""
self._cache = dict()
###########################################################################
def cache_drop(self, obj_type, obj_target):
"""Drop an object from the cache, if it exists"""
if obj_type in self._cache and obj_target in self._cache[obj_type]:
del self._cache[obj_type][obj_target]
|
Aaron0927/xen-4.2.1 | refs/heads/master | tools/xm-test/tests/network-attach/03_network_attach_detach_multiple_pos.py | 42 | #!/usr/bin/python
# Copyright (C) International Business Machines Corp., 2005
# Author: Murillo F. Bernardes <mfb@br.ibm.com>
import sys
import re
import time
from XmTestLib import *
from XmTestLib.network_utils import *
if ENABLE_HVM_SUPPORT:
SKIP("Network-attach not supported for HVM domains")
# Create a domain (default XmTestDomain, with our ramdisk)
domain = XmTestDomain()
try:
console = domain.start()
except DomainError, e:
if verbose:
print "Failed to create test domain because:"
print e.extra
FAIL(str(e))
console.debugMe = True
try:
# Run 'ls'
run = console.runCmd("ls")
except ConsoleError, e:
saveLog(console.getHistory())
FAIL(str(e))
for i in range(10):
print "Attaching %d device" % i
status, msg = network_attach(domain.getName(), console)
if status:
FAIL(msg)
print "Detaching %d device" % i
status, msg = network_detach(domain.getName(), console, i)
if status:
FAIL(msg)
# Close the console
domain.closeConsole()
# Stop the domain (nice shutdown)
domain.stop()
|
authurlan/amdfin | refs/heads/develop | server/main.py | 1 | # -*- coding: utf-8 -*-
import logging
import traceback
from bottle import Bottle, run
from beaker.middleware import SessionMiddleware
import const
from app import UserApp, AnimtApp, TmpAnimtApp
def wcgi_app():
app = Bottle()
#app.mount("/server", ServerApp())
app.mount("/api/user", UserApp())
app.mount("/api/animation", AnimtApp())
app.mount("/api/tmpanimt", TmpAnimtApp())
#app.mount("/movie", Movie())
#app.mount("/drama", Drama())
session_opts = {
"session.type": "file",
"session.data_dir": "/tmp/amdfin/data",
"session.lock_dir": "/tmp/amdfin/lock",
"session.auto": True
}
return SessionMiddleware(app, session_opts)
def main():
try:
app = wcgi_app()
run(app, host=const.SERVER_HOST, port=const.SERVER_PORT,
server="twisted")
except Exception as e:
logging.error("Failed to start server at %s:%s, %s",
const.SERVER_HOST, const.SERVER_PORT, e)
logging.error(traceback.format_exc())
application = wcgi_app()
#if __name__ == "__main__":
# try:
# app = wcgi_app()
# run(app, host="127.0.0.1", port=9090)
# except Exception as e:
# logging.error(traceback.format_exc())
#else:
# try:
# application = wcgi_app()
# except Exception as e:
# with open("log") as fp:
# fp.write(str(e))
# logging.error(traceback.format_exc())
# logging.error(e)
# vim: set ts=4 sw=4 sts=4 et:
|
tradel/AppDynamicsREST | refs/heads/master | appd/model/license_usage.py | 1 | """
Model classes for AppDynamics REST API
"""
from . import JsonObject, JsonList
from appd.time import from_ts
class LicenseUsage(JsonObject):
FIELDS = {
'id': '',
'account_id': 'accountId',
'units_used': 'unitsUsed',
'units_allowed': 'unitsAllowed',
'units_provisioned': 'unitsProvisioned',
'license_module': 'agentType',
'created_on_ms': 'createdOn',
}
def __init__(self, id=0, account_id=0, license_module=None, units_used=0,
units_allowed=0, units_provisioned=None, created_on_ms=0):
(self.id, self.account_id, self.license_module, self.units_used,
self.units_allowed, self.units_provisioned, self.created_on_ms) = (id, account_id, license_module,
units_used, units_allowed,
units_provisioned, created_on_ms)
@property
def created_on(self):
"""
:rtype: datetime.datetime
"""
return from_ts(self.created_on_ms)
class LicenseUsageList(JsonList):
def __init__(self, initial_list=None):
super(LicenseUsageList, self).__init__(LicenseUsage, initial_list)
def __getitem__(self, i):
"""
:rtype: LicenseUsage
"""
return self.data[i]
def by_account_id(self, account_id):
"""
:rtype: LicenseUsageList
"""
return LicenseUsageList([x for x in self.data if x.account_id == account_id])
def by_license_module(self, license_module):
"""
:rtype: LicenseUsageList
"""
return LicenseUsageList([x for x in self.data if x.license_module == license_module])
class LicenseUsages(JsonObject):
FIELDS = {}
def __init__(self):
self.usages = LicenseUsageList()
@classmethod
def from_json(cls, json_dict):
obj = super(LicenseUsages, cls).from_json(json_dict)
obj.usages = LicenseUsageList.from_json(json_dict['usages'])
return obj
|
AdrianHuang/linux-3.8.13 | refs/heads/master | tools/perf/scripts/python/netdev-times.py | 11271 | # Display a process of packets and processed time.
# It helps us to investigate networking or network device.
#
# options
# tx: show only tx chart
# rx: show only rx chart
# dev=: show only thing related to specified device
# debug: work with debug mode. It shows buffer status.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
all_event_list = []; # insert all tracepoint event related with this script
irq_dic = {}; # key is cpu and value is a list which stacks irqs
# which raise NET_RX softirq
net_rx_dic = {}; # key is cpu and value include time of NET_RX softirq-entry
# and a list which stacks receive
receive_hunk_list = []; # a list which include a sequence of receive events
rx_skb_list = []; # received packet list for matching
# skb_copy_datagram_iovec
buffer_budget = 65536; # the budget of rx_skb_list, tx_queue_list and
# tx_xmit_list
of_count_rx_skb_list = 0; # overflow count
tx_queue_list = []; # list of packets which pass through dev_queue_xmit
of_count_tx_queue_list = 0; # overflow count
tx_xmit_list = []; # list of packets which pass through dev_hard_start_xmit
of_count_tx_xmit_list = 0; # overflow count
tx_free_list = []; # list of packets which is freed
# options
show_tx = 0;
show_rx = 0;
dev = 0; # store a name of device specified by option "dev="
debug = 0;
# indices of event_info tuple
EINFO_IDX_NAME= 0
EINFO_IDX_CONTEXT=1
EINFO_IDX_CPU= 2
EINFO_IDX_TIME= 3
EINFO_IDX_PID= 4
EINFO_IDX_COMM= 5
# Calculate a time interval(msec) from src(nsec) to dst(nsec)
def diff_msec(src, dst):
return (dst - src) / 1000000.0
# Display a process of transmitting a packet
def print_transmit(hunk):
if dev != 0 and hunk['dev'].find(dev) < 0:
return
print "%7s %5d %6d.%06dsec %12.3fmsec %12.3fmsec" % \
(hunk['dev'], hunk['len'],
nsecs_secs(hunk['queue_t']),
nsecs_nsecs(hunk['queue_t'])/1000,
diff_msec(hunk['queue_t'], hunk['xmit_t']),
diff_msec(hunk['xmit_t'], hunk['free_t']))
# Format for displaying rx packet processing
PF_IRQ_ENTRY= " irq_entry(+%.3fmsec irq=%d:%s)"
PF_SOFT_ENTRY=" softirq_entry(+%.3fmsec)"
PF_NAPI_POLL= " napi_poll_exit(+%.3fmsec %s)"
PF_JOINT= " |"
PF_WJOINT= " | |"
PF_NET_RECV= " |---netif_receive_skb(+%.3fmsec skb=%x len=%d)"
PF_NET_RX= " |---netif_rx(+%.3fmsec skb=%x)"
PF_CPY_DGRAM= " | skb_copy_datagram_iovec(+%.3fmsec %d:%s)"
PF_KFREE_SKB= " | kfree_skb(+%.3fmsec location=%x)"
PF_CONS_SKB= " | consume_skb(+%.3fmsec)"
# Display a process of received packets and interrputs associated with
# a NET_RX softirq
def print_receive(hunk):
show_hunk = 0
irq_list = hunk['irq_list']
cpu = irq_list[0]['cpu']
base_t = irq_list[0]['irq_ent_t']
# check if this hunk should be showed
if dev != 0:
for i in range(len(irq_list)):
if irq_list[i]['name'].find(dev) >= 0:
show_hunk = 1
break
else:
show_hunk = 1
if show_hunk == 0:
return
print "%d.%06dsec cpu=%d" % \
(nsecs_secs(base_t), nsecs_nsecs(base_t)/1000, cpu)
for i in range(len(irq_list)):
print PF_IRQ_ENTRY % \
(diff_msec(base_t, irq_list[i]['irq_ent_t']),
irq_list[i]['irq'], irq_list[i]['name'])
print PF_JOINT
irq_event_list = irq_list[i]['event_list']
for j in range(len(irq_event_list)):
irq_event = irq_event_list[j]
if irq_event['event'] == 'netif_rx':
print PF_NET_RX % \
(diff_msec(base_t, irq_event['time']),
irq_event['skbaddr'])
print PF_JOINT
print PF_SOFT_ENTRY % \
diff_msec(base_t, hunk['sirq_ent_t'])
print PF_JOINT
event_list = hunk['event_list']
for i in range(len(event_list)):
event = event_list[i]
if event['event_name'] == 'napi_poll':
print PF_NAPI_POLL % \
(diff_msec(base_t, event['event_t']), event['dev'])
if i == len(event_list) - 1:
print ""
else:
print PF_JOINT
else:
print PF_NET_RECV % \
(diff_msec(base_t, event['event_t']), event['skbaddr'],
event['len'])
if 'comm' in event.keys():
print PF_WJOINT
print PF_CPY_DGRAM % \
(diff_msec(base_t, event['comm_t']),
event['pid'], event['comm'])
elif 'handle' in event.keys():
print PF_WJOINT
if event['handle'] == "kfree_skb":
print PF_KFREE_SKB % \
(diff_msec(base_t,
event['comm_t']),
event['location'])
elif event['handle'] == "consume_skb":
print PF_CONS_SKB % \
diff_msec(base_t,
event['comm_t'])
print PF_JOINT
def trace_begin():
global show_tx
global show_rx
global dev
global debug
for i in range(len(sys.argv)):
if i == 0:
continue
arg = sys.argv[i]
if arg == 'tx':
show_tx = 1
elif arg =='rx':
show_rx = 1
elif arg.find('dev=',0, 4) >= 0:
dev = arg[4:]
elif arg == 'debug':
debug = 1
if show_tx == 0 and show_rx == 0:
show_tx = 1
show_rx = 1
def trace_end():
# order all events in time
all_event_list.sort(lambda a,b :cmp(a[EINFO_IDX_TIME],
b[EINFO_IDX_TIME]))
# process all events
for i in range(len(all_event_list)):
event_info = all_event_list[i]
name = event_info[EINFO_IDX_NAME]
if name == 'irq__softirq_exit':
handle_irq_softirq_exit(event_info)
elif name == 'irq__softirq_entry':
handle_irq_softirq_entry(event_info)
elif name == 'irq__softirq_raise':
handle_irq_softirq_raise(event_info)
elif name == 'irq__irq_handler_entry':
handle_irq_handler_entry(event_info)
elif name == 'irq__irq_handler_exit':
handle_irq_handler_exit(event_info)
elif name == 'napi__napi_poll':
handle_napi_poll(event_info)
elif name == 'net__netif_receive_skb':
handle_netif_receive_skb(event_info)
elif name == 'net__netif_rx':
handle_netif_rx(event_info)
elif name == 'skb__skb_copy_datagram_iovec':
handle_skb_copy_datagram_iovec(event_info)
elif name == 'net__net_dev_queue':
handle_net_dev_queue(event_info)
elif name == 'net__net_dev_xmit':
handle_net_dev_xmit(event_info)
elif name == 'skb__kfree_skb':
handle_kfree_skb(event_info)
elif name == 'skb__consume_skb':
handle_consume_skb(event_info)
# display receive hunks
if show_rx:
for i in range(len(receive_hunk_list)):
print_receive(receive_hunk_list[i])
# display transmit hunks
if show_tx:
print " dev len Qdisc " \
" netdevice free"
for i in range(len(tx_free_list)):
print_transmit(tx_free_list[i])
if debug:
print "debug buffer status"
print "----------------------------"
print "xmit Qdisc:remain:%d overflow:%d" % \
(len(tx_queue_list), of_count_tx_queue_list)
print "xmit netdevice:remain:%d overflow:%d" % \
(len(tx_xmit_list), of_count_tx_xmit_list)
print "receive:remain:%d overflow:%d" % \
(len(rx_skb_list), of_count_rx_skb_list)
# called from perf, when it finds a correspoinding event
def irq__softirq_entry(name, context, cpu, sec, nsec, pid, comm, vec):
if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX":
return
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec)
all_event_list.append(event_info)
def irq__softirq_exit(name, context, cpu, sec, nsec, pid, comm, vec):
if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX":
return
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec)
all_event_list.append(event_info)
def irq__softirq_raise(name, context, cpu, sec, nsec, pid, comm, vec):
if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX":
return
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec)
all_event_list.append(event_info)
def irq__irq_handler_entry(name, context, cpu, sec, nsec, pid, comm,
irq, irq_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
irq, irq_name)
all_event_list.append(event_info)
def irq__irq_handler_exit(name, context, cpu, sec, nsec, pid, comm, irq, ret):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, irq, ret)
all_event_list.append(event_info)
def napi__napi_poll(name, context, cpu, sec, nsec, pid, comm, napi, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
napi, dev_name)
all_event_list.append(event_info)
def net__netif_receive_skb(name, context, cpu, sec, nsec, pid, comm, skbaddr,
skblen, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, dev_name)
all_event_list.append(event_info)
def net__netif_rx(name, context, cpu, sec, nsec, pid, comm, skbaddr,
skblen, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, dev_name)
all_event_list.append(event_info)
def net__net_dev_queue(name, context, cpu, sec, nsec, pid, comm,
skbaddr, skblen, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, dev_name)
all_event_list.append(event_info)
def net__net_dev_xmit(name, context, cpu, sec, nsec, pid, comm,
skbaddr, skblen, rc, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, rc ,dev_name)
all_event_list.append(event_info)
def skb__kfree_skb(name, context, cpu, sec, nsec, pid, comm,
skbaddr, protocol, location):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, protocol, location)
all_event_list.append(event_info)
def skb__consume_skb(name, context, cpu, sec, nsec, pid, comm, skbaddr):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr)
all_event_list.append(event_info)
def skb__skb_copy_datagram_iovec(name, context, cpu, sec, nsec, pid, comm,
skbaddr, skblen):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen)
all_event_list.append(event_info)
def handle_irq_handler_entry(event_info):
(name, context, cpu, time, pid, comm, irq, irq_name) = event_info
if cpu not in irq_dic.keys():
irq_dic[cpu] = []
irq_record = {'irq':irq, 'name':irq_name, 'cpu':cpu, 'irq_ent_t':time}
irq_dic[cpu].append(irq_record)
def handle_irq_handler_exit(event_info):
(name, context, cpu, time, pid, comm, irq, ret) = event_info
if cpu not in irq_dic.keys():
return
irq_record = irq_dic[cpu].pop()
if irq != irq_record['irq']:
return
irq_record.update({'irq_ext_t':time})
# if an irq doesn't include NET_RX softirq, drop.
if 'event_list' in irq_record.keys():
irq_dic[cpu].append(irq_record)
def handle_irq_softirq_raise(event_info):
(name, context, cpu, time, pid, comm, vec) = event_info
if cpu not in irq_dic.keys() \
or len(irq_dic[cpu]) == 0:
return
irq_record = irq_dic[cpu].pop()
if 'event_list' in irq_record.keys():
irq_event_list = irq_record['event_list']
else:
irq_event_list = []
irq_event_list.append({'time':time, 'event':'sirq_raise'})
irq_record.update({'event_list':irq_event_list})
irq_dic[cpu].append(irq_record)
def handle_irq_softirq_entry(event_info):
(name, context, cpu, time, pid, comm, vec) = event_info
net_rx_dic[cpu] = {'sirq_ent_t':time, 'event_list':[]}
def handle_irq_softirq_exit(event_info):
(name, context, cpu, time, pid, comm, vec) = event_info
irq_list = []
event_list = 0
if cpu in irq_dic.keys():
irq_list = irq_dic[cpu]
del irq_dic[cpu]
if cpu in net_rx_dic.keys():
sirq_ent_t = net_rx_dic[cpu]['sirq_ent_t']
event_list = net_rx_dic[cpu]['event_list']
del net_rx_dic[cpu]
if irq_list == [] or event_list == 0:
return
rec_data = {'sirq_ent_t':sirq_ent_t, 'sirq_ext_t':time,
'irq_list':irq_list, 'event_list':event_list}
# merge information realted to a NET_RX softirq
receive_hunk_list.append(rec_data)
def handle_napi_poll(event_info):
(name, context, cpu, time, pid, comm, napi, dev_name) = event_info
if cpu in net_rx_dic.keys():
event_list = net_rx_dic[cpu]['event_list']
rec_data = {'event_name':'napi_poll',
'dev':dev_name, 'event_t':time}
event_list.append(rec_data)
def handle_netif_rx(event_info):
(name, context, cpu, time, pid, comm,
skbaddr, skblen, dev_name) = event_info
if cpu not in irq_dic.keys() \
or len(irq_dic[cpu]) == 0:
return
irq_record = irq_dic[cpu].pop()
if 'event_list' in irq_record.keys():
irq_event_list = irq_record['event_list']
else:
irq_event_list = []
irq_event_list.append({'time':time, 'event':'netif_rx',
'skbaddr':skbaddr, 'skblen':skblen, 'dev_name':dev_name})
irq_record.update({'event_list':irq_event_list})
irq_dic[cpu].append(irq_record)
def handle_netif_receive_skb(event_info):
global of_count_rx_skb_list
(name, context, cpu, time, pid, comm,
skbaddr, skblen, dev_name) = event_info
if cpu in net_rx_dic.keys():
rec_data = {'event_name':'netif_receive_skb',
'event_t':time, 'skbaddr':skbaddr, 'len':skblen}
event_list = net_rx_dic[cpu]['event_list']
event_list.append(rec_data)
rx_skb_list.insert(0, rec_data)
if len(rx_skb_list) > buffer_budget:
rx_skb_list.pop()
of_count_rx_skb_list += 1
def handle_net_dev_queue(event_info):
global of_count_tx_queue_list
(name, context, cpu, time, pid, comm,
skbaddr, skblen, dev_name) = event_info
skb = {'dev':dev_name, 'skbaddr':skbaddr, 'len':skblen, 'queue_t':time}
tx_queue_list.insert(0, skb)
if len(tx_queue_list) > buffer_budget:
tx_queue_list.pop()
of_count_tx_queue_list += 1
def handle_net_dev_xmit(event_info):
global of_count_tx_xmit_list
(name, context, cpu, time, pid, comm,
skbaddr, skblen, rc, dev_name) = event_info
if rc == 0: # NETDEV_TX_OK
for i in range(len(tx_queue_list)):
skb = tx_queue_list[i]
if skb['skbaddr'] == skbaddr:
skb['xmit_t'] = time
tx_xmit_list.insert(0, skb)
del tx_queue_list[i]
if len(tx_xmit_list) > buffer_budget:
tx_xmit_list.pop()
of_count_tx_xmit_list += 1
return
def handle_kfree_skb(event_info):
(name, context, cpu, time, pid, comm,
skbaddr, protocol, location) = event_info
for i in range(len(tx_queue_list)):
skb = tx_queue_list[i]
if skb['skbaddr'] == skbaddr:
del tx_queue_list[i]
return
for i in range(len(tx_xmit_list)):
skb = tx_xmit_list[i]
if skb['skbaddr'] == skbaddr:
skb['free_t'] = time
tx_free_list.append(skb)
del tx_xmit_list[i]
return
for i in range(len(rx_skb_list)):
rec_data = rx_skb_list[i]
if rec_data['skbaddr'] == skbaddr:
rec_data.update({'handle':"kfree_skb",
'comm':comm, 'pid':pid, 'comm_t':time})
del rx_skb_list[i]
return
def handle_consume_skb(event_info):
(name, context, cpu, time, pid, comm, skbaddr) = event_info
for i in range(len(tx_xmit_list)):
skb = tx_xmit_list[i]
if skb['skbaddr'] == skbaddr:
skb['free_t'] = time
tx_free_list.append(skb)
del tx_xmit_list[i]
return
def handle_skb_copy_datagram_iovec(event_info):
(name, context, cpu, time, pid, comm, skbaddr, skblen) = event_info
for i in range(len(rx_skb_list)):
rec_data = rx_skb_list[i]
if skbaddr == rec_data['skbaddr']:
rec_data.update({'handle':"skb_copy_datagram_iovec",
'comm':comm, 'pid':pid, 'comm_t':time})
del rx_skb_list[i]
return
|
gujiawen/flask_web | refs/heads/master | venv/lib/python2.7/site-packages/pip/_vendor/cachecontrol/heuristics.py | 374 | import calendar
import time
from email.utils import formatdate, parsedate, parsedate_tz
from datetime import datetime, timedelta
TIME_FMT = "%a, %d %b %Y %H:%M:%S GMT"
def expire_after(delta, date=None):
date = date or datetime.now()
return date + delta
def datetime_to_header(dt):
return formatdate(calendar.timegm(dt.timetuple()))
class BaseHeuristic(object):
def warning(self, response):
"""
Return a valid 1xx warning header value describing the cache
adjustments.
The response is provided too allow warnings like 113
http://tools.ietf.org/html/rfc7234#section-5.5.4 where we need
to explicitly say response is over 24 hours old.
"""
return '110 - "Response is Stale"'
def update_headers(self, response):
"""Update the response headers with any new headers.
NOTE: This SHOULD always include some Warning header to
signify that the response was cached by the client, not
by way of the provided headers.
"""
return {}
def apply(self, response):
warning_header_value = self.warning(response)
response.headers.update(self.update_headers(response))
if warning_header_value is not None:
response.headers.update({'Warning': warning_header_value})
return response
class OneDayCache(BaseHeuristic):
"""
Cache the response by providing an expires 1 day in the
future.
"""
def update_headers(self, response):
headers = {}
if 'expires' not in response.headers:
date = parsedate(response.headers['date'])
expires = expire_after(timedelta(days=1),
date=datetime(*date[:6]))
headers['expires'] = datetime_to_header(expires)
headers['cache-control'] = 'public'
return headers
class ExpiresAfter(BaseHeuristic):
"""
Cache **all** requests for a defined time period.
"""
def __init__(self, **kw):
self.delta = timedelta(**kw)
def update_headers(self, response):
expires = expire_after(self.delta)
return {
'expires': datetime_to_header(expires),
'cache-control': 'public',
}
def warning(self, response):
tmpl = '110 - Automatically cached for %s. Response might be stale'
return tmpl % self.delta
class LastModified(BaseHeuristic):
"""
If there is no Expires header already, fall back on Last-Modified
using the heuristic from
http://tools.ietf.org/html/rfc7234#section-4.2.2
to calculate a reasonable value.
Firefox also does something like this per
https://developer.mozilla.org/en-US/docs/Web/HTTP/Caching_FAQ
http://lxr.mozilla.org/mozilla-release/source/netwerk/protocol/http/nsHttpResponseHead.cpp#397
Unlike mozilla we limit this to 24-hr.
"""
cacheable_by_default_statuses = set([
200, 203, 204, 206, 300, 301, 404, 405, 410, 414, 501
])
def update_headers(self, resp):
headers = resp.headers
if 'expires' in headers:
return {}
if 'cache-control' in headers and headers['cache-control'] != 'public':
return {}
if resp.status not in self.cacheable_by_default_statuses:
return {}
if 'date' not in headers or 'last-modified' not in headers:
return {}
date = calendar.timegm(parsedate_tz(headers['date']))
last_modified = parsedate(headers['last-modified'])
if date is None or last_modified is None:
return {}
now = time.time()
current_age = max(0, now - date)
delta = date - calendar.timegm(last_modified)
freshness_lifetime = max(0, min(delta / 10, 24 * 3600))
if freshness_lifetime <= current_age:
return {}
expires = date + freshness_lifetime
return {'expires': time.strftime(TIME_FMT, time.gmtime(expires))}
def warning(self, resp):
return None
|
darkleons/odoo | refs/heads/master | addons/account/__openerp__.py | 41 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name' : 'eInvoicing',
'version' : '1.1',
'author' : 'OpenERP SA',
'category' : 'Accounting & Finance',
'description' : """
Accounting and Financial Management.
====================================
Financial and accounting module that covers:
--------------------------------------------
* General Accounting
* Cost/Analytic accounting
* Third party accounting
* Taxes management
* Budgets
* Customer and Supplier Invoices
* Bank statements
* Reconciliation process by partner
Creates a dashboard for accountants that includes:
--------------------------------------------------
* List of Customer Invoices to Approve
* Company Analysis
* Graph of Treasury
Processes like maintaining general ledgers are done through the defined Financial Journals (entry move line or grouping is maintained through a journal)
for a particular financial year and for preparation of vouchers there is a module named account_voucher.
""",
'website': 'https://www.odoo.com/page/billing',
'images' : ['images/accounts.jpeg','images/bank_statement.jpeg','images/cash_register.jpeg','images/chart_of_accounts.jpeg','images/customer_invoice.jpeg','images/journal_entries.jpeg'],
'depends' : ['base_setup', 'product', 'analytic', 'board', 'edi', 'report'],
'data': [
'security/account_security.xml',
'security/ir.model.access.csv',
'account_menuitem.xml',
'report/account_invoice_report_view.xml',
'report/account_entries_report_view.xml',
'report/account_treasury_report_view.xml',
'report/account_report_view.xml',
'report/account_analytic_entries_report_view.xml',
'wizard/account_move_bank_reconcile_view.xml',
'wizard/account_use_model_view.xml',
'account_installer.xml',
'wizard/account_period_close_view.xml',
'wizard/account_reconcile_view.xml',
'wizard/account_unreconcile_view.xml',
'wizard/account_statement_from_invoice_view.xml',
'account_view.xml',
'account_report.xml',
'account_financial_report_data.xml',
'wizard/account_report_common_view.xml',
'wizard/account_invoice_refund_view.xml',
'wizard/account_fiscalyear_close_state.xml',
'wizard/account_chart_view.xml',
'wizard/account_tax_chart_view.xml',
'wizard/account_move_line_reconcile_select_view.xml',
'wizard/account_open_closed_fiscalyear_view.xml',
'wizard/account_move_line_unreconcile_select_view.xml',
'wizard/account_vat_view.xml',
'wizard/account_report_print_journal_view.xml',
'wizard/account_report_general_journal_view.xml',
'wizard/account_report_central_journal_view.xml',
'wizard/account_subscription_generate_view.xml',
'wizard/account_fiscalyear_close_view.xml',
'wizard/account_state_open_view.xml',
'wizard/account_journal_select_view.xml',
'wizard/account_change_currency_view.xml',
'wizard/account_validate_move_view.xml',
'wizard/account_report_general_ledger_view.xml',
'wizard/account_invoice_state_view.xml',
'wizard/account_report_partner_balance_view.xml',
'wizard/account_report_account_balance_view.xml',
'wizard/account_report_aged_partner_balance_view.xml',
'wizard/account_report_partner_ledger_view.xml',
'wizard/account_reconcile_partner_process_view.xml',
'wizard/account_automatic_reconcile_view.xml',
'wizard/account_financial_report_view.xml',
'wizard/pos_box.xml',
'project/wizard/project_account_analytic_line_view.xml',
'account_end_fy.xml',
'account_invoice_view.xml',
'data/account_data.xml',
'data/data_account_type.xml',
'data/configurable_account_chart.xml',
'account_invoice_workflow.xml',
'project/project_view.xml',
'project/project_report.xml',
'project/wizard/account_analytic_balance_report_view.xml',
'project/wizard/account_analytic_cost_ledger_view.xml',
'project/wizard/account_analytic_inverted_balance_report.xml',
'project/wizard/account_analytic_journal_report_view.xml',
'project/wizard/account_analytic_cost_ledger_for_journal_report_view.xml',
'project/wizard/account_analytic_chart_view.xml',
'partner_view.xml',
'product_view.xml',
'account_assert_test.xml',
'ir_sequence_view.xml',
'company_view.xml',
'edi/invoice_action_data.xml',
'account_bank_view.xml',
'res_config_view.xml',
'account_pre_install.yml',
'views/report_vat.xml',
'views/report_invoice.xml',
'views/report_trialbalance.xml',
'views/report_centraljournal.xml',
'views/report_overdue.xml',
'views/report_generaljournal.xml',
'views/report_journal.xml',
'views/report_salepurchasejournal.xml',
'views/report_partnerbalance.xml',
'views/report_agedpartnerbalance.xml',
'views/report_partnerledger.xml',
'views/report_partnerledgerother.xml',
'views/report_financial.xml',
'views/report_generalledger.xml',
'project/views/report_analyticbalance.xml',
'project/views/report_analyticjournal.xml',
'project/views/report_analyticcostledgerquantity.xml',
'project/views/report_analyticcostledger.xml',
'project/views/report_invertedanalyticbalance.xml',
'views/account.xml',
],
'qweb' : [
"static/src/xml/account_move_reconciliation.xml",
"static/src/xml/account_move_line_quickadd.xml",
"static/src/xml/account_bank_statement_reconciliation.xml",
],
'demo': [
'demo/account_demo.xml',
'project/project_demo.xml',
'project/analytic_account_demo.xml',
'demo/account_minimal.xml',
'demo/account_invoice_demo.xml',
'demo/account_bank_statement.xml',
'account_unit_test.xml',
],
'test': [
'test/account_test_users.yml',
'test/account_customer_invoice.yml',
'test/account_supplier_invoice.yml',
'test/account_change_currency.yml',
'test/chart_of_account.yml',
'test/account_period_close.yml',
'test/account_use_model.yml',
'test/account_validate_account_move.yml',
'test/test_edi_invoice.yml',
'test/account_report.yml',
'test/account_fiscalyear_close.yml', #last test, as it will definitively close the demo fiscalyear
],
'installable': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
haltode/haltode.fr-core | refs/heads/master | content/articles/algorithme/ia/apprentissage_artificiel/regression_lin_poly/algo_gradient/sortie_graphique.py | 5 | import matplotlib.pyplot as plt
# Récupère dans des listes les valeurs de x, y, et de notre approximation de y
x = np.array(ia.x[:,1]).tolist()
x = [float(i[0]) for i in x]
y = np.array(ia.y).tolist()
y = [float(i[0]) for i in y]
y_approx = np.array(ia.x * ia.theta).tolist()
y_approx = [float(i[0]) for i in y_approx]
# Affiche les points donnés en entrée, ainsi que notre modèle linéaire
plt.plot(x, y, '+')
plt.plot(x, y_approx, 'r-')
plt.show()
|
yukoba/sympy | refs/heads/master | sympy/combinatorics/tests/test_prufer.py | 102 | from sympy.combinatorics.prufer import Prufer
from sympy.utilities.pytest import raises
def test_prufer():
# number of nodes is optional
assert Prufer([[0, 1], [0, 2], [0, 3], [0, 4]], 5).nodes == 5
assert Prufer([[0, 1], [0, 2], [0, 3], [0, 4]]).nodes == 5
a = Prufer([[0, 1], [0, 2], [0, 3], [0, 4]])
assert a.rank == 0
assert a.nodes == 5
assert a.prufer_repr == [0, 0, 0]
a = Prufer([[2, 4], [1, 4], [1, 3], [0, 5], [0, 4]])
assert a.rank == 924
assert a.nodes == 6
assert a.tree_repr == [[2, 4], [1, 4], [1, 3], [0, 5], [0, 4]]
assert a.prufer_repr == [4, 1, 4, 0]
assert Prufer.edges([0, 1, 2, 3], [1, 4, 5], [1, 4, 6]) == \
([[0, 1], [1, 2], [1, 4], [2, 3], [4, 5], [4, 6]], 7)
assert Prufer([0]*4).size == Prufer([6]*4).size == 1296
# accept iterables but convert to list of lists
tree = [(0, 1), (1, 5), (0, 3), (0, 2), (2, 6), (4, 7), (2, 4)]
tree_lists = [list(t) for t in tree]
assert Prufer(tree).tree_repr == tree_lists
assert sorted(Prufer(set(tree)).tree_repr) == sorted(tree_lists)
raises(ValueError, lambda: Prufer([[1, 2], [3, 4]])) # 0 is missing
assert Prufer(*Prufer.edges([1, 2], [3, 4])).prufer_repr == [1, 3]
raises(ValueError, lambda: Prufer.edges(
[1, 3], [3, 4])) # a broken tree but edges doesn't care
raises(ValueError, lambda: Prufer.edges([1, 2], [5, 6]))
def test_round_trip():
def doit(t, b):
e, n = Prufer.edges(*t)
t = Prufer(e, n)
a = sorted(t.tree_repr)
b = [i - 1 for i in b]
assert t.prufer_repr == b
assert sorted(Prufer(b).tree_repr) == a
assert Prufer.unrank(t.rank, n).prufer_repr == b
doit([[1, 2]], [])
doit([[2, 1, 3]], [1])
doit([[1, 3, 2]], [3])
doit([[1, 2, 3]], [2])
doit([[2, 1, 4], [1, 3]], [1, 1])
doit([[3, 2, 1, 4]], [2, 1])
doit([[3, 2, 1], [2, 4]], [2, 2])
doit([[1, 3, 2, 4]], [3, 2])
doit([[1, 4, 2, 3]], [4, 2])
doit([[3, 1, 4, 2]], [4, 1])
doit([[4, 2, 1, 3]], [1, 2])
doit([[1, 2, 4, 3]], [2, 4])
doit([[1, 3, 4, 2]], [3, 4])
doit([[2, 4, 1], [4, 3]], [4, 4])
doit([[1, 2, 3, 4]], [2, 3])
doit([[2, 3, 1], [3, 4]], [3, 3])
doit([[1, 4, 3, 2]], [4, 3])
doit([[2, 1, 4, 3]], [1, 4])
doit([[2, 1, 3, 4]], [1, 3])
doit([[6, 2, 1, 4], [1, 3, 5, 8], [3, 7]], [1, 2, 1, 3, 3, 5])
|
minhphung171093/GreenERP_V9 | refs/heads/master | openerp/addons/web_editor/models/test_models.py | 47 | # -*- coding: utf-8 -*-
from openerp.osv import orm, fields
class test_converter(orm.Model):
_name = 'web_editor.converter.test'
# disable translation export for those brilliant field labels and values
_translate = False
_columns = {
'char': fields.char(),
'integer': fields.integer(),
'float': fields.float(),
'numeric': fields.float(digits=(16, 2)),
'many2one': fields.many2one('web_editor.converter.test.sub'),
'binary': fields.binary(),
'date': fields.date(),
'datetime': fields.datetime(),
'selection': fields.selection([
(1, "réponse A"),
(2, "réponse B"),
(3, "réponse C"),
(4, "réponse D"),
]),
'selection_str': fields.selection([
('A', "Qu'il n'est pas arrivé à Toronto"),
('B', "Qu'il était supposé arriver à Toronto"),
('C', "Qu'est-ce qu'il fout ce maudit pancake, tabernacle ?"),
('D', "La réponse D"),
], string=u"Lorsqu'un pancake prend l'avion à destination de Toronto et "
u"qu'il fait une escale technique à St Claude, on dit:"),
'html': fields.html(),
'text': fields.text(),
}
class test_converter_sub(orm.Model):
_name = 'web_editor.converter.test.sub'
_columns = {
'name': fields.char(),
}
|
marado/youtube-dl | refs/heads/master | youtube_dl/extractor/history.py | 121 | from __future__ import unicode_literals
from .common import InfoExtractor
from ..utils import smuggle_url
class HistoryIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?history\.com/(?:[^/]+/)+(?P<id>[^/]+?)(?:$|[?#])'
_TESTS = [{
'url': 'http://www.history.com/topics/valentines-day/history-of-valentines-day/videos/bet-you-didnt-know-valentines-day?m=528e394da93ae&s=undefined&f=1&free=false',
'md5': '6fe632d033c92aa10b8d4a9be047a7c5',
'info_dict': {
'id': 'bLx5Dv5Aka1G',
'ext': 'mp4',
'title': "Bet You Didn't Know: Valentine's Day",
'description': 'md5:7b57ea4829b391995b405fa60bd7b5f7',
},
'add_ie': ['ThePlatform'],
}]
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
video_url = self._search_regex(
r'data-href="[^"]*/%s"[^>]+data-release-url="([^"]+)"' % video_id,
webpage, 'video url')
return self.url_result(smuggle_url(video_url, {'sig': {'key': 'crazyjava', 'secret': 's3cr3t'}}))
|
nicolargo/intellij-community | refs/heads/master | python/testData/resolve/multiFile/importSubmodule/ImportSubmodule.py | 83 | import p1.m1
p1.m1
# <ref> |
alexmorozov/django | refs/heads/master | tests/template_tests/templatetags/custom.py | 161 | import operator
import warnings
from django import template
from django.template.defaultfilters import stringfilter
from django.utils import six
from django.utils.html import escape, format_html
register = template.Library()
@register.filter
@stringfilter
def trim(value, num):
return value[:num]
@register.filter
def noop(value, param=None):
"""A noop filter that always return its first argument and does nothing with
its second (optional) one.
Useful for testing out whitespace in filter arguments (see #19882)."""
return value
@register.simple_tag(takes_context=True)
def context_stack_length(context):
return len(context.dicts)
@register.simple_tag
def no_params():
"""Expected no_params __doc__"""
return "no_params - Expected result"
no_params.anything = "Expected no_params __dict__"
@register.simple_tag
def one_param(arg):
"""Expected one_param __doc__"""
return "one_param - Expected result: %s" % arg
one_param.anything = "Expected one_param __dict__"
@register.simple_tag(takes_context=False)
def explicit_no_context(arg):
"""Expected explicit_no_context __doc__"""
return "explicit_no_context - Expected result: %s" % arg
explicit_no_context.anything = "Expected explicit_no_context __dict__"
@register.simple_tag(takes_context=True)
def no_params_with_context(context):
"""Expected no_params_with_context __doc__"""
return "no_params_with_context - Expected result (context value: %s)" % context['value']
no_params_with_context.anything = "Expected no_params_with_context __dict__"
@register.simple_tag(takes_context=True)
def params_and_context(context, arg):
"""Expected params_and_context __doc__"""
return "params_and_context - Expected result (context value: %s): %s" % (context['value'], arg)
params_and_context.anything = "Expected params_and_context __dict__"
@register.simple_tag
def simple_two_params(one, two):
"""Expected simple_two_params __doc__"""
return "simple_two_params - Expected result: %s, %s" % (one, two)
simple_two_params.anything = "Expected simple_two_params __dict__"
@register.simple_tag
def simple_one_default(one, two='hi'):
"""Expected simple_one_default __doc__"""
return "simple_one_default - Expected result: %s, %s" % (one, two)
simple_one_default.anything = "Expected simple_one_default __dict__"
@register.simple_tag
def simple_unlimited_args(one, two='hi', *args):
"""Expected simple_unlimited_args __doc__"""
return "simple_unlimited_args - Expected result: %s" % (
', '.join(six.text_type(arg) for arg in [one, two] + list(args))
)
simple_unlimited_args.anything = "Expected simple_unlimited_args __dict__"
@register.simple_tag
def simple_only_unlimited_args(*args):
"""Expected simple_only_unlimited_args __doc__"""
return "simple_only_unlimited_args - Expected result: %s" % ', '.join(six.text_type(arg) for arg in args)
simple_only_unlimited_args.anything = "Expected simple_only_unlimited_args __dict__"
@register.simple_tag
def simple_unlimited_args_kwargs(one, two='hi', *args, **kwargs):
"""Expected simple_unlimited_args_kwargs __doc__"""
# Sort the dictionary by key to guarantee the order for testing.
sorted_kwarg = sorted(six.iteritems(kwargs), key=operator.itemgetter(0))
return "simple_unlimited_args_kwargs - Expected result: %s / %s" % (
', '.join(six.text_type(arg) for arg in [one, two] + list(args)),
', '.join('%s=%s' % (k, v) for (k, v) in sorted_kwarg)
)
simple_unlimited_args_kwargs.anything = "Expected simple_unlimited_args_kwargs __dict__"
@register.simple_tag(takes_context=True)
def simple_tag_without_context_parameter(arg):
"""Expected simple_tag_without_context_parameter __doc__"""
return "Expected result"
simple_tag_without_context_parameter.anything = "Expected simple_tag_without_context_parameter __dict__"
@register.simple_tag(takes_context=True)
def escape_naive(context):
"""A tag that doesn't even think about escaping issues"""
return "Hello {0}!".format(context['name'])
@register.simple_tag(takes_context=True)
def escape_explicit(context):
"""A tag that uses escape explicitly"""
return escape("Hello {0}!".format(context['name']))
@register.simple_tag(takes_context=True)
def escape_format_html(context):
"""A tag that uses format_html"""
return format_html("Hello {0}!", context['name'])
@register.simple_tag(takes_context=True)
def current_app(context):
return "%s" % context.current_app
@register.simple_tag(takes_context=True)
def use_l10n(context):
return "%s" % context.use_l10n
@register.simple_tag(name='minustwo')
def minustwo_overridden_name(value):
return value - 2
register.simple_tag(lambda x: x - 1, name='minusone')
with warnings.catch_warnings():
warnings.simplefilter('ignore')
@register.assignment_tag
def assignment_no_params():
"""Expected assignment_no_params __doc__"""
return "assignment_no_params - Expected result"
assignment_no_params.anything = "Expected assignment_no_params __dict__"
@register.assignment_tag(takes_context=True)
def assignment_tag_without_context_parameter(arg):
"""Expected assignment_tag_without_context_parameter __doc__"""
return "Expected result"
assignment_tag_without_context_parameter.anything = "Expected assignment_tag_without_context_parameter __dict__"
|
aivarsk/scrapy | refs/heads/master | scrapy/utils/ossignal.py | 204 |
from __future__ import absolute_import
from twisted.internet import reactor
import signal
signal_names = {}
for signame in dir(signal):
if signame.startswith("SIG"):
signum = getattr(signal, signame)
if isinstance(signum, int):
signal_names[signum] = signame
def install_shutdown_handlers(function, override_sigint=True):
"""Install the given function as a signal handler for all common shutdown
signals (such as SIGINT, SIGTERM, etc). If override_sigint is ``False`` the
SIGINT handler won't be install if there is already a handler in place
(e.g. Pdb)
"""
reactor._handleSignals()
signal.signal(signal.SIGTERM, function)
if signal.getsignal(signal.SIGINT) == signal.default_int_handler or \
override_sigint:
signal.signal(signal.SIGINT, function)
# Catch Ctrl-Break in windows
if hasattr(signal, "SIGBREAK"):
signal.signal(signal.SIGBREAK, function)
|
Nu3001/external_chromium_org | refs/heads/master | tools/telemetry/telemetry/core/chrome/desktop_browser_backend.py | 23 | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
import os
import subprocess as subprocess
import shutil
import sys
import tempfile
from telemetry.core import util
from telemetry.core.backends import browser_backend
from telemetry.core.backends.chrome import chrome_browser_backend
class DesktopBrowserBackend(chrome_browser_backend.ChromeBrowserBackend):
"""The backend for controlling a locally-executed browser instance, on Linux,
Mac or Windows.
"""
def __init__(self, options, executable, flash_path, is_content_shell,
browser_directory, delete_profile_dir_after_run=True):
super(DesktopBrowserBackend, self).__init__(
is_content_shell=is_content_shell,
supports_extensions=not is_content_shell,
options=options)
# Initialize fields so that an explosion during init doesn't break in Close.
self._proc = None
self._tmpdir = None
self._tmp_output_file = None
self._executable = executable
if not self._executable:
raise Exception('Cannot create browser, no executable found!')
self._flash_path = flash_path
if self._flash_path and not os.path.exists(self._flash_path):
logging.warning(('Could not find flash at %s. Running without flash.\n\n'
'To fix this see http://go/read-src-internal') %
self._flash_path)
self._flash_path = None
if len(options.extensions_to_load) > 0 and is_content_shell:
raise browser_backend.ExtensionsNotSupportedException(
'Content shell does not support extensions.')
self._browser_directory = browser_directory
self._port = util.GetAvailableLocalPort()
self._profile_dir = None
self._supports_net_benchmarking = True
self._delete_profile_dir_after_run = delete_profile_dir_after_run
self._SetupProfile()
def _SetupProfile(self):
if not self.options.dont_override_profile:
self._tmpdir = tempfile.mkdtemp()
profile_dir = self._profile_dir or self.options.profile_dir
if profile_dir:
if self.is_content_shell:
logging.critical('Profiles cannot be used with content shell')
sys.exit(1)
shutil.rmtree(self._tmpdir)
shutil.copytree(profile_dir, self._tmpdir)
def _LaunchBrowser(self):
args = [self._executable]
args.extend(self.GetBrowserStartupArgs())
if not self.options.show_stdout:
self._tmp_output_file = tempfile.NamedTemporaryFile('w', 0)
self._proc = subprocess.Popen(
args, stdout=self._tmp_output_file, stderr=subprocess.STDOUT)
else:
self._proc = subprocess.Popen(args)
try:
self._WaitForBrowserToComeUp()
self._PostBrowserStartupInitialization()
except:
self.Close()
raise
def GetBrowserStartupArgs(self):
args = super(DesktopBrowserBackend, self).GetBrowserStartupArgs()
args.append('--remote-debugging-port=%i' % self._port)
if not self.is_content_shell:
args.append('--window-size=1280,1024')
if self._flash_path:
args.append('--ppapi-flash-path=%s' % self._flash_path)
if self._supports_net_benchmarking:
args.append('--enable-net-benchmarking')
else:
args.append('--enable-benchmarking')
if not self.options.dont_override_profile:
args.append('--user-data-dir=%s' % self._tmpdir)
return args
def SetProfileDirectory(self, profile_dir):
# Make sure _profile_dir hasn't already been set.
assert self._profile_dir is None
if self.is_content_shell:
logging.critical('Profile creation cannot be used with content shell')
sys.exit(1)
self._profile_dir = profile_dir
def Start(self):
self._LaunchBrowser()
# For old chrome versions, might have to relaunch to have the
# correct net_benchmarking switch.
if self._chrome_branch_number < 1418:
self.Close()
self._supports_net_benchmarking = False
self._LaunchBrowser()
@property
def pid(self):
if self._proc:
return self._proc.pid
return None
@property
def browser_directory(self):
return self._browser_directory
@property
def profile_directory(self):
return self._tmpdir
def IsBrowserRunning(self):
return self._proc.poll() == None
def GetStandardOutput(self):
assert self._tmp_output_file, "Can't get standard output with show_stdout"
self._tmp_output_file.flush()
try:
with open(self._tmp_output_file.name) as f:
return f.read()
except IOError:
return ''
def GetStackTrace(self):
# crbug.com/223572, symbolize stack trace for desktop browsers.
logging.warning('Stack traces not supported on desktop browsers, '
'returning stdout')
return self.GetStandardOutput()
def __del__(self):
self.Close()
def Close(self):
super(DesktopBrowserBackend, self).Close()
if self._proc:
def IsClosed():
if not self._proc:
return True
return self._proc.poll() != None
# Try to politely shutdown, first.
self._proc.terminate()
try:
util.WaitFor(IsClosed, timeout=1)
self._proc = None
except util.TimeoutException:
pass
# Kill it.
if not IsClosed():
self._proc.kill()
try:
util.WaitFor(IsClosed, timeout=5)
self._proc = None
except util.TimeoutException:
self._proc = None
raise Exception('Could not shutdown the browser.')
if self._delete_profile_dir_after_run and \
self._tmpdir and os.path.exists(self._tmpdir):
shutil.rmtree(self._tmpdir, ignore_errors=True)
self._tmpdir = None
if self._tmp_output_file:
self._tmp_output_file.close()
self._tmp_output_file = None
def CreateForwarder(self, *port_pairs):
return browser_backend.DoNothingForwarder(*port_pairs)
|
etalab/udata | refs/heads/master | udata/tests/forms/test_current_user_field.py | 2 | import datetime
from bson import ObjectId
from werkzeug.datastructures import MultiDict
from udata.auth import login_user
from udata.auth.forms import ExtendedLoginForm, ExtendedResetPasswordForm
from udata.core.user.factories import UserFactory, AdminFactory
from udata.forms import ModelForm, fields
from udata.models import db, User
from udata.tests import TestCase
class CurrentUserFieldTest(TestCase):
def factory(self, *args, **kwargs):
class Ownable(db.Document):
owner = db.ReferenceField(User)
class OwnableForm(ModelForm):
model_class = Ownable
owner = fields.CurrentUserField(*args, **kwargs)
return Ownable, OwnableForm
def test_empty_values(self):
Ownable, OwnableForm = self.factory()
user = UserFactory()
login_user(user)
form = OwnableForm()
self.assertEqual(form.owner.data, user)
ownable = Ownable()
form.populate_obj(ownable)
self.assertEqual(ownable.owner, user)
def test_initial_value(self):
Ownable, OwnableForm = self.factory()
user = UserFactory()
login_user(user)
ownable = Ownable(owner=user)
form = OwnableForm(None, obj=ownable)
self.assertEqual(form.owner.data, user)
def test_with_valid_user_self(self):
Ownable, OwnableForm = self.factory()
user = UserFactory()
login_user(user)
form = OwnableForm(MultiDict({
'owner': str(user.id)
}))
self.assertEqual(form.owner.data, user)
form.validate()
self.assertEqual(form.errors, {})
ownable = Ownable()
form.populate_obj(ownable)
self.assertEqual(ownable.owner, user)
def test_with_other_user(self):
Ownable, OwnableForm = self.factory()
user = UserFactory()
other = UserFactory()
login_user(user)
form = OwnableForm(MultiDict({
'owner': str(other.id)
}))
self.assertEqual(form.owner.data, other)
form.validate()
self.assertIn('owner', form.errors)
self.assertEqual(len(form.errors['owner']), 1)
def test_with_other_user_admin(self):
Ownable, OwnableForm = self.factory()
user = UserFactory()
admin = AdminFactory()
login_user(admin)
form = OwnableForm(MultiDict({
'owner': str(user.id)
}))
self.assertEqual(form.owner.data, user)
form.validate()
self.assertEqual(form.errors, {})
ownable = Ownable()
form.populate_obj(ownable)
self.assertEqual(ownable.owner, user)
def test_with_valid_user_self_json(self):
Ownable, OwnableForm = self.factory()
user = UserFactory()
login_user(user)
form = OwnableForm.from_json({
'owner': str(user.id)
})
self.assertEqual(form.owner.data, user)
form.validate()
self.assertEqual(form.errors, {})
ownable = Ownable()
form.populate_obj(ownable)
self.assertEqual(ownable.owner, user)
def test_with_user_null_json(self):
Ownable, OwnableForm = self.factory()
user = UserFactory()
login_user(user)
form = OwnableForm.from_json({
'owner': None
})
self.assertEqual(form.owner.data, user)
form.validate()
self.assertEqual(form.errors, {})
ownable = Ownable()
form.populate_obj(ownable)
self.assertEqual(ownable.owner, user)
def test_with_user_object_self_from_json(self):
Ownable, OwnableForm = self.factory()
user = UserFactory()
login_user(user)
form = OwnableForm.from_json({
'owner': {'id': str(user.id)}
})
self.assertEqual(form.owner.data, user)
form.validate()
self.assertEqual(form.errors, {})
ownable = Ownable()
form.populate_obj(ownable)
self.assertEqual(ownable.owner, user)
def test_with_invalid_data(self):
Ownable, OwnableForm = self.factory()
user = UserFactory()
login_user(user)
form = OwnableForm(MultiDict({
'owner': str('wrongwith12c')
}))
form.validate()
self.assertIn('owner', form.errors)
self.assertEqual(len(form.errors['owner']), 1)
def test_with_user_not_found(self):
Ownable, OwnableForm = self.factory()
user = UserFactory()
login_user(user)
form = OwnableForm(MultiDict({
'owner': str(ObjectId())
}))
form.validate()
self.assertIn('owner', form.errors)
self.assertEqual(len(form.errors['owner']), 1)
def test_with_user_not_logged_found(self):
Ownable, OwnableForm = self.factory()
user = UserFactory()
form = OwnableForm(MultiDict({
'owner': str(user.id)
}))
form.validate()
self.assertIn('owner', form.errors)
self.assertEqual(len(form.errors['owner']), 1)
def test_password_rotation(self):
today = datetime.datetime.now()
user = UserFactory(password='password', password_rotation_demanded=today, confirmed_at=today)
form = ExtendedLoginForm.from_json({
'email': user.email,
'password': 'password'
})
form.validate()
self.assertIn('Password must be changed for security reasons', form.errors['password'])
|
hobarrera/django | refs/heads/master | tests/template_tests/filter_tests/test_upper.py | 388 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.template.defaultfilters import upper
from django.test import SimpleTestCase
from django.utils.safestring import mark_safe
from ..utils import setup
class UpperTests(SimpleTestCase):
"""
The "upper" filter messes up entities (which are case-sensitive),
so it's not safe for non-escaping purposes.
"""
@setup({'upper01': '{% autoescape off %}{{ a|upper }} {{ b|upper }}{% endautoescape %}'})
def test_upper01(self):
output = self.engine.render_to_string('upper01', {'a': 'a & b', 'b': mark_safe('a & b')})
self.assertEqual(output, 'A & B A & B')
@setup({'upper02': '{{ a|upper }} {{ b|upper }}'})
def test_upper02(self):
output = self.engine.render_to_string('upper02', {'a': 'a & b', 'b': mark_safe('a & b')})
self.assertEqual(output, 'A & B A &AMP; B')
class FunctionTests(SimpleTestCase):
def test_upper(self):
self.assertEqual(upper('Mixed case input'), 'MIXED CASE INPUT')
def test_unicode(self):
# lowercase e umlaut
self.assertEqual(upper('\xeb'), '\xcb')
def test_non_string_input(self):
self.assertEqual(upper(123), '123')
|
yashsharan/sympy | refs/heads/master | sympy/strategies/tests/test_tools.py | 129 | from sympy.strategies.tools import subs, typed
from sympy.strategies.rl import rm_id
from sympy import Basic
def test_subs():
from sympy import symbols
a,b,c,d,e,f = symbols('a,b,c,d,e,f')
mapping = {a: d, d: a, Basic(e): Basic(f)}
expr = Basic(a, Basic(b, c), Basic(d, Basic(e)))
result = Basic(d, Basic(b, c), Basic(a, Basic(f)))
assert subs(mapping)(expr) == result
def test_subs_empty():
assert subs({})(Basic(1, 2)) == Basic(1, 2)
def test_typed():
class A(Basic):
pass
class B(Basic):
pass
rmzeros = rm_id(lambda x: x == 0)
rmones = rm_id(lambda x: x == 1)
remove_something = typed({A: rmzeros, B: rmones})
assert remove_something(A(0, 1)) == A(1)
assert remove_something(B(0, 1)) == B(0)
|
derekjchow/models | refs/heads/master | official/keras_application_models/dataset.py | 1 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Prepare dataset for keras model benchmark."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
from official.utils.misc import model_helpers # pylint: disable=g-bad-import-order
# Default values for dataset.
_NUM_CHANNELS = 3
_NUM_CLASSES = 1000
def _get_default_image_size(model):
"""Provide default image size for each model."""
image_size = (224, 224)
if model in ["inceptionv3", "xception", "inceptionresnetv2"]:
image_size = (299, 299)
elif model in ["nasnetlarge"]:
image_size = (331, 331)
return image_size
def generate_synthetic_input_dataset(model, batch_size):
"""Generate synthetic dataset."""
image_size = _get_default_image_size(model)
image_shape = (batch_size,) + image_size + (_NUM_CHANNELS,)
label_shape = (batch_size, _NUM_CLASSES)
dataset = model_helpers.generate_synthetic_data(
input_shape=tf.TensorShape(image_shape),
label_shape=tf.TensorShape(label_shape),
)
return dataset
class Cifar10Dataset(object):
"""CIFAR10 dataset, including train and test set.
Each sample consists of a 32x32 color image, and label is from 10 classes.
"""
def __init__(self, batch_size):
"""Initializes train/test datasets.
Args:
batch_size: int, the number of batch size.
"""
self.input_shape = (32, 32, 3)
self.num_classes = 10
(x_train, y_train), (x_test, y_test) = tf.keras.datasets.cifar10.load_data()
x_train, x_test = x_train / 255.0, x_test / 255.0
y_train, y_test = y_train.astype(np.int64), y_test.astype(np.int64)
y_train = tf.keras.utils.to_categorical(y_train, self.num_classes)
y_test = tf.keras.utils.to_categorical(y_test, self.num_classes)
self.train_dataset = tf.data.Dataset.from_tensor_slices(
(x_train, y_train)).shuffle(2000).batch(batch_size).repeat()
self.test_dataset = tf.data.Dataset.from_tensor_slices(
(x_test, y_test)).shuffle(2000).batch(batch_size).repeat()
|
gsehub/edx-platform | refs/heads/gsehub-release | lms/djangoapps/discussion_api/views.py | 9 | """
Discussion API views
"""
from django.core.exceptions import ValidationError
from django.contrib.auth import get_user_model
from edx_rest_framework_extensions.authentication import JwtAuthentication
from opaque_keys.edx.keys import CourseKey
from rest_framework import permissions
from rest_framework import status
from rest_framework.exceptions import UnsupportedMediaType
from rest_framework.parsers import JSONParser
from rest_framework.response import Response
from rest_framework.views import APIView
from rest_framework.viewsets import ViewSet
from six import text_type
from lms.lib import comment_client
from discussion_api.api import (
create_comment,
create_thread,
delete_comment,
delete_thread,
get_comment_list,
get_course,
get_course_topics,
get_response_comments,
get_thread,
get_thread_list,
update_comment,
update_thread
)
from discussion_api.forms import CommentGetForm, CommentListGetForm, ThreadListGetForm
from openedx.core.lib.api.parsers import MergePatchParser
from openedx.core.lib.api.view_utils import DeveloperErrorViewMixin, view_auth_classes
from openedx.core.djangoapps.user_api.accounts.permissions import CanRetireUser
from openedx.core.djangoapps.user_api.models import UserRetirementStatus
from xmodule.modulestore.django import modulestore
@view_auth_classes()
class CourseView(DeveloperErrorViewMixin, APIView):
"""
**Use Cases**
Retrieve general discussion metadata for a course.
**Example Requests**:
GET /api/discussion/v1/courses/course-v1:ExampleX+Subject101+2015
**Response Values**:
* id: The identifier of the course
* blackouts: A list of objects representing blackout periods (during
which discussions are read-only except for privileged users). Each
item in the list includes:
* start: The ISO 8601 timestamp for the start of the blackout period
* end: The ISO 8601 timestamp for the end of the blackout period
* thread_list_url: The URL of the list of all threads in the course.
* topics_url: The URL of the topic listing for the course.
"""
def get(self, request, course_id):
"""Implements the GET method as described in the class docstring."""
course_key = CourseKey.from_string(course_id) # TODO: which class is right?
return Response(get_course(request, course_key))
@view_auth_classes()
class CourseTopicsView(DeveloperErrorViewMixin, APIView):
"""
**Use Cases**
Retrieve the topic listing for a course. Only topics accessible to the
authenticated user are included.
**Example Requests**:
GET /api/discussion/v1/course_topics/course-v1:ExampleX+Subject101+2015
?topic_id={topic_id_1, topid_id_2}
**Response Values**:
* courseware_topics: The list of topic trees for courseware-linked
topics. Each item in the list includes:
* id: The id of the discussion topic (null for a topic that only
has children but cannot contain threads itself).
* name: The display name of the topic.
* children: A list of child subtrees of the same format.
* non_courseware_topics: The list of topic trees that are not linked to
courseware. Items are of the same format as in courseware_topics.
"""
def get(self, request, course_id):
"""
Implements the GET method as described in the class docstring.
"""
course_key = CourseKey.from_string(course_id)
topic_ids = self.request.GET.get('topic_id')
with modulestore().bulk_operations(course_key):
response = get_course_topics(
request,
course_key,
set(topic_ids.strip(',').split(',')) if topic_ids else None,
)
return Response(response)
@view_auth_classes()
class ThreadViewSet(DeveloperErrorViewMixin, ViewSet):
"""
**Use Cases**
Retrieve the list of threads for a course, retrieve thread details,
post a new thread, or modify or delete an existing thread.
**Example Requests**:
GET /api/discussion/v1/threads/?course_id=ExampleX/Demo/2015
GET /api/discussion/v1/threads/{thread_id}
POST /api/discussion/v1/threads
{
"course_id": "foo/bar/baz",
"topic_id": "quux",
"type": "discussion",
"title": "Title text",
"raw_body": "Body text"
}
PATCH /api/discussion/v1/threads/thread_id
{"raw_body": "Edited text"}
Content Type: "application/merge-patch+json"
DELETE /api/discussion/v1/threads/thread_id
**GET Thread List Parameters**:
* course_id (required): The course to retrieve threads for
* page: The (1-indexed) page to retrieve (default is 1)
* page_size: The number of items per page (default is 10, max is 100)
* topic_id: The id of the topic to retrieve the threads. There can be
multiple topic_id queries to retrieve threads from multiple topics
at once.
* text_search: A search string to match. Any thread whose content
(including the bodies of comments in the thread) matches the search
string will be returned.
* order_by: Must be "last_activity_at", "comment_count", or
"vote_count". The key to sort the threads by. The default is
"last_activity_at".
* order_direction: Must be "desc". The direction in which to sort the
threads by. The default and only value is "desc". This will be
removed in a future major version.
* following: If true, retrieve only threads the requesting user is
following
* view: "unread" for threads the requesting user has not read, or
"unanswered" for question threads with no marked answer. Only one
can be selected.
* requested_fields: (list) Indicates which additional fields to return
for each thread. (supports 'profile_image')
The topic_id, text_search, and following parameters are mutually
exclusive (i.e. only one may be specified in a request)
**GET Thread Parameters**:
* thread_id (required): The id of the thread
* requested_fields (optional parameter): (list) Indicates which additional
fields to return for each thread. (supports 'profile_image')
**POST Parameters**:
* course_id (required): The course to create the thread in
* topic_id (required): The topic to create the thread in
* type (required): The thread's type (either "question" or "discussion")
* title (required): The thread's title
* raw_body (required): The thread's raw body text
* following (optional): A boolean indicating whether the user should
follow the thread upon its creation; defaults to false
**PATCH Parameters**:
* abuse_flagged (optional): A boolean to mark thread as abusive
* voted (optional): A boolean to vote for thread
* read (optional): A boolean to mark thread as read
* topic_id, type, title, and raw_body are accepted with the same meaning
as in a POST request
If "application/merge-patch+json" is not the specified content type,
a 415 error is returned.
**GET Thread List Response Values**:
* results: The list of threads; each item in the list has the same
fields as the POST/PATCH response below
* next: The URL of the next page (or null if first page)
* previous: The URL of the previous page (or null if last page)
* text_search_rewrite: The search string to which the text_search
parameter was rewritten in order to match threads (e.g. for spelling
correction)
**GET Thread Details Response Values**:
Same response fields as the POST/PATCH response below
**POST/PATCH response values**:
* id: The id of the thread
* course_id: The id of the thread's course
* topic_id: The id of the thread's topic
* created_at: The ISO 8601 timestamp for the creation of the thread
* updated_at: The ISO 8601 timestamp for the last modification of
the thread, which may not have been an update of the title/body
* type: The thread's type (either "question" or "discussion")
* title: The thread's title
* raw_body: The thread's raw body text without any rendering applied
* pinned: Boolean indicating whether the thread has been pinned
* closed: Boolean indicating whether the thread has been closed
* comment_count: The number of comments within the thread
* unread_comment_count: The number of comments within the thread
that were created or updated since the last time the user read
the thread
* editable_fields: The fields that the requesting user is allowed to
modify with a PATCH request
* read: Boolean indicating whether the user has read this thread
* has_endorsed: Boolean indicating whether this thread has been answered
* response_count: The number of direct responses for a thread
**DELETE response values:
No content is returned for a DELETE request
"""
lookup_field = "thread_id"
parser_classes = (JSONParser, MergePatchParser,)
def list(self, request):
"""
Implements the GET method for the list endpoint as described in the
class docstring.
"""
form = ThreadListGetForm(request.GET)
if not form.is_valid():
raise ValidationError(form.errors)
return get_thread_list(
request,
form.cleaned_data["course_id"],
form.cleaned_data["page"],
form.cleaned_data["page_size"],
form.cleaned_data["topic_id"],
form.cleaned_data["text_search"],
form.cleaned_data["following"],
form.cleaned_data["view"],
form.cleaned_data["order_by"],
form.cleaned_data["order_direction"],
form.cleaned_data["requested_fields"]
)
def retrieve(self, request, thread_id=None):
"""
Implements the GET method for thread ID
"""
requested_fields = request.GET.get('requested_fields')
return Response(get_thread(request, thread_id, requested_fields))
def create(self, request):
"""
Implements the POST method for the list endpoint as described in the
class docstring.
"""
return Response(create_thread(request, request.data))
def partial_update(self, request, thread_id):
"""
Implements the PATCH method for the instance endpoint as described in
the class docstring.
"""
if request.content_type != MergePatchParser.media_type:
raise UnsupportedMediaType(request.content_type)
return Response(update_thread(request, thread_id, request.data))
def destroy(self, request, thread_id):
"""
Implements the DELETE method for the instance endpoint as described in
the class docstring
"""
delete_thread(request, thread_id)
return Response(status=204)
@view_auth_classes()
class CommentViewSet(DeveloperErrorViewMixin, ViewSet):
"""
**Use Cases**
Retrieve the list of comments in a thread, retrieve the list of
child comments for a response comment, create a comment, or modify
or delete an existing comment.
**Example Requests**:
GET /api/discussion/v1/comments/?thread_id=0123456789abcdef01234567
GET /api/discussion/v1/comments/2123456789abcdef01234555
POST /api/discussion/v1/comments/
{
"thread_id": "0123456789abcdef01234567",
"raw_body": "Body text"
}
PATCH /api/discussion/v1/comments/comment_id
{"raw_body": "Edited text"}
Content Type: "application/merge-patch+json"
DELETE /api/discussion/v1/comments/comment_id
**GET Comment List Parameters**:
* thread_id (required): The thread to retrieve comments for
* endorsed: If specified, only retrieve the endorsed or non-endorsed
comments accordingly. Required for a question thread, must be absent
for a discussion thread.
* page: The (1-indexed) page to retrieve (default is 1)
* page_size: The number of items per page (default is 10, max is 100)
* requested_fields: (list) Indicates which additional fields to return
for each thread. (supports 'profile_image')
**GET Child Comment List Parameters**:
* comment_id (required): The comment to retrieve child comments for
* page: The (1-indexed) page to retrieve (default is 1)
* page_size: The number of items per page (default is 10, max is 100)
* requested_fields: (list) Indicates which additional fields to return
for each thread. (supports 'profile_image')
**POST Parameters**:
* thread_id (required): The thread to post the comment in
* parent_id: The parent comment of the new comment. Can be null or
omitted for a comment that should be directly under the thread
* raw_body: The comment's raw body text
**PATCH Parameters**:
raw_body is accepted with the same meaning as in a POST request
If "application/merge-patch+json" is not the specified content type,
a 415 error is returned.
**GET Response Values**:
* results: The list of comments; each item in the list has the same
fields as the POST response below
* next: The URL of the next page (or null if first page)
* previous: The URL of the previous page (or null if last page)
**POST/PATCH Response Values**:
* id: The id of the comment
* thread_id: The id of the comment's thread
* parent_id: The id of the comment's parent
* author: The username of the comment's author, or None if the
comment is anonymous
* author_label: A label indicating whether the author has a special
role in the course, either "Staff" for moderators and
administrators or "Community TA" for community TAs
* created_at: The ISO 8601 timestamp for the creation of the comment
* updated_at: The ISO 8601 timestamp for the last modification of
the comment, which may not have been an update of the body
* raw_body: The comment's raw body text without any rendering applied
* endorsed: Boolean indicating whether the comment has been endorsed
(by a privileged user or, for a question thread, the thread
author)
* endorsed_by: The username of the endorsing user, if available
* endorsed_by_label: A label indicating whether the endorsing user
has a special role in the course (see author_label)
* endorsed_at: The ISO 8601 timestamp for the endorsement, if
available
* abuse_flagged: Boolean indicating whether the requesting user has
flagged the comment for abuse
* voted: Boolean indicating whether the requesting user has voted
for the comment
* vote_count: The number of votes for the comment
* children: The list of child comments (with the same format)
* editable_fields: The fields that the requesting user is allowed to
modify with a PATCH request
**DELETE Response Value**
No content is returned for a DELETE request
"""
lookup_field = "comment_id"
parser_classes = (JSONParser, MergePatchParser,)
def list(self, request):
"""
Implements the GET method for the list endpoint as described in the
class docstring.
"""
form = CommentListGetForm(request.GET)
if not form.is_valid():
raise ValidationError(form.errors)
return get_comment_list(
request,
form.cleaned_data["thread_id"],
form.cleaned_data["endorsed"],
form.cleaned_data["page"],
form.cleaned_data["page_size"],
form.cleaned_data["requested_fields"],
)
def retrieve(self, request, comment_id=None):
"""
Implements the GET method for comments against response ID
"""
form = CommentGetForm(request.GET)
if not form.is_valid():
raise ValidationError(form.errors)
return get_response_comments(
request,
comment_id,
form.cleaned_data["page"],
form.cleaned_data["page_size"],
form.cleaned_data["requested_fields"],
)
def create(self, request):
"""
Implements the POST method for the list endpoint as described in the
class docstring.
"""
return Response(create_comment(request, request.data))
def destroy(self, request, comment_id):
"""
Implements the DELETE method for the instance endpoint as described in
the class docstring
"""
delete_comment(request, comment_id)
return Response(status=204)
def partial_update(self, request, comment_id):
"""
Implements the PATCH method for the instance endpoint as described in
the class docstring.
"""
if request.content_type != MergePatchParser.media_type:
raise UnsupportedMediaType(request.content_type)
return Response(update_comment(request, comment_id, request.data))
class RetireUserView(APIView):
"""
**Use Cases**
A superuser or the user with the settings.RETIREMENT_SERVICE_WORKER_USERNAME
can "retire" the user's data from the comments service, which will remove
personal information and blank all posts / comments the user has made.
**Example Requests**:
POST /api/discussion/v1/retire_user/
{
"username": "an_original_user_name"
}
**Example Response**:
Empty string
"""
authentication_classes = (JwtAuthentication,)
permission_classes = (permissions.IsAuthenticated, CanRetireUser)
def post(self, request):
"""
Implements the retirement endpoint.
"""
username = request.data['username']
try:
retirement = UserRetirementStatus.get_retirement_for_retirement_action(username)
cc_user = comment_client.User.from_django_user(retirement.user)
# Send the retired username to the forums service, as the service cannot generate
# the retired username itself. Forums users are referenced by Django auth_user id.
cc_user.retire(retirement.retired_username)
except UserRetirementStatus.DoesNotExist:
return Response(status=status.HTTP_404_NOT_FOUND)
except comment_client.CommentClientRequestError as exc:
# 404s from client service for users that don't exist there are expected
# we can just pass those up.
if exc.status_code == 404:
return Response(status=status.HTTP_404_NOT_FOUND)
raise
except Exception as exc: # pylint: disable=broad-except
return Response(text_type(exc), status=status.HTTP_500_INTERNAL_SERVER_ERROR)
return Response(status=status.HTTP_204_NO_CONTENT)
|
fearlessspider/python-social-auth | refs/heads/master | examples/tornado_example/app.py | 58 | import sys
sys.path.append('../..')
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import scoped_session, sessionmaker
import tornado.httpserver
import tornado.ioloop
import tornado.options
import tornado.web
from social.apps.tornado_app.models import init_social
from social.apps.tornado_app.routes import SOCIAL_AUTH_ROUTES
import settings
engine = create_engine('sqlite:///test.db', echo=False)
session = scoped_session(sessionmaker(bind=engine))
Base = declarative_base()
class MainHandler(tornado.web.RequestHandler):
def get(self):
self.render('templates/home.html')
class DoneHandler(tornado.web.RequestHandler):
def get(self, *args, **kwargs):
from models import User
user_id = self.get_secure_cookie('user_id')
user = session.query(User).get(int(user_id))
self.render('templates/done.html', user=user)
class LogoutHandler(tornado.web.RequestHandler):
def get(self):
self.request.redirect('/')
tornado.options.parse_command_line()
tornado_settings = dict((k, getattr(settings, k)) for k in dir(settings)
if not k.startswith('__'))
application = tornado.web.Application(SOCIAL_AUTH_ROUTES + [
(r'/', MainHandler),
(r'/done/', DoneHandler),
(r'/logout/', LogoutHandler),
], cookie_secret='adb528da-20bb-4386-8eaf-09f041b569e0',
**tornado_settings)
def main():
init_social(Base, session, tornado_settings)
http_server = tornado.httpserver.HTTPServer(application)
http_server.listen(8000)
tornado.ioloop.IOLoop.instance().start()
def syncdb():
from models import user_syncdb
init_social(Base, session, tornado_settings)
Base.metadata.create_all(engine)
user_syncdb()
if __name__ == '__main__':
if len(sys.argv) > 1 and sys.argv[1] == 'syncdb':
syncdb()
else:
main()
|
yorung/XLE | refs/heads/master | Foreign/FreeType/src/tools/docmaker/docbeauty.py | 877 | #!/usr/bin/env python
#
# DocBeauty (c) 2003, 2004, 2008 David Turner <david@freetype.org>
#
# This program is used to beautify the documentation comments used
# in the FreeType 2 public headers.
#
from sources import *
from content import *
from utils import *
import utils
import sys, os, time, string, getopt
content_processor = ContentProcessor()
def beautify_block( block ):
if block.content:
content_processor.reset()
markups = content_processor.process_content( block.content )
text = []
first = 1
for markup in markups:
text.extend( markup.beautify( first ) )
first = 0
# now beautify the documentation "borders" themselves
lines = [" /*************************************************************************"]
for l in text:
lines.append( " *" + l )
lines.append( " */" )
block.lines = lines
def usage():
print "\nDocBeauty 0.1 Usage information\n"
print " docbeauty [options] file1 [file2 ...]\n"
print "using the following options:\n"
print " -h : print this page"
print " -b : backup original files with the 'orig' extension"
print ""
print " --backup : same as -b"
def main( argv ):
"""main program loop"""
global output_dir
try:
opts, args = getopt.getopt( sys.argv[1:], \
"hb", \
["help", "backup"] )
except getopt.GetoptError:
usage()
sys.exit( 2 )
if args == []:
usage()
sys.exit( 1 )
# process options
#
output_dir = None
do_backup = None
for opt in opts:
if opt[0] in ( "-h", "--help" ):
usage()
sys.exit( 0 )
if opt[0] in ( "-b", "--backup" ):
do_backup = 1
# create context and processor
source_processor = SourceProcessor()
# retrieve the list of files to process
file_list = make_file_list( args )
for filename in file_list:
source_processor.parse_file( filename )
for block in source_processor.blocks:
beautify_block( block )
new_name = filename + ".new"
ok = None
try:
file = open( new_name, "wt" )
for block in source_processor.blocks:
for line in block.lines:
file.write( line )
file.write( "\n" )
file.close()
except:
ok = 0
# if called from the command line
#
if __name__ == '__main__':
main( sys.argv )
# eof
|
dllsf/odootest | refs/heads/master | addons/web_graph/__openerp__.py | 376 | {
'name': 'Graph Views',
'category': 'Hidden',
'description': """
Graph Views for Web Client.
===========================
* Parse a <graph> view but allows changing dynamically the presentation
* Graph Types: pie, lines, areas, bars, radar
* Stacked/Not Stacked for areas and bars
* Legends: top, inside (top/left), hidden
* Features: download as PNG or CSV, browse data grid, switch orientation
* Unlimited "Group By" levels (not stacked), two cross level analysis (stacked)
""",
'version': '3.0',
'depends': ['web'],
'data' : [
'views/web_graph.xml',
],
'qweb' : [
'static/src/xml/*.xml',
],
'auto_install': True
}
|
smcantab/pele | refs/heads/master | pele/utils/events.py | 5 | """
adapted from http://code.activestate.com/recipes/577980-improved-signalsslots-implementation-in-python/
A signal/slot implementation
File: signal.py
Author: Thiago Marcos P. Santos
Author: Christopher S. Case
Author: David H. Bronke
Created: August 28, 2008
Updated: December 12, 2011
License: MIT
"""
import inspect
from weakref import WeakSet, WeakKeyDictionary
__all__ = ["Signal"]
class Signal(object):
""" class for signal slot concept
Example
-------
A simple example for a callback is
>>> event = Signal()
>>> event.connect(mfunc)
>>> # raise the signal
>>> event("hello")
>>>
>>> # functions can be disconnected
>>> even.disconnect(myfunc)
Since weak references are used, care has to be taken with object functions
>>> obj = MyClass()
>>> event.connect(obj.myfunc) # works
>>> event.connect(MyClass().myfunc) # will not work
The second example for member functions will not work since the Signal class
uses weakref and therefore does not increase the reference counter. MyClass()
only exists for the time of the function call and will be deleted afterwards
and the weakref will become invalid.
"""
def __init__(self):
self._functions = WeakSet()
self._methods = WeakKeyDictionary()
def __call__(self, *args, **kargs):
""" raise the event """
# Call handler functions
for func in self._functions:
func(*args, **kargs)
# Call handler methods
for obj, funcs in self._methods.items():
for func in funcs:
func(obj, *args, **kargs)
def connect(self, slot):
""" connect a function / member function to the signal """
if inspect.ismethod(slot):
if slot.__self__ not in self._methods:
self._methods[slot.__self__] = set()
self._methods[slot.__self__].add(slot.__func__)
else:
self._functions.add(slot)
def disconnect(self, slot):
""" disconnect a function from the signal """
if inspect.ismethod(slot):
if slot.__self__ in self._methods:
self._methods[slot.__self__].remove(slot.__func__)
else:
if slot in self._functions:
self._functions.remove(slot)
def clear(self):
""" remove all callbacks from the signal """
self._functions.clear()
self._methods.clear()
|
JavaRabbit/CS496_capstone | refs/heads/master | appengine/flexible/hello_world_django/helloworld/views.py | 9 | #!/usr/bin/env python
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.http import HttpResponse
def index(request):
return HttpResponse(
'Hello, World. This is Django running on Google App Engine')
|
kevinmel2000/brython | refs/heads/master | www/tests/test_null.py | 18 | from javascript import JSObject
assert(JSObject(null) == None)
assert(JSObject(null) is None)
assert(None is None)
assert(None is JSObject(null))
assert(None == JSObject(null))
assert(JSObject(null) is JSObject(null))
print("All tests passed")
|
boxu0001/practice | refs/heads/master | py3/S123_BestTimeBuySell.py | 1 | # Say you have an array for which the ith element is the price of a given stock on day i.
# Design an algorithm to find the maximum profit. You may complete at most two transactions.
# Note: You may not engage in multiple transactions at the same time (i.e., you must sell the stock before you buy again).
# Example 1:
# Input: [3,3,5,0,0,3,1,4]
# Output: 6
# Explanation: Buy on day 4 (price = 0) and sell on day 6 (price = 3), profit = 3-0 = 3.
# Then buy on day 7 (price = 1) and sell on day 8 (price = 4), profit = 4-1 = 3.
# Example 2:
# Input: [1,2,3,4,5]
# Output: 4
# Explanation: Buy on day 1 (price = 1) and sell on day 5 (price = 5), profit = 5-1 = 4.
# Note that you cannot buy on day 1, buy on day 2 and sell them later, as you are
# engaging multiple transactions at the same time. You must sell before buying again.
# Example 3:
# Input: [7,6,4,3,1]
# Output: 0
# Explanation: In this case, no transaction is done, i.e. max profit = 0.
class Solution:
def maxProfit(self, prices: List[int]) -> int:
result=0
ls=len(prices)
fLeft=[0]*ls
fRight=[0]*ls
leftMin=prices[0] if ls > 0 else None
rightMax=prices[-1] if ls > 0 else None
for i in range(1, ls):
if prices[i] <= prices[i-1]:
fLeft[i] = fLeft[i-1]
leftMin = min(leftMin, prices[i])
else:
fLeft[i] = max(fLeft[i-1], prices[i]-leftMin)
j=ls-1-i
if prices[j] >= prices[j+1]:
fRight[j] = fRight[j+1]
rightMax = max(rightMax, prices[j])
else:
fRight[j] = max(fRight[j+1], rightMax-prices[j])
for s in range(ls):
result=max(result, fLeft[s]+fRight[s])
return result
def maxProfit2(self, prices: List[int]) -> int:
mx1=[0] #mx1 数组存第i个元素之前的最大值获利值,所以mx1[0]为0(没开始交易),mx1[1]也会为0(只有i==0这个交易起点)
newLow=prices[0] if prices else None
mx1gain=0
for p in prices:
if p > newLow:
mx1gain = p-newLow if p-newLow > mx1gain else mx1gain
else:
newLow = p
mx1+=[mx1gain]
newHigh=prices[-1] if prices else None
result=mx1gain
mx2gain=0
for qi in range(len(prices)-1, -1, -1):
if prices[qi] < newHigh:
mx2gain = newHigh - prices[qi] if newHigh - prices[qi] > mx2gain else mx2gain
else:
newHigh = prices[qi]
if mx2gain + mx1[qi] > result: #mx2gain为从qi为起点的最大值, mx1[qi]为qi点之前的最大值(不包括qi点)
result = mx2gain + mx1[qi]
return result
#总结:
#分析中,有至少两次交易所产生的最佳结果, 一次交易 vs 两次交易
#1. 很容易证明最佳交易一定是可拆分成两次交易(不失一般性,定义在数组之前和之后的价格为, price_before=prices[0], price_after=prices[-1])
#2. best_result=best_result_before[i]+best_result_after[i], both inclusive i,
# 这样找到在i点左边(包括i)的最佳交易,和i点右边(包括i)的最佳交易, 两者之和 为可能的最佳交易
#3. 这里用了dynamic programming里的缓存,fLeft and fRight, fLeft为在i点左侧的最佳交易值, fRight为在i点右侧的最佳交易值,
# 对于i左边的最佳交易,如果 prices[i] < prices[i-1], 那么 fLeft[i]一定等于fLeft[i-1],因为i点为低位,不能形成更好的盈利卖出点;
# 但是i点可以成为潜在的买入点: leftMin = min(leftMin, prices[i]);
# 对于i右边的最佳交易,如果 prices[j] >= prices[j+1], 那么 fRight[j]一定等于fRight[j+1],因为j点为高位,不能形成更好的买入点让j右边形成更大的盈利;
# 但是j点可以成为潜在的卖出点: rightMax = max(rightMax, prices[j]); |
CapOM/ChromiumGStreamerBackend | refs/heads/master | build/win/use_ansi_codes.py | 64 | #!/usr/bin/env python
# Copyright (c) 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Prints if the the terminal is likely to understand ANSI codes."""
import os
# Add more terminals here as needed.
print 'ANSICON' in os.environ
|
DaviKaur/LibreHatti | refs/heads/master | src/librehatti/reports/models.py | 4 | from django.db import models
class SavedRegisters(models.Model):
title = models.CharField(max_length = 200)
selected_fields = models.CharField(max_length = 1000)
def __unicode__(self):
return self.title |
ajdavis/tornado | refs/heads/master | tornado/test/import_test.py | 8 | # flake8: noqa
from __future__ import absolute_import, division, print_function
from tornado.test.util import unittest
class ImportTest(unittest.TestCase):
def test_import_everything(self):
# Some of our modules are not otherwise tested. Import them
# all (unless they have external dependencies) here to at
# least ensure that there are no syntax errors.
import tornado.auth
import tornado.autoreload
import tornado.concurrent
import tornado.escape
import tornado.gen
import tornado.http1connection
import tornado.httpclient
import tornado.httpserver
import tornado.httputil
import tornado.ioloop
import tornado.iostream
import tornado.locale
import tornado.log
import tornado.netutil
import tornado.options
import tornado.process
import tornado.simple_httpclient
import tornado.stack_context
import tornado.tcpserver
import tornado.tcpclient
import tornado.template
import tornado.testing
import tornado.util
import tornado.web
import tornado.websocket
import tornado.wsgi
# for modules with dependencies, if those dependencies can be loaded,
# load them too.
def test_import_pycurl(self):
try:
import pycurl # type: ignore
except ImportError:
pass
else:
import tornado.curl_httpclient
def test_import_aliases(self):
# Ensure we don't delete formerly-documented aliases accidentally.
import tornado.ioloop, tornado.gen, tornado.util
self.assertIs(tornado.ioloop.TimeoutError, tornado.util.TimeoutError)
self.assertIs(tornado.gen.TimeoutError, tornado.util.TimeoutError)
|
mcopik/Elemental | refs/heads/master | examples/interface/PNorm.py | 2 | # A small SOCP formulation of minimizing the p-norm of a vector subject to
# constraints (in this trivial case, maximize the two-norm subject to the
# entire vector being fixed). This example originally caused the ninth SOCP
# IPM KKT system to be excessively ill-conditioned and was reported by
# Steven Diamond via CVXPY. The original script was:
#
# from cvxpy import *
# x = Variable(3)
# prob = Problem(Maximize(pnorm(x,.5)),[x==[1.1,2,.1]])
# prob.solve(verbose=True, solver=ELEMENTAL)
#
# which should have a solution of roughly 7.72423. The problem data used
# in the following script can be found by then running
#
# data = prob.get_problem_data(ELEMENTAL)
# print( data['A'] )
# print( data['G'] )
# print( data['b'] )
# print( data['c'] )
# print( data['h'] )
#
# which has three SOCs of size three.
import El
orders = El.DistMultiVec(El.iTag)
firstInds = El.DistMultiVec(El.iTag)
orders.Resize(9,1)
firstInds.Resize(9,1)
for c in xrange(0,3):
for i in xrange(0,3):
orders.Set(3*c+i,0,3)
firstInds.Set(3*c+i,0,3*c)
A = El.DistSparseMatrix(El.dTag)
A.Resize(4,7)
A.Reserve(7)
A.QueueUpdate( 1, 0, 1, passive=True )
A.QueueUpdate( 2, 1, 1, passive=True )
A.QueueUpdate( 3, 2, 1, passive=True )
A.QueueUpdate( 0, 3, -1, passive=True )
A.QueueUpdate( 0, 4, 1, passive=True )
A.QueueUpdate( 0, 5, 1, passive=True )
A.QueueUpdate( 0, 6, 1, passive=True )
A.ProcessLocalQueues()
G = El.DistSparseMatrix(El.dTag)
G.Resize(9,7)
G.Reserve(15)
G.QueueUpdate( 0, 0, -1, passive=True )
G.QueueUpdate( 1, 0, -1, passive=True )
G.QueueUpdate( 3, 1, -1, passive=True )
G.QueueUpdate( 4, 1, -1, passive=True )
G.QueueUpdate( 6, 2, -1, passive=True )
G.QueueUpdate( 7, 2, -1, passive=True )
G.QueueUpdate( 0, 3, -1, passive=True )
G.QueueUpdate( 1, 3, 1, passive=True )
G.QueueUpdate( 3, 3, -1, passive=True )
G.QueueUpdate( 4, 3, 1, passive=True )
G.QueueUpdate( 6, 3, -1, passive=True )
G.QueueUpdate( 7, 3, 1, passive=True )
G.QueueUpdate( 2, 4, -2, passive=True )
G.QueueUpdate( 5, 5, -2, passive=True )
G.QueueUpdate( 8, 6, -2, passive=True )
G.ProcessLocalQueues()
b = El.DistMultiVec(El.dTag)
b.Resize(4,1)
b.Set(0,0,-0.0)
b.Set(1,0, 1.1)
b.Set(2,0, 2.0)
b.Set(3,0, 0.1)
c = El.DistMultiVec(El.dTag)
c.Resize(7,1)
c.Set(0,0, 0)
c.Set(1,0, 0)
c.Set(2,0, 0)
c.Set(3,0,-1)
c.Set(4,0, 0)
c.Set(5,0, 0)
c.Set(6,0, 0)
h = El.DistMultiVec(El.dTag)
El.Zeros( h, 9, 1 )
x = El.DistMultiVec()
y = El.DistMultiVec()
z = El.DistMultiVec()
s = El.DistMultiVec()
ctrl = El.SOCPAffineCtrl_d()
ctrl.mehrotraCtrl.solveCtrl.progress = True
ctrl.mehrotraCtrl.progress = True
ctrl.mehrotraCtrl.outerEquil = True
ctrl.mehrotraCtrl.time = True
El.SOCPAffine(A,G,b,c,h,orders,firstInds,x,y,z,s,ctrl)
El.Finalize()
|
silenci/neutron | refs/heads/master | neutron/agent/l3/namespaces.py | 37 | # Copyright 2015 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
from oslo_log import log as logging
from neutron.agent.linux import ip_lib
from neutron.i18n import _LE
LOG = logging.getLogger(__name__)
NS_PREFIX = 'qrouter-'
INTERNAL_DEV_PREFIX = 'qr-'
EXTERNAL_DEV_PREFIX = 'qg-'
# TODO(Carl) It is odd that this file needs this. It is a dvr detail.
ROUTER_2_FIP_DEV_PREFIX = 'rfp-'
def build_ns_name(prefix, identifier):
"""Builds a namespace name from the given prefix and identifier
:param prefix: The prefix which must end with '-' for legacy reasons
:param identifier: The id associated with the namespace
"""
return prefix + identifier
def get_prefix_from_ns_name(ns_name):
"""Parses prefix from prefix-identifier
:param ns_name: The name of a namespace
:returns: The prefix ending with a '-' or None if there is no '-'
"""
dash_index = ns_name.find('-')
if 0 <= dash_index:
return ns_name[:dash_index + 1]
def get_id_from_ns_name(ns_name):
"""Parses identifier from prefix-identifier
:param ns_name: The name of a namespace
:returns: Identifier or None if there is no - to end the prefix
"""
dash_index = ns_name.find('-')
if 0 <= dash_index:
return ns_name[dash_index + 1:]
class Namespace(object):
def __init__(self, name, agent_conf, driver, use_ipv6):
self.name = name
self.ip_wrapper_root = ip_lib.IPWrapper()
self.agent_conf = agent_conf
self.driver = driver
self.use_ipv6 = use_ipv6
def create(self):
ip_wrapper = self.ip_wrapper_root.ensure_namespace(self.name)
cmd = ['sysctl', '-w', 'net.ipv4.ip_forward=1']
ip_wrapper.netns.execute(cmd)
if self.use_ipv6:
cmd = ['sysctl', '-w', 'net.ipv6.conf.all.forwarding=1']
ip_wrapper.netns.execute(cmd)
def delete(self):
if self.agent_conf.router_delete_namespaces:
try:
self.ip_wrapper_root.netns.delete(self.name)
except RuntimeError:
msg = _LE('Failed trying to delete namespace: %s')
LOG.exception(msg, self.name)
class RouterNamespace(Namespace):
def __init__(self, router_id, agent_conf, driver, use_ipv6):
self.router_id = router_id
name = self._get_ns_name(router_id)
super(RouterNamespace, self).__init__(
name, agent_conf, driver, use_ipv6)
@classmethod
def _get_ns_name(cls, router_id):
return build_ns_name(NS_PREFIX, router_id)
def delete(self):
ns_ip = ip_lib.IPWrapper(namespace=self.name)
for d in ns_ip.get_devices(exclude_loopback=True):
if d.name.startswith(INTERNAL_DEV_PREFIX):
# device is on default bridge
self.driver.unplug(d.name, namespace=self.name,
prefix=INTERNAL_DEV_PREFIX)
elif d.name.startswith(ROUTER_2_FIP_DEV_PREFIX):
ns_ip.del_veth(d.name)
elif d.name.startswith(EXTERNAL_DEV_PREFIX):
self.driver.unplug(
d.name,
bridge=self.agent_conf.external_network_bridge,
namespace=self.name,
prefix=EXTERNAL_DEV_PREFIX)
super(RouterNamespace, self).delete()
|
MarcusJones/py_ExergyUtilities | refs/heads/master | ExergyUtilities/util_logger.py | 2 | #===============================================================================
# Title of this Module
# Authors; MJones, Other
# 00 - 2012FEB05 - First commit
# 01 - 2012MAR17 - Update to ...
#===============================================================================
"""This module does A and B.
Etc.
"""
#===============================================================================
# Set up
#===============================================================================
# Standard:
from config import *
import logging.config
import unittest
LOGGING_CONFIG_1 = '%(funcName)-20s %(levelno)-3s: %(message)s'
LOGGING_CONFIG_2 = '%(module)-20s %(funcName)-20s %(levelno)-3s: %(message)s'
#===============================================================================
# Code
#===============================================================================
class LoggerCritical:
def __enter__(self):
my_logger = logging.getLogger()
my_logger.setLevel("CRITICAL")
def __exit__(self, type, value, traceback):
my_logger = logging.getLogger()
my_logger.setLevel("DEBUG")
class LoggerCritical:
def __enter__(self):
my_logger = logging.getLogger()
#my_logger.setLevel("CRITICAL")
logger.setLevel(logging.DEBUG)
return self
def __exit__(self, type, value, traceback):
my_logger = logging.getLogger()
my_logger.setLevel("DEBUG")
class NoLog:
def __enter__(self):
logging.disabled = True
def __exit__(self, type, value, traceback):
logging.disabled = False
#import requests
import logging
for key in logging.Logger.manager.loggerDict:
print(key)
logging.getLogger("github.Requester").setLevel(logging.WARNING)
class LoggerDebug:
def __enter__(self):
my_logger = logging.getLogger()
my_logger.setLevel("DEBUG")
def __exit__(self, type, value, traceback):
my_logger = logging.getLogger()
my_logger.setLevel("DEBUG")
|
liorvh/golismero | refs/heads/master | golismero/api/data/vulnerability/information_disclosure/dns_zone_transfer.py | 8 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
__license__= """
GoLismero 2.0 - The web knife - Copyright (C) 2011-2014
Golismero project site: https://github.com/golismero
Golismero project mail: contact@golismero-project.com
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
__all__ = ["DNSZoneTransfer"]
from .. import Vulnerability
from ... import identity
from ...resource.domain import Domain
#------------------------------------------------------------------------------
class DNSZoneTransfer(Vulnerability):
"""
DNS Zone Transfer Enabled.
When DNS zone transfers are enabled, the DNS server allows any user to
download the entire set of domain names defined by that server. This may
help an adversary to gather information prior to an attack.
The details on how to disable zone transfers is specific to the DNS server
being used. Please consult the documentation of your DNS server software
on how to do this.
"""
TARGET_CLASS = Domain
DEFAULTS = Vulnerability.DEFAULTS.copy()
DEFAULTS["level"] = "high"
DEFAULTS["capec"] = "CAPEC-291"
DEFAULTS["cwe"] = ("CWE-276", "CWE-16")
DEFAULTS["cvss_base"] = "6.0"
DEFAULTS["references"] = (
"https://en.wikipedia.org/wiki/DNS_zone_transfer",
"https://www.owasp.org/index.php/Information_Leakage",
)
#--------------------------------------------------------------------------
def __init__(self, target, ns_server, port=53, **kwargs):
"""
:param target: Root domain on which the DNS zone transfer attack is
possible.
:type target: Domain
:param ns_server: Nameserver address.
:type ns_server: str
:param port: Open port in name server.
:type port: int
"""
# Type checks.
if not isinstance(port, int):
raise TypeError("Expected int, got '%s'" % type(port))
if not isinstance(ns_server, basestring):
raise TypeError("Expected str, got '%s'" % type(ns_server))
# Value checks.
if port < 1 or port > 65535:
raise ValueError("Port value must be between the range: 0-65535.")
# Store the properties.
self.__ns_server = ns_server
self.__port = port
# Parent constructor.
super(DNSZoneTransfer, self).__init__(target, **kwargs)
__init__.__doc__ += Vulnerability.__init__.__doc__[
Vulnerability.__init__.__doc__.find("\n :keyword"):]
#--------------------------------------------------------------------------
@identity
def ns_server(self):
"""
:return: Nameserver address.
:rtype: str
"""
return self.__ns_server
#--------------------------------------------------------------------------
@identity
def port(self):
"""
:return: Open port in name server.
:rtype: int
"""
return self.__port
|
nagyistoce/edx-analytics-data-api-client | refs/heads/master | analyticsclient/module.py | 2 | import analyticsclient.constants.data_format as DF
class Module(object):
""" Module related analytics data. """
def __init__(self, client, course_id, module_id):
"""
Initialize the Module client.
Arguments:
client (analyticsclient.client.Client): The client to use to access the API.
course_id (str): String identifying the course
module_id (str): String identifying the module
"""
self.client = client
self.course_id = unicode(course_id)
self.module_id = unicode(module_id)
def answer_distribution(self, data_format=DF.JSON):
"""
Get answer distribution data for a module.
Arguments:
data_format (str): Format in which to return data (default is JSON)
"""
path = 'problems/{0}/answer_distribution/'.format(self.module_id)
return self.client.get(path, data_format=data_format)
def grade_distribution(self, data_format=DF.JSON):
"""
Get grade distribution data for a module.
Arguments:
data_format (str): Format in which to return data (default is JSON)
"""
path = 'problems/{0}/grade_distribution/'.format(self.module_id)
return self.client.get(path, data_format=data_format)
def sequential_open_distribution(self, data_format=DF.JSON):
"""
Get open distribution data for a module.
Arguments:
data_format (str): Format in which to return data (default is JSON)
"""
path = 'problems/{0}/sequential_open_distribution/'.format(self.module_id)
return self.client.get(path, data_format=data_format)
def video_timeline(self, data_format=DF.JSON):
"""
Get video segments/timeline for a module.
Arguments:
data_format (str): Format in which to return data (default is JSON)
"""
path = 'videos/{0}/timeline/'.format(self.module_id)
return self.client.get(path, data_format=data_format)
|
jsilter/scipy | refs/heads/master | scipy/sparse/linalg/setup.py | 20 | #!/usr/bin/env python
from __future__ import division, print_function, absolute_import
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('linalg',parent_package,top_path)
config.add_subpackage(('isolve'))
config.add_subpackage(('dsolve'))
config.add_subpackage(('eigen'))
config.add_data_dir('tests')
config.add_data_dir('benchmarks')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration(top_path='').todict())
|
neilLasrado/frappe | refs/heads/develop | frappe/api.py | 4 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import json
import frappe
import frappe.handler
import frappe.client
from frappe.utils.response import build_response
from frappe import _
from six.moves.urllib.parse import urlparse, urlencode
import base64
def handle():
"""
Handler for `/api` methods
### Examples:
`/api/method/{methodname}` will call a whitelisted method
`/api/resource/{doctype}` will query a table
examples:
- `?fields=["name", "owner"]`
- `?filters=[["Task", "name", "like", "%005"]]`
- `?limit_start=0`
- `?limit_page_length=20`
`/api/resource/{doctype}/{name}` will point to a resource
`GET` will return doclist
`POST` will insert
`PUT` will update
`DELETE` will delete
`/api/resource/{doctype}/{name}?run_method={method}` will run a whitelisted controller method
"""
validate_oauth()
validate_auth_via_api_keys()
parts = frappe.request.path[1:].split("/",3)
call = doctype = name = None
if len(parts) > 1:
call = parts[1]
if len(parts) > 2:
doctype = parts[2]
if len(parts) > 3:
name = parts[3]
if call=="method":
frappe.local.form_dict.cmd = doctype
return frappe.handler.handle()
elif call=="resource":
if "run_method" in frappe.local.form_dict:
method = frappe.local.form_dict.pop("run_method")
doc = frappe.get_doc(doctype, name)
doc.is_whitelisted(method)
if frappe.local.request.method=="GET":
if not doc.has_permission("read"):
frappe.throw(_("Not permitted"), frappe.PermissionError)
frappe.local.response.update({"data": doc.run_method(method, **frappe.local.form_dict)})
if frappe.local.request.method=="POST":
if not doc.has_permission("write"):
frappe.throw(_("Not permitted"), frappe.PermissionError)
frappe.local.response.update({"data": doc.run_method(method, **frappe.local.form_dict)})
frappe.db.commit()
else:
if name:
if frappe.local.request.method=="GET":
doc = frappe.get_doc(doctype, name)
if not doc.has_permission("read"):
raise frappe.PermissionError
frappe.local.response.update({"data": doc})
if frappe.local.request.method=="PUT":
data = json.loads(frappe.local.form_dict.data)
doc = frappe.get_doc(doctype, name)
if "flags" in data:
del data["flags"]
# Not checking permissions here because it's checked in doc.save
doc.update(data)
frappe.local.response.update({
"data": doc.save().as_dict()
})
frappe.db.commit()
if frappe.local.request.method=="DELETE":
# Not checking permissions here because it's checked in delete_doc
frappe.delete_doc(doctype, name, ignore_missing=False)
frappe.local.response.http_status_code = 202
frappe.local.response.message = "ok"
frappe.db.commit()
elif doctype:
if frappe.local.request.method=="GET":
if frappe.local.form_dict.get('fields'):
frappe.local.form_dict['fields'] = json.loads(frappe.local.form_dict['fields'])
frappe.local.form_dict.setdefault('limit_page_length', 20)
frappe.local.response.update({
"data": frappe.call(frappe.client.get_list,
doctype, **frappe.local.form_dict)})
if frappe.local.request.method=="POST":
data = json.loads(frappe.local.form_dict.data)
data.update({
"doctype": doctype
})
frappe.local.response.update({
"data": frappe.get_doc(data).insert().as_dict()
})
frappe.db.commit()
else:
raise frappe.DoesNotExistError
else:
raise frappe.DoesNotExistError
return build_response("json")
def validate_oauth():
from frappe.oauth import get_url_delimiter
form_dict = frappe.local.form_dict
authorization_header = frappe.get_request_header("Authorization").split(" ") if frappe.get_request_header("Authorization") else None
if authorization_header and authorization_header[0].lower() == "bearer":
from frappe.integrations.oauth2 import get_oauth_server
token = authorization_header[1]
r = frappe.request
parsed_url = urlparse(r.url)
access_token = { "access_token": token}
uri = parsed_url.scheme + "://" + parsed_url.netloc + parsed_url.path + "?" + urlencode(access_token)
http_method = r.method
body = r.get_data()
headers = r.headers
required_scopes = frappe.db.get_value("OAuth Bearer Token", token, "scopes").split(get_url_delimiter())
valid, oauthlib_request = get_oauth_server().verify_request(uri, http_method, body, headers, required_scopes)
if valid:
frappe.set_user(frappe.db.get_value("OAuth Bearer Token", token, "user"))
frappe.local.form_dict = form_dict
def validate_auth_via_api_keys():
"""
authentication using api key and api secret
set user
"""
try:
authorization_header = frappe.get_request_header("Authorization", None).split(" ") if frappe.get_request_header("Authorization") else None
if authorization_header and authorization_header[0] == 'Basic':
token = frappe.safe_decode(base64.b64decode(authorization_header[1])).split(":")
validate_api_key_secret(token[0], token[1])
elif authorization_header and authorization_header[0] == 'token':
token = authorization_header[1].split(":")
validate_api_key_secret(token[0], token[1])
except Exception as e:
raise e
def validate_api_key_secret(api_key, api_secret):
user = frappe.db.get_value(
doctype="User",
filters={"api_key": api_key},
fieldname=['name']
)
form_dict = frappe.local.form_dict
user_secret = frappe.utils.password.get_decrypted_password ("User", user, fieldname='api_secret')
if api_secret == user_secret:
frappe.set_user(user)
frappe.local.form_dict = form_dict |
bobwalker99/Pydev | refs/heads/master | plugins/org.python.pydev.jython/Lib/encodings/mac_cyrillic.py | 593 | """ Python Character Mapping Codec mac_cyrillic generated from 'MAPPINGS/VENDORS/APPLE/CYRILLIC.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='mac-cyrillic',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
u'\x00' # 0x00 -> CONTROL CHARACTER
u'\x01' # 0x01 -> CONTROL CHARACTER
u'\x02' # 0x02 -> CONTROL CHARACTER
u'\x03' # 0x03 -> CONTROL CHARACTER
u'\x04' # 0x04 -> CONTROL CHARACTER
u'\x05' # 0x05 -> CONTROL CHARACTER
u'\x06' # 0x06 -> CONTROL CHARACTER
u'\x07' # 0x07 -> CONTROL CHARACTER
u'\x08' # 0x08 -> CONTROL CHARACTER
u'\t' # 0x09 -> CONTROL CHARACTER
u'\n' # 0x0A -> CONTROL CHARACTER
u'\x0b' # 0x0B -> CONTROL CHARACTER
u'\x0c' # 0x0C -> CONTROL CHARACTER
u'\r' # 0x0D -> CONTROL CHARACTER
u'\x0e' # 0x0E -> CONTROL CHARACTER
u'\x0f' # 0x0F -> CONTROL CHARACTER
u'\x10' # 0x10 -> CONTROL CHARACTER
u'\x11' # 0x11 -> CONTROL CHARACTER
u'\x12' # 0x12 -> CONTROL CHARACTER
u'\x13' # 0x13 -> CONTROL CHARACTER
u'\x14' # 0x14 -> CONTROL CHARACTER
u'\x15' # 0x15 -> CONTROL CHARACTER
u'\x16' # 0x16 -> CONTROL CHARACTER
u'\x17' # 0x17 -> CONTROL CHARACTER
u'\x18' # 0x18 -> CONTROL CHARACTER
u'\x19' # 0x19 -> CONTROL CHARACTER
u'\x1a' # 0x1A -> CONTROL CHARACTER
u'\x1b' # 0x1B -> CONTROL CHARACTER
u'\x1c' # 0x1C -> CONTROL CHARACTER
u'\x1d' # 0x1D -> CONTROL CHARACTER
u'\x1e' # 0x1E -> CONTROL CHARACTER
u'\x1f' # 0x1F -> CONTROL CHARACTER
u' ' # 0x20 -> SPACE
u'!' # 0x21 -> EXCLAMATION MARK
u'"' # 0x22 -> QUOTATION MARK
u'#' # 0x23 -> NUMBER SIGN
u'$' # 0x24 -> DOLLAR SIGN
u'%' # 0x25 -> PERCENT SIGN
u'&' # 0x26 -> AMPERSAND
u"'" # 0x27 -> APOSTROPHE
u'(' # 0x28 -> LEFT PARENTHESIS
u')' # 0x29 -> RIGHT PARENTHESIS
u'*' # 0x2A -> ASTERISK
u'+' # 0x2B -> PLUS SIGN
u',' # 0x2C -> COMMA
u'-' # 0x2D -> HYPHEN-MINUS
u'.' # 0x2E -> FULL STOP
u'/' # 0x2F -> SOLIDUS
u'0' # 0x30 -> DIGIT ZERO
u'1' # 0x31 -> DIGIT ONE
u'2' # 0x32 -> DIGIT TWO
u'3' # 0x33 -> DIGIT THREE
u'4' # 0x34 -> DIGIT FOUR
u'5' # 0x35 -> DIGIT FIVE
u'6' # 0x36 -> DIGIT SIX
u'7' # 0x37 -> DIGIT SEVEN
u'8' # 0x38 -> DIGIT EIGHT
u'9' # 0x39 -> DIGIT NINE
u':' # 0x3A -> COLON
u';' # 0x3B -> SEMICOLON
u'<' # 0x3C -> LESS-THAN SIGN
u'=' # 0x3D -> EQUALS SIGN
u'>' # 0x3E -> GREATER-THAN SIGN
u'?' # 0x3F -> QUESTION MARK
u'@' # 0x40 -> COMMERCIAL AT
u'A' # 0x41 -> LATIN CAPITAL LETTER A
u'B' # 0x42 -> LATIN CAPITAL LETTER B
u'C' # 0x43 -> LATIN CAPITAL LETTER C
u'D' # 0x44 -> LATIN CAPITAL LETTER D
u'E' # 0x45 -> LATIN CAPITAL LETTER E
u'F' # 0x46 -> LATIN CAPITAL LETTER F
u'G' # 0x47 -> LATIN CAPITAL LETTER G
u'H' # 0x48 -> LATIN CAPITAL LETTER H
u'I' # 0x49 -> LATIN CAPITAL LETTER I
u'J' # 0x4A -> LATIN CAPITAL LETTER J
u'K' # 0x4B -> LATIN CAPITAL LETTER K
u'L' # 0x4C -> LATIN CAPITAL LETTER L
u'M' # 0x4D -> LATIN CAPITAL LETTER M
u'N' # 0x4E -> LATIN CAPITAL LETTER N
u'O' # 0x4F -> LATIN CAPITAL LETTER O
u'P' # 0x50 -> LATIN CAPITAL LETTER P
u'Q' # 0x51 -> LATIN CAPITAL LETTER Q
u'R' # 0x52 -> LATIN CAPITAL LETTER R
u'S' # 0x53 -> LATIN CAPITAL LETTER S
u'T' # 0x54 -> LATIN CAPITAL LETTER T
u'U' # 0x55 -> LATIN CAPITAL LETTER U
u'V' # 0x56 -> LATIN CAPITAL LETTER V
u'W' # 0x57 -> LATIN CAPITAL LETTER W
u'X' # 0x58 -> LATIN CAPITAL LETTER X
u'Y' # 0x59 -> LATIN CAPITAL LETTER Y
u'Z' # 0x5A -> LATIN CAPITAL LETTER Z
u'[' # 0x5B -> LEFT SQUARE BRACKET
u'\\' # 0x5C -> REVERSE SOLIDUS
u']' # 0x5D -> RIGHT SQUARE BRACKET
u'^' # 0x5E -> CIRCUMFLEX ACCENT
u'_' # 0x5F -> LOW LINE
u'`' # 0x60 -> GRAVE ACCENT
u'a' # 0x61 -> LATIN SMALL LETTER A
u'b' # 0x62 -> LATIN SMALL LETTER B
u'c' # 0x63 -> LATIN SMALL LETTER C
u'd' # 0x64 -> LATIN SMALL LETTER D
u'e' # 0x65 -> LATIN SMALL LETTER E
u'f' # 0x66 -> LATIN SMALL LETTER F
u'g' # 0x67 -> LATIN SMALL LETTER G
u'h' # 0x68 -> LATIN SMALL LETTER H
u'i' # 0x69 -> LATIN SMALL LETTER I
u'j' # 0x6A -> LATIN SMALL LETTER J
u'k' # 0x6B -> LATIN SMALL LETTER K
u'l' # 0x6C -> LATIN SMALL LETTER L
u'm' # 0x6D -> LATIN SMALL LETTER M
u'n' # 0x6E -> LATIN SMALL LETTER N
u'o' # 0x6F -> LATIN SMALL LETTER O
u'p' # 0x70 -> LATIN SMALL LETTER P
u'q' # 0x71 -> LATIN SMALL LETTER Q
u'r' # 0x72 -> LATIN SMALL LETTER R
u's' # 0x73 -> LATIN SMALL LETTER S
u't' # 0x74 -> LATIN SMALL LETTER T
u'u' # 0x75 -> LATIN SMALL LETTER U
u'v' # 0x76 -> LATIN SMALL LETTER V
u'w' # 0x77 -> LATIN SMALL LETTER W
u'x' # 0x78 -> LATIN SMALL LETTER X
u'y' # 0x79 -> LATIN SMALL LETTER Y
u'z' # 0x7A -> LATIN SMALL LETTER Z
u'{' # 0x7B -> LEFT CURLY BRACKET
u'|' # 0x7C -> VERTICAL LINE
u'}' # 0x7D -> RIGHT CURLY BRACKET
u'~' # 0x7E -> TILDE
u'\x7f' # 0x7F -> CONTROL CHARACTER
u'\u0410' # 0x80 -> CYRILLIC CAPITAL LETTER A
u'\u0411' # 0x81 -> CYRILLIC CAPITAL LETTER BE
u'\u0412' # 0x82 -> CYRILLIC CAPITAL LETTER VE
u'\u0413' # 0x83 -> CYRILLIC CAPITAL LETTER GHE
u'\u0414' # 0x84 -> CYRILLIC CAPITAL LETTER DE
u'\u0415' # 0x85 -> CYRILLIC CAPITAL LETTER IE
u'\u0416' # 0x86 -> CYRILLIC CAPITAL LETTER ZHE
u'\u0417' # 0x87 -> CYRILLIC CAPITAL LETTER ZE
u'\u0418' # 0x88 -> CYRILLIC CAPITAL LETTER I
u'\u0419' # 0x89 -> CYRILLIC CAPITAL LETTER SHORT I
u'\u041a' # 0x8A -> CYRILLIC CAPITAL LETTER KA
u'\u041b' # 0x8B -> CYRILLIC CAPITAL LETTER EL
u'\u041c' # 0x8C -> CYRILLIC CAPITAL LETTER EM
u'\u041d' # 0x8D -> CYRILLIC CAPITAL LETTER EN
u'\u041e' # 0x8E -> CYRILLIC CAPITAL LETTER O
u'\u041f' # 0x8F -> CYRILLIC CAPITAL LETTER PE
u'\u0420' # 0x90 -> CYRILLIC CAPITAL LETTER ER
u'\u0421' # 0x91 -> CYRILLIC CAPITAL LETTER ES
u'\u0422' # 0x92 -> CYRILLIC CAPITAL LETTER TE
u'\u0423' # 0x93 -> CYRILLIC CAPITAL LETTER U
u'\u0424' # 0x94 -> CYRILLIC CAPITAL LETTER EF
u'\u0425' # 0x95 -> CYRILLIC CAPITAL LETTER HA
u'\u0426' # 0x96 -> CYRILLIC CAPITAL LETTER TSE
u'\u0427' # 0x97 -> CYRILLIC CAPITAL LETTER CHE
u'\u0428' # 0x98 -> CYRILLIC CAPITAL LETTER SHA
u'\u0429' # 0x99 -> CYRILLIC CAPITAL LETTER SHCHA
u'\u042a' # 0x9A -> CYRILLIC CAPITAL LETTER HARD SIGN
u'\u042b' # 0x9B -> CYRILLIC CAPITAL LETTER YERU
u'\u042c' # 0x9C -> CYRILLIC CAPITAL LETTER SOFT SIGN
u'\u042d' # 0x9D -> CYRILLIC CAPITAL LETTER E
u'\u042e' # 0x9E -> CYRILLIC CAPITAL LETTER YU
u'\u042f' # 0x9F -> CYRILLIC CAPITAL LETTER YA
u'\u2020' # 0xA0 -> DAGGER
u'\xb0' # 0xA1 -> DEGREE SIGN
u'\u0490' # 0xA2 -> CYRILLIC CAPITAL LETTER GHE WITH UPTURN
u'\xa3' # 0xA3 -> POUND SIGN
u'\xa7' # 0xA4 -> SECTION SIGN
u'\u2022' # 0xA5 -> BULLET
u'\xb6' # 0xA6 -> PILCROW SIGN
u'\u0406' # 0xA7 -> CYRILLIC CAPITAL LETTER BYELORUSSIAN-UKRAINIAN I
u'\xae' # 0xA8 -> REGISTERED SIGN
u'\xa9' # 0xA9 -> COPYRIGHT SIGN
u'\u2122' # 0xAA -> TRADE MARK SIGN
u'\u0402' # 0xAB -> CYRILLIC CAPITAL LETTER DJE
u'\u0452' # 0xAC -> CYRILLIC SMALL LETTER DJE
u'\u2260' # 0xAD -> NOT EQUAL TO
u'\u0403' # 0xAE -> CYRILLIC CAPITAL LETTER GJE
u'\u0453' # 0xAF -> CYRILLIC SMALL LETTER GJE
u'\u221e' # 0xB0 -> INFINITY
u'\xb1' # 0xB1 -> PLUS-MINUS SIGN
u'\u2264' # 0xB2 -> LESS-THAN OR EQUAL TO
u'\u2265' # 0xB3 -> GREATER-THAN OR EQUAL TO
u'\u0456' # 0xB4 -> CYRILLIC SMALL LETTER BYELORUSSIAN-UKRAINIAN I
u'\xb5' # 0xB5 -> MICRO SIGN
u'\u0491' # 0xB6 -> CYRILLIC SMALL LETTER GHE WITH UPTURN
u'\u0408' # 0xB7 -> CYRILLIC CAPITAL LETTER JE
u'\u0404' # 0xB8 -> CYRILLIC CAPITAL LETTER UKRAINIAN IE
u'\u0454' # 0xB9 -> CYRILLIC SMALL LETTER UKRAINIAN IE
u'\u0407' # 0xBA -> CYRILLIC CAPITAL LETTER YI
u'\u0457' # 0xBB -> CYRILLIC SMALL LETTER YI
u'\u0409' # 0xBC -> CYRILLIC CAPITAL LETTER LJE
u'\u0459' # 0xBD -> CYRILLIC SMALL LETTER LJE
u'\u040a' # 0xBE -> CYRILLIC CAPITAL LETTER NJE
u'\u045a' # 0xBF -> CYRILLIC SMALL LETTER NJE
u'\u0458' # 0xC0 -> CYRILLIC SMALL LETTER JE
u'\u0405' # 0xC1 -> CYRILLIC CAPITAL LETTER DZE
u'\xac' # 0xC2 -> NOT SIGN
u'\u221a' # 0xC3 -> SQUARE ROOT
u'\u0192' # 0xC4 -> LATIN SMALL LETTER F WITH HOOK
u'\u2248' # 0xC5 -> ALMOST EQUAL TO
u'\u2206' # 0xC6 -> INCREMENT
u'\xab' # 0xC7 -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xbb' # 0xC8 -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\u2026' # 0xC9 -> HORIZONTAL ELLIPSIS
u'\xa0' # 0xCA -> NO-BREAK SPACE
u'\u040b' # 0xCB -> CYRILLIC CAPITAL LETTER TSHE
u'\u045b' # 0xCC -> CYRILLIC SMALL LETTER TSHE
u'\u040c' # 0xCD -> CYRILLIC CAPITAL LETTER KJE
u'\u045c' # 0xCE -> CYRILLIC SMALL LETTER KJE
u'\u0455' # 0xCF -> CYRILLIC SMALL LETTER DZE
u'\u2013' # 0xD0 -> EN DASH
u'\u2014' # 0xD1 -> EM DASH
u'\u201c' # 0xD2 -> LEFT DOUBLE QUOTATION MARK
u'\u201d' # 0xD3 -> RIGHT DOUBLE QUOTATION MARK
u'\u2018' # 0xD4 -> LEFT SINGLE QUOTATION MARK
u'\u2019' # 0xD5 -> RIGHT SINGLE QUOTATION MARK
u'\xf7' # 0xD6 -> DIVISION SIGN
u'\u201e' # 0xD7 -> DOUBLE LOW-9 QUOTATION MARK
u'\u040e' # 0xD8 -> CYRILLIC CAPITAL LETTER SHORT U
u'\u045e' # 0xD9 -> CYRILLIC SMALL LETTER SHORT U
u'\u040f' # 0xDA -> CYRILLIC CAPITAL LETTER DZHE
u'\u045f' # 0xDB -> CYRILLIC SMALL LETTER DZHE
u'\u2116' # 0xDC -> NUMERO SIGN
u'\u0401' # 0xDD -> CYRILLIC CAPITAL LETTER IO
u'\u0451' # 0xDE -> CYRILLIC SMALL LETTER IO
u'\u044f' # 0xDF -> CYRILLIC SMALL LETTER YA
u'\u0430' # 0xE0 -> CYRILLIC SMALL LETTER A
u'\u0431' # 0xE1 -> CYRILLIC SMALL LETTER BE
u'\u0432' # 0xE2 -> CYRILLIC SMALL LETTER VE
u'\u0433' # 0xE3 -> CYRILLIC SMALL LETTER GHE
u'\u0434' # 0xE4 -> CYRILLIC SMALL LETTER DE
u'\u0435' # 0xE5 -> CYRILLIC SMALL LETTER IE
u'\u0436' # 0xE6 -> CYRILLIC SMALL LETTER ZHE
u'\u0437' # 0xE7 -> CYRILLIC SMALL LETTER ZE
u'\u0438' # 0xE8 -> CYRILLIC SMALL LETTER I
u'\u0439' # 0xE9 -> CYRILLIC SMALL LETTER SHORT I
u'\u043a' # 0xEA -> CYRILLIC SMALL LETTER KA
u'\u043b' # 0xEB -> CYRILLIC SMALL LETTER EL
u'\u043c' # 0xEC -> CYRILLIC SMALL LETTER EM
u'\u043d' # 0xED -> CYRILLIC SMALL LETTER EN
u'\u043e' # 0xEE -> CYRILLIC SMALL LETTER O
u'\u043f' # 0xEF -> CYRILLIC SMALL LETTER PE
u'\u0440' # 0xF0 -> CYRILLIC SMALL LETTER ER
u'\u0441' # 0xF1 -> CYRILLIC SMALL LETTER ES
u'\u0442' # 0xF2 -> CYRILLIC SMALL LETTER TE
u'\u0443' # 0xF3 -> CYRILLIC SMALL LETTER U
u'\u0444' # 0xF4 -> CYRILLIC SMALL LETTER EF
u'\u0445' # 0xF5 -> CYRILLIC SMALL LETTER HA
u'\u0446' # 0xF6 -> CYRILLIC SMALL LETTER TSE
u'\u0447' # 0xF7 -> CYRILLIC SMALL LETTER CHE
u'\u0448' # 0xF8 -> CYRILLIC SMALL LETTER SHA
u'\u0449' # 0xF9 -> CYRILLIC SMALL LETTER SHCHA
u'\u044a' # 0xFA -> CYRILLIC SMALL LETTER HARD SIGN
u'\u044b' # 0xFB -> CYRILLIC SMALL LETTER YERU
u'\u044c' # 0xFC -> CYRILLIC SMALL LETTER SOFT SIGN
u'\u044d' # 0xFD -> CYRILLIC SMALL LETTER E
u'\u044e' # 0xFE -> CYRILLIC SMALL LETTER YU
u'\u20ac' # 0xFF -> EURO SIGN
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
|
kajigga/canvas-contrib | refs/heads/master | SIS_Integration/create_ssha_passwords/ssha_password.py | 5 | #!/usr/bin/env python
import base64
import hashlib
"""
This method generates a SSHA password. This generated password can be included
in a Canvas users.csv file. If you do this, you would use a column called
ssha_password rather than simply password
"""
def gen_ssha_password(password,salt):
final_hashed_pw = "{SSHA}%s" % base64.b64encode(gen_digested_password(password,salt)+salt)
# '{SSHA}NjJmOTIzY2RlODEwOWI2MWEzMjRmMDY3N2Q3YzBjYWZkYjllNjQ4MDEyMzU='
print 'final_hashed_pw',final_hashed_pw
return final_hashed_pw
"""
This method generates the sha1 hex of the password+salt.
"""
def gen_digested_password(pw,salt):
return hashlib.sha1('%s%s'%(pw,salt)).hexdigest()
"""
This method decodes a ssha-encoded password string, returning the
digest and salt
"""
def decode_ssha_password(ssha_password):
decoded = base64.b64decode(ssha_password.replace("{SSHA}",""))
digest = decoded[0:40]
salt = decoded[40:]
return digest,salt
"""
This method simply compares a given password to a ssha-encoded string.
"""
def compare_passwords(plaintext_password,ssha_password):
if not plaintext_password or not ssha_password:
return False
decoded = base64.b64decode(ssha_password.replace("{SSHA}",""))
digest = decoded[0:40]
salt = decoded[40:]
if not all((digest,salt)):
return False
digested_password = gen_digested_password(plaintext_password,salt)
print digest,digested_password
return digest == digested_password
if __name__ == '__main__':
ps = gen_ssha_password('password','asdf')
#decode_ssha_password(ps)
print compare_passwords('password',ps)
|
eugena/django | refs/heads/master | django/http/__init__.py | 341 | from django.http.cookie import SimpleCookie, parse_cookie
from django.http.request import (
HttpRequest, QueryDict, RawPostDataException, UnreadablePostError,
)
from django.http.response import (
BadHeaderError, FileResponse, Http404, HttpResponse,
HttpResponseBadRequest, HttpResponseForbidden, HttpResponseGone,
HttpResponseNotAllowed, HttpResponseNotFound, HttpResponseNotModified,
HttpResponsePermanentRedirect, HttpResponseRedirect,
HttpResponseServerError, JsonResponse, StreamingHttpResponse,
)
from django.http.utils import conditional_content_removal
__all__ = [
'SimpleCookie', 'parse_cookie', 'HttpRequest', 'QueryDict',
'RawPostDataException', 'UnreadablePostError',
'HttpResponse', 'StreamingHttpResponse', 'HttpResponseRedirect',
'HttpResponsePermanentRedirect', 'HttpResponseNotModified',
'HttpResponseBadRequest', 'HttpResponseForbidden', 'HttpResponseNotFound',
'HttpResponseNotAllowed', 'HttpResponseGone', 'HttpResponseServerError',
'Http404', 'BadHeaderError', 'JsonResponse', 'FileResponse',
'conditional_content_removal',
]
|
julianprabhakar/eden_car | refs/heads/master | modules/unit_tests/s3/s3widgets.py | 6 | # S3 Widgets Unit Tests
#
# To run this script use:
# python web2py.py -S eden -M -R applications/eden/modules/unit_tests/s3/s3widgets.py
import unittest
from gluon import *
from gluon.storage import Storage
from s3.s3widgets import S3OptionsMatrixWidget
from gluon.contrib.simplejson.ordered_dict import OrderedDict
# =============================================================================
class TestS3OptionsMatrixWidget(unittest.TestCase):
""" Test the S3OptionsMatrixWidget widget for correct output """
# @todo: deprecate?
def setUp(self):
self.field = Storage(name='roles')
self.rows = (
("Staff", "1", "2", "3", "4", "5"),
("Volunteers", "6", "7", "8", "9", "10"),
("Members", "11", "12", "13", "14", "15"),
("Warehouse", "16", "17", "18", "19", "20"),
("Assets", "21", "22", "23", "24", "25"),
("Projects", "26", "27", "28", "29", "30"),
("Assessments", "31", "32", "33", "34", "35"),
("Incidents", "36", "37", "38", "39", "40"),
)
self.columns = ("", "None", "Reader", "Data Entry", "Editor", "Super Editor")
self.value = ("3", "24", "29", "39", "40")
self.widget = S3OptionsMatrixWidget(self.rows, self.columns)
# -------------------------------------------------------------------------
def test_widget(self):
# Test with just the required parameters
expected_result = TABLE(THEAD(TR(TH("", _scope="col"),
TH("None", _scope="col"),
TH("Reader", _scope="col"),
TH("Data Entry", _scope="col"),
TH("Editor", _scope="col"),
TH("Super Editor", _scope="col"),
)),
TBODY(TR(TH("Staff", _scope="row"),
TD(INPUT(_type="checkbox",
_name="roles",
_value="1"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="2"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="3"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="4"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="5"
))
),
TR(TH("Volunteers", _scope="row"),
TD(INPUT(_type="checkbox",
_name="roles",
_value="6"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="7"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="8"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="9"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="10"
))
),
TR(TH("Members", _scope="row"),
TD(INPUT(_type="checkbox",
_name="roles",
_value="11"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="12"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="13"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="14"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="15"
))
),
TR(TH("Warehouse", _scope="row"),
TD(INPUT(_type="checkbox",
_name="roles",
_value="16"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="17"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="18"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="19"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="20"
))
),
TR(TH("Assets", _scope="row"),
TD(INPUT(_type="checkbox",
_name="roles",
_value="21"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="22"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="23"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="24"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="25"
))
),
TR(TH("Projects", _scope="row"),
TD(INPUT(_type="checkbox",
_name="roles",
_value="26"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="27"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="28"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="29"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="30"
))
),
TR(TH("Assessments", _scope="row"),
TD(INPUT(_type="checkbox",
_name="roles",
_value="31"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="32"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="33"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="34"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="35"
))
),
TR(TH("Incidents", _scope="row"),
TD(INPUT(_type="checkbox",
_name="roles",
_value="36"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="37"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="38"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="39"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="40"
))
),
)
)
self.failUnlessEqual(str(self.widget(self.field, [])), str(expected_result))
# -------------------------------------------------------------------------
def test_values(self):
# Test the widget with values
expected_result = TABLE(THEAD(TR(TH("", _scope="col"),
TH("None", _scope="col"),
TH("Reader", _scope="col"),
TH("Data Entry", _scope="col"),
TH("Editor", _scope="col"),
TH("Super Editor", _scope="col"),
)),
TBODY(TR(TH("Staff", _scope="row"),
TD(INPUT(_type="checkbox",
_name="roles",
_value="1"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="2"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="3",
value=True
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="4"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="5"
))
),
TR(TH("Volunteers", _scope="row"),
TD(INPUT(_type="checkbox",
_name="roles",
_value="6"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="7"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="8"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="9"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="10"
))
),
TR(TH("Members", _scope="row"),
TD(INPUT(_type="checkbox",
_name="roles",
_value="11"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="12"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="13"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="14"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="15"
))
),
TR(TH("Warehouse", _scope="row"),
TD(INPUT(_type="checkbox",
_name="roles",
_value="16"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="17"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="18"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="19"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="20"
))
),
TR(TH("Assets", _scope="row"),
TD(INPUT(_type="checkbox",
_name="roles",
_value="21"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="22"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="23"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="24",
value=True
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="25"
))
),
TR(TH("Projects", _scope="row"),
TD(INPUT(_type="checkbox",
_name="roles",
_value="26"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="27"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="28"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="29",
value=True
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="30"
))
),
TR(TH("Assessments", _scope="row"),
TD(INPUT(_type="checkbox",
_name="roles",
_value="31"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="32"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="33"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="34"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="35"
))
),
TR(TH("Incidents", _scope="row"),
TD(INPUT(_type="checkbox",
_name="roles",
_value="36"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="37"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="38"
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="39",
value=True
)),
TD(INPUT(_type="checkbox",
_name="roles",
_value="40",
value=True
))
),
)
)
self.assertEqual(str(self.widget(self.field, self.value)),
str(expected_result))
# =============================================================================
def run_suite(*test_classes):
""" Run the test suite """
loader = unittest.TestLoader()
suite = unittest.TestSuite()
for test_class in test_classes:
tests = loader.loadTestsFromTestCase(test_class)
suite.addTests(tests)
if suite is not None:
unittest.TextTestRunner(verbosity=2).run(suite)
return
if __name__ == "__main__":
run_suite(
TestS3OptionsMatrixWidget,
)
# END ========================================================================
|
alecalve/bitcoin | refs/heads/master | contrib/seeds/generate-seeds.py | 30 | #!/usr/bin/env python3
# Copyright (c) 2014-2017 Wladimir J. van der Laan
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
Script to generate list of seed nodes for chainparams.cpp.
This script expects two text files in the directory that is passed as an
argument:
nodes_main.txt
nodes_test.txt
These files must consist of lines in the format
<ip>
<ip>:<port>
[<ipv6>]
[<ipv6>]:<port>
<onion>.onion
0xDDBBCCAA (IPv4 little-endian old pnSeeds format)
The output will be two data structures with the peers in binary format:
static SeedSpec6 pnSeed6_main[]={
...
}
static SeedSpec6 pnSeed6_test[]={
...
}
These should be pasted into `src/chainparamsseeds.h`.
'''
from base64 import b32decode
from binascii import a2b_hex
import sys
import os
import re
# ipv4 in ipv6 prefix
pchIPv4 = bytearray([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0xff, 0xff])
# tor-specific ipv6 prefix
pchOnionCat = bytearray([0xFD,0x87,0xD8,0x7E,0xEB,0x43])
def name_to_ipv6(addr):
if len(addr)>6 and addr.endswith('.onion'):
vchAddr = b32decode(addr[0:-6], True)
if len(vchAddr) != 16-len(pchOnionCat):
raise ValueError('Invalid onion %s' % vchAddr)
return pchOnionCat + vchAddr
elif '.' in addr: # IPv4
return pchIPv4 + bytearray((int(x) for x in addr.split('.')))
elif ':' in addr: # IPv6
sub = [[], []] # prefix, suffix
x = 0
addr = addr.split(':')
for i,comp in enumerate(addr):
if comp == '':
if i == 0 or i == (len(addr)-1): # skip empty component at beginning or end
continue
x += 1 # :: skips to suffix
assert(x < 2)
else: # two bytes per component
val = int(comp, 16)
sub[x].append(val >> 8)
sub[x].append(val & 0xff)
nullbytes = 16 - len(sub[0]) - len(sub[1])
assert((x == 0 and nullbytes == 0) or (x == 1 and nullbytes > 0))
return bytearray(sub[0] + ([0] * nullbytes) + sub[1])
elif addr.startswith('0x'): # IPv4-in-little-endian
return pchIPv4 + bytearray(reversed(a2b_hex(addr[2:])))
else:
raise ValueError('Could not parse address %s' % addr)
def parse_spec(s, defaultport):
match = re.match(r'\[([0-9a-fA-F:]+)\](?::([0-9]+))?$', s)
if match: # ipv6
host = match.group(1)
port = match.group(2)
elif s.count(':') > 1: # ipv6, no port
host = s
port = ''
else:
(host,_,port) = s.partition(':')
if not port:
port = defaultport
else:
port = int(port)
host = name_to_ipv6(host)
return (host,port)
def process_nodes(g, f, structname, defaultport):
g.write('static SeedSpec6 %s[] = {\n' % structname)
first = True
for line in f:
comment = line.find('#')
if comment != -1:
line = line[0:comment]
line = line.strip()
if not line:
continue
if not first:
g.write(',\n')
first = False
(host,port) = parse_spec(line, defaultport)
hoststr = ','.join(('0x%02x' % b) for b in host)
g.write(' {{%s}, %i}' % (hoststr, port))
g.write('\n};\n')
def main():
if len(sys.argv)<2:
print(('Usage: %s <path_to_nodes_txt>' % sys.argv[0]), file=sys.stderr)
sys.exit(1)
g = sys.stdout
indir = sys.argv[1]
g.write('#ifndef BITCOIN_CHAINPARAMSSEEDS_H\n')
g.write('#define BITCOIN_CHAINPARAMSSEEDS_H\n')
g.write('/**\n')
g.write(' * List of fixed seed nodes for the bitcoin network\n')
g.write(' * AUTOGENERATED by contrib/seeds/generate-seeds.py\n')
g.write(' *\n')
g.write(' * Each line contains a 16-byte IPv6 address and a port.\n')
g.write(' * IPv4 as well as onion addresses are wrapped inside an IPv6 address accordingly.\n')
g.write(' */\n')
with open(os.path.join(indir,'nodes_main.txt'), 'r', encoding="utf8") as f:
process_nodes(g, f, 'pnSeed6_main', 8333)
g.write('\n')
with open(os.path.join(indir,'nodes_test.txt'), 'r', encoding="utf8") as f:
process_nodes(g, f, 'pnSeed6_test', 18333)
g.write('#endif // BITCOIN_CHAINPARAMSSEEDS_H\n')
if __name__ == '__main__':
main()
|
ntkrnl/yacoin-p2pool | refs/heads/master | nattraverso/ipdiscover.py | 288 | """
Generic methods to retreive the IP address of the local machine.
TODO: Example
@author: Raphael Slinckx
@copyright: Copyright 2005
@license: LGPL
@contact: U{raphael@slinckx.net<mailto:raphael@slinckx.net>}
@version: 0.1.0
"""
__revision__ = "$id"
import random, socket, logging, itertools
from twisted.internet import defer, reactor
from twisted.internet.protocol import DatagramProtocol
from twisted.internet.error import CannotListenError
from nattraverso.utils import is_rfc1918_ip, is_bogus_ip
@defer.inlineCallbacks
def get_local_ip():
"""
Returns a deferred which will be called with a
2-uple (lan_flag, ip_address) :
- lan_flag:
- True if it's a local network (RFC1918)
- False if it's a WAN address
- ip_address is the actual ip address
@return: A deferred called with the above defined tuple
@rtype: L{twisted.internet.defer.Deferred}
"""
# first we try a connected udp socket, then via multicast
logging.debug("Resolving dns to get udp ip")
try:
ipaddr = yield reactor.resolve('A.ROOT-SERVERS.NET')
except:
pass
else:
udpprot = DatagramProtocol()
port = reactor.listenUDP(0, udpprot)
udpprot.transport.connect(ipaddr, 7)
localip = udpprot.transport.getHost().host
port.stopListening()
if is_bogus_ip(localip):
raise RuntimeError, "Invalid IP address returned"
else:
defer.returnValue((is_rfc1918_ip(localip), localip))
logging.debug("Multicast ping to retrieve local IP")
ipaddr = yield _discover_multicast()
defer.returnValue((is_rfc1918_ip(ipaddr), ipaddr))
@defer.inlineCallbacks
def get_external_ip():
"""
Returns a deferred which will be called with a
2-uple (wan_flag, ip_address):
- wan_flag:
- True if it's a WAN address
- False if it's a LAN address
- None if it's a localhost (127.0.0.1) address
- ip_address: the most accessible ip address of this machine
@return: A deferred called with the above defined tuple
@rtype: L{twisted.internet.defer.Deferred}
"""
try:
local, ipaddr = yield get_local_ip()
except:
defer.returnValue((None, "127.0.0.1"))
if not local:
defer.returnValue((True, ipaddr))
logging.debug("Got local ip, trying to use upnp to get WAN ip")
import nattraverso.pynupnp
try:
ipaddr2 = yield nattraverso.pynupnp.get_external_ip()
except:
defer.returnValue((False, ipaddr))
else:
defer.returnValue((True, ipaddr2))
class _LocalNetworkMulticast(DatagramProtocol):
def __init__(self, nonce):
from p2pool.util import variable
self.nonce = nonce
self.address_received = variable.Event()
def datagramReceived(self, dgram, addr):
"""Datagram received, we callback the IP address."""
logging.debug("Received multicast pong: %s; addr:%r", dgram, addr)
if dgram != self.nonce:
return
self.address_received.happened(addr[0])
@defer.inlineCallbacks
def _discover_multicast():
"""
Local IP discovery protocol via multicast:
- Broadcast 3 ping multicast packet with "ping" in it
- Wait for an answer
- Retrieve the ip address from the returning packet, which is ours
"""
nonce = str(random.randrange(2**64))
p = _LocalNetworkMulticast(nonce)
for attempt in itertools.count():
port = 11000 + random.randint(0, 5000)
try:
mcast = reactor.listenMulticast(port, p)
except CannotListenError:
if attempt >= 10:
raise
continue
else:
break
try:
yield mcast.joinGroup('239.255.255.250', socket.INADDR_ANY)
logging.debug("Sending multicast ping")
for i in xrange(3):
p.transport.write(nonce, ('239.255.255.250', port))
address, = yield p.address_received.get_deferred(5)
finally:
mcast.stopListening()
defer.returnValue(address)
|
fnouama/intellij-community | refs/heads/master | python/testData/quickFixes/PyMoveAttributeToInitQuickFixTest/removePass.py | 83 | __author__ = 'ktisha'
class A:
def __init__(self):
pass
def foo(self):
self.<caret>b = 1 |
lavakyan/mstm-spectrum | refs/heads/master | mstm_studio/test/test_mstm_spectrum.py | 1 | # -*- coding: utf-8 -*-
#
# ----------------------------------------------------- #
# #
# This code is a part of T-matrix fitting project #
# Contributors: #
# L. Avakyan <laavakyan@sfedu.ru> #
# #
# ----------------------------------------------------- #
from __future__ import print_function
from __future__ import division
import pytest
from mstm_studio import mstm_spectrum
import numpy as np
import os
# ~ import numpy as np
# ~ from numpy.random import lognormal
# ~ from scipy import interpolate
# ~ import subprocess
# ~ import os # to delete files after calc.
# ~ import sys # to check whether running on Linux or Windows
# ~ import datetime
# ~ import time
# ~ import tempfile # to run mstm in temporary directory
# ~ try:
# ~ import matplotlib.pyplot as plt
# ~ except ImportError:
# ~ pass
# ~ # use input in both python2 and python3
# ~ try:
# ~ input = raw_input
# ~ except NameError:
# ~ pass
# ~ # use xrange in both python2 and python3
# ~ try:
# ~ xrange
# ~ except NameError:
# ~ xrange = range
def test_spheres():
print('Test Spheres')
print('Overlap tests')
spheres = mstm_spectrum.Spheres()
print(' Test not overlapped... ')
spheres.x = [-5, 5]
spheres.y = [0, 0]
spheres.z = [0, 0]
spheres.a = [4, 4]
assert(not spheres.check_overlap())
print(' Test overlapped... ')
spheres.a = [5, 5]
assert(spheres.check_overlap())
print(' Test nested... ')
spheres.x = [0, 0]
spheres.a = [2, 5]
assert(not spheres.check_overlap())
spheres.a = [5, 3]
assert(not spheres.check_overlap())
def test_materials():
print('Test Materials')
mat = mstm_spectrum.Material(os.path.join('..', 'nk', 'etaGold.txt'))
# mat.plot()
mat1 = mstm_spectrum.Material(os.path.join('..', 'nk', 'etaSilver.txt'))
mat3 = mstm_spectrum.Material('glass')
mat5 = mstm_spectrum.Material(1.5)
mat6 = mstm_spectrum.Material('2.0+0.5j')
mat7 = mstm_spectrum.Material('mat7', wls=np.linspace(300, 800, 100),
nk=np.linspace(-10, 5, 100) + 1j * np.linspace(0, 10, 100))
mat8 = mstm_spectrum.Material('mat7', wls=np.linspace(300, 800, 100),
eps=np.linspace(-10, 5, 100) + 1j * np.linspace(0, 10, 100))
assert(np.isclose(mat.get_n(800), 0.15436829401)) # etaGold
assert(np.isclose(mat1.get_n(800), 0.03604950826)) # etaSilver
assert(np.isclose(mat3.get_n(800), 1.66)) # Glass (constant)
assert(np.isclose(mat3.get_k(800), 0.00))
assert(np.isclose(mat5.get_n(550), 1.5)) # n=1.5 material
assert(np.isclose(mat6.get_n(550), 2.0)) # n=2.0+0.5j material
assert(np.isclose(mat6.get_k(550), 0.5))
assert(np.isclose(mat7.get_n(550), -2.5)) # nk material
assert(np.isclose(mat7.get_k(550), 5.0))
assert(np.isclose(mat8.get_n(550), 1.243014470)) # eps material
assert(np.isclose(mat8.get_k(550), 2.011239667))
def test_SPR():
wls = np.linspace(300, 800, 100)
# create SPR object
spr = mstm_spectrum.SPR(wls)
spr.environment_material = 'glass'
# spr.set_spheres(SingleSphere(0.0, 0.0, 0.0, 25.0, 'etaGold.txt'))
spheres = mstm_spectrum.ExplicitSpheres(2, [0, 0, 0, 10, 0, 0, 0, 12],
mat_filename=['../nk/etaGold.txt',
'../nk/etaSilver.txt'])
# spheres = ExplicitSpheres(2, [0,0,0,20,0,0,0,21],
# mat_filename='etaGold.txt')
spr.set_spheres(spheres)
# spr.set_spheres(LogNormalSpheres(27, 0.020, 0.9, 0.050 ))
# calculate!
# spr.command = ''
_, exts = spr.simulate()
assert(np.allclose(
(exts[20], exts[50], exts[-20]), (3.3279, 1.9906, 0.056079)))
|
devendermishrajio/nova | refs/heads/master | nova/tests/unit/api/openstack/compute/test_security_groups.py | 22 | # Copyright 2011 OpenStack Foundation
# Copyright 2012 Justin Santa Barbara
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from mox3 import mox
from oslo_config import cfg
from oslo_serialization import jsonutils
import webob
from nova.api.openstack.compute.legacy_v2.contrib import security_groups as \
secgroups_v2
from nova.api.openstack.compute import security_groups as \
secgroups_v21
from nova import compute
from nova.compute import power_state
from nova import context as context_maker
import nova.db
from nova import exception
from nova import objects
from nova import quota
from nova import test
from nova.tests.unit.api.openstack import fakes
from nova.tests.unit import fake_instance
CONF = cfg.CONF
FAKE_UUID1 = 'a47ae74e-ab08-447f-8eee-ffd43fc46c16'
FAKE_UUID2 = 'c6e6430a-6563-4efa-9542-5e93c9e97d18'
class AttrDict(dict):
def __getattr__(self, k):
return self[k]
def security_group_request_template(**kwargs):
sg = kwargs.copy()
sg.setdefault('name', 'test')
sg.setdefault('description', 'test-description')
return sg
def security_group_template(**kwargs):
sg = kwargs.copy()
sg.setdefault('tenant_id', '123')
sg.setdefault('name', 'test')
sg.setdefault('description', 'test-description')
return sg
def security_group_db(security_group, id=None):
attrs = security_group.copy()
if 'tenant_id' in attrs:
attrs['project_id'] = attrs.pop('tenant_id')
if id is not None:
attrs['id'] = id
attrs.setdefault('rules', [])
attrs.setdefault('instances', [])
return AttrDict(attrs)
def security_group_rule_template(**kwargs):
rule = kwargs.copy()
rule.setdefault('ip_protocol', 'tcp')
rule.setdefault('from_port', 22)
rule.setdefault('to_port', 22)
rule.setdefault('parent_group_id', 2)
return rule
def security_group_rule_db(rule, id=None):
attrs = rule.copy()
if 'ip_protocol' in attrs:
attrs['protocol'] = attrs.pop('ip_protocol')
return AttrDict(attrs)
def return_server(context, server_id,
columns_to_join=None, use_slave=False):
return fake_instance.fake_db_instance(
**{'id': int(server_id),
'power_state': 0x01,
'host': "localhost",
'uuid': FAKE_UUID1,
'name': 'asdf'})
def return_server_by_uuid(context, server_uuid,
columns_to_join=None,
use_slave=False):
return fake_instance.fake_db_instance(
**{'id': 1,
'power_state': 0x01,
'host': "localhost",
'uuid': server_uuid,
'name': 'asdf'})
def return_non_running_server(context, server_id, columns_to_join=None):
return fake_instance.fake_db_instance(
**{'id': server_id, 'power_state': power_state.SHUTDOWN,
'uuid': FAKE_UUID1, 'host': "localhost", 'name': 'asdf'})
def return_security_group_by_name(context, project_id, group_name):
return {'id': 1, 'name': group_name,
"instances": [{'id': 1, 'uuid': FAKE_UUID1}]}
def return_security_group_without_instances(context, project_id, group_name):
return {'id': 1, 'name': group_name}
def return_server_nonexistent(context, server_id, columns_to_join=None):
raise exception.InstanceNotFound(instance_id=server_id)
class TestSecurityGroupsV21(test.TestCase):
secgrp_ctl_cls = secgroups_v21.SecurityGroupController
server_secgrp_ctl_cls = secgroups_v21.ServerSecurityGroupController
secgrp_act_ctl_cls = secgroups_v21.SecurityGroupActionController
def setUp(self):
super(TestSecurityGroupsV21, self).setUp()
self.controller = self.secgrp_ctl_cls()
self.server_controller = self.server_secgrp_ctl_cls()
self.manager = self.secgrp_act_ctl_cls()
# This needs to be done here to set fake_id because the derived
# class needs to be called first if it wants to set
# 'security_group_api' and this setUp method needs to be called.
if self.controller.security_group_api.id_is_uuid:
self.fake_id = '11111111-1111-1111-1111-111111111111'
else:
self.fake_id = '11111111'
self.req = fakes.HTTPRequest.blank('')
self.admin_req = fakes.HTTPRequest.blank('', use_admin_context=True)
def _assert_no_security_groups_reserved(self, context):
"""Check that no reservations are leaked during tests."""
result = quota.QUOTAS.get_project_quotas(context, context.project_id)
self.assertEqual(result['security_groups']['reserved'], 0)
def _assert_security_groups_in_use(self, project_id, user_id, in_use):
context = context_maker.get_admin_context()
result = quota.QUOTAS.get_user_quotas(context, project_id, user_id)
self.assertEqual(result['security_groups']['in_use'], in_use)
def test_create_security_group(self):
sg = security_group_request_template()
res_dict = self.controller.create(self.req, {'security_group': sg})
self.assertEqual(res_dict['security_group']['name'], 'test')
self.assertEqual(res_dict['security_group']['description'],
'test-description')
def test_create_security_group_with_no_name(self):
sg = security_group_request_template()
del sg['name']
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req,
{'security_group': sg})
self._assert_no_security_groups_reserved(
self.req.environ['nova.context'])
def test_create_security_group_with_no_description(self):
sg = security_group_request_template()
del sg['description']
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
self.req, {'security_group': sg})
self._assert_no_security_groups_reserved(
self.req.environ['nova.context'])
def test_create_security_group_with_empty_description(self):
sg = security_group_request_template()
sg['description'] = ""
try:
self.controller.create(self.req, {'security_group': sg})
self.fail('Should have raised BadRequest exception')
except webob.exc.HTTPBadRequest as exc:
self.assertEqual('description has a minimum character requirement'
' of 1.', exc.explanation)
except exception.InvalidInput:
self.fail('Should have raised BadRequest exception instead of')
self._assert_no_security_groups_reserved(
self.req.environ['nova.context'])
def test_create_security_group_with_blank_name(self):
sg = security_group_request_template(name='')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
self.req, {'security_group': sg})
self._assert_no_security_groups_reserved(
self.req.environ['nova.context'])
def test_create_security_group_with_whitespace_name(self):
sg = security_group_request_template(name=' ')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
self.req, {'security_group': sg})
self._assert_no_security_groups_reserved(
self.req.environ['nova.context'])
def test_create_security_group_with_blank_description(self):
sg = security_group_request_template(description='')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
self.req, {'security_group': sg})
self._assert_no_security_groups_reserved(
self.req.environ['nova.context'])
def test_create_security_group_with_whitespace_description(self):
sg = security_group_request_template(description=' ')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
self.req, {'security_group': sg})
self._assert_no_security_groups_reserved(
self.req.environ['nova.context'])
def test_create_security_group_with_duplicate_name(self):
sg = security_group_request_template()
# FIXME: Stub out _get instead of creating twice
self.controller.create(self.req, {'security_group': sg})
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
self.req, {'security_group': sg})
self._assert_no_security_groups_reserved(
self.req.environ['nova.context'])
def test_create_security_group_with_no_body(self):
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, None)
self._assert_no_security_groups_reserved(
self.req.environ['nova.context'])
def test_create_security_group_with_no_security_group(self):
body = {'no-securityGroup': None}
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, body)
self._assert_no_security_groups_reserved(
self.req.environ['nova.context'])
def test_create_security_group_above_255_characters_name(self):
sg = security_group_request_template(name='1234567890' * 26)
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
self.req, {'security_group': sg})
self._assert_no_security_groups_reserved(
self.req.environ['nova.context'])
def test_create_security_group_above_255_characters_description(self):
sg = security_group_request_template(description='1234567890' * 26)
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
self.req, {'security_group': sg})
self._assert_no_security_groups_reserved(
self.req.environ['nova.context'])
def test_create_security_group_non_string_name(self):
sg = security_group_request_template(name=12)
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
self.req, {'security_group': sg})
self._assert_no_security_groups_reserved(
self.req.environ['nova.context'])
def test_create_security_group_non_string_description(self):
sg = security_group_request_template(description=12)
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
self.req, {'security_group': sg})
self._assert_no_security_groups_reserved(
self.req.environ['nova.context'])
def test_create_security_group_quota_limit(self):
for num in range(1, CONF.quota_security_groups):
name = 'test%s' % num
sg = security_group_request_template(name=name)
res_dict = self.controller.create(self.req, {'security_group': sg})
self.assertEqual(res_dict['security_group']['name'], name)
sg = security_group_request_template()
self.assertRaises(webob.exc.HTTPForbidden, self.controller.create,
self.req, {'security_group': sg})
def test_get_security_group_list(self):
groups = []
for i, name in enumerate(['default', 'test']):
sg = security_group_template(id=i + 1,
name=name,
description=name + '-desc',
rules=[])
groups.append(sg)
expected = {'security_groups': groups}
def return_security_groups(context, project_id):
return [security_group_db(sg) for sg in groups]
self.stubs.Set(nova.db, 'security_group_get_by_project',
return_security_groups)
res_dict = self.controller.index(self.req)
self.assertEqual(res_dict, expected)
def test_get_security_group_list_missing_group_id_rule(self):
groups = []
rule1 = security_group_rule_template(cidr='10.2.3.124/24',
parent_group_id=1,
group_id={}, id=88,
protocol='TCP')
rule2 = security_group_rule_template(cidr='10.2.3.125/24',
parent_group_id=1,
id=99, protocol=88,
group_id='HAS_BEEN_DELETED')
sg = security_group_template(id=1,
name='test',
description='test-desc',
rules=[rule1, rule2])
groups.append(sg)
# An expected rule here needs to be created as the api returns
# different attributes on the rule for a response than what was
# passed in. For example:
# "cidr": "0.0.0.0/0" ->"ip_range": {"cidr": "0.0.0.0/0"}
expected_rule = security_group_rule_template(
ip_range={'cidr': '10.2.3.124/24'}, parent_group_id=1,
group={}, id=88, ip_protocol='TCP')
expected = security_group_template(id=1,
name='test',
description='test-desc',
rules=[expected_rule])
expected = {'security_groups': [expected]}
def return_security_groups(context, project, search_opts):
return [security_group_db(sg) for sg in groups]
self.stubs.Set(self.controller.security_group_api, 'list',
return_security_groups)
res_dict = self.controller.index(self.req)
self.assertEqual(res_dict, expected)
def test_get_security_group_list_all_tenants(self):
all_groups = []
tenant_groups = []
for i, name in enumerate(['default', 'test']):
sg = security_group_template(id=i + 1,
name=name,
description=name + '-desc',
rules=[])
all_groups.append(sg)
if name == 'default':
tenant_groups.append(sg)
all = {'security_groups': all_groups}
tenant_specific = {'security_groups': tenant_groups}
def return_all_security_groups(context):
return [security_group_db(sg) for sg in all_groups]
self.stubs.Set(nova.db, 'security_group_get_all',
return_all_security_groups)
def return_tenant_security_groups(context, project_id):
return [security_group_db(sg) for sg in tenant_groups]
self.stubs.Set(nova.db, 'security_group_get_by_project',
return_tenant_security_groups)
path = '/v2/fake/os-security-groups'
req = fakes.HTTPRequest.blank(path, use_admin_context=True)
res_dict = self.controller.index(req)
self.assertEqual(res_dict, tenant_specific)
req = fakes.HTTPRequest.blank('%s?all_tenants=1' % path,
use_admin_context=True)
res_dict = self.controller.index(req)
self.assertEqual(res_dict, all)
def test_get_security_group_by_instance(self):
groups = []
for i, name in enumerate(['default', 'test']):
sg = security_group_template(id=i + 1,
name=name,
description=name + '-desc',
rules=[])
groups.append(sg)
expected = {'security_groups': groups}
def return_instance(context, server_id,
columns_to_join=None, use_slave=False):
self.assertEqual(server_id, FAKE_UUID1)
return return_server_by_uuid(context, server_id)
self.stubs.Set(nova.db, 'instance_get_by_uuid',
return_instance)
def return_security_groups(context, instance_uuid):
self.assertEqual(instance_uuid, FAKE_UUID1)
return [security_group_db(sg) for sg in groups]
self.stubs.Set(nova.db, 'security_group_get_by_instance',
return_security_groups)
res_dict = self.server_controller.index(self.req, FAKE_UUID1)
self.assertEqual(res_dict, expected)
@mock.patch('nova.db.instance_get_by_uuid')
@mock.patch('nova.db.security_group_get_by_instance', return_value=[])
def test_get_security_group_empty_for_instance(self, mock_sec_group,
mock_db_get_ins):
expected = {'security_groups': []}
def return_instance(context, server_id,
columns_to_join=None, use_slave=False):
self.assertEqual(server_id, FAKE_UUID1)
return return_server_by_uuid(context, server_id)
mock_db_get_ins.side_effect = return_instance
res_dict = self.server_controller.index(self.req, FAKE_UUID1)
self.assertEqual(expected, res_dict)
mock_sec_group.assert_called_once_with(
self.req.environ['nova.context'], FAKE_UUID1)
def test_get_security_group_by_instance_non_existing(self):
self.stubs.Set(nova.db, 'instance_get', return_server_nonexistent)
self.stubs.Set(nova.db, 'instance_get_by_uuid',
return_server_nonexistent)
self.assertRaises(webob.exc.HTTPNotFound,
self.server_controller.index, self.req, '1')
def test_get_security_group_by_instance_invalid_id(self):
self.assertRaises(webob.exc.HTTPNotFound,
self.server_controller.index, self.req, 'invalid')
def test_get_security_group_by_id(self):
sg = security_group_template(id=2, rules=[])
def return_security_group(context, group_id):
self.assertEqual(sg['id'], group_id)
return security_group_db(sg)
self.stubs.Set(nova.db, 'security_group_get',
return_security_group)
res_dict = self.controller.show(self.req, '2')
expected = {'security_group': sg}
self.assertEqual(res_dict, expected)
def test_get_security_group_by_invalid_id(self):
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.delete,
self.req, 'invalid')
def test_get_security_group_by_non_existing_id(self):
self.assertRaises(webob.exc.HTTPNotFound, self.controller.delete,
self.req, self.fake_id)
def test_update_security_group(self):
sg = security_group_template(id=2, rules=[])
sg_update = security_group_template(id=2, rules=[],
name='update_name', description='update_desc')
def return_security_group(context, group_id):
self.assertEqual(sg['id'], group_id)
return security_group_db(sg)
def return_update_security_group(context, group_id, values,
columns_to_join=None):
self.assertEqual(sg_update['id'], group_id)
self.assertEqual(sg_update['name'], values['name'])
self.assertEqual(sg_update['description'], values['description'])
return security_group_db(sg_update)
self.stubs.Set(nova.db, 'security_group_update',
return_update_security_group)
self.stubs.Set(nova.db, 'security_group_get',
return_security_group)
res_dict = self.controller.update(self.req, '2',
{'security_group': sg_update})
expected = {'security_group': sg_update}
self.assertEqual(res_dict, expected)
def test_update_security_group_name_to_default(self):
sg = security_group_template(id=2, rules=[], name='default')
def return_security_group(context, group_id):
self.assertEqual(sg['id'], group_id)
return security_group_db(sg)
self.stubs.Set(nova.db, 'security_group_get',
return_security_group)
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.update,
self.req, '2', {'security_group': sg})
def test_update_default_security_group_fail(self):
sg = security_group_template()
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.update,
self.req, '1', {'security_group': sg})
def test_delete_security_group_by_id(self):
sg = security_group_template(id=1, project_id='fake_project',
user_id='fake_user', rules=[])
self.called = False
def security_group_destroy(context, id):
self.called = True
def return_security_group(context, group_id):
self.assertEqual(sg['id'], group_id)
return security_group_db(sg)
self.stubs.Set(nova.db, 'security_group_destroy',
security_group_destroy)
self.stubs.Set(nova.db, 'security_group_get',
return_security_group)
self.controller.delete(self.req, '1')
self.assertTrue(self.called)
def test_delete_security_group_by_admin(self):
sg = security_group_request_template()
self.controller.create(self.req, {'security_group': sg})
context = self.req.environ['nova.context']
# Ensure quota usage for security group is correct.
self._assert_security_groups_in_use(context.project_id,
context.user_id, 2)
# Delete the security group by admin.
self.controller.delete(self.admin_req, '2')
# Ensure quota for security group in use is released.
self._assert_security_groups_in_use(context.project_id,
context.user_id, 1)
def test_delete_security_group_by_invalid_id(self):
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.delete,
self.req, 'invalid')
def test_delete_security_group_by_non_existing_id(self):
self.assertRaises(webob.exc.HTTPNotFound, self.controller.delete,
self.req, self.fake_id)
def test_delete_security_group_in_use(self):
sg = security_group_template(id=1, rules=[])
def security_group_in_use(context, id):
return True
def return_security_group(context, group_id):
self.assertEqual(sg['id'], group_id)
return security_group_db(sg)
self.stubs.Set(nova.db, 'security_group_in_use',
security_group_in_use)
self.stubs.Set(nova.db, 'security_group_get',
return_security_group)
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.delete,
self.req, '1')
def test_associate_by_non_existing_security_group_name(self):
self.stubs.Set(nova.db, 'instance_get', return_server)
self.assertEqual(return_server(None, '1'),
nova.db.instance_get(None, '1'))
body = dict(addSecurityGroup=dict(name='non-existing'))
self.assertRaises(webob.exc.HTTPNotFound,
self.manager._addSecurityGroup, self.req, '1', body)
def test_associate_by_invalid_server_id(self):
body = dict(addSecurityGroup=dict(name='test'))
self.assertRaises(webob.exc.HTTPNotFound,
self.manager._addSecurityGroup, self.req,
'invalid', body)
def test_associate_without_body(self):
self.stubs.Set(nova.db, 'instance_get', return_server)
body = dict(addSecurityGroup=None)
self.assertRaises(webob.exc.HTTPBadRequest,
self.manager._addSecurityGroup, self.req, '1', body)
def test_associate_no_security_group_name(self):
self.stubs.Set(nova.db, 'instance_get', return_server)
body = dict(addSecurityGroup=dict())
self.assertRaises(webob.exc.HTTPBadRequest,
self.manager._addSecurityGroup, self.req, '1', body)
def test_associate_security_group_name_with_whitespaces(self):
self.stubs.Set(nova.db, 'instance_get', return_server)
body = dict(addSecurityGroup=dict(name=" "))
self.assertRaises(webob.exc.HTTPBadRequest,
self.manager._addSecurityGroup, self.req, '1', body)
def test_associate_non_existing_instance(self):
self.stubs.Set(nova.db, 'instance_get', return_server_nonexistent)
self.stubs.Set(nova.db, 'instance_get_by_uuid',
return_server_nonexistent)
body = dict(addSecurityGroup=dict(name="test"))
self.assertRaises(webob.exc.HTTPNotFound,
self.manager._addSecurityGroup, self.req, '1', body)
def test_associate_non_running_instance(self):
self.stubs.Set(nova.db, 'instance_get', return_non_running_server)
self.stubs.Set(nova.db, 'instance_get_by_uuid',
return_non_running_server)
self.stubs.Set(nova.db, 'security_group_get_by_name',
return_security_group_without_instances)
body = dict(addSecurityGroup=dict(name="test"))
self.manager._addSecurityGroup(self.req, '1', body)
def test_associate_already_associated_security_group_to_instance(self):
self.stubs.Set(nova.db, 'instance_get', return_server)
self.stubs.Set(nova.db, 'instance_get_by_uuid',
return_server_by_uuid)
self.stubs.Set(nova.db, 'security_group_get_by_name',
return_security_group_by_name)
body = dict(addSecurityGroup=dict(name="test"))
self.assertRaises(webob.exc.HTTPBadRequest,
self.manager._addSecurityGroup, self.req,
'1', body)
def test_associate(self):
self.stubs.Set(nova.db, 'instance_get', return_server)
self.stubs.Set(nova.db, 'instance_get_by_uuid',
return_server_by_uuid)
self.mox.StubOutWithMock(nova.db, 'instance_add_security_group')
nova.db.instance_add_security_group(mox.IgnoreArg(),
mox.IgnoreArg(),
mox.IgnoreArg())
self.stubs.Set(nova.db, 'security_group_get_by_name',
return_security_group_without_instances)
self.mox.ReplayAll()
body = dict(addSecurityGroup=dict(name="test"))
self.manager._addSecurityGroup(self.req, '1', body)
def test_disassociate_by_non_existing_security_group_name(self):
self.stubs.Set(nova.db, 'instance_get', return_server)
self.assertEqual(return_server(None, '1'),
nova.db.instance_get(None, '1'))
body = dict(removeSecurityGroup=dict(name='non-existing'))
self.assertRaises(webob.exc.HTTPNotFound,
self.manager._removeSecurityGroup, self.req,
'1', body)
def test_disassociate_by_invalid_server_id(self):
self.stubs.Set(nova.db, 'security_group_get_by_name',
return_security_group_by_name)
body = dict(removeSecurityGroup=dict(name='test'))
self.assertRaises(webob.exc.HTTPNotFound,
self.manager._removeSecurityGroup, self.req,
'invalid', body)
def test_disassociate_without_body(self):
self.stubs.Set(nova.db, 'instance_get', return_server)
body = dict(removeSecurityGroup=None)
self.assertRaises(webob.exc.HTTPBadRequest,
self.manager._removeSecurityGroup, self.req,
'1', body)
def test_disassociate_no_security_group_name(self):
self.stubs.Set(nova.db, 'instance_get', return_server)
body = dict(removeSecurityGroup=dict())
self.assertRaises(webob.exc.HTTPBadRequest,
self.manager._removeSecurityGroup, self.req,
'1', body)
def test_disassociate_security_group_name_with_whitespaces(self):
self.stubs.Set(nova.db, 'instance_get', return_server)
body = dict(removeSecurityGroup=dict(name=" "))
self.assertRaises(webob.exc.HTTPBadRequest,
self.manager._removeSecurityGroup, self.req,
'1', body)
def test_disassociate_non_existing_instance(self):
self.stubs.Set(nova.db, 'instance_get', return_server_nonexistent)
self.stubs.Set(nova.db, 'security_group_get_by_name',
return_security_group_by_name)
body = dict(removeSecurityGroup=dict(name="test"))
self.assertRaises(webob.exc.HTTPNotFound,
self.manager._removeSecurityGroup,
self.req, '1', body)
def test_disassociate_non_running_instance(self):
self.stubs.Set(nova.db, 'instance_get', return_non_running_server)
self.stubs.Set(nova.db, 'instance_get_by_uuid',
return_non_running_server)
self.stubs.Set(nova.db, 'security_group_get_by_name',
return_security_group_by_name)
body = dict(removeSecurityGroup=dict(name="test"))
self.manager._removeSecurityGroup(self.req, '1', body)
def test_disassociate_already_associated_security_group_to_instance(self):
self.stubs.Set(nova.db, 'instance_get', return_server)
self.stubs.Set(nova.db, 'instance_get_by_uuid',
return_server_by_uuid)
self.stubs.Set(nova.db, 'security_group_get_by_name',
return_security_group_without_instances)
body = dict(removeSecurityGroup=dict(name="test"))
self.assertRaises(webob.exc.HTTPBadRequest,
self.manager._removeSecurityGroup, self.req,
'1', body)
def test_disassociate(self):
self.stubs.Set(nova.db, 'instance_get', return_server)
self.stubs.Set(nova.db, 'instance_get_by_uuid',
return_server_by_uuid)
self.mox.StubOutWithMock(nova.db, 'instance_remove_security_group')
nova.db.instance_remove_security_group(mox.IgnoreArg(),
mox.IgnoreArg(),
mox.IgnoreArg())
self.stubs.Set(nova.db, 'security_group_get_by_name',
return_security_group_by_name)
self.mox.ReplayAll()
body = dict(removeSecurityGroup=dict(name="test"))
self.manager._removeSecurityGroup(self.req, '1', body)
class TestSecurityGroupsV2(TestSecurityGroupsV21):
secgrp_ctl_cls = secgroups_v2.SecurityGroupController
server_secgrp_ctl_cls = secgroups_v2.ServerSecurityGroupController
secgrp_act_ctl_cls = secgroups_v2.SecurityGroupActionController
class TestSecurityGroupRulesV21(test.TestCase):
secgrp_ctl_cls = secgroups_v21.SecurityGroupRulesController
def setUp(self):
super(TestSecurityGroupRulesV21, self).setUp()
self.controller = self.secgrp_ctl_cls()
if self.controller.security_group_api.id_is_uuid:
id1 = '11111111-1111-1111-1111-111111111111'
id2 = '22222222-2222-2222-2222-222222222222'
self.invalid_id = '33333333-3333-3333-3333-333333333333'
else:
id1 = 1
id2 = 2
self.invalid_id = '33333333'
self.sg1 = security_group_template(id=id1)
self.sg2 = security_group_template(
id=id2, name='authorize_revoke',
description='authorize-revoke testing')
db1 = security_group_db(self.sg1)
db2 = security_group_db(self.sg2)
def return_security_group(context, group_id, columns_to_join=None):
if group_id == db1['id']:
return db1
if group_id == db2['id']:
return db2
raise exception.SecurityGroupNotFound(security_group_id=group_id)
self.stubs.Set(nova.db, 'security_group_get',
return_security_group)
self.parent_security_group = db2
self.req = fakes.HTTPRequest.blank('')
def test_create_by_cidr(self):
rule = security_group_rule_template(cidr='10.2.3.124/24',
parent_group_id=self.sg2['id'])
res_dict = self.controller.create(self.req,
{'security_group_rule': rule})
security_group_rule = res_dict['security_group_rule']
self.assertNotEqual(security_group_rule['id'], 0)
self.assertEqual(security_group_rule['parent_group_id'],
self.sg2['id'])
self.assertEqual(security_group_rule['ip_range']['cidr'],
"10.2.3.124/24")
def test_create_by_group_id(self):
rule = security_group_rule_template(group_id=self.sg1['id'],
parent_group_id=self.sg2['id'])
res_dict = self.controller.create(self.req,
{'security_group_rule': rule})
security_group_rule = res_dict['security_group_rule']
self.assertNotEqual(security_group_rule['id'], 0)
self.assertEqual(security_group_rule['parent_group_id'],
self.sg2['id'])
def test_create_by_same_group_id(self):
rule1 = security_group_rule_template(group_id=self.sg1['id'],
from_port=80, to_port=80,
parent_group_id=self.sg2['id'])
self.parent_security_group['rules'] = [security_group_rule_db(rule1)]
rule2 = security_group_rule_template(group_id=self.sg1['id'],
from_port=81, to_port=81,
parent_group_id=self.sg2['id'])
res_dict = self.controller.create(self.req,
{'security_group_rule': rule2})
security_group_rule = res_dict['security_group_rule']
self.assertNotEqual(security_group_rule['id'], 0)
self.assertEqual(security_group_rule['parent_group_id'],
self.sg2['id'])
self.assertEqual(security_group_rule['from_port'], 81)
self.assertEqual(security_group_rule['to_port'], 81)
def test_create_none_value_from_to_port(self):
rule = {'parent_group_id': self.sg1['id'],
'group_id': self.sg1['id']}
res_dict = self.controller.create(self.req,
{'security_group_rule': rule})
security_group_rule = res_dict['security_group_rule']
self.assertIsNone(security_group_rule['from_port'])
self.assertIsNone(security_group_rule['to_port'])
self.assertEqual(security_group_rule['group']['name'], 'test')
self.assertEqual(security_group_rule['parent_group_id'],
self.sg1['id'])
def test_create_none_value_from_to_port_icmp(self):
rule = {'parent_group_id': self.sg1['id'],
'group_id': self.sg1['id'],
'ip_protocol': 'ICMP'}
res_dict = self.controller.create(self.req,
{'security_group_rule': rule})
security_group_rule = res_dict['security_group_rule']
self.assertEqual(security_group_rule['ip_protocol'], 'ICMP')
self.assertEqual(security_group_rule['from_port'], -1)
self.assertEqual(security_group_rule['to_port'], -1)
self.assertEqual(security_group_rule['group']['name'], 'test')
self.assertEqual(security_group_rule['parent_group_id'],
self.sg1['id'])
def test_create_none_value_from_to_port_tcp(self):
rule = {'parent_group_id': self.sg1['id'],
'group_id': self.sg1['id'],
'ip_protocol': 'TCP'}
res_dict = self.controller.create(self.req,
{'security_group_rule': rule})
security_group_rule = res_dict['security_group_rule']
self.assertEqual(security_group_rule['ip_protocol'], 'TCP')
self.assertEqual(security_group_rule['from_port'], 1)
self.assertEqual(security_group_rule['to_port'], 65535)
self.assertEqual(security_group_rule['group']['name'], 'test')
self.assertEqual(security_group_rule['parent_group_id'],
self.sg1['id'])
def test_create_by_invalid_cidr_json(self):
rule = security_group_rule_template(
ip_protocol="tcp",
from_port=22,
to_port=22,
parent_group_id=self.sg2['id'],
cidr="10.2.3.124/2433")
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
self.req, {'security_group_rule': rule})
def test_create_by_invalid_tcp_port_json(self):
rule = security_group_rule_template(
ip_protocol="tcp",
from_port=75534,
to_port=22,
parent_group_id=self.sg2['id'],
cidr="10.2.3.124/24")
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
self.req, {'security_group_rule': rule})
def test_create_by_invalid_icmp_port_json(self):
rule = security_group_rule_template(
ip_protocol="icmp",
from_port=1,
to_port=256,
parent_group_id=self.sg2['id'],
cidr="10.2.3.124/24")
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
self.req, {'security_group_rule': rule})
def test_create_add_existing_rules_by_cidr(self):
rule = security_group_rule_template(cidr='10.0.0.0/24',
parent_group_id=self.sg2['id'])
self.parent_security_group['rules'] = [security_group_rule_db(rule)]
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
self.req, {'security_group_rule': rule})
def test_create_add_existing_rules_by_group_id(self):
rule = security_group_rule_template(group_id=1)
self.parent_security_group['rules'] = [security_group_rule_db(rule)]
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
self.req, {'security_group_rule': rule})
def test_create_with_no_body(self):
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, None)
def test_create_with_no_security_group_rule_in_body(self):
rules = {'test': 'test'}
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, rules)
def test_create_with_invalid_parent_group_id(self):
rule = security_group_rule_template(parent_group_id='invalid')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
self.req, {'security_group_rule': rule})
def test_create_with_non_existing_parent_group_id(self):
rule = security_group_rule_template(group_id=None,
parent_group_id=self.invalid_id)
self.assertRaises(webob.exc.HTTPNotFound, self.controller.create,
self.req, {'security_group_rule': rule})
def test_create_with_non_existing_group_id(self):
rule = security_group_rule_template(group_id='invalid',
parent_group_id=self.sg2['id'])
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
self.req, {'security_group_rule': rule})
def test_create_with_invalid_protocol(self):
rule = security_group_rule_template(ip_protocol='invalid-protocol',
cidr='10.2.2.0/24',
parent_group_id=self.sg2['id'])
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
self.req, {'security_group_rule': rule})
def test_create_with_no_protocol(self):
rule = security_group_rule_template(cidr='10.2.2.0/24',
parent_group_id=self.sg2['id'])
del rule['ip_protocol']
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
self.req, {'security_group_rule': rule})
def test_create_with_invalid_from_port(self):
rule = security_group_rule_template(from_port='666666',
cidr='10.2.2.0/24',
parent_group_id=self.sg2['id'])
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
self.req, {'security_group_rule': rule})
def test_create_with_invalid_to_port(self):
rule = security_group_rule_template(to_port='666666',
cidr='10.2.2.0/24',
parent_group_id=self.sg2['id'])
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
self.req, {'security_group_rule': rule})
def test_create_with_non_numerical_from_port(self):
rule = security_group_rule_template(from_port='invalid',
cidr='10.2.2.0/24',
parent_group_id=self.sg2['id'])
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
self.req, {'security_group_rule': rule})
def test_create_with_non_numerical_to_port(self):
rule = security_group_rule_template(to_port='invalid',
cidr='10.2.2.0/24',
parent_group_id=self.sg2['id'])
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
self.req, {'security_group_rule': rule})
def test_create_with_no_from_port(self):
rule = security_group_rule_template(cidr='10.2.2.0/24',
parent_group_id=self.sg2['id'])
del rule['from_port']
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
self.req, {'security_group_rule': rule})
def test_create_with_no_to_port(self):
rule = security_group_rule_template(cidr='10.2.2.0/24',
parent_group_id=self.sg2['id'])
del rule['to_port']
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
self.req, {'security_group_rule': rule})
def test_create_with_invalid_cidr(self):
rule = security_group_rule_template(cidr='10.2.2222.0/24',
parent_group_id=self.sg2['id'])
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
self.req, {'security_group_rule': rule})
def test_create_with_no_cidr_group(self):
rule = security_group_rule_template(parent_group_id=self.sg2['id'])
res_dict = self.controller.create(self.req,
{'security_group_rule': rule})
security_group_rule = res_dict['security_group_rule']
self.assertNotEqual(security_group_rule['id'], 0)
self.assertEqual(security_group_rule['parent_group_id'],
self.parent_security_group['id'])
self.assertEqual(security_group_rule['ip_range']['cidr'],
"0.0.0.0/0")
def test_create_with_invalid_group_id(self):
rule = security_group_rule_template(group_id='invalid',
parent_group_id=self.sg2['id'])
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
self.req, {'security_group_rule': rule})
def test_create_with_empty_group_id(self):
rule = security_group_rule_template(group_id='',
parent_group_id=self.sg2['id'])
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
self.req, {'security_group_rule': rule})
def test_create_with_nonexist_group_id(self):
rule = security_group_rule_template(group_id=self.invalid_id,
parent_group_id=self.sg2['id'])
self.assertRaises(webob.exc.HTTPNotFound, self.controller.create,
self.req, {'security_group_rule': rule})
def test_create_with_same_group_parent_id_and_group_id(self):
rule = security_group_rule_template(group_id=self.sg1['id'],
parent_group_id=self.sg1['id'])
res_dict = self.controller.create(self.req,
{'security_group_rule': rule})
security_group_rule = res_dict['security_group_rule']
self.assertNotEqual(security_group_rule['id'], 0)
self.assertEqual(security_group_rule['parent_group_id'],
self.sg1['id'])
self.assertEqual(security_group_rule['group']['name'],
self.sg1['name'])
def _test_create_with_no_ports_and_no_group(self, proto):
rule = {'ip_protocol': proto, 'parent_group_id': self.sg2['id']}
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
self.req, {'security_group_rule': rule})
def _test_create_with_no_ports(self, proto):
rule = {'ip_protocol': proto, 'parent_group_id': self.sg2['id'],
'group_id': self.sg1['id']}
res_dict = self.controller.create(self.req,
{'security_group_rule': rule})
security_group_rule = res_dict['security_group_rule']
expected_rule = {
'from_port': 1, 'group': {'tenant_id': '123', 'name': 'test'},
'ip_protocol': proto, 'to_port': 65535, 'parent_group_id':
self.sg2['id'], 'ip_range': {}, 'id': security_group_rule['id']
}
if proto == 'icmp':
expected_rule['to_port'] = -1
expected_rule['from_port'] = -1
self.assertEqual(expected_rule, security_group_rule)
def test_create_with_no_ports_icmp(self):
self._test_create_with_no_ports_and_no_group('icmp')
self._test_create_with_no_ports('icmp')
def test_create_with_no_ports_tcp(self):
self._test_create_with_no_ports_and_no_group('tcp')
self._test_create_with_no_ports('tcp')
def test_create_with_no_ports_udp(self):
self._test_create_with_no_ports_and_no_group('udp')
self._test_create_with_no_ports('udp')
def _test_create_with_ports(self, proto, from_port, to_port):
rule = {
'ip_protocol': proto, 'from_port': from_port, 'to_port': to_port,
'parent_group_id': self.sg2['id'], 'group_id': self.sg1['id']
}
res_dict = self.controller.create(self.req,
{'security_group_rule': rule})
security_group_rule = res_dict['security_group_rule']
expected_rule = {
'from_port': from_port,
'group': {'tenant_id': '123', 'name': 'test'},
'ip_protocol': proto, 'to_port': to_port, 'parent_group_id':
self.sg2['id'], 'ip_range': {}, 'id': security_group_rule['id']
}
self.assertEqual(proto, security_group_rule['ip_protocol'])
self.assertEqual(from_port, security_group_rule['from_port'])
self.assertEqual(to_port, security_group_rule['to_port'])
self.assertEqual(expected_rule, security_group_rule)
def test_create_with_ports_icmp(self):
self._test_create_with_ports('icmp', 0, 1)
self._test_create_with_ports('icmp', 0, 0)
self._test_create_with_ports('icmp', 1, 0)
def test_create_with_ports_tcp(self):
self._test_create_with_ports('tcp', 1, 1)
self._test_create_with_ports('tcp', 1, 65535)
self._test_create_with_ports('tcp', 65535, 65535)
def test_create_with_ports_udp(self):
self._test_create_with_ports('udp', 1, 1)
self._test_create_with_ports('udp', 1, 65535)
self._test_create_with_ports('udp', 65535, 65535)
def test_delete(self):
rule = security_group_rule_template(id=self.sg2['id'],
parent_group_id=self.sg2['id'])
def security_group_rule_get(context, id):
return security_group_rule_db(rule)
def security_group_rule_destroy(context, id):
pass
self.stubs.Set(nova.db, 'security_group_rule_get',
security_group_rule_get)
self.stubs.Set(nova.db, 'security_group_rule_destroy',
security_group_rule_destroy)
self.controller.delete(self.req, self.sg2['id'])
def test_delete_invalid_rule_id(self):
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.delete,
self.req, 'invalid')
def test_delete_non_existing_rule_id(self):
self.assertRaises(webob.exc.HTTPNotFound, self.controller.delete,
self.req, self.invalid_id)
def test_create_rule_quota_limit(self):
for num in range(100, 100 + CONF.quota_security_group_rules):
rule = {
'ip_protocol': 'tcp', 'from_port': num,
'to_port': num, 'parent_group_id': self.sg2['id'],
'group_id': self.sg1['id']
}
self.controller.create(self.req, {'security_group_rule': rule})
rule = {
'ip_protocol': 'tcp', 'from_port': '121', 'to_port': '121',
'parent_group_id': self.sg2['id'], 'group_id': self.sg1['id']
}
self.assertRaises(webob.exc.HTTPForbidden, self.controller.create,
self.req, {'security_group_rule': rule})
def test_create_rule_cidr_allow_all(self):
rule = security_group_rule_template(cidr='0.0.0.0/0',
parent_group_id=self.sg2['id'])
res_dict = self.controller.create(self.req,
{'security_group_rule': rule})
security_group_rule = res_dict['security_group_rule']
self.assertNotEqual(security_group_rule['id'], 0)
self.assertEqual(security_group_rule['parent_group_id'],
self.parent_security_group['id'])
self.assertEqual(security_group_rule['ip_range']['cidr'],
"0.0.0.0/0")
def test_create_rule_cidr_ipv6_allow_all(self):
rule = security_group_rule_template(cidr='::/0',
parent_group_id=self.sg2['id'])
res_dict = self.controller.create(self.req,
{'security_group_rule': rule})
security_group_rule = res_dict['security_group_rule']
self.assertNotEqual(security_group_rule['id'], 0)
self.assertEqual(security_group_rule['parent_group_id'],
self.parent_security_group['id'])
self.assertEqual(security_group_rule['ip_range']['cidr'],
"::/0")
def test_create_rule_cidr_allow_some(self):
rule = security_group_rule_template(cidr='15.0.0.0/8',
parent_group_id=self.sg2['id'])
res_dict = self.controller.create(self.req,
{'security_group_rule': rule})
security_group_rule = res_dict['security_group_rule']
self.assertNotEqual(security_group_rule['id'], 0)
self.assertEqual(security_group_rule['parent_group_id'],
self.parent_security_group['id'])
self.assertEqual(security_group_rule['ip_range']['cidr'],
"15.0.0.0/8")
def test_create_rule_cidr_bad_netmask(self):
rule = security_group_rule_template(cidr='15.0.0.0/0')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
self.req, {'security_group_rule': rule})
class TestSecurityGroupRulesV2(TestSecurityGroupRulesV21):
secgrp_ctl_cls = secgroups_v2.SecurityGroupRulesController
UUID1 = '00000000-0000-0000-0000-000000000001'
UUID2 = '00000000-0000-0000-0000-000000000002'
UUID3 = '00000000-0000-0000-0000-000000000003'
def fake_compute_get_all(*args, **kwargs):
base = {'id': 1, 'description': 'foo', 'user_id': 'bar',
'project_id': 'baz', 'deleted': False, 'deleted_at': None,
'updated_at': None, 'created_at': None}
inst_list = [
fakes.stub_instance_obj(
None, 1, uuid=UUID1,
security_groups=[dict(base, **{'name': 'fake-0-0'}),
dict(base, **{'name': 'fake-0-1'})]),
fakes.stub_instance_obj(
None, 2, uuid=UUID2,
security_groups=[dict(base, **{'name': 'fake-1-0'}),
dict(base, **{'name': 'fake-1-1'})])
]
return objects.InstanceList(objects=inst_list)
def fake_compute_get(*args, **kwargs):
secgroups = objects.SecurityGroupList()
secgroups.objects = [
objects.SecurityGroup(name='fake-2-0'),
objects.SecurityGroup(name='fake-2-1'),
]
inst = fakes.stub_instance_obj(None, 1, uuid=UUID3)
inst.security_groups = secgroups
return inst
def fake_compute_create(*args, **kwargs):
return ([fake_compute_get(*args, **kwargs)], '')
def fake_get_instances_security_groups_bindings(inst, context, servers):
groups = {UUID1: [{'name': 'fake-0-0'}, {'name': 'fake-0-1'}],
UUID2: [{'name': 'fake-1-0'}, {'name': 'fake-1-1'}],
UUID3: [{'name': 'fake-2-0'}, {'name': 'fake-2-1'}]}
result = {}
for server in servers:
result[server['id']] = groups.get(server['id'])
return result
class SecurityGroupsOutputTestV21(test.TestCase):
base_url = '/v2/fake/servers'
content_type = 'application/json'
def setUp(self):
super(SecurityGroupsOutputTestV21, self).setUp()
fakes.stub_out_nw_api(self.stubs)
self.stubs.Set(compute.api.API, 'get', fake_compute_get)
self.stubs.Set(compute.api.API, 'get_all', fake_compute_get_all)
self.stubs.Set(compute.api.API, 'create', fake_compute_create)
self.flags(
osapi_compute_extension=[
'nova.api.openstack.compute.contrib.select_extensions'],
osapi_compute_ext_list=['Security_groups'])
self.app = self._setup_app()
def _setup_app(self):
return fakes.wsgi_app_v21(init_only=('os-security-groups', 'servers'))
def _make_request(self, url, body=None):
req = webob.Request.blank(url)
if body:
req.method = 'POST'
req.body = self._encode_body(body)
req.content_type = self.content_type
req.headers['Accept'] = self.content_type
res = req.get_response(self.app)
return res
def _encode_body(self, body):
return jsonutils.dumps(body)
def _get_server(self, body):
return jsonutils.loads(body).get('server')
def _get_servers(self, body):
return jsonutils.loads(body).get('servers')
def _get_groups(self, server):
return server.get('security_groups')
def test_create(self):
image_uuid = 'c905cedb-7281-47e4-8a62-f26bc5fc4c77'
server = dict(name='server_test', imageRef=image_uuid, flavorRef=2)
res = self._make_request(self.base_url, {'server': server})
self.assertEqual(res.status_int, 202)
server = self._get_server(res.body)
for i, group in enumerate(self._get_groups(server)):
name = 'fake-2-%s' % i
self.assertEqual(group.get('name'), name)
def test_show(self):
url = self.base_url + '/' + UUID3
res = self._make_request(url)
self.assertEqual(res.status_int, 200)
server = self._get_server(res.body)
for i, group in enumerate(self._get_groups(server)):
name = 'fake-2-%s' % i
self.assertEqual(group.get('name'), name)
def test_detail(self):
url = self.base_url + '/detail'
res = self._make_request(url)
self.assertEqual(res.status_int, 200)
for i, server in enumerate(self._get_servers(res.body)):
for j, group in enumerate(self._get_groups(server)):
name = 'fake-%s-%s' % (i, j)
self.assertEqual(group.get('name'), name)
def test_no_instance_passthrough_404(self):
def fake_compute_get(*args, **kwargs):
raise exception.InstanceNotFound(instance_id='fake')
self.stubs.Set(compute.api.API, 'get', fake_compute_get)
url = self.base_url + '/70f6db34-de8d-4fbd-aafb-4065bdfa6115'
res = self._make_request(url)
self.assertEqual(res.status_int, 404)
class SecurityGroupsOutputTestV2(SecurityGroupsOutputTestV21):
def _setup_app(self):
return fakes.wsgi_app(init_only=('servers',))
class SecurityGroupsOutputPolicyEnforcementV21(test.NoDBTestCase):
def setUp(self):
super(SecurityGroupsOutputPolicyEnforcementV21, self).setUp()
self.controller = secgroups_v21.SecurityGroupsOutputController()
self.req = fakes.HTTPRequest.blank('')
self.rule_name = "os_compute_api:os-security-groups"
self.rule = {self.rule_name: "project:non_fake"}
self.policy.set_rules(self.rule)
def test_show_policy_failed(self):
self.controller.show(self.req, None, FAKE_UUID1)
def test_create_policy_failed(self):
self.controller.create(self.req, None, {})
def test_detail_policy_failed(self):
self.controller.detail(self.req, None)
class PolicyEnforcementV21(test.NoDBTestCase):
def setUp(self):
super(PolicyEnforcementV21, self).setUp()
self.req = fakes.HTTPRequest.blank('')
self.rule_name = "os_compute_api:os-security-groups"
self.rule = {self.rule_name: "project:non_fake"}
def _common_policy_check(self, func, *arg, **kwarg):
self.policy.set_rules(self.rule)
exc = self.assertRaises(
exception.PolicyNotAuthorized, func, *arg, **kwarg)
self.assertEqual(
"Policy doesn't allow %s to be performed." % self.rule_name,
exc.format_message())
class SecurityGroupPolicyEnforcementV21(PolicyEnforcementV21):
def setUp(self):
super(SecurityGroupPolicyEnforcementV21, self).setUp()
self.controller = secgroups_v21.SecurityGroupController()
def test_create_policy_failed(self):
self._common_policy_check(self.controller.create, self.req, {})
def test_show_policy_failed(self):
self._common_policy_check(self.controller.show, self.req, FAKE_UUID1)
def test_delete_policy_failed(self):
self._common_policy_check(self.controller.delete, self.req, FAKE_UUID1)
def test_index_policy_failed(self):
self._common_policy_check(self.controller.index, self.req)
def test_update_policy_failed(self):
self._common_policy_check(
self.controller.update, self.req, FAKE_UUID1, {})
class ServerSecurityGroupPolicyEnforcementV21(PolicyEnforcementV21):
def setUp(self):
super(ServerSecurityGroupPolicyEnforcementV21, self).setUp()
self.controller = secgroups_v21.ServerSecurityGroupController()
def test_index_policy_failed(self):
self._common_policy_check(self.controller.index, self.req, FAKE_UUID1)
class SecurityGroupRulesPolicyEnforcementV21(PolicyEnforcementV21):
def setUp(self):
super(SecurityGroupRulesPolicyEnforcementV21, self).setUp()
self.controller = secgroups_v21.SecurityGroupRulesController()
def test_create_policy_failed(self):
self._common_policy_check(self.controller.create, self.req, {})
def test_delete_policy_failed(self):
self._common_policy_check(self.controller.delete, self.req, FAKE_UUID1)
class SecurityGroupActionPolicyEnforcementV21(PolicyEnforcementV21):
def setUp(self):
super(SecurityGroupActionPolicyEnforcementV21, self).setUp()
self.controller = secgroups_v21.SecurityGroupActionController()
def test_add_security_group_policy_failed(self):
self._common_policy_check(
self.controller._addSecurityGroup, self.req, FAKE_UUID1, {})
def test_remove_security_group_policy_failed(self):
self._common_policy_check(
self.controller._removeSecurityGroup, self.req, FAKE_UUID1, {})
|
firebitsbr/infernal-twin | refs/heads/master | build/reportlab/build/lib.linux-i686-2.7/reportlab/pdfbase/pdfdoc.py | 24 | #Copyright ReportLab Europe Ltd. 2000-2012
#see license.txt for license details
#history http://www.reportlab.co.uk/cgi-bin/viewcvs.cgi/public/reportlab/trunk/reportlab/pdfbase/pdfdoc.py
__version__=''' $Id$ '''
__doc__="""
The module pdfdoc.py handles the 'outer structure' of PDF documents, ensuring that
all objects are properly cross-referenced and indexed to the nearest byte. The
'inner structure' - the page descriptions - are presumed to be generated before
each page is saved.
pdfgen.py calls this and provides a 'canvas' object to handle page marking operators.
piddlePDF calls pdfgen and offers a high-level interface.
The classes within this generally mirror structures in the PDF file
and are not part of any public interface. Instead, canvas and font
classes are made available elsewhere for users to manipulate.
"""
import types, binascii, codecs
from collections import OrderedDict
from reportlab.pdfbase import pdfutils
from reportlab import rl_config
from reportlab.lib.utils import import_zlib, open_for_read, makeFileName, isSeq, isBytes, isUnicode, _digester, isStr, bytestr, isPy3, annotateException
from reportlab.lib.rl_accel import escapePDF, fp_str, asciiBase85Encode, asciiBase85Decode
from reportlab.pdfbase import pdfmetrics
from hashlib import md5
from sys import platform
from sys import version_info
from sys import stderr
if platform[:4] == 'java' and version_info[:2] == (2, 1):
# workaround for list()-bug in Jython 2.1 (should be fixed in 2.2)
def list(sequence):
def f(x):
return x
return list(map(f, sequence))
class PDFError(Exception):
pass
# __InternalName__ is a special attribute that can only be set by the Document arbitrator
__InternalName__ = "__InternalName__"
# __RefOnly__ marks reference only elements that must be formatted on top level
__RefOnly__ = "__RefOnly__"
# __Comment__ provides a (one line) comment to inline with an object ref, if present
# if it is more than one line then percentize it...
__Comment__ = "__Comment__"
# name for standard font dictionary
BasicFonts = "BasicFonts"
# name for the pages object
Pages = "Pages"
PDF_VERSION_DEFAULT = (1, 3)
PDF_SUPPORT_VERSION = dict( #map keyword to min version that supports it
transparency = (1, 4),
)
if isPy3:
def pdfdocEnc(x):
return x.encode('extpdfdoc') if isinstance(x,str) else x
else:
def pdfdocEnc(x):
return x.encode('extpdfdoc') if isinstance(x,unicode) else x
def format(element, document, toplevel=0):
"""Indirection step for formatting.
Ensures that document parameters alter behaviour
of formatting for all elements.
"""
if isinstance(element,PDFObject):
if not toplevel and hasattr(element, __RefOnly__):
# the object cannot be a component at non top level.
# make a reference to it and return it's format
return document.Reference(element).format(document)
else:
f = element.format(document)
if not rl_config.invariant and rl_config.pdfComments and hasattr(element, __Comment__):
f = pdfdocEnc("%% %s\r\n" % element.__Comment__)+f
return f
elif type(element) in (float, int):
#use a controlled number formatting routine
#instead of str, so Jython/Python etc do not differ
return pdfdocEnc(fp_str(element))
elif isBytes(element):
return element
elif isUnicode(element):
return pdfdocEnc(element)
else:
return pdfdocEnc(str(element))
def xObjectName(externalname):
return "FormXob.%s" % externalname
# backwards compatibility
formName = xObjectName
# no encryption
class NoEncryption:
def encode(self, t):
"encode a string, stream, text"
return t
def prepare(self, document):
# get ready to do encryption
pass
def register(self, objnum, version):
# enter a new direct object
pass
def info(self):
# the representation of self in file if any (should be None or PDFDict)
return None
class PDFObject(object):
pass
class DummyDoc(PDFObject):
"used to bypass encryption when required"
encrypt = NoEncryption()
### the global document structure manager
class PDFDocument(PDFObject):
# set this to define filters
defaultStreamFilters = None
encrypt = NoEncryption() # default no encryption
def __init__(self,
dummyoutline=0,
compression=rl_config.pageCompression,
invariant=rl_config.invariant,
filename=None,
pdfVersion=PDF_VERSION_DEFAULT,
):
self._ID = None
self.objectcounter = 0
self.shadingCounter = 0
self.inObject = None
self.pageCounter = 1
# allow None value to be passed in to mean 'give system defaults'
if invariant is None:
self.invariant = rl_config.invariant
else:
self.invariant = invariant
self.setCompression(compression)
self._pdfVersion = pdfVersion
# signature for creating PDF ID
sig = self.signature = md5()
sig.update(b"a reportlab document")
if not self.invariant:
cat = _getTimeStamp()
else:
cat = 946684800.0
cat = ascii(cat)
sig.update(bytestr(cat)) # initialize with timestamp digest
# mapping of internal identifier ("Page001") to PDF objectnumber and generation number (34, 0)
self.idToObjectNumberAndVersion = {}
# mapping of internal identifier ("Page001") to PDF object (PDFPage instance)
self.idToObject = {}
# internal id to file location
self.idToOffset = {}
# number to id
self.numberToId = {}
cat = self.Catalog = self._catalog = PDFCatalog()
pages = self.Pages = PDFPages()
cat.Pages = pages
if dummyoutline:
outlines = PDFOutlines0()
else:
outlines = PDFOutlines()
self.Outlines = self.outline = outlines
cat.Outlines = outlines
self.info = PDFInfo()
self.info.invariant = self.invariant
#self.Reference(self.Catalog)
#self.Reference(self.Info)
self.fontMapping = {}
#make an empty font dictionary
DD = PDFDictionary({})
DD.__Comment__ = "The standard fonts dictionary"
self.Reference(DD, BasicFonts)
self.delayedFonts = []
def setCompression(self, onoff):
# XXX: maybe this should also set self.defaultStreamFilters?
self.compression = onoff
def ensureMinPdfVersion(self, *keys):
"Ensure that the pdf version is greater than or equal to that specified by the keys"
for k in keys:
self._pdfVersion = max(self._pdfVersion, PDF_SUPPORT_VERSION[k])
def updateSignature(self, thing):
"add information to the signature"
if self._ID: return # but not if its used already!
self.signature.update(bytestr(thing))
def ID(self):
"A unique fingerprint for the file (unless in invariant mode)"
if self._ID:
return self._ID
digest = self.signature.digest()
doc = DummyDoc()
IDs = PDFString(digest,enc='raw').format(doc)
self._ID = (b'\r\n % ReportLab generated PDF document -- digest (http://www.reportlab.com)\r\n ['
+IDs+b' '+IDs+b']\r\n')
return self._ID
def SaveToFile(self, filename, canvas):
if hasattr(getattr(filename, "write",None),'__call__'):
myfile = 0
f = filename
filename = makeFileName(getattr(filename,'name',''))
else :
myfile = 1
filename = makeFileName(filename)
f = open(filename, "wb")
data = self.GetPDFData(canvas)
if isUnicode(data):
data = data.encode('latin1')
f.write(data)
if myfile:
f.close()
import os
if os.name=='mac':
from reportlab.lib.utils import markfilename
markfilename(filename) # do platform specific file junk
if getattr(canvas,'_verbosity',None): print('saved %s' % (filename,))
def GetPDFData(self, canvas):
# realize delayed fonts
for fnt in self.delayedFonts:
fnt.addObjects(self)
# add info stuff to signature
self.info.invariant = self.invariant
self.info.digest(self.signature)
### later: maybe add more info to sig?
# prepare outline
self.Reference(self.Catalog)
self.Reference(self.info)
outline = self.outline
outline.prepare(self, canvas)
return self.format()
def inPage(self):
"""specify the current object as a page (enables reference binding and other page features)"""
if self.inObject is not None:
if self.inObject=="page": return
raise ValueError("can't go in page already in object %s" % self.inObject)
self.inObject = "page"
def inForm(self):
"""specify that we are in a form xobject (disable page features, etc)"""
# don't need this check anymore since going in a form pushes old context at canvas level.
#if self.inObject not in ["form", None]:
# raise ValueError("can't go in form already in object %s" % self.inObject)
self.inObject = "form"
# don't need to do anything else, I think...
def getInternalFontName(self, psfontname):
fm = self.fontMapping
if psfontname in fm:
return fm[psfontname]
else:
try:
# does pdfmetrics know about it? if so, add
fontObj = pdfmetrics.getFont(psfontname)
if fontObj._dynamicFont:
raise PDFError("getInternalFontName(%s) called for a dynamic font" % repr(psfontname))
fontObj.addObjects(self)
#self.addFont(fontObj)
return fm[psfontname]
except KeyError:
raise PDFError("Font %s not known!" % repr(psfontname))
def thisPageName(self):
return "Page"+repr(self.pageCounter)
def thisPageRef(self):
return PDFObjectReference(self.thisPageName())
def addPage(self, page):
name = self.thisPageName()
self.Reference(page, name)
self.Pages.addPage(page)
self.pageCounter += 1
self.inObject = None
def addForm(self, name, form):
"""add a Form XObject."""
# XXX should check that name is a legal PDF name
if self.inObject != "form":
self.inForm()
self.Reference(form, xObjectName(name))
self.inObject = None
def annotationName(self, externalname):
return "Annot.%s"%externalname
def addAnnotation(self, name, annotation):
self.Reference(annotation, self.annotationName(name))
def refAnnotation(self, name):
internalname = self.annotationName(name)
return PDFObjectReference(internalname)
def addShading(self, shading):
name = "Sh%d" % self.shadingCounter
self.Reference(shading, name)
self.shadingCounter += 1
return name
def addColor(self,cmyk):
sname = cmyk.spotName
if not sname:
if cmyk.cyan==0 and cmyk.magenta==0 and cmyk.yellow==0:
sname = 'BLACK'
elif cmyk.black==0 and cmyk.magenta==0 and cmyk.yellow==0:
sname = 'CYAN'
elif cmyk.cyan==0 and cmyk.black==0 and cmyk.yellow==0:
sname = 'MAGENTA'
elif cmyk.cyan==0 and cmyk.magenta==0 and cmyk.black==0:
sname = 'YELLOW'
if not sname:
raise ValueError("CMYK colour %r used without a spotName" % cmyk)
else:
cmyk = cmyk.clone(spotName = sname)
name = PDFName(sname)[1:]
if name not in self.idToObject:
sep = PDFSeparationCMYKColor(cmyk).value() #PDFArray([/Separation /name /DeviceCMYK tint_tf])
self.Reference(sep,name)
return name,sname
def setTitle(self, title):
"embeds in PDF file"
if title is None:
self.info.title = '(anonymous)'
else:
self.info.title = title
def setAuthor(self, author):
"embedded in PDF file"
#allow resetting to clear it
if author is None:
self.info.author = '(anonymous)'
else:
self.info.author = author
def setSubject(self, subject):
"embeds in PDF file"
#allow resetting to clear it
if subject is None:
self.info.subject = '(unspecified)'
else:
self.info.subject = subject
def setCreator(self, creator):
"embeds in PDF file"
#allow resetting to clear it
if creator is None:
self.info.creator = '(unspecified)'
else:
self.info.creator = creator
def setKeywords(self, keywords):
"embeds a string containing keywords in PDF file"
#allow resetting to clear it but ensure it's a string
if keywords is None:
self.info.keywords = ''
else:
self.info.keywords = keywords
def setDateFormatter(self, dateFormatter):
self.info._dateFormatter = dateFormatter
def getAvailableFonts(self):
fontnames = list(self.fontMapping.keys())
# the standard 14 are also always available! (even if not initialized yet)
from reportlab.pdfbase import _fontdata
for name in _fontdata.standardFonts:
if name not in fontnames:
fontnames.append(name)
fontnames.sort()
return fontnames
def format(self):
# register the Catalog/INfo and then format the objects one by one until exhausted
# (possible infinite loop if there is a bug that continually makes new objects/refs...)
# Prepare encryption
self.encrypt.prepare(self)
cat = self.Catalog
info = self.info
self.Reference(self.Catalog)
self.Reference(self.info)
# register the encryption dictionary if present
encryptref = None
encryptinfo = self.encrypt.info()
if encryptinfo:
encryptref = self.Reference(encryptinfo)
# make std fonts (this could be made optional
counter = 0 # start at first object (object 1 after preincrement)
ids = [] # the collection of object ids in object number order
numbertoid = self.numberToId
idToNV = self.idToObjectNumberAndVersion
idToOb = self.idToObject
idToOf = self.idToOffset
### note that new entries may be "appended" DURING FORMATTING
done = None
# __accum__ allows objects to know where they are in the file etc etc
self.__accum__ = File = PDFFile(self._pdfVersion) # output collector
while done is None:
counter += 1 # do next object...
if counter in numbertoid:
id = numbertoid[counter]
#printidToOb
obj = idToOb[id]
IO = PDFIndirectObject(id, obj)
# register object number and version
#encrypt.register(id,
IOf = IO.format(self)
# add a comment to the PDF output
if not rl_config.invariant and rl_config.pdfComments:
try:
classname = obj.__class__.__name__
except:
classname = ascii(obj)
File.add("%% %s: class %s \r\n" % (ascii(id), classname[:50]))
offset = File.add(IOf)
idToOf[id] = offset
ids.append(id)
else:
done = 1
del self.__accum__
# sanity checks (must happen AFTER formatting)
lno = len(numbertoid)
if counter-1!=lno:
raise ValueError("counter %s doesn't match number to id dictionary %s" %(counter, lno))
# now add the xref
xref = PDFCrossReferenceTable()
xref.addsection(0, ids)
xreff = xref.format(self)
xrefoffset = File.add(xreff)
# now add the trailer
trailer = PDFTrailer(
startxref = xrefoffset,
Size = lno+1,
Root = self.Reference(cat),
Info = self.Reference(info),
Encrypt = encryptref,
ID = self.ID(),
)
trailerf = trailer.format(self)
File.add(trailerf)
for ds in getattr(self,'_digiSigs',[]):
ds.sign(File)
# return string format for pdf file
return File.format(self)
def hasForm(self, name):
"""test for existence of named form"""
internalname = xObjectName(name)
return internalname in self.idToObject
def getFormBBox(self, name, boxType="MediaBox"):
"""get the declared bounding box of the form as a list.
If you specify a different PDF box definition (e.g. the
ArtBox) and it has one, that's what you'll get."""
internalname = xObjectName(name)
if internalname in self.idToObject:
theform = self.idToObject[internalname]
if hasattr(theform,'_extra_pageCatcher_info'):
return theform._extra_pageCatcher_info[boxType]
if isinstance(theform, PDFFormXObject):
# internally defined form
return theform.BBoxList()
elif isinstance(theform, PDFStream):
# externally defined form
return list(theform.dictionary.dict[boxType].sequence)
else:
raise ValueError("I don't understand the form instance %s" % repr(name))
def getXObjectName(self, name):
"""Lets canvas find out what form is called internally.
Never mind whether it is defined yet or not."""
return xObjectName(name)
def xobjDict(self, formnames):
"""construct an xobject dict (for inclusion in a resource dict, usually)
from a list of form names (images not yet supported)"""
D = {}
for name in formnames:
internalname = xObjectName(name)
reference = PDFObjectReference(internalname)
D[internalname] = reference
#print "xobjDict D", D
return PDFDictionary(D)
def Reference(self, obj, name=None):
### note references may "grow" during the final formatting pass: don't use d.keys()!
# don't make references to other references, or non instances, unless they are named!
iob = isinstance(obj,PDFObject)
idToObject = self.idToObject
if name is None and (not iob or obj.__class__ is PDFObjectReference):
return obj
if hasattr(obj, __InternalName__):
# already registered
intname = obj.__InternalName__
if name is not None and name!=intname:
raise ValueError("attempt to reregister object %s with new name %s" % (
repr(intname), repr(name)))
if intname not in idToObject:
raise ValueError("object of type %s named as %s, but not registered" % (type(obj),ascii(intname)))
return PDFObjectReference(intname)
# otherwise register the new object
objectcounter = self.objectcounter = self.objectcounter+1
if name is None:
name = "R"+repr(objectcounter)
if name in idToObject:
other = idToObject[name]
if other!=obj:
raise ValueError("redefining named object: "+repr(name))
return PDFObjectReference(name)
if iob:
obj.__InternalName__ = name
#print "name", name, "counter", objectcounter
self.idToObjectNumberAndVersion[name] = (objectcounter, 0)
self.numberToId[objectcounter] = name
idToObject[name] = obj
return PDFObjectReference(name)
### chapter 4 Objects
PDFtrue = "true"
PDFfalse = "false"
PDFnull = "null"
class PDFText(PDFObject):
def __init__(self, t):
self.t = t
def format(self, document):
t = self.t
if isUnicode(t):
t = t.encode('utf-8')
result = binascii.hexlify(document.encrypt.encode(t))
return b"<" + result + b">"
def __str__(self):
dummydoc = DummyDoc()
return self.format(dummydoc)
def PDFnumber(n):
return n
import re
_re_cleanparens=re.compile('[^()]')
del re
def _isbalanced(s):
'''test whether a string is balanced in parens'''
s = _re_cleanparens.sub('',s)
n = 0
for c in s:
if c=='(': n+=1
else:
n -= 1
if n<0: return 0
return not n and 1 or 0
def _checkPdfdoc(utext):
'''return true if no Pdfdoc encoding errors'''
try:
utext.encode('pdfdoc')
return 1
except UnicodeEncodeError as e:
return 0
class PDFString(PDFObject):
def __init__(self, s, escape=1, enc='auto'):
'''s can be unicode/utf8 or a PDFString
if escape is true then the output will be passed through escape
if enc is raw then the string will be left alone
if enc is auto we'll try and automatically adapt to utf_16_be if the
effective string is not entirely in pdfdoc
'''
if isinstance(s,PDFString):
self.s = s.s
self.escape = s.escape
self.enc = s.enc
else:
self.s = s
self.escape = escape
self.enc = enc
def format(self, document):
s = self.s
enc = getattr(self,'enc','auto')
if (isBytes(s)):
if enc is 'auto':
try:
u = s.decode(s.startswith(codecs.BOM_UTF16_BE) and 'utf16' or 'utf8')
if _checkPdfdoc(u):
s = u.encode('pdfdoc')
else:
s = codecs.BOM_UTF16_BE+u.encode('utf_16_be')
except:
try:
s.decode('pdfdoc')
except:
stderr.write('Error in %s' % (repr(s),))
raise
elif isUnicode(s):
if enc is 'auto':
if _checkPdfdoc(s):
s = s.encode('pdfdoc')
else:
s = codecs.BOM_UTF16_BE+s.encode('utf_16_be')
else:
s = codecs.BOM_UTF16_BE+s.encode('utf_16_be')
else:
raise ValueError('PDFString argument must be str/unicode not %s' % type(s))
escape = getattr(self,'escape',1)
if not isinstance(document.encrypt,NoEncryption):
s = document.encrypt.encode(s)
escape = 1
if escape:
try:
es = "(%s)" % escapePDF(s)
except:
raise ValueError("cannot escape %s %s" % (s, repr(s)))
if escape&2:
es = es.replace('\\012','\n')
if escape&4 and _isbalanced(es):
es = es.replace('\\(','(').replace('\\)',')')
return pdfdocEnc(es)
else:
return b'(' + s + b')'
def __str__(self):
return "(%s)" % escapePDF(self.s)
def PDFName(data,lo=chr(0x21),hi=chr(0x7e)):
# might need to change this to class for encryption
# NOTE: RESULT MUST ALWAYS SUPPORT MEANINGFUL COMPARISONS (EQUALITY) AND HASH
# first convert the name
L = list(data)
for i,c in enumerate(L):
if c<lo or c>hi or c in "%()<>{}[]#":
L[i] = "#"+hex(ord(c))[2:] # forget the 0x thing...
return "/"+(''.join(L))
class PDFDictionary(PDFObject):
multiline = True
def __init__(self, dict=None):
"""dict should be namestring to value eg "a": 122 NOT pdfname to value NOT "/a":122"""
if dict is None:
self.dict = {}
else:
self.dict = dict.copy()
def __setitem__(self, name, value):
self.dict[name] = value
def __getitem__(self, a):
return self.dict[a]
def __contains__(self,a):
return a in self.dict
def Reference(self, name, document):
self.dict[name] = document.Reference(self.dict[name])
def format(self, document,IND=b'\r\n '):
dict = self.dict
try:
keys = list(dict.keys())
except:
print(ascii(dict))
raise
if not isinstance(dict,OrderedDict): keys.sort()
L = [(format(PDFName(k),document)+b" "+format(dict[k],document)) for k in keys]
if self.multiline and rl_config.pdfMultiLine:
L = IND.join(L)
else:
# break up every 6 elements anyway
t=L.insert
for i in reversed(range(6, len(L), 6)):
t(i,b'\r\n ')
L = b" ".join(L)
return b'<< '+L+b' >>'
def copy(self):
return PDFDictionary(self.dict)
def normalize(self):
#normalize the names to use RL standard ie Name not /Name
D = self.dict
K = [k for k in D.keys() if k.startswith('/')]
for k in K:
D[k[1:]] = D.pop(k)
class checkPDFNames:
def __init__(self,*names):
self.names = list(map(PDFName,names))
def __call__(self,value):
if not value.startswith('/'):
value=PDFName(value)
if value in self.names:
return value
def checkPDFBoolean(value):
if value in ('true','false'): return value
class CheckedPDFDictionary(PDFDictionary):
validate = {}
def __init__(self,dict=None,validate=None):
PDFDictionary.__init__(self,dict)
if validate: self.validate = validate
def __setitem__(self,name,value):
if name not in self.validate:
raise ValueError('invalid key, %r' % name)
cvalue = self.validate[name](value)
if cvalue is None:
raise ValueError('Bad value %r for key %r' % (value,name))
PDFDictionary.__setitem__(self,name,cvalue)
class ViewerPreferencesPDFDictionary(CheckedPDFDictionary):
validate=dict(
HideToolbar=checkPDFBoolean,
HideMenubar=checkPDFBoolean,
HideWindowUI=checkPDFBoolean,
FitWindow=checkPDFBoolean,
CenterWindow=checkPDFBoolean,
DisplayDocTitle=checkPDFBoolean, #contributed by mark Erbaugh
NonFullScreenPageMode=checkPDFNames(*'UseNone UseOutlines UseThumbs UseOC'.split()),
Direction=checkPDFNames(*'L2R R2L'.split()),
ViewArea=checkPDFNames(*'MediaBox CropBox BleedBox TrimBox ArtBox'.split()),
ViewClip=checkPDFNames(*'MediaBox CropBox BleedBox TrimBox ArtBox'.split()),
PrintArea=checkPDFNames(*'MediaBox CropBox BleedBox TrimBox ArtBox'.split()),
PrintClip=checkPDFNames(*'MediaBox CropBox BleedBox TrimBox ArtBox'.split()),
PrintScaling=checkPDFNames(*'None AppDefault'.split()),
)
# stream filters are objects to support round trip and
# possibly in the future also support parameters
class PDFStreamFilterZCompress:
pdfname = "FlateDecode"
def encode(self, text):
from reportlab.lib.utils import import_zlib
zlib = import_zlib()
if not zlib: raise ImportError("cannot z-compress zlib unavailable")
if isUnicode(text):
text = text.encode('utf8')
return zlib.compress(text)
def decode(self, encoded):
from reportlab.lib.utils import import_zlib
zlib = import_zlib()
if not zlib: raise ImportError("cannot z-decompress zlib unavailable")
return zlib.decompress(encoded)
# need only one of these, unless we implement parameters later
PDFZCompress = PDFStreamFilterZCompress()
class PDFStreamFilterBase85Encode:
pdfname = "ASCII85Decode"
def encode(self, text):
from reportlab.pdfbase.pdfutils import _wrap
text = asciiBase85Encode(text)
if rl_config.wrapA85:
text = _wrap(text)
return text
def decode(self, text):
return asciiBase85Decode(text)
# need only one of these too
PDFBase85Encode = PDFStreamFilterBase85Encode()
class PDFStream(PDFObject):
'''set dictionary elements explicitly stream.dictionary[name]=value'''
### compression stuff not implemented yet
__RefOnly__ = 1 # must be at top level
def __init__(self, dictionary=None, content=None, filters=None):
if dictionary is None:
dictionary = PDFDictionary()
self.dictionary = dictionary
self.content = content
self.filters = filters
def format(self, document):
dictionary = self.dictionary
# copy it for modification
dictionary = PDFDictionary(dictionary.dict.copy())
content = self.content
filters = self.filters
if self.content is None:
raise ValueError("stream content not set")
if filters is None:
filters = document.defaultStreamFilters
# only apply filters if they haven't been applied elsewhere
if filters is not None and "Filter" not in dictionary.dict:
# apply filters in reverse order listed
rf = list(filters)
rf.reverse()
fnames = []
for f in rf:
#print "*****************content:"; print repr(content[:200])
#print "*****************filter", f.pdfname
content = f.encode(content)
fnames.insert(0, PDFName(f.pdfname))
#print "*****************finally:"; print content[:200]
#print "****** FILTERS", fnames
#stop
dictionary["Filter"] = PDFArray(fnames)
# "stream encoding is done after all filters have been applied"
content = document.encrypt.encode(content)
fc = format(content, document)
dictionary["Length"] = len(content)
fd = format(dictionary, document)
return fd+b'\r\nstream\r\n'+fc+b'endstream\r\n'
def teststream(content=None):
#content = "" # test
if content is None:
content = teststreamcontent
content = content.strip()
content = content.replace("\n", '\n\r') + '\n\r'
S = PDFStream(content = content,
filters=rl_config.useA85 and [PDFBase85Encode,PDFZCompress] or [PDFZCompress])
# nothing else needed...
S.__Comment__ = "test stream"
return S
teststreamcontent = """
1 0 0 1 0 0 cm BT /F9 12 Tf 14.4 TL ET
1.00 0.00 1.00 rg
n 72.00 72.00 432.00 648.00 re B*
"""
class PDFArray(PDFObject):
multiline = True
def __init__(self, sequence):
self.sequence = list(sequence)
def References(self, document):
"""make all objects in sequence references"""
self.sequence = list(map(document.Reference, self.sequence))
def format(self, document, IND=b'\r\n '):
L = [format(e, document) for e in self.sequence]
if self.multiline and rl_config.pdfMultiLine:
L = IND.join(L)
else:
n=len(L)
if n>10:
# break up every 10 elements anyway
t=L.insert
for i in reversed(range(10, n, 10)):
t(i,b'\r\n ')
L = b' '.join(L)
else:
L = b' '.join(L)
return b'[ ' + L + b' ]'
class PDFArrayCompact(PDFArray):
multiline=False
class PDFIndirectObject(PDFObject):
__RefOnly__ = 1
def __init__(self, name, content):
self.name = name
self.content = content
def format(self, document):
name = self.name
n, v = document.idToObjectNumberAndVersion[name]
# set encryption parameters
document.encrypt.register(n, v)
fcontent = format(self.content, document, toplevel=1) # yes this is at top level
return (pdfdocEnc("%s %s obj\r\n"%(n,v))
+fcontent+ (b'' if fcontent.endswith(b'\r\n') else b'\r\n')
+b'endobj\r\n')
class PDFObjectReference(PDFObject):
def __init__(self, name):
self.name = name
def format(self, document):
try:
return pdfdocEnc("%s %s R" % document.idToObjectNumberAndVersion[self.name])
except:
raise KeyError("forward reference to %s not resolved upon final formatting" % repr(self.name))
class PDFFile(PDFObject):
### just accumulates strings: keeps track of current offset
def __init__(self,pdfVersion=PDF_VERSION_DEFAULT):
self.strings = []
self.write = self.strings.append
self.offset = 0
### chapter 5
# Following Ken Lunde's advice and the PDF spec, this includes
# some high-order bytes. I chose the characters for Tokyo
# in Shift-JIS encoding, as these cannot be mistaken for
# any other encoding, and we'll be able to tell if something
# has run our PDF files through a dodgy Unicode conversion.
self.add((pdfdocEnc("%%PDF-%s.%s" % pdfVersion) +
b'\r\n%\223\214\213\236 ReportLab Generated PDF document http://www.reportlab.com\r\n'
))
def closeOrReset(self):
pass
def add(self, s):
"""should be constructed as late as possible, return position where placed"""
s = pdfdocEnc(s)
result = self.offset
self.offset = result+len(s)
self.write(s)
return result
def format(self, document):
return b''.join(self.strings)
XREFFMT = '%0.10d %0.5d n'
class PDFCrossReferenceSubsection(PDFObject):
def __init__(self, firstentrynumber, idsequence):
self.firstentrynumber = firstentrynumber
self.idsequence = idsequence
def format(self, document):
"""id sequence should represent contiguous object nums else error. free numbers not supported (yet)"""
firstentrynumber = self.firstentrynumber
idsequence = self.idsequence
entries = list(idsequence)
nentries = len(idsequence)
# special case: object number 0 is always free
taken = {}
if firstentrynumber==0:
taken[0] = "standard free entry"
nentries = nentries+1
entries.insert(0, "0000000000 65535 f")
idToNV = document.idToObjectNumberAndVersion
idToOffset = document.idToOffset
lastentrynumber = firstentrynumber+nentries-1
for id in idsequence:
(num, version) = idToNV[id]
if num in taken:
raise ValueError("object number collision %s %s %s" % (num, repr(id), repr(taken[id])))
if num>lastentrynumber or num<firstentrynumber:
raise ValueError("object number %s not in range %s..%s" % (num, firstentrynumber, lastentrynumber))
# compute position in list
rnum = num-firstentrynumber
taken[num] = id
offset = idToOffset[id]
entries[num] = XREFFMT % (offset, version)
# now add the initial line
firstline = "%s %s" % (firstentrynumber, nentries)
entries.insert(0, firstline)
# make sure it ends with \r\n
entries.append("")
return pdfdocEnc('\r\n'.join(entries))
class PDFCrossReferenceTable(PDFObject):
def __init__(self):
self.sections = []
def addsection(self, firstentry, ids):
section = PDFCrossReferenceSubsection(firstentry, ids)
self.sections.append(section)
def format(self, document):
sections = self.sections
if not sections:
raise ValueError("no crossref sections")
L = [b"xref\r\n"]
for s in self.sections:
fs = format(s, document)
L.append(fs)
return pdfdocEnc(b''.join(L))
class PDFTrailer(PDFObject):
def __init__(self, startxref, Size=None, Prev=None, Root=None, Info=None, ID=None, Encrypt=None):
self.startxref = startxref
if Size is None or Root is None:
raise ValueError("Size and Root keys required")
dict = self.dict = PDFDictionary()
for (n,v) in [("Size", Size), ("Prev", Prev), ("Root", Root),
("Info", Info), ("ID", ID), ("Encrypt", Encrypt)]:
if v is not None:
dict[n] = v
def format(self, document):
fdict = format(self.dict, document)
return b''.join([
b'trailer\r\n',
fdict,
b'\r\nstartxref\r\n',
pdfdocEnc(str(self.startxref)),
b'\r\n%%EOF\r\n',
]
)
#### XXXX skipping incremental update,
#### encryption
#### chapter 6, doc structure
class PDFCatalog(PDFObject):
__Comment__ = "Document Root"
__RefOnly__ = 1
# to override, set as attributes
__Defaults__ = {"Type": PDFName("Catalog"),
"PageMode": PDFName("UseNone"),
"Lang": None,
}
__NoDefault__ = """
Dests Outlines Pages Threads AcroForm Names OpenAction PageMode URI
ViewerPreferences PageLabels PageLayout JavaScript StructTreeRoot SpiderInfo""".split()
__Refs__ = __NoDefault__ # make these all into references, if present
def format(self, document):
self.check_format(document)
defaults = self.__Defaults__
Refs = self.__Refs__
D = {}
for k,v in defaults.items():
v = getattr(self,k,v)
if v is not None:
D[k] = v
for k in self.__NoDefault__:
v = getattr(self,k,None)
if v is not None:
D[k] = v
# force objects to be references where required
for k in Refs:
if k in D:
#print"k is", k, "value", D[k]
D[k] = document.Reference(D[k])
dict = PDFDictionary(D)
return format(dict, document)
def showOutline(self):
self.setPageMode("UseOutlines")
def showFullScreen(self):
self.setPageMode("FullScreen")
def setPageLayout(self,layout):
if layout:
self.PageLayout = PDFName(layout)
def setPageMode(self,mode):
if mode:
self.PageMode = PDFName(mode)
def check_format(self, document):
"""for use in subclasses"""
pass
class PDFPages(PDFCatalog):
"""PAGES TREE WITH ONE INTERNAL NODE, FOR "BALANCING" CHANGE IMPLEMENTATION"""
__Comment__ = "page tree"
__RefOnly__ = 1
# note: could implement page attribute inheritance...
__Defaults__ = {"Type": PDFName("Pages"),
}
__NoDefault__ = "Kids Count Parent".split()
__Refs__ = ["Parent"]
def __init__(self):
self.pages = []
def __getitem__(self, item):
return self.pages[item]
def addPage(self, page):
self.pages.append(page)
def check_format(self, document):
# convert all pages to page references
pages = self.pages
kids = PDFArray(pages)
# make sure all pages are references
kids.References(document)
self.Kids = kids
self.Count = len(pages)
class PDFPage(PDFCatalog):
__Comment__ = "Page dictionary"
# all PDF attributes can be set explicitly
# if this flag is set, the "usual" behavior will be suppressed
Override_default_compilation = 0
__RefOnly__ = 1
__Defaults__ = {"Type": PDFName("Page"),
# "Parent": PDFObjectReference(Pages), # no! use document.Pages
}
__NoDefault__ = """Parent
MediaBox Resources Contents CropBox Rotate Thumb Annots B Dur Hid Trans AA
PieceInfo LastModified SeparationInfo ArtBox TrimBox BleedBox ID PZ
Trans""".split()
__Refs__ = """Contents Parent ID""".split()
pagewidth = 595
pageheight = 842
stream = None
hasImages = 0
compression = 0
XObjects = None
_colorsUsed = {}
_shadingsUsed = {}
Trans = None
# transitionstring?
# xobjects?
# annotations
def __init__(self):
# set all nodefaults to None
for name in self.__NoDefault__:
setattr(self, name, None)
def setCompression(self, onoff):
self.compression = onoff
def setStream(self, code):
if self.Override_default_compilation:
raise ValueError("overridden! must set stream explicitly")
if isSeq(code):
code = '\r\n'.join(code)+'\r\n'
self.stream = code
def setPageTransition(self, tranDict):
self.Trans = PDFDictionary(tranDict)
def check_format(self, document):
# set up parameters unless usual behaviour is suppressed
if self.Override_default_compilation:
return
self.MediaBox = self.MediaBox or PDFArray(self.Rotate in (90,270) and [0,0,self.pageheight,self.pagewidth] or [0, 0, self.pagewidth, self.pageheight])
if not self.Annots:
self.Annots = None
else:
#print self.Annots
#raise ValueError("annotations not reimplemented yet")
if not isinstance(self.Annots,PDFObject):
self.Annots = PDFArray(self.Annots)
if not self.Contents:
stream = self.stream
if not stream:
self.Contents = teststream()
else:
S = PDFStream()
if self.compression:
S.filters = rl_config.useA85 and [PDFBase85Encode, PDFZCompress] or [PDFZCompress]
S.content = stream
S.__Comment__ = "page stream"
self.Contents = S
if not self.Resources:
resources = PDFResourceDictionary()
# fonts!
resources.basicFonts()
if self.hasImages:
resources.allProcs()
else:
resources.basicProcs()
if self.XObjects:
#print "XObjects", self.XObjects.dict
resources.XObject = self.XObjects
if self.ExtGState:
resources.ExtGState = self.ExtGState
resources.setShading(self._shadingUsed)
resources.setColorSpace(self._colorsUsed)
self.Resources = resources
if not self.Parent:
pages = document.Pages
self.Parent = document.Reference(pages)
#this code contributed by Christian Jacobs <cljacobsen@gmail.com>
class DuplicatePageLabelPage(Exception):
pass
class PDFPageLabels(PDFCatalog):
__comment__ = None
__RefOnly__ = 0
__Defaults__ = {}
__NoDefault__ = ["Nums"]
__Refs__ = []
def __init__(self):
self.labels = []
def addPageLabel(self, page, label):
""" Adds a new PDFPageLabel to this catalog.
The 'page' argument, an integer, is the page number in the PDF document
with which the 'label' should be associated. Page numbering in the PDF
starts at zero! Thus, to change the label on the first page, '0' should be
provided as an argument, and to change the 6th page, '5' should be provided
as the argument.
The 'label' argument should be a PDFPageLabel instance, which describes the
format of the labels starting on page 'page' in the PDF and continuing
until the next encounter of a PDFPageLabel.
The order in which labels are added is not important.
"""
self.labels.append((page, label))
def format(self, document):
try:
self.labels.sort()
except DuplicatePageLabelPage:
tmp = sorted([x[0] for x in self.labels])
annotateException('\n\n!!!!! Duplicate PageLabel seen for pages %r' % list(set([x for x in tmp if tmp.count(x)>1])))
labels = []
for page, label in self.labels:
labels.append(page)
labels.append(label)
self.Nums = PDFArray(labels) #PDFArray makes a copy with list()
return PDFCatalog.format(self, document)
class PDFPageLabel(PDFCatalog):
__Comment__ = None
__RefOnly__ = 0
__Defaults__ = {}
__NoDefault__ = "Type S P St".split()
__convertible__ = 'ARABIC ROMAN_UPPER ROMAN_LOWER LETTERS_UPPER LETTERS_LOWER'
ARABIC = 'D'
ROMAN_UPPER = 'R'
ROMAN_LOWER = 'r'
LETTERS_UPPER = 'A'
LETTERS_LOWER = 'a'
def __init__(self, style=None, start=None, prefix=None):
"""
A PDFPageLabel changes the style of page numbering as displayed in a PDF
viewer. PDF page labels have nothing to do with 'physical' page numbers
printed on a canvas, but instead influence the 'logical' page numbers
displayed by PDF viewers. However, when using roman numerals (i, ii,
iii...) or page prefixes for appendecies (A.1, A.2...) on the physical
pages PDF page labels are necessary to change the logical page numbers
displayed by the PDF viewer to match up with the physical numbers. A
PDFPageLabel changes the properties of numbering at the page on which it
appears (see the class 'PDFPageLabels' for specifying where a PDFPageLabel
is associated) and all subsequent pages, until a new PDFPageLabel is
encountered.
The arguments to this initialiser determine the properties of all
subsequent page labels. 'style' determines the numberings style, arabic,
roman, letters; 'start' specifies the starting number; and 'prefix' any
prefix to be applied to the page numbers. All these arguments can be left
out or set to None.
* style:
- None: No numbering, can be used to display the prefix only.
- PDFPageLabel.ARABIC: Use arabic numbers: 1, 2, 3, 4...
- PDFPageLabel.ROMAN_UPPER: Use upper case roman numerals: I, II, III...
- PDFPageLabel.ROMAN_LOWER: Use lower case roman numerals: i, ii, iii...
- PDFPageLabel.LETTERS_UPPER: Use upper case letters: A, B, C, D...
- PDFPageLabel.LETTERS_LOWER: Use lower case letters: a, b, c, d...
* start:
- An integer specifying the starting number for this PDFPageLabel. This
can be used when numbering style changes to reset the page number back
to one, ie from roman to arabic, or from arabic to appendecies. Can be
any positive integer or None. I'm not sure what the effect of
specifying None is, probably that page numbering continues with the
current sequence, I'd have to check the spec to clarify though.
* prefix:
- A string which is prefixed to the page numbers. Can be used to display
appendecies in the format: A.1, A.2, ..., B.1, B.2, ... where a
PDFPageLabel is used to set the properties for the first page of each
appendix to restart the page numbering at one and set the prefix to the
appropriate letter for current appendix. The prefix can also be used to
display text only, if the 'style' is set to None. This can be used to
display strings such as 'Front', 'Back', or 'Cover' for the covers on
books.
"""
if style:
if style.upper() in self.__convertible__: style = getattr(self,style.upper())
self.S = PDFName(style)
if start: self.St = PDFnumber(start)
if prefix: self.P = PDFString(prefix)
def __lt__(self,oth):
if rl_config.errorOnDuplicatePageLabelPage:
raise DuplicatePageLabelPage()
return False
#ends code contributed by Christian Jacobs <cljacobsen@gmail.com>
def testpage(document):
P = PDFPage()
P.Contents = teststream()
pages = document.Pages
P.Parent = document.Reference(pages)
P.MediaBox = PDFArray([0, 0, 595, 841])
resources = PDFResourceDictionary()
resources.allProcs() # enable all procsets
resources.basicFonts()
P.Resources = resources
pages.addPage(P)
#### DUMMY OUTLINES IMPLEMENTATION FOR testing
DUMMYOUTLINE = """
<<
/Count
0
/Type
/Outlines
>>"""
class PDFOutlines0(PDFObject):
__Comment__ = "TEST OUTLINE!"
text = DUMMYOUTLINE.replace("\n", '\r\n')
__RefOnly__ = 1
def format(self, document):
return pdfdocEnc(self.text)
class OutlineEntryObject(PDFObject):
"an entry in an outline"
Title = Dest = Parent = Prev = Next = First = Last = Count = None
def format(self, document):
D = {}
D["Title"] = PDFString(self.Title)
D["Parent"] = self.Parent
D["Dest"] = self.Dest
for n in ("Prev", "Next", "First", "Last", "Count"):
v = getattr(self, n)
if v is not None:
D[n] = v
PD = PDFDictionary(D)
return PD.format(document)
class PDFOutlines(PDFObject):
"""
takes a recursive list of outline destinations like::
out = PDFOutline1()
out.setNames(canvas, # requires canvas for name resolution
"chapter1dest",
("chapter2dest",
["chapter2section1dest",
"chapter2section2dest",
"chapter2conclusiondest"]
), # end of chapter2 description
"chapter3dest",
("chapter4dest", ["c4s1", "c4s2"])
)
Higher layers may build this structure incrementally. KISS at base level.
"""
# first attempt, many possible features missing.
#no init for now
mydestinations = ready = None
counter = 0
currentlevel = -1 # ie, no levels yet
def __init__(self):
self.destinationnamestotitles = {}
self.destinationstotitles = {}
self.levelstack = []
self.buildtree = []
self.closedict = {} # dictionary of "closed" destinations in the outline
def addOutlineEntry(self, destinationname, level=0, title=None, closed=None):
"""destinationname of None means "close the tree" """
if destinationname is None and level!=0:
raise ValueError("close tree must have level of 0")
if not isinstance(level,int): raise ValueError("level must be integer, got %s" % type(level))
if level<0: raise ValueError("negative levels not allowed")
if title is None: title = destinationname
currentlevel = self.currentlevel
stack = self.levelstack
tree = self.buildtree
# adjust currentlevel and stack to match level
if level>currentlevel:
if level>currentlevel+1:
raise ValueError("can't jump from outline level %s to level %s, need intermediates (destinationname=%r, title=%r)" %(currentlevel, level, destinationname, title))
level = currentlevel = currentlevel+1
stack.append([])
while level<currentlevel:
# pop off levels to match
current = stack[-1]
del stack[-1]
previous = stack[-1]
lastinprevious = previous[-1]
if isinstance(lastinprevious,tuple):
(name, sectionlist) = lastinprevious
raise ValueError("cannot reset existing sections: " + repr(lastinprevious))
else:
name = lastinprevious
sectionlist = current
previous[-1] = (name, sectionlist)
#sectionlist.append(current)
currentlevel = currentlevel-1
if destinationname is None: return
stack[-1].append(destinationname)
self.destinationnamestotitles[destinationname] = title
if closed: self.closedict[destinationname] = 1
self.currentlevel = level
def setDestinations(self, destinationtree):
self.mydestinations = destinationtree
def format(self, document):
D = {}
D["Type"] = PDFName("Outlines")
c = self.count
D["Count"] = c
if c!=0:
D["First"] = self.first
D["Last"] = self.last
PD = PDFDictionary(D)
return PD.format(document)
def setNames(self, canvas, *nametree):
desttree = self.translateNames(canvas, nametree)
self.setDestinations(desttree)
def setNameList(self, canvas, nametree):
"Explicit list so I don't need to do in the caller"
desttree = self.translateNames(canvas, nametree)
self.setDestinations(desttree)
def translateNames(self, canvas, object):
"recursively translate tree of names into tree of destinations"
destinationnamestotitles = self.destinationnamestotitles
destinationstotitles = self.destinationstotitles
closedict = self.closedict
if isStr(object):
if not isUnicode(object): object = object.decode('utf8')
destination = canvas._bookmarkReference(object)
title = object
if object in destinationnamestotitles:
title = destinationnamestotitles[object]
else:
destinationnamestotitles[title] = title
destinationstotitles[destination] = title
if object in closedict:
closedict[destination] = 1 # mark destination closed
return {object: canvas._bookmarkReference(object)} # name-->ref
if isSeq(object):
L = []
for o in object:
L.append(self.translateNames(canvas, o))
if isinstance(object,tuple):
return tuple(L)
return L
# bug contributed by Benjamin Dumke <reportlab@benjamin-dumke.de>
raise TypeError("in outline, destination name must be string: got a %s"%type(object))
def prepare(self, document, canvas):
"""prepare all data structures required for save operation (create related objects)"""
if self.mydestinations is None:
if self.levelstack:
self.addOutlineEntry(None) # close the tree
destnames = self.levelstack[0]
#from pprint import pprint; pprint(destnames); stop
self.mydestinations = self.translateNames(canvas, destnames)
else:
self.first = self.last = None
self.count = 0
self.ready = 1
return
#self.first = document.objectReference("Outline.First")
#self.last = document.objectReference("Outline.Last")
# XXXX this needs to be generalized for closed entries!
self.count = count(self.mydestinations, self.closedict)
(self.first, self.last) = self.maketree(document, self.mydestinations, toplevel=1)
self.ready = 1
def maketree(self, document, destinationtree, Parent=None, toplevel=0):
if toplevel:
levelname = "Outline"
Parent = document.Reference(document.Outlines)
else:
self.count = self.count+1
levelname = "Outline.%s" % self.count
if Parent is None:
raise ValueError("non-top level outline elt parent must be specified")
if not isSeq(destinationtree):
raise ValueError("destinationtree must be list or tuple, got %s")
nelts = len(destinationtree)
lastindex = nelts-1
lastelt = firstref = lastref = None
destinationnamestotitles = self.destinationnamestotitles
closedict = self.closedict
for index in range(nelts):
eltobj = OutlineEntryObject()
eltobj.Parent = Parent
eltname = "%s.%s" % (levelname, index)
eltref = document.Reference(eltobj, eltname)
#document.add(eltname, eltobj)
if lastelt is not None:
lastelt.Next = eltref
eltobj.Prev = lastref
if firstref is None:
firstref = eltref
lastref = eltref
lastelt = eltobj # advance eltobj
lastref = eltref
elt = destinationtree[index]
if isinstance(elt,dict):
# simple leaf {name: dest}
leafdict = elt
elif isinstance(elt,tuple):
# leaf with subsections: ({name: ref}, subsections) XXXX should clean up (see count(...))
try:
(leafdict, subsections) = elt
except:
raise ValueError("destination tree elt tuple should have two elts, got %s" % len(elt))
eltobj.Count = count(subsections, closedict)
(eltobj.First, eltobj.Last) = self.maketree(document, subsections, eltref)
else:
raise ValueError("destination tree elt should be dict or tuple, got %s" % type(elt))
try:
[(Title, Dest)] = list(leafdict.items())
except:
raise ValueError("bad outline leaf dictionary, should have one entry "+bytestr(elt))
eltobj.Title = destinationnamestotitles[Title]
eltobj.Dest = Dest
if isinstance(elt,tuple) and Dest in closedict:
# closed subsection, count should be negative
eltobj.Count = -eltobj.Count
return (firstref, lastref)
def count(tree, closedict=None):
"""utility for outline: recursively count leaves in a tuple/list tree"""
from operator import add
if isinstance(tree,tuple):
# leaf with subsections XXXX should clean up this structural usage
(leafdict, subsections) = tree
[(Title, Dest)] = list(leafdict.items())
if closedict and Dest in closedict:
return 1 # closed tree element
if isSeq(tree):
#return reduce(add, map(count, tree))
counts = []
for e in tree:
counts.append(count(e, closedict))
return sum(counts) #used to be: return reduce(add, counts)
return 1
class PDFInfo(PDFObject):
"""PDF documents can have basic information embedded, viewable from
File | Document Info in Acrobat Reader. If this is wrong, you get
Postscript errors while printing, even though it does not print."""
producer = "ReportLab PDF Library - www.reportlab.com"
creator = "ReportLab PDF Library - www.reportlab.com"
title = "untitled"
author = "anonymous"
subject = "unspecified"
keywords = ""
_dateFormatter = None
def __init__(self):
self.invariant = rl_config.invariant
def digest(self, md5object):
# add self information to signature
for x in (self.title, self.author, self.subject, self.keywords):
md5object.update(bytestr(x))
def format(self, document):
D = {}
D["Title"] = PDFString(self.title)
D["Author"] = PDFString(self.author)
D["CreationDate"] = PDFDate(invariant=self.invariant,dateFormatter=self._dateFormatter)
D["Producer"] = PDFString(self.producer)
D["Creator"] = PDFString(self.creator)
D["Subject"] = PDFString(self.subject)
D["Keywords"] = PDFString(self.keywords)
PD = PDFDictionary(D)
return PD.format(document)
def copy(self):
"shallow copy - useful in pagecatchering"
thing = self.__klass__()
for k, v in self.__dict__.items():
setattr(thing, k, v)
return thing
# skipping thumbnails, etc
class Annotation(PDFObject):
"""superclass for all annotations."""
defaults = [("Type", PDFName("Annot"),)]
required = ("Type", "Rect", "Contents", "Subtype")
permitted = required+(
"Border", "C", "T", "M", "F", "H", "BS", "AA", "AS", "Popup", "P", "AP")
def cvtdict(self, d, escape=1):
"""transform dict args from python form to pdf string rep as needed"""
Rect = d["Rect"]
if not isStr(Rect):
d["Rect"] = PDFArray(Rect)
d["Contents"] = PDFString(d["Contents"],escape)
return d
def AnnotationDict(self, **kw):
if 'escape' in kw:
escape = kw['escape']
del kw['escape']
else:
escape = 1
d = {}
for (name,val) in self.defaults:
d[name] = val
d.update(kw)
for name in self.required:
if name not in d:
raise ValueError("keyword argument %s missing" % name)
d = self.cvtdict(d,escape=escape)
permitted = self.permitted
for name in d.keys():
if name not in permitted:
raise ValueError("bad annotation dictionary name %s" % name)
return PDFDictionary(d)
def Dict(self):
raise ValueError("DictString undefined for virtual superclass Annotation, must overload")
# but usually
#return self.AnnotationDict(self, Rect=(a,b,c,d)) or whatever
def format(self, document):
D = self.Dict()
return D.format(document)
class TextAnnotation(Annotation):
permitted = Annotation.permitted + (
"Open", "Name")
def __init__(self, Rect, Contents, **kw):
self.Rect = Rect
self.Contents = Contents
self.otherkw = kw
def Dict(self):
d = {}
d.update(self.otherkw)
d["Rect"] = self.Rect
d["Contents"] = self.Contents
d["Subtype"] = "/Text"
return self.AnnotationDict(**d)
class FreeTextAnnotation(Annotation):
permitted = Annotation.permitted + ("DA",)
def __init__(self, Rect, Contents, DA, **kw):
self.Rect = Rect
self.Contents = Contents
self.DA = DA
self.otherkw = kw
def Dict(self):
d = {}
d.update(self.otherkw)
d["Rect"] = self.Rect
d["Contents"] = self.Contents
d["DA"] = self.DA
d["Subtype"] = "/FreeText"
return self.AnnotationDict(**d)
class LinkAnnotation(Annotation):
permitted = Annotation.permitted + (
"Dest", "A", "PA")
def __init__(self, Rect, Contents, Destination, Border="[0 0 1]", **kw):
self.Border = Border
self.Rect = Rect
self.Contents = Contents
self.Destination = Destination
self.otherkw = kw
def dummyDictString(self): # old, testing
return """
<< /Type /Annot /Subtype /Link /Rect [71 717 190 734] /Border [16 16 1]
/Dest [23 0 R /Fit] >>
"""
def Dict(self):
d = {}
d.update(self.otherkw)
d["Border"] = self.Border
d["Rect"] = self.Rect
d["Contents"] = self.Contents
d["Subtype"] = "/Link"
d["Dest"] = self.Destination
return self.AnnotationDict(**d)
class HighlightAnnotation(Annotation):
"""
HighlightAnnotation is an annotation that highlights the selected area.
Rect is the mouseover area that will show the contents.
QuadPoints is a list of points to highlight, you can have many groups of
four QuadPoints to allow highlighting many lines.
"""
permitted = Annotation.permitted + ("QuadPoints", )
def __init__(self, Rect, Contents, QuadPoints, Color=[0.83, 0.89, 0.95], **kw):
self.Rect = Rect
self.Contents = Contents
self.otherkw = kw
self.QuadPoints = QuadPoints
self.Color = Color
def cvtdict(self, d, escape=1):
"""transform dict args from python form to pdf string rep as needed"""
Rect = d["Rect"]
Quad = d["QuadPoints"]
Color = d["C"]
if not isinstance(Rect, str):
d["Rect"] = PDFArray(Rect).format(d, IND=b" ")
if not isinstance(Quad, str):
d["QuadPoints"] = PDFArray(Quad).format(d, IND=b" ")
if not isinstance(Color, str):
d["C"] = PDFArray(Color).format(d, IND=b" ")
d["Contents"] = PDFString(d["Contents"], escape)
return d
def Dict(self):
d = {}
d.update(self.otherkw)
d["Rect"] = self.Rect
d["Contents"] = self.Contents
d["Subtype"] = "/Highlight"
d["QuadPoints"] = self.QuadPoints
d["C"] = self.Color
return self.AnnotationDict(**d)
def rect_to_quad(Rect):
"""
Utility method to convert a Rect to a QuadPoint
"""
return [Rect[0], Rect[1], Rect[2], Rect[1],
Rect[0], Rect[3], Rect[2], Rect[3]]
# skipping names tree
# skipping actions
# skipping names trees
# skipping to chapter 7
class PDFRectangle(PDFObject):
def __init__(self, llx, lly, urx, ury):
self.llx, self.lly, self.ulx, self.ury = llx, lly, urx, ury
def format(self, document):
A = PDFArray([self.llx, self.lly, self.ulx, self.ury])
return format(A, document)
_NOWT=None
def _getTimeStamp():
global _NOWT
if not _NOWT:
import time
_NOWT = time.time()
return _NOWT
class PDFDate(PDFObject):
# gmt offset now suppported properly
def __init__(self, invariant=rl_config.invariant, dateFormatter=None):
if invariant:
now = (2000,1,1,0,0,0,0)
self.dhh = 0
self.dmm = 0
else:
import time
now = tuple(time.localtime(_getTimeStamp())[:6])
from time import timezone
self.dhh = int(timezone / (3600.0))
self.dmm = (timezone % 3600) % 60
self.date = now[:6]
self.dateFormatter = dateFormatter
def format(self, doc):
dfmt = self.dateFormatter or (
lambda yyyy,mm,dd,hh,m,s:
"D:%04d%02d%02d%02d%02d%02d%+03d'%02d'"
% (yyyy,mm,dd,hh,m,s,self.dhh,self.dmm))
return format(PDFString(dfmt(*self.date)), doc)
class Destination(PDFObject):
"""
not a PDFObject! This is a placeholder that can delegates
to a pdf object only after it has been defined by the methods
below.
EG a Destination can refer to Appendix A before it has been
defined, but only if Appendix A is explicitly noted as a destination
and resolved before the document is generated...
For example the following sequence causes resolution before doc generation.
d = Destination()
d.fit() # or other format defining method call
d.setPage(p)
(at present setPageRef is called on generation of the page).
"""
representation = format = page = None
def __init__(self,name):
self.name = name
self.fmt = self.page = None
def format(self, document):
f = self.fmt
if f is None: raise ValueError("format not resolved, probably missing URL scheme or undefined destination target for '%s'" % self.name)
p = self.page
if p is None: raise ValueError("Page not bound, probably missing URL scheme or undefined destination target for '%s'" % self.name)
f.page = p
return f.format(document)
def xyz(self, left, top, zoom): # see pdfspec mar 11 99 pp184+
self.fmt = PDFDestinationXYZ(None, left, top, zoom)
def fit(self):
self.fmt = PDFDestinationFit(None)
def fitb(self):
self.fmt = PDFDestinationFitB(None)
def fith(self, top):
self.fmt = PDFDestinationFitH(None,top)
def fitv(self, left):
self.fmt = PDFDestinationFitV(None, left)
def fitbh(self, top):
self.fmt = PDFDestinationFitBH(None, top)
def fitbv(self, left):
self.fmt = PDFDestinationFitBV(None, left)
def fitr(self, left, bottom, right, top):
self.fmt = PDFDestinationFitR(None, left, bottom, right, top)
def setPage(self, page):
self.page = page
#self.fmt.page = page # may not yet be defined!
class PDFDestinationXYZ(PDFObject):
typename = "XYZ"
def __init__(self, page, left, top, zoom):
self.page = page
self.top = top
self.zoom = zoom
self.left = left
def format(self, document):
pageref = document.Reference(self.page)
A = PDFArray( [ pageref, PDFName(self.typename), self.left, self.top, self.zoom ] )
return format(A, document)
class PDFDestinationFit(PDFObject):
typename = "Fit"
def __init__(self, page):
self.page = page
def format(self, document):
pageref = document.Reference(self.page)
A = PDFArray( [ pageref, PDFName(self.typename) ] )
return format(A, document)
class PDFDestinationFitB(PDFDestinationFit):
typename = "FitB"
class PDFDestinationFitH(PDFObject):
typename = "FitH"
def __init__(self, page, top):
self.page = page; self.top=top
def format(self, document):
pageref = document.Reference(self.page)
A = PDFArray( [ pageref, PDFName(self.typename), self.top ] )
return format(A, document)
class PDFDestinationFitBH(PDFDestinationFitH):
typename = "FitBH"
class PDFDestinationFitV(PDFObject):
typename = "FitV"
def __init__(self, page, left):
self.page = page; self.left=left
def format(self, document):
pageref = document.Reference(self.page)
A = PDFArray( [ pageref, PDFName(self.typename), self.left ] )
return format(A, document)
class PDFDestinationFitBV(PDFDestinationFitV):
typename = "FitBV"
class PDFDestinationFitR(PDFObject):
typename = "FitR"
def __init__(self, page, left, bottom, right, top):
self.page = page; self.left=left; self.bottom=bottom; self.right=right; self.top=top
def format(self, document):
pageref = document.Reference(self.page)
A = PDFArray( [ pageref, PDFName(self.typename), self.left, self.bottom, self.right, self.top] )
return format(A, document)
# named destinations need nothing
# skipping filespecs
class PDFResourceDictionary(PDFObject):
"""each element *could* be reset to a reference if desired"""
def __init__(self):
self.ColorSpace = {}
self.XObject = {}
self.ExtGState = {}
self.Font = {}
self.Pattern = {}
self.ProcSet = []
self.Properties = {}
self.Shading = {}
# ?by default define the basicprocs
self.basicProcs()
stdprocs = [PDFName(s) for s in "PDF Text ImageB ImageC ImageI".split()]
dict_attributes = ("ColorSpace", "XObject", "ExtGState", "Font", "Pattern", "Properties", "Shading")
def allProcs(self):
# define all standard procsets
self.ProcSet = self.stdprocs
def basicProcs(self):
self.ProcSet = self.stdprocs[:2] # just PDF and Text
def basicFonts(self):
self.Font = PDFObjectReference(BasicFonts)
def setColorSpace(self,colorsUsed):
for c,s in colorsUsed.items():
self.ColorSpace[s] = PDFObjectReference(c)
def setShading(self,shadingUsed):
for c,s in shadingUsed.items():
self.Shading[s] = PDFObjectReference(c)
def format(self, document):
D = {}
for dname in self.dict_attributes:
v = getattr(self, dname)
if isinstance(v,dict):
if v:
dv = PDFDictionary(v)
D[dname] = dv
else:
D[dname] = v
v = self.ProcSet
dname = "ProcSet"
if isSeq(v):
if v:
dv = PDFArray(v)
D[dname] = dv
else:
D[dname] = v
DD = PDFDictionary(D)
return format(DD, document)
##############################################################################
#
# Font objects - the PDFDocument.addFont() method knows which of these
# to construct when given a user-facing Font object
#
##############################################################################
class PDFType1Font(PDFObject):
"""no init: set attributes explicitly"""
__RefOnly__ = 1
# note! /Name appears to be an undocumented attribute....
name_attributes = "Type Subtype BaseFont Name".split()
Type = "Font"
Subtype = "Type1"
# these attributes are assumed to already be of the right type
local_attributes = "FirstChar LastChar Widths Encoding ToUnicode FontDescriptor".split()
def format(self, document):
D = {}
for name in self.name_attributes:
if hasattr(self, name):
value = getattr(self, name)
D[name] = PDFName(value)
for name in self.local_attributes:
if hasattr(self, name):
value = getattr(self, name)
D[name] = value
#print D
PD = PDFDictionary(D)
return PD.format(document)
## These attribute listings will be useful in future, even if we
## put them elsewhere
class PDFTrueTypeFont(PDFType1Font):
Subtype = "TrueType"
#local_attributes = "FirstChar LastChar Widths Encoding ToUnicode FontDescriptor".split() #same
##class PDFMMType1Font(PDFType1Font):
## Subtype = "MMType1"
##
##class PDFType3Font(PDFType1Font):
## Subtype = "Type3"
## local_attributes = "FirstChar LastChar Widths CharProcs FontBBox FontMatrix Resources Encoding".split()
##
##class PDFType0Font(PDFType1Font):
## Subtype = "Type0"
## local_attributes = "DescendantFonts Encoding".split(
##
##class PDFCIDFontType0(PDFType1Font):
## Subtype = "CIDFontType0"
## local_attributes = "CIDSystemInfo FontDescriptor DW W DW2 W2 Registry Ordering Supplement".split()
##
##class PDFCIDFontType0(PDFType1Font):
## Subtype = "CIDFontType2"
## local_attributes = "BaseFont CIDToGIDMap CIDSystemInfo FontDescriptor DW W DW2 W2".split()
##
##class PDFEncoding(PDFType1Font):
## Type = "Encoding"
## name_attributes = "Type BaseEncoding".split()
## # these attributes are assumed to already be of the right type
## local_attributes = ["Differences"]
##
# UGLY ALERT - this needs turning into something O-O, it was hacked
# across from the pdfmetrics.Encoding class to avoid circularity
# skipping CMaps
class PDFFormXObject(PDFObject):
# like page requires .info set by some higher level (doc)
# XXXX any resource used in a form must be propagated up to the page that (recursively) uses
# the form!! (not implemented yet).
XObjects = Annots = BBox = Matrix = Contents = stream = Resources = None
hasImages = 1 # probably should change
compression = 0
def __init__(self, lowerx, lowery, upperx, uppery):
#not done
self.lowerx = lowerx; self.lowery=lowery; self.upperx=upperx; self.uppery=uppery
def setStreamList(self, data):
if isSeq(data):
data = '\r\n'.join(data)
self.stream = pdfdocEnc(data)
def BBoxList(self):
"get the declared bounding box for the form as a list"
if self.BBox:
return list(self.BBox.sequence)
else:
return [self.lowerx, self.lowery, self.upperx, self.uppery]
def format(self, document):
self.BBox = self.BBox or PDFArray([self.lowerx, self.lowery, self.upperx, self.uppery])
self.Matrix = self.Matrix or PDFArray([1, 0, 0, 1, 0, 0])
if not self.Annots:
self.Annots = None
else:
#these must be transferred to the page when the form is used
raise ValueError("annotations don't work in PDFFormXObjects yet")
if not self.Contents:
stream = self.stream
if not stream:
self.Contents = teststream()
else:
S = PDFStream()
S.content = stream
# need to add filter stuff (?)
S.__Comment__ = "xobject form stream"
self.Contents = S
if not self.Resources:
resources = PDFResourceDictionary()
# fonts!
resources.basicFonts()
if self.hasImages:
resources.allProcs()
else:
resources.basicProcs()
if self.XObjects:
#print "XObjects", self.XObjects.dict
resources.XObject = self.XObjects
self.Resources=resources
if self.compression:
self.Contents.filters = rl_config.useA85 and [PDFBase85Encode, PDFZCompress] or [PDFZCompress]
sdict = self.Contents.dictionary
sdict["Type"] = PDFName("XObject")
sdict["Subtype"] = PDFName("Form")
sdict["FormType"] = 1
sdict["BBox"] = self.BBox
sdict["Matrix"] = self.Matrix
sdict["Resources"] = self.Resources
return self.Contents.format(document)
class PDFPostScriptXObject(PDFObject):
"For embedding PD (e.g. tray commands) in PDF"
def __init__(self, content=None):
self.content = content
def format(self, document):
S = PDFStream()
S.content = self.content
S.__Comment__ = "xobject postscript stream"
sdict = S.dictionary
sdict["Type"] = PDFName("XObject")
sdict["Subtype"] = PDFName("PS")
return S.format(document)
_mode2CS={'RGB':'DeviceRGB', 'L':'DeviceGray', 'CMYK':'DeviceCMYK'}
class PDFImageXObject(PDFObject):
# first attempts at a hard-coded one
# in the file, Image XObjects are stream objects. We already
# have a PDFStream object with 3 attributes: dictionary, content
# and filters. So the job of this thing is to construct the
# right PDFStream instance and ask it to format itself.
def __init__(self, name, source=None, mask=None):
self.name = name
self.width = 24
self.height = 23
self.bitsPerComponent = 1
self.colorSpace = 'DeviceGray'
self._filters = rl_config.useA85 and ('ASCII85Decode',) or ()
self.streamContent = """
003B00 002700 002480 0E4940 114920 14B220 3CB650
75FE88 17FF8C 175F14 1C07E2 3803C4 703182 F8EDFC
B2BBC2 BB6F84 31BFC2 18EA3C 0E3E00 07FC00 03F800
1E1800 1FF800>
"""
self.mask = mask
if source is None:
pass # use the canned one.
elif hasattr(source,'jpeg_fh'):
self.loadImageFromSRC(source) #it is already a PIL Image
else:
# it is a filename
import os
ext = os.path.splitext(source)[1].lower()
src = open_for_read(source)
try:
if not(ext in ('.jpg', '.jpeg') and self.loadImageFromJPEG(src)):
if rl_config.useA85:
self.loadImageFromA85(src)
else:
self.loadImageFromRaw(src)
finally:
src.close()
def loadImageFromA85(self,source):
IMG=[]
imagedata = [s.strip() for s in pdfutils.makeA85Image(source,IMG=IMG)]
words = imagedata[1].split()
self.width, self.height = (int(words[1]),int(words[3]))
self.colorSpace = {'/RGB':'DeviceRGB', '/G':'DeviceGray', '/CMYK':'DeviceCMYK'}[words[7]]
self.bitsPerComponent = 8
self._filters = 'ASCII85Decode','FlateDecode' #'A85','Fl'
if IMG: self._checkTransparency(IMG[0])
elif self.mask=='auto': self.mask = None
self.streamContent = ''.join(imagedata[3:-1])
def loadImageFromJPEG(self,imageFile):
try:
try:
info = pdfutils.readJPEGInfo(imageFile)
finally:
imageFile.seek(0) #reset file pointer
except:
return False
self.width, self.height = info[0], info[1]
self.bitsPerComponent = 8
if info[2] == 1:
self.colorSpace = 'DeviceGray'
elif info[2] == 3:
self.colorSpace = 'DeviceRGB'
else: #maybe should generate an error, is this right for CMYK?
self.colorSpace = 'DeviceCMYK'
self._dotrans = 1
self.streamContent = imageFile.read()
if rl_config.useA85:
self.streamContent = asciiBase85Encode(self.streamContent)
self._filters = 'ASCII85Decode','DCTDecode' #'A85','DCT'
else:
self._filters = 'DCTDecode', #'DCT'
self.mask = None
return True
def loadImageFromRaw(self,source):
IMG=[]
imagedata = pdfutils.makeRawImage(source,IMG=IMG)
words = imagedata[1].split()
self.width = int(words[1])
self.height = int(words[3])
self.colorSpace = {'/RGB':'DeviceRGB', '/G':'DeviceGray', '/CMYK':'DeviceCMYK'}[words[7]]
self.bitsPerComponent = 8
self._filters = 'FlateDecode', #'Fl'
if IMG: self._checkTransparency(IMG[0])
elif self.mask=='auto': self.mask = None
self.streamContent = ''.join(imagedata[3:-1])
def _checkTransparency(self,im):
if self.mask=='auto':
if im._dataA:
self.mask = None
self._smask = PDFImageXObject(_digester(im._dataA.getRGBData()),im._dataA,mask=None)
self._smask._decode = [0,1]
else:
tc = im.getTransparent()
if tc:
self.mask = (tc[0], tc[0], tc[1], tc[1], tc[2], tc[2])
else:
self.mask = None
elif hasattr(self.mask,'rgb'):
_ = self.mask.rgb()
self.mask = _[0],_[0],_[1],_[1],_[2],_[2]
def loadImageFromSRC(self, im):
"Extracts the stream, width and height"
fp = im.jpeg_fh()
if fp:
self.loadImageFromJPEG(fp)
else:
zlib = import_zlib()
if not zlib: return
self.width, self.height = im.getSize()
raw = im.getRGBData()
#assert len(raw) == self.width*self.height, "Wrong amount of data for image expected %sx%s=%s got %s" % (self.width,self.height,self.width*self.height,len(raw))
self.streamContent = zlib.compress(raw)
if rl_config.useA85:
self.streamContent = asciiBase85Encode(self.streamContent)
self._filters = 'ASCII85Decode','FlateDecode' #'A85','Fl'
else:
self._filters = 'FlateDecode', #'Fl'
self.colorSpace= _mode2CS[im.mode]
self.bitsPerComponent = 8
self._checkTransparency(im)
def format(self, document):
S = PDFStream(content = self.streamContent)
dict = S.dictionary
dict["Type"] = PDFName("XObject")
dict["Subtype"] = PDFName("Image")
dict["Width"] = self.width
dict["Height"] = self.height
dict["BitsPerComponent"] = self.bitsPerComponent
dict["ColorSpace"] = PDFName(self.colorSpace)
if self.colorSpace=='DeviceCMYK' and getattr(self,'_dotrans',0):
dict["Decode"] = PDFArray([1,0,1,0,1,0,1,0])
elif getattr(self,'_decode',None):
dict["Decode"] = PDFArray(self._decode)
dict["Filter"] = PDFArray(map(PDFName,self._filters))
dict["Length"] = len(self.streamContent)
if self.mask: dict["Mask"] = PDFArray(self.mask)
if getattr(self,'smask',None): dict["SMask"] = self.smask
return S.format(document)
class PDFSeparationCMYKColor:
def __init__(self, cmyk):
from reportlab.lib.colors import CMYKColor
if not isinstance(cmyk,CMYKColor):
raise ValueError('%s needs a CMYKColor argument' % self.__class__.__name__)
elif not cmyk.spotName:
raise ValueError('%s needs a CMYKColor argument with a spotName' % self.__class__.__name__)
self.cmyk = cmyk
def _makeFuncPS(self):
'''create the postscript code for the tint transfer function
effectively this is tint*c, tint*y, ... tint*k'''
R = [].append
for i,v in enumerate(self.cmyk.cmyk()):
v=float(v)
if i==3:
if v==0.0:
R('pop')
R('0.0')
else:
R(str(v))
R('mul')
else:
if v==0:
R('0.0')
else:
R('dup')
R(str(v))
R('mul')
R('exch')
return '{%s}' % (' '.join(R.__self__))
def value(self):
return PDFArrayCompact((
PDFName('Separation'),
PDFName(self.cmyk.spotName),
PDFName('DeviceCMYK'),
PDFStream(
dictionary=PDFDictionary(dict(
FunctionType=4,
Domain=PDFArrayCompact((0,1)),
Range=PDFArrayCompact((0,1,0,1,0,1,0,1))
)),
content=self._makeFuncPS(),
filters=None,#[PDFBase85Encode, PDFZCompress],
)
))
class PDFFunction(PDFObject):
"""superclass for all function types."""
defaults = []
required = ("FunctionType", "Domain")
permitted = required+("Range",)
def FunctionDict(self, **kw):
d = {}
for (name,val) in self.defaults:
d[name] = val
d.update(kw)
for name in self.required:
if name not in d:
raise ValueError("keyword argument %s missing" % name)
permitted = self.permitted
for name in d.keys():
if name not in permitted:
raise ValueError("bad annotation dictionary name %s" % name)
return PDFDictionary(d)
def Dict(self, document):
raise ValueError("Dict undefined for virtual superclass PDFShading, must overload")
# but usually
#return self.FunctionDict(self, ...)
def format(self, document):
D = self.Dict(document)
return D.format(document)
class PDFExponentialFunction(PDFFunction):
defaults = PDFFunction.defaults + [("Domain", PDFArrayCompact((0.0, 1.0)))]
required = PDFFunction.required + ("N",)
permitted = PDFFunction.permitted + ("C0", "C1", "N")
def __init__(self, C0, C1, N, **kw):
self.C0 = C0
self.C1 = C1
self.N = N
self.otherkw = kw
def Dict(self, document):
d = {}
d.update(self.otherkw)
d["FunctionType"] = 2
d["C0"] = PDFArrayCompact(self.C0)
d["C1"] = PDFArrayCompact(self.C1)
d["N"] = self.N
return self.FunctionDict(**d)
class PDFStitchingFunction(PDFFunction):
required = PDFFunction.required + ("Functions", "Bounds", "Encode")
permitted = PDFFunction.permitted + ("Functions", "Bounds", "Encode")
def __init__(self, Functions, Bounds, Encode, **kw):
self.Functions = Functions
self.Bounds = Bounds
self.Encode = Encode
self.otherkw = kw
def Dict(self, document):
d = {}
d.update(self.otherkw)
d["FunctionType"] = 3
d["Functions"] = PDFArray([document.Reference(x) for x in self.Functions])
d["Bounds"] = PDFArray(self.Bounds)
d["Encode"] = PDFArray(self.Encode)
return self.FunctionDict(**d)
class PDFShading(PDFObject):
"""superclass for all shading types."""
required = ("ShadingType", "ColorSpace")
permitted = required+("Background", "BBox", "AntiAlias")
def ShadingDict(self, **kw):
d = {}
d.update(kw)
for name in self.required:
if name not in d:
raise ValueError("keyword argument %s missing" % name)
permitted = self.permitted
for name in d.keys():
if name not in permitted:
raise ValueError("bad annotation dictionary name %s" % name)
return PDFDictionary(d)
def Dict(self, document):
raise ValueError("Dict undefined for virtual superclass PDFShading, must overload")
# but usually
#return self.ShadingDict(self, ...)
def format(self, document):
D = self.Dict(document)
return D.format(document)
class PDFFunctionShading(PDFShading):
required = PDFShading.required + ("Function",)
permitted = PDFShading.permitted + ("Domain", "Matrix", "Function")
def __init__(self, Function, ColorSpace, **kw):
self.Function = Function
self.ColorSpace = ColorSpace
self.otherkw = kw
def Dict(self, document):
d = {}
d.update(self.otherkw)
d["ShadingType"] = 1
d["ColorSpace"] = PDFName(self.ColorSpace)
d["Function"] = document.Reference(self.Function)
return self.ShadingDict(**d)
class PDFAxialShading(PDFShading):
required = PDFShading.required + ("Coords", "Function")
permitted = PDFShading.permitted + (
"Coords", "Domain", "Function", "Extend")
def __init__(self, x0, y0, x1, y1, Function, ColorSpace, **kw):
self.Coords = (x0, y0, x1, y1)
self.Function = Function
self.ColorSpace = ColorSpace
self.otherkw = kw
def Dict(self, document):
d = {}
d.update(self.otherkw)
d["ShadingType"] = 2
d["ColorSpace"] = PDFName(self.ColorSpace)
d["Coords"] = PDFArrayCompact(self.Coords)
d["Function"] = document.Reference(self.Function)
return self.ShadingDict(**d)
class PDFRadialShading(PDFShading):
required = PDFShading.required + ("Coords", "Function")
permitted = PDFShading.permitted + (
"Coords", "Domain", "Function", "Extend")
def __init__(self, x0, y0, r0, x1, y1, r1, Function, ColorSpace, **kw):
self.Coords = (x0, y0, r0, x1, y1, r1)
self.Function = Function
self.ColorSpace = ColorSpace
self.otherkw = kw
def Dict(self, document):
d = {}
d.update(self.otherkw)
d["ShadingType"] = 3
d["ColorSpace"] = PDFName(self.ColorSpace)
d["Coords"] = PDFArrayCompact(self.Coords)
d["Function"] = document.Reference(self.Function)
return self.ShadingDict(**d)
if __name__=="__main__":
print("There is no script interpretation for pdfdoc.")
|
yongshengwang/hue | refs/heads/master | desktop/core/ext-py/Pygments-1.3.1/build/lib/pygments/style.py | 75 | # -*- coding: utf-8 -*-
"""
pygments.style
~~~~~~~~~~~~~~
Basic style object.
:copyright: Copyright 2006-2010 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.token import Token, STANDARD_TYPES
class StyleMeta(type):
def __new__(mcs, name, bases, dct):
obj = type.__new__(mcs, name, bases, dct)
for token in STANDARD_TYPES:
if token not in obj.styles:
obj.styles[token] = ''
def colorformat(text):
if text[0:1] == '#':
col = text[1:]
if len(col) == 6:
return col
elif len(col) == 3:
return col[0]+'0'+col[1]+'0'+col[2]+'0'
elif text == '':
return ''
assert False, "wrong color format %r" % text
_styles = obj._styles = {}
for ttype in obj.styles:
for token in ttype.split():
if token in _styles:
continue
ndef = _styles.get(token.parent, None)
styledefs = obj.styles.get(token, '').split()
if not ndef or token is None:
ndef = ['', 0, 0, 0, '', '', 0, 0, 0]
elif 'noinherit' in styledefs and token is not Token:
ndef = _styles[Token][:]
else:
ndef = ndef[:]
_styles[token] = ndef
for styledef in obj.styles.get(token, '').split():
if styledef == 'noinherit':
pass
elif styledef == 'bold':
ndef[1] = 1
elif styledef == 'nobold':
ndef[1] = 0
elif styledef == 'italic':
ndef[2] = 1
elif styledef == 'noitalic':
ndef[2] = 0
elif styledef == 'underline':
ndef[3] = 1
elif styledef == 'nounderline':
ndef[3] = 0
elif styledef[:3] == 'bg:':
ndef[4] = colorformat(styledef[3:])
elif styledef[:7] == 'border:':
ndef[5] = colorformat(styledef[7:])
elif styledef == 'roman':
ndef[6] = 1
elif styledef == 'sans':
ndef[7] = 1
elif styledef == 'mono':
ndef[8] = 1
else:
ndef[0] = colorformat(styledef)
return obj
def style_for_token(cls, token):
t = cls._styles[token]
return {
'color': t[0] or None,
'bold': bool(t[1]),
'italic': bool(t[2]),
'underline': bool(t[3]),
'bgcolor': t[4] or None,
'border': t[5] or None,
'roman': bool(t[6]) or None,
'sans': bool(t[7]) or None,
'mono': bool(t[8]) or None,
}
def list_styles(cls):
return list(cls)
def styles_token(cls, ttype):
return ttype in cls._styles
def __iter__(cls):
for token in cls._styles:
yield token, cls.style_for_token(token)
def __len__(cls):
return len(cls._styles)
class Style(object):
__metaclass__ = StyleMeta
#: overall background color (``None`` means transparent)
background_color = '#ffffff'
#: highlight background color
highlight_color = '#ffffcc'
#: Style definitions for individual token types.
styles = {}
|
LoHChina/nova | refs/heads/master | nova/hacking/checks.py | 8 | # Copyright (c) 2012, Cloudscaling
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import ast
import re
import pep8
"""
Guidelines for writing new hacking checks
- Use only for Nova specific tests. OpenStack general tests
should be submitted to the common 'hacking' module.
- Pick numbers in the range N3xx. Find the current test with
the highest allocated number and then pick the next value.
- Keep the test method code in the source file ordered based
on the N3xx value.
- List the new rule in the top level HACKING.rst file
- Add test cases for each new rule to nova/tests/unit/test_hacking.py
"""
UNDERSCORE_IMPORT_FILES = []
session_check = re.compile(r"\w*def [a-zA-Z0-9].*[(].*session.*[)]")
cfg_re = re.compile(r".*\scfg\.")
vi_header_re = re.compile(r"^#\s+vim?:.+")
virt_file_re = re.compile(r"\./nova/(?:tests/)?virt/(\w+)/")
virt_import_re = re.compile(
r"^\s*(?:import|from) nova\.(?:tests\.)?virt\.(\w+)")
virt_config_re = re.compile(
r"CONF\.import_opt\('.*?', 'nova\.virt\.(\w+)('|.)")
asse_trueinst_re = re.compile(
r"(.)*assertTrue\(isinstance\((\w|\.|\'|\"|\[|\])+, "
"(\w|\.|\'|\"|\[|\])+\)\)")
asse_equal_type_re = re.compile(
r"(.)*assertEqual\(type\((\w|\.|\'|\"|\[|\])+\), "
"(\w|\.|\'|\"|\[|\])+\)")
asse_equal_in_end_with_true_or_false_re = re.compile(r"assertEqual\("
r"(\w|[][.'\"])+ in (\w|[][.'\", ])+, (True|False)\)")
asse_equal_in_start_with_true_or_false_re = re.compile(r"assertEqual\("
r"(True|False), (\w|[][.'\"])+ in (\w|[][.'\", ])+\)")
asse_equal_end_with_none_re = re.compile(
r"assertEqual\(.*?,\s+None\)$")
asse_equal_start_with_none_re = re.compile(
r"assertEqual\(None,")
# NOTE(snikitin): Next two regexes weren't united to one for more readability.
# asse_true_false_with_in_or_not_in regex checks
# assertTrue/False(A in B) cases where B argument has no spaces
# asse_true_false_with_in_or_not_in_spaces regex checks cases
# where B argument has spaces and starts/ends with [, ', ".
# For example: [1, 2, 3], "some string", 'another string'.
# We have to separate these regexes to escape a false positives
# results. B argument should have spaces only if it starts
# with [, ", '. Otherwise checking of string
# "assertFalse(A in B and C in D)" will be false positives.
# In this case B argument is "B and C in D".
asse_true_false_with_in_or_not_in = re.compile(r"assert(True|False)\("
r"(\w|[][.'\"])+( not)? in (\w|[][.'\",])+(, .*)?\)")
asse_true_false_with_in_or_not_in_spaces = re.compile(r"assert(True|False)"
r"\((\w|[][.'\"])+( not)? in [\[|'|\"](\w|[][.'\", ])+"
r"[\[|'|\"](, .*)?\)")
asse_raises_regexp = re.compile(r"assertRaisesRegexp\(")
conf_attribute_set_re = re.compile(r"CONF\.[a-z0-9_.]+\s*=\s*\w")
log_translation = re.compile(
r"(.)*LOG\.(audit|error|critical)\(\s*('|\")")
log_translation_info = re.compile(
r"(.)*LOG\.(info)\(\s*(_\(|'|\")")
log_translation_exception = re.compile(
r"(.)*LOG\.(exception)\(\s*(_\(|'|\")")
log_translation_LW = re.compile(
r"(.)*LOG\.(warning|warn)\(\s*(_\(|'|\")")
translated_log = re.compile(
r"(.)*LOG\.(audit|error|info|critical|exception)"
"\(\s*_\(\s*('|\")")
mutable_default_args = re.compile(r"^\s*def .+\((.+=\{\}|.+=\[\])")
string_translation = re.compile(r"[^_]*_\(\s*('|\")")
underscore_import_check = re.compile(r"(.)*import _(.)*")
import_translation_for_log_or_exception = re.compile(
r"(.)*(from\snova.i18n\simport)\s_")
# We need this for cases where they have created their own _ function.
custom_underscore_check = re.compile(r"(.)*_\s*=\s*(.)*")
api_version_re = re.compile(r"@.*api_version")
dict_constructor_with_list_copy_re = re.compile(r".*\bdict\((\[)?(\(|\[)")
decorator_re = re.compile(r"@.*")
http_not_implemented_re = re.compile(r"raise .*HTTPNotImplemented\(")
class BaseASTChecker(ast.NodeVisitor):
"""Provides a simple framework for writing AST-based checks.
Subclasses should implement visit_* methods like any other AST visitor
implementation. When they detect an error for a particular node the
method should call ``self.add_error(offending_node)``. Details about
where in the code the error occurred will be pulled from the node
object.
Subclasses should also provide a class variable named CHECK_DESC to
be used for the human readable error message.
"""
def __init__(self, tree, filename):
"""This object is created automatically by pep8.
:param tree: an AST tree
:param filename: name of the file being analyzed
(ignored by our checks)
"""
self._tree = tree
self._errors = []
def run(self):
"""Called automatically by pep8."""
self.visit(self._tree)
return self._errors
def add_error(self, node, message=None):
"""Add an error caused by a node to the list of errors for pep8."""
message = message or self.CHECK_DESC
error = (node.lineno, node.col_offset, message, self.__class__)
self._errors.append(error)
def _check_call_names(self, call_node, names):
if isinstance(call_node, ast.Call):
if isinstance(call_node.func, ast.Name):
if call_node.func.id in names:
return True
return False
def import_no_db_in_virt(logical_line, filename):
"""Check for db calls from nova/virt
As of grizzly-2 all the database calls have been removed from
nova/virt, and we want to keep it that way.
N307
"""
if "nova/virt" in filename and not filename.endswith("fake.py"):
if logical_line.startswith("from nova import db"):
yield (0, "N307: nova.db import not allowed in nova/virt/*")
def no_db_session_in_public_api(logical_line, filename):
if "db/api.py" in filename:
if session_check.match(logical_line):
yield (0, "N309: public db api methods may not accept session")
def use_timeutils_utcnow(logical_line, filename):
# tools are OK to use the standard datetime module
if "/tools/" in filename:
return
msg = "N310: timeutils.utcnow() must be used instead of datetime.%s()"
datetime_funcs = ['now', 'utcnow']
for f in datetime_funcs:
pos = logical_line.find('datetime.%s' % f)
if pos != -1:
yield (pos, msg % f)
def _get_virt_name(regex, data):
m = regex.match(data)
if m is None:
return None
driver = m.group(1)
# Ignore things we mis-detect as virt drivers in the regex
if driver in ["test_virt_drivers", "driver", "firewall",
"disk", "api", "imagecache", "cpu", "hardware",
"image"]:
return None
return driver
def import_no_virt_driver_import_deps(physical_line, filename):
"""Check virt drivers' modules aren't imported by other drivers
Modules under each virt driver's directory are
considered private to that virt driver. Other drivers
in Nova must not access those drivers. Any code that
is to be shared should be refactored into a common
module
N311
"""
thisdriver = _get_virt_name(virt_file_re, filename)
thatdriver = _get_virt_name(virt_import_re, physical_line)
if (thatdriver is not None and
thisdriver is not None and
thisdriver != thatdriver):
return (0, "N311: importing code from other virt drivers forbidden")
def import_no_virt_driver_config_deps(physical_line, filename):
"""Check virt drivers' config vars aren't used by other drivers
Modules under each virt driver's directory are
considered private to that virt driver. Other drivers
in Nova must not use their config vars. Any config vars
that are to be shared should be moved into a common module
N312
"""
thisdriver = _get_virt_name(virt_file_re, filename)
thatdriver = _get_virt_name(virt_config_re, physical_line)
if (thatdriver is not None and
thisdriver is not None and
thisdriver != thatdriver):
return (0, "N312: using config vars from other virt drivers forbidden")
def capital_cfg_help(logical_line, tokens):
msg = "N313: capitalize help string"
if cfg_re.match(logical_line):
for t in range(len(tokens)):
if tokens[t][1] == "help":
txt = tokens[t + 2][1]
if len(txt) > 1 and txt[1].islower():
yield(0, msg)
def no_vi_headers(physical_line, line_number, lines):
"""Check for vi editor configuration in source files.
By default vi modelines can only appear in the first or
last 5 lines of a source file.
N314
"""
# NOTE(gilliard): line_number is 1-indexed
if line_number <= 5 or line_number > len(lines) - 5:
if vi_header_re.match(physical_line):
return 0, "N314: Don't put vi configuration in source files"
def assert_true_instance(logical_line):
"""Check for assertTrue(isinstance(a, b)) sentences
N316
"""
if asse_trueinst_re.match(logical_line):
yield (0, "N316: assertTrue(isinstance(a, b)) sentences not allowed")
def assert_equal_type(logical_line):
"""Check for assertEqual(type(A), B) sentences
N317
"""
if asse_equal_type_re.match(logical_line):
yield (0, "N317: assertEqual(type(A), B) sentences not allowed")
def assert_equal_none(logical_line):
"""Check for assertEqual(A, None) or assertEqual(None, A) sentences
N318
"""
res = (asse_equal_start_with_none_re.search(logical_line) or
asse_equal_end_with_none_re.search(logical_line))
if res:
yield (0, "N318: assertEqual(A, None) or assertEqual(None, A) "
"sentences not allowed")
def no_translate_debug_logs(logical_line, filename):
"""Check for 'LOG.debug(_('
As per our translation policy,
https://wiki.openstack.org/wiki/LoggingStandards#Log_Translation
we shouldn't translate debug level logs.
* This check assumes that 'LOG' is a logger.
* Use filename so we can start enforcing this in specific folders instead
of needing to do so all at once.
N319
"""
if logical_line.startswith("LOG.debug(_("):
yield(0, "N319 Don't translate debug level logs")
def no_import_translation_in_tests(logical_line, filename):
"""Check for 'from nova.i18n import _'
N337
"""
if 'nova/tests/' in filename:
res = import_translation_for_log_or_exception.match(logical_line)
if res:
yield(0, "N337 Don't import translation in tests")
def no_setting_conf_directly_in_tests(logical_line, filename):
"""Check for setting CONF.* attributes directly in tests
The value can leak out of tests affecting how subsequent tests run.
Using self.flags(option=value) is the preferred method to temporarily
set config options in tests.
N320
"""
if 'nova/tests/' in filename:
res = conf_attribute_set_re.match(logical_line)
if res:
yield (0, "N320: Setting CONF.* attributes directly in tests is "
"forbidden. Use self.flags(option=value) instead")
def validate_log_translations(logical_line, physical_line, filename):
# Translations are not required in the test directory
# and the Xen utilities
if ("nova/tests" in filename or
"plugins/xenserver/xenapi/etc/xapi.d" in filename):
return
if pep8.noqa(physical_line):
return
msg = "N328: LOG.info messages require translations `_LI()`!"
if log_translation_info.match(logical_line):
yield (0, msg)
msg = "N329: LOG.exception messages require translations `_LE()`!"
if log_translation_exception.match(logical_line):
yield (0, msg)
msg = "N330: LOG.warning, LOG.warn messages require translations `_LW()`!"
if log_translation_LW.match(logical_line):
yield (0, msg)
msg = "N321: Log messages require translations!"
if log_translation.match(logical_line):
yield (0, msg)
def no_mutable_default_args(logical_line):
msg = "N322: Method's default argument shouldn't be mutable!"
if mutable_default_args.match(logical_line):
yield (0, msg)
def check_explicit_underscore_import(logical_line, filename):
"""Check for explicit import of the _ function
We need to ensure that any files that are using the _() function
to translate logs are explicitly importing the _ function. We
can't trust unit test to catch whether the import has been
added so we need to check for it here.
"""
# Build a list of the files that have _ imported. No further
# checking needed once it is found.
if filename in UNDERSCORE_IMPORT_FILES:
pass
elif (underscore_import_check.match(logical_line) or
custom_underscore_check.match(logical_line)):
UNDERSCORE_IMPORT_FILES.append(filename)
elif (translated_log.match(logical_line) or
string_translation.match(logical_line)):
yield(0, "N323: Found use of _() without explicit import of _ !")
def use_jsonutils(logical_line, filename):
# the code below that path is not meant to be executed from neutron
# tree where jsonutils module is present, so don't enforce its usage
# for this subdirectory
if "plugins/xenserver" in filename:
return
# tools are OK to use the standard json module
if "/tools/" in filename:
return
msg = "N324: jsonutils.%(fun)s must be used instead of json.%(fun)s"
if "json." in logical_line:
json_funcs = ['dumps(', 'dump(', 'loads(', 'load(']
for f in json_funcs:
pos = logical_line.find('json.%s' % f)
if pos != -1:
yield (pos, msg % {'fun': f[:-1]})
def check_api_version_decorator(logical_line, previous_logical, blank_before,
filename):
msg = ("N332: the api_version decorator must be the first decorator"
" on a method.")
if blank_before == 0 and re.match(api_version_re, logical_line) \
and re.match(decorator_re, previous_logical):
yield(0, msg)
class CheckForStrUnicodeExc(BaseASTChecker):
"""Checks for the use of str() or unicode() on an exception.
This currently only handles the case where str() or unicode()
is used in the scope of an exception handler. If the exception
is passed into a function, returned from an assertRaises, or
used on an exception created in the same scope, this does not
catch it.
"""
CHECK_DESC = ('N325 str() and unicode() cannot be used on an '
'exception. Remove or use six.text_type()')
def __init__(self, tree, filename):
super(CheckForStrUnicodeExc, self).__init__(tree, filename)
self.name = []
self.already_checked = []
def visit_TryExcept(self, node):
for handler in node.handlers:
if handler.name:
self.name.append(handler.name.id)
super(CheckForStrUnicodeExc, self).generic_visit(node)
self.name = self.name[:-1]
else:
super(CheckForStrUnicodeExc, self).generic_visit(node)
def visit_Call(self, node):
if self._check_call_names(node, ['str', 'unicode']):
if node not in self.already_checked:
self.already_checked.append(node)
if isinstance(node.args[0], ast.Name):
if node.args[0].id in self.name:
self.add_error(node.args[0])
super(CheckForStrUnicodeExc, self).generic_visit(node)
class CheckForTransAdd(BaseASTChecker):
"""Checks for the use of concatenation on a translated string.
Translations should not be concatenated with other strings, but
should instead include the string being added to the translated
string to give the translators the most information.
"""
CHECK_DESC = ('N326 Translated messages cannot be concatenated. '
'String should be included in translated message.')
TRANS_FUNC = ['_', '_LI', '_LW', '_LE', '_LC']
def visit_BinOp(self, node):
if isinstance(node.op, ast.Add):
if self._check_call_names(node.left, self.TRANS_FUNC):
self.add_error(node.left)
elif self._check_call_names(node.right, self.TRANS_FUNC):
self.add_error(node.right)
super(CheckForTransAdd, self).generic_visit(node)
def assert_true_or_false_with_in(logical_line):
"""Check for assertTrue/False(A in B), assertTrue/False(A not in B),
assertTrue/False(A in B, message) or assertTrue/False(A not in B, message)
sentences.
N334
"""
res = (asse_true_false_with_in_or_not_in.search(logical_line) or
asse_true_false_with_in_or_not_in_spaces.search(logical_line))
if res:
yield (0, "N334: Use assertIn/NotIn(A, B) rather than "
"assertTrue/False(A in/not in B) when checking collection "
"contents.")
def assert_raises_regexp(logical_line):
"""Check for usage of deprecated assertRaisesRegexp
N335
"""
res = asse_raises_regexp.search(logical_line)
if res:
yield (0, "N335: assertRaisesRegex must be used instead "
"of assertRaisesRegexp")
def dict_constructor_with_list_copy(logical_line):
msg = ("N336: Must use a dict comprehension instead of a dict constructor"
" with a sequence of key-value pairs."
)
if dict_constructor_with_list_copy_re.match(logical_line):
yield (0, msg)
def assert_equal_in(logical_line):
"""Check for assertEqual(A in B, True), assertEqual(True, A in B),
assertEqual(A in B, False) or assertEqual(False, A in B) sentences
N338
"""
res = (asse_equal_in_start_with_true_or_false_re.search(logical_line) or
asse_equal_in_end_with_true_or_false_re.search(logical_line))
if res:
yield (0, "N338: Use assertIn/NotIn(A, B) rather than "
"assertEqual(A in B, True/False) when checking collection "
"contents.")
def check_http_not_implemented(logical_line, physical_line, filename):
msg = ("N339: HTTPNotImplemented response must be implemented with"
" common raise_feature_not_supported().")
if pep8.noqa(physical_line):
return
if "nova/api/openstack/compute/plugins/v3" not in filename:
return
if re.match(http_not_implemented_re, logical_line):
yield(0, msg)
def factory(register):
register(import_no_db_in_virt)
register(no_db_session_in_public_api)
register(use_timeutils_utcnow)
register(import_no_virt_driver_import_deps)
register(import_no_virt_driver_config_deps)
register(capital_cfg_help)
register(no_vi_headers)
register(no_import_translation_in_tests)
register(assert_true_instance)
register(assert_equal_type)
register(assert_equal_none)
register(assert_raises_regexp)
register(no_translate_debug_logs)
register(no_setting_conf_directly_in_tests)
register(validate_log_translations)
register(no_mutable_default_args)
register(check_explicit_underscore_import)
register(use_jsonutils)
register(check_api_version_decorator)
register(CheckForStrUnicodeExc)
register(CheckForTransAdd)
register(assert_true_or_false_with_in)
register(dict_constructor_with_list_copy)
register(assert_equal_in)
register(check_http_not_implemented)
|
ayumilong/rethinkdb | refs/heads/next | test/rql_test/connections/http_support/jinja2/exceptions.py | 977 | # -*- coding: utf-8 -*-
"""
jinja2.exceptions
~~~~~~~~~~~~~~~~~
Jinja exceptions.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
from jinja2._compat import imap, text_type, PY2, implements_to_string
class TemplateError(Exception):
"""Baseclass for all template errors."""
if PY2:
def __init__(self, message=None):
if message is not None:
message = text_type(message).encode('utf-8')
Exception.__init__(self, message)
@property
def message(self):
if self.args:
message = self.args[0]
if message is not None:
return message.decode('utf-8', 'replace')
def __unicode__(self):
return self.message or u''
else:
def __init__(self, message=None):
Exception.__init__(self, message)
@property
def message(self):
if self.args:
message = self.args[0]
if message is not None:
return message
@implements_to_string
class TemplateNotFound(IOError, LookupError, TemplateError):
"""Raised if a template does not exist."""
# looks weird, but removes the warning descriptor that just
# bogusly warns us about message being deprecated
message = None
def __init__(self, name, message=None):
IOError.__init__(self)
if message is None:
message = name
self.message = message
self.name = name
self.templates = [name]
def __str__(self):
return self.message
class TemplatesNotFound(TemplateNotFound):
"""Like :class:`TemplateNotFound` but raised if multiple templates
are selected. This is a subclass of :class:`TemplateNotFound`
exception, so just catching the base exception will catch both.
.. versionadded:: 2.2
"""
def __init__(self, names=(), message=None):
if message is None:
message = u'none of the templates given were found: ' + \
u', '.join(imap(text_type, names))
TemplateNotFound.__init__(self, names and names[-1] or None, message)
self.templates = list(names)
@implements_to_string
class TemplateSyntaxError(TemplateError):
"""Raised to tell the user that there is a problem with the template."""
def __init__(self, message, lineno, name=None, filename=None):
TemplateError.__init__(self, message)
self.lineno = lineno
self.name = name
self.filename = filename
self.source = None
# this is set to True if the debug.translate_syntax_error
# function translated the syntax error into a new traceback
self.translated = False
def __str__(self):
# for translated errors we only return the message
if self.translated:
return self.message
# otherwise attach some stuff
location = 'line %d' % self.lineno
name = self.filename or self.name
if name:
location = 'File "%s", %s' % (name, location)
lines = [self.message, ' ' + location]
# if the source is set, add the line to the output
if self.source is not None:
try:
line = self.source.splitlines()[self.lineno - 1]
except IndexError:
line = None
if line:
lines.append(' ' + line.strip())
return u'\n'.join(lines)
class TemplateAssertionError(TemplateSyntaxError):
"""Like a template syntax error, but covers cases where something in the
template caused an error at compile time that wasn't necessarily caused
by a syntax error. However it's a direct subclass of
:exc:`TemplateSyntaxError` and has the same attributes.
"""
class TemplateRuntimeError(TemplateError):
"""A generic runtime error in the template engine. Under some situations
Jinja may raise this exception.
"""
class UndefinedError(TemplateRuntimeError):
"""Raised if a template tries to operate on :class:`Undefined`."""
class SecurityError(TemplateRuntimeError):
"""Raised if a template tries to do something insecure if the
sandbox is enabled.
"""
class FilterArgumentError(TemplateRuntimeError):
"""This error is raised if a filter was called with inappropriate
arguments
"""
|
crdroid-devices/android_kernel_htc_msm8960 | refs/heads/6.0 | tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/Util.py | 12527 | # Util.py - Python extension for perf script, miscellaneous utility code
#
# Copyright (C) 2010 by Tom Zanussi <tzanussi@gmail.com>
#
# This software may be distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
import errno, os
FUTEX_WAIT = 0
FUTEX_WAKE = 1
FUTEX_PRIVATE_FLAG = 128
FUTEX_CLOCK_REALTIME = 256
FUTEX_CMD_MASK = ~(FUTEX_PRIVATE_FLAG | FUTEX_CLOCK_REALTIME)
NSECS_PER_SEC = 1000000000
def avg(total, n):
return total / n
def nsecs(secs, nsecs):
return secs * NSECS_PER_SEC + nsecs
def nsecs_secs(nsecs):
return nsecs / NSECS_PER_SEC
def nsecs_nsecs(nsecs):
return nsecs % NSECS_PER_SEC
def nsecs_str(nsecs):
str = "%5u.%09u" % (nsecs_secs(nsecs), nsecs_nsecs(nsecs)),
return str
def add_stats(dict, key, value):
if not dict.has_key(key):
dict[key] = (value, value, value, 1)
else:
min, max, avg, count = dict[key]
if value < min:
min = value
if value > max:
max = value
avg = (avg + value) / 2
dict[key] = (min, max, avg, count + 1)
def clear_term():
print("\x1b[H\x1b[2J")
audit_package_warned = False
try:
import audit
machine_to_id = {
'x86_64': audit.MACH_86_64,
'alpha' : audit.MACH_ALPHA,
'ia64' : audit.MACH_IA64,
'ppc' : audit.MACH_PPC,
'ppc64' : audit.MACH_PPC64,
's390' : audit.MACH_S390,
's390x' : audit.MACH_S390X,
'i386' : audit.MACH_X86,
'i586' : audit.MACH_X86,
'i686' : audit.MACH_X86,
}
try:
machine_to_id['armeb'] = audit.MACH_ARMEB
except:
pass
machine_id = machine_to_id[os.uname()[4]]
except:
if not audit_package_warned:
audit_package_warned = True
print "Install the audit-libs-python package to get syscall names"
def syscall_name(id):
try:
return audit.audit_syscall_to_name(id, machine_id)
except:
return str(id)
def strerror(nr):
try:
return errno.errorcode[abs(nr)]
except:
return "Unknown %d errno" % nr
|
petermezei/angular2-cli | refs/heads/master | node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py | 1812 | # Copyright (c) 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Utility functions shared amongst the Windows generators."""
import copy
import os
# A dictionary mapping supported target types to extensions.
TARGET_TYPE_EXT = {
'executable': 'exe',
'loadable_module': 'dll',
'shared_library': 'dll',
'static_library': 'lib',
}
def _GetLargePdbShimCcPath():
"""Returns the path of the large_pdb_shim.cc file."""
this_dir = os.path.abspath(os.path.dirname(__file__))
src_dir = os.path.abspath(os.path.join(this_dir, '..', '..'))
win_data_dir = os.path.join(src_dir, 'data', 'win')
large_pdb_shim_cc = os.path.join(win_data_dir, 'large-pdb-shim.cc')
return large_pdb_shim_cc
def _DeepCopySomeKeys(in_dict, keys):
"""Performs a partial deep-copy on |in_dict|, only copying the keys in |keys|.
Arguments:
in_dict: The dictionary to copy.
keys: The keys to be copied. If a key is in this list and doesn't exist in
|in_dict| this is not an error.
Returns:
The partially deep-copied dictionary.
"""
d = {}
for key in keys:
if key not in in_dict:
continue
d[key] = copy.deepcopy(in_dict[key])
return d
def _SuffixName(name, suffix):
"""Add a suffix to the end of a target.
Arguments:
name: name of the target (foo#target)
suffix: the suffix to be added
Returns:
Target name with suffix added (foo_suffix#target)
"""
parts = name.rsplit('#', 1)
parts[0] = '%s_%s' % (parts[0], suffix)
return '#'.join(parts)
def _ShardName(name, number):
"""Add a shard number to the end of a target.
Arguments:
name: name of the target (foo#target)
number: shard number
Returns:
Target name with shard added (foo_1#target)
"""
return _SuffixName(name, str(number))
def ShardTargets(target_list, target_dicts):
"""Shard some targets apart to work around the linkers limits.
Arguments:
target_list: List of target pairs: 'base/base.gyp:base'.
target_dicts: Dict of target properties keyed on target pair.
Returns:
Tuple of the new sharded versions of the inputs.
"""
# Gather the targets to shard, and how many pieces.
targets_to_shard = {}
for t in target_dicts:
shards = int(target_dicts[t].get('msvs_shard', 0))
if shards:
targets_to_shard[t] = shards
# Shard target_list.
new_target_list = []
for t in target_list:
if t in targets_to_shard:
for i in range(targets_to_shard[t]):
new_target_list.append(_ShardName(t, i))
else:
new_target_list.append(t)
# Shard target_dict.
new_target_dicts = {}
for t in target_dicts:
if t in targets_to_shard:
for i in range(targets_to_shard[t]):
name = _ShardName(t, i)
new_target_dicts[name] = copy.copy(target_dicts[t])
new_target_dicts[name]['target_name'] = _ShardName(
new_target_dicts[name]['target_name'], i)
sources = new_target_dicts[name].get('sources', [])
new_sources = []
for pos in range(i, len(sources), targets_to_shard[t]):
new_sources.append(sources[pos])
new_target_dicts[name]['sources'] = new_sources
else:
new_target_dicts[t] = target_dicts[t]
# Shard dependencies.
for t in new_target_dicts:
for deptype in ('dependencies', 'dependencies_original'):
dependencies = copy.copy(new_target_dicts[t].get(deptype, []))
new_dependencies = []
for d in dependencies:
if d in targets_to_shard:
for i in range(targets_to_shard[d]):
new_dependencies.append(_ShardName(d, i))
else:
new_dependencies.append(d)
new_target_dicts[t][deptype] = new_dependencies
return (new_target_list, new_target_dicts)
def _GetPdbPath(target_dict, config_name, vars):
"""Returns the path to the PDB file that will be generated by a given
configuration.
The lookup proceeds as follows:
- Look for an explicit path in the VCLinkerTool configuration block.
- Look for an 'msvs_large_pdb_path' variable.
- Use '<(PRODUCT_DIR)/<(product_name).(exe|dll).pdb' if 'product_name' is
specified.
- Use '<(PRODUCT_DIR)/<(target_name).(exe|dll).pdb'.
Arguments:
target_dict: The target dictionary to be searched.
config_name: The name of the configuration of interest.
vars: A dictionary of common GYP variables with generator-specific values.
Returns:
The path of the corresponding PDB file.
"""
config = target_dict['configurations'][config_name]
msvs = config.setdefault('msvs_settings', {})
linker = msvs.get('VCLinkerTool', {})
pdb_path = linker.get('ProgramDatabaseFile')
if pdb_path:
return pdb_path
variables = target_dict.get('variables', {})
pdb_path = variables.get('msvs_large_pdb_path', None)
if pdb_path:
return pdb_path
pdb_base = target_dict.get('product_name', target_dict['target_name'])
pdb_base = '%s.%s.pdb' % (pdb_base, TARGET_TYPE_EXT[target_dict['type']])
pdb_path = vars['PRODUCT_DIR'] + '/' + pdb_base
return pdb_path
def InsertLargePdbShims(target_list, target_dicts, vars):
"""Insert a shim target that forces the linker to use 4KB pagesize PDBs.
This is a workaround for targets with PDBs greater than 1GB in size, the
limit for the 1KB pagesize PDBs created by the linker by default.
Arguments:
target_list: List of target pairs: 'base/base.gyp:base'.
target_dicts: Dict of target properties keyed on target pair.
vars: A dictionary of common GYP variables with generator-specific values.
Returns:
Tuple of the shimmed version of the inputs.
"""
# Determine which targets need shimming.
targets_to_shim = []
for t in target_dicts:
target_dict = target_dicts[t]
# We only want to shim targets that have msvs_large_pdb enabled.
if not int(target_dict.get('msvs_large_pdb', 0)):
continue
# This is intended for executable, shared_library and loadable_module
# targets where every configuration is set up to produce a PDB output.
# If any of these conditions is not true then the shim logic will fail
# below.
targets_to_shim.append(t)
large_pdb_shim_cc = _GetLargePdbShimCcPath()
for t in targets_to_shim:
target_dict = target_dicts[t]
target_name = target_dict.get('target_name')
base_dict = _DeepCopySomeKeys(target_dict,
['configurations', 'default_configuration', 'toolset'])
# This is the dict for copying the source file (part of the GYP tree)
# to the intermediate directory of the project. This is necessary because
# we can't always build a relative path to the shim source file (on Windows
# GYP and the project may be on different drives), and Ninja hates absolute
# paths (it ends up generating the .obj and .obj.d alongside the source
# file, polluting GYPs tree).
copy_suffix = 'large_pdb_copy'
copy_target_name = target_name + '_' + copy_suffix
full_copy_target_name = _SuffixName(t, copy_suffix)
shim_cc_basename = os.path.basename(large_pdb_shim_cc)
shim_cc_dir = vars['SHARED_INTERMEDIATE_DIR'] + '/' + copy_target_name
shim_cc_path = shim_cc_dir + '/' + shim_cc_basename
copy_dict = copy.deepcopy(base_dict)
copy_dict['target_name'] = copy_target_name
copy_dict['type'] = 'none'
copy_dict['sources'] = [ large_pdb_shim_cc ]
copy_dict['copies'] = [{
'destination': shim_cc_dir,
'files': [ large_pdb_shim_cc ]
}]
# This is the dict for the PDB generating shim target. It depends on the
# copy target.
shim_suffix = 'large_pdb_shim'
shim_target_name = target_name + '_' + shim_suffix
full_shim_target_name = _SuffixName(t, shim_suffix)
shim_dict = copy.deepcopy(base_dict)
shim_dict['target_name'] = shim_target_name
shim_dict['type'] = 'static_library'
shim_dict['sources'] = [ shim_cc_path ]
shim_dict['dependencies'] = [ full_copy_target_name ]
# Set up the shim to output its PDB to the same location as the final linker
# target.
for config_name, config in shim_dict.get('configurations').iteritems():
pdb_path = _GetPdbPath(target_dict, config_name, vars)
# A few keys that we don't want to propagate.
for key in ['msvs_precompiled_header', 'msvs_precompiled_source', 'test']:
config.pop(key, None)
msvs = config.setdefault('msvs_settings', {})
# Update the compiler directives in the shim target.
compiler = msvs.setdefault('VCCLCompilerTool', {})
compiler['DebugInformationFormat'] = '3'
compiler['ProgramDataBaseFileName'] = pdb_path
# Set the explicit PDB path in the appropriate configuration of the
# original target.
config = target_dict['configurations'][config_name]
msvs = config.setdefault('msvs_settings', {})
linker = msvs.setdefault('VCLinkerTool', {})
linker['GenerateDebugInformation'] = 'true'
linker['ProgramDatabaseFile'] = pdb_path
# Add the new targets. They must go to the beginning of the list so that
# the dependency generation works as expected in ninja.
target_list.insert(0, full_copy_target_name)
target_list.insert(0, full_shim_target_name)
target_dicts[full_copy_target_name] = copy_dict
target_dicts[full_shim_target_name] = shim_dict
# Update the original target to depend on the shim target.
target_dict.setdefault('dependencies', []).append(full_shim_target_name)
return (target_list, target_dicts)
|
MikkCZ/kitsune | refs/heads/master | authority/widgets.py | 6 | from django import forms
from django.conf import settings
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext_lazy as _
from django.contrib.admin.widgets import ForeignKeyRawIdWidget
generic_script = """
<script type="text/javascript">
function showGenericRelatedObjectLookupPopup(ct_select, triggering_link, url_base) {
var url = content_types[ct_select.options[ct_select.selectedIndex].value];
if (url != undefined) {
triggering_link.href = url_base + url;
return showRelatedObjectLookupPopup(triggering_link);
}
return false;
}
</script>
"""
class GenericForeignKeyRawIdWidget(ForeignKeyRawIdWidget):
def __init__(self, ct_field, cts=[], attrs=None):
self.ct_field = ct_field
self.cts = cts
forms.TextInput.__init__(self, attrs)
def render(self, name, value, attrs=None):
if attrs is None:
attrs = {}
related_url = '../../../'
params = self.url_parameters()
if params:
url = '?' + '&'.join(['%s=%s' % (k, v) for k, v in params.iteritems()])
else:
url = ''
if 'class' not in attrs:
attrs['class'] = 'vForeignKeyRawIdAdminField'
output = [forms.TextInput.render(self, name, value, attrs)]
output.append(
"""%(generic_script)s
<a href="%(related)s%(url)s"
class="related-lookup"
id="lookup_id_%(name)s"
onclick="return showGenericRelatedObjectLookupPopup(
document.getElementById('id_%(ct_field)s'), this, '%(related)s%(url)s');">
""" % {
'generic_script': generic_script,
'related': related_url,
'url': url,
'name': name,
'ct_field': self.ct_field
})
output.append(
'<img src="%s/admin/img/selector-search.gif" width="16" height="16" alt="%s" /></a>'
% (settings.STATIC_URL, _('Lookup')))
from django.contrib.contenttypes.models import ContentType
content_types = """
<script type="text/javascript">
var content_types = new Array();
%s
</script>
""" % ('\n'.join([
"content_types[%s] = '%s/%s/';" % (
ContentType.objects.get_for_model(ct).id,
ct._meta.app_label,
ct._meta.object_name.lower()
) for ct in self.cts]))
return mark_safe(u''.join(output) + content_types)
def url_parameters(self):
return {}
|
akkakks/pygeoip | refs/heads/master | pygeoip/regionname.py | 1 | # -*- coding: utf-8 -*-
"""
Region name functions. Part of the pygeoip package.
@author: Phus Lu <phus.lu@gmail.com>
@license: Copyright(C) 2004 MaxMind LLC
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with this program. If not, see <http://www.gnu.org/licenses/lgpl.txt>.
"""
__all__ = ['region_name_by_country_and_region']
_country = {
'AD': {'02': 'Canillo',
'03': 'Encamp',
'04': 'La Massana',
'05': 'Ordino',
'06': 'Sant Julia de Loria',
'07': 'Andorra la Vella',
'08': 'Escaldes-Engordany'},
'AE': {'01': 'Abu Dhabi',
'02': 'Ajman',
'03': 'Dubai',
'04': 'Fujairah',
'05': 'Ras Al Khaimah',
'06': 'Sharjah',
'07': 'Umm Al Quwain'},
'AF': {'01': 'Badakhshan',
'02': 'Badghis',
'03': 'Baghlan',
'05': 'Bamian',
'06': 'Farah',
'07': 'Faryab',
'08': 'Ghazni',
'09': 'Ghowr',
'10': 'Helmand',
'11': 'Herat',
'13': 'Kabol',
'14': 'Kapisa',
'17': 'Lowgar',
'18': 'Nangarhar',
'19': 'Nimruz',
'23': 'Kandahar',
'24': 'Kondoz',
'26': 'Takhar',
'27': 'Vardak',
'28': 'Zabol',
'29': 'Paktika',
'30': 'Balkh',
'31': 'Jowzjan',
'32': 'Samangan',
'33': 'Sar-e Pol',
'34': 'Konar',
'35': 'Laghman',
'36': 'Paktia',
'37': 'Khowst',
'38': 'Nurestan',
'39': 'Oruzgan',
'40': 'Parvan',
'41': 'Daykondi',
'42': 'Panjshir'},
'AG': {'01': 'Barbuda',
'03': 'Saint George',
'04': 'Saint John',
'05': 'Saint Mary',
'06': 'Saint Paul',
'07': 'Saint Peter',
'08': 'Saint Philip',
'09': 'Redonda'},
'AL': {'40': 'Berat',
'41': 'Diber',
'42': 'Durres',
'43': 'Elbasan',
'44': 'Fier',
'45': 'Gjirokaster',
'46': 'Korce',
'47': 'Kukes',
'48': 'Lezhe',
'49': 'Shkoder',
'50': 'Tirane',
'51': 'Vlore'},
'AM': {'01': 'Aragatsotn',
'02': 'Ararat',
'03': 'Armavir',
'04': "Geghark'unik'",
'05': "Kotayk'",
'06': 'Lorri',
'07': 'Shirak',
'08': "Syunik'",
'09': 'Tavush',
'10': "Vayots' Dzor",
'11': 'Yerevan'},
'AO': {'01': 'Benguela',
'02': 'Bie',
'03': 'Cabinda',
'04': 'Cuando Cubango',
'05': 'Cuanza Norte',
'06': 'Cuanza Sul',
'07': 'Cunene',
'08': 'Huambo',
'09': 'Huila',
'12': 'Malanje',
'13': 'Namibe',
'14': 'Moxico',
'15': 'Uige',
'16': 'Zaire',
'17': 'Lunda Norte',
'18': 'Lunda Sul',
'19': 'Bengo',
'20': 'Luanda'},
'AR': {'01': 'Buenos Aires',
'02': 'Catamarca',
'03': 'Chaco',
'04': 'Chubut',
'05': 'Cordoba',
'06': 'Corrientes',
'07': 'Distrito Federal',
'08': 'Entre Rios',
'09': 'Formosa',
'10': 'Jujuy',
'11': 'La Pampa',
'12': 'La Rioja',
'13': 'Mendoza',
'14': 'Misiones',
'15': 'Neuquen',
'16': 'Rio Negro',
'17': 'Salta',
'18': 'San Juan',
'19': 'San Luis',
'20': 'Santa Cruz',
'21': 'Santa Fe',
'22': 'Santiago del Estero',
'23': 'Tierra del Fuego',
'24': 'Tucuman'},
'AT': {'01': 'Burgenland',
'02': 'Karnten',
'03': 'Niederosterreich',
'04': 'Oberosterreich',
'05': 'Salzburg',
'06': 'Steiermark',
'07': 'Tirol',
'08': 'Vorarlberg',
'09': 'Wien'},
'AU': {'01': 'Australian Capital Territory',
'02': 'New South Wales',
'03': 'Northern Territory',
'04': 'Queensland',
'05': 'South Australia',
'06': 'Tasmania',
'07': 'Victoria',
'08': 'Western Australia'},
'AZ': {'01': 'Abseron',
'02': 'Agcabadi',
'03': 'Agdam',
'04': 'Agdas',
'05': 'Agstafa',
'06': 'Agsu',
'07': 'Ali Bayramli',
'08': 'Astara',
'09': 'Baki',
'10': 'Balakan',
'11': 'Barda',
'12': 'Beylaqan',
'13': 'Bilasuvar',
'14': 'Cabrayil',
'15': 'Calilabad',
'16': 'Daskasan',
'17': 'Davaci',
'18': 'Fuzuli',
'19': 'Gadabay',
'20': 'Ganca',
'21': 'Goranboy',
'22': 'Goycay',
'23': 'Haciqabul',
'24': 'Imisli',
'25': 'Ismayilli',
'26': 'Kalbacar',
'27': 'Kurdamir',
'28': 'Lacin',
'29': 'Lankaran',
'30': 'Lankaran',
'31': 'Lerik',
'32': 'Masalli',
'33': 'Mingacevir',
'34': 'Naftalan',
'35': 'Naxcivan',
'36': 'Neftcala',
'37': 'Oguz',
'38': 'Qabala',
'39': 'Qax',
'40': 'Qazax',
'41': 'Qobustan',
'42': 'Quba',
'43': 'Qubadli',
'44': 'Qusar',
'45': 'Saatli',
'46': 'Sabirabad',
'47': 'Saki',
'48': 'Saki',
'49': 'Salyan',
'50': 'Samaxi',
'51': 'Samkir',
'52': 'Samux',
'53': 'Siyazan',
'54': 'Sumqayit',
'55': 'Susa',
'56': 'Susa',
'57': 'Tartar',
'58': 'Tovuz',
'59': 'Ucar',
'60': 'Xacmaz',
'61': 'Xankandi',
'62': 'Xanlar',
'63': 'Xizi',
'64': 'Xocali',
'65': 'Xocavand',
'66': 'Yardimli',
'67': 'Yevlax',
'68': 'Yevlax',
'69': 'Zangilan',
'70': 'Zaqatala',
'71': 'Zardab'},
'BA': {'01': 'Federation of Bosnia and Herzegovina',
'02': 'Republika Srpska'},
'BB': {'01': 'Christ Church',
'02': 'Saint Andrew',
'03': 'Saint George',
'04': 'Saint James',
'05': 'Saint John',
'06': 'Saint Joseph',
'07': 'Saint Lucy',
'08': 'Saint Michael',
'09': 'Saint Peter',
'10': 'Saint Philip',
'11': 'Saint Thomas'},
'BD': {'81': 'Dhaka',
'82': 'Khulna',
'83': 'Rajshahi',
'84': 'Chittagong',
'85': 'Barisal',
'86': 'Sylhet'},
'BE': {'01': 'Antwerpen',
'03': 'Hainaut',
'04': 'Liege',
'05': 'Limburg',
'06': 'Luxembourg',
'07': 'Namur',
'08': 'Oost-Vlaanderen',
'09': 'West-Vlaanderen',
'10': 'Brabant Wallon',
'11': 'Brussels Hoofdstedelijk Gewest',
'12': 'Vlaams-Brabant',
'13': 'Flanders',
'14': 'Wallonia'},
'BF': {'15': 'Bam',
'19': 'Boulkiemde',
'20': 'Ganzourgou',
'21': 'Gnagna',
'28': 'Kouritenga',
'33': 'Oudalan',
'34': 'Passore',
'36': 'Sanguie',
'40': 'Soum',
'42': 'Tapoa',
'44': 'Zoundweogo',
'45': 'Bale',
'46': 'Banwa',
'47': 'Bazega',
'48': 'Bougouriba',
'49': 'Boulgou',
'50': 'Gourma',
'51': 'Houet',
'52': 'Ioba',
'53': 'Kadiogo',
'54': 'Kenedougou',
'55': 'Komoe',
'56': 'Komondjari',
'57': 'Kompienga',
'58': 'Kossi',
'59': 'Koulpelogo',
'60': 'Kourweogo',
'61': 'Leraba',
'62': 'Loroum',
'63': 'Mouhoun',
'64': 'Namentenga',
'65': 'Naouri',
'66': 'Nayala',
'67': 'Noumbiel',
'68': 'Oubritenga',
'69': 'Poni',
'70': 'Sanmatenga',
'71': 'Seno',
'72': 'Sissili',
'73': 'Sourou',
'74': 'Tuy',
'75': 'Yagha',
'76': 'Yatenga',
'77': 'Ziro',
'78': 'Zondoma'},
'BG': {'33': 'Mikhaylovgrad',
'38': 'Blagoevgrad',
'39': 'Burgas',
'40': 'Dobrich',
'41': 'Gabrovo',
'42': 'Grad Sofiya',
'43': 'Khaskovo',
'44': 'Kurdzhali',
'45': 'Kyustendil',
'46': 'Lovech',
'47': 'Montana',
'48': 'Pazardzhik',
'49': 'Pernik',
'50': 'Pleven',
'51': 'Plovdiv',
'52': 'Razgrad',
'53': 'Ruse',
'54': 'Shumen',
'55': 'Silistra',
'56': 'Sliven',
'57': 'Smolyan',
'58': 'Sofiya',
'59': 'Stara Zagora',
'60': 'Turgovishte',
'61': 'Varna',
'62': 'Veliko Turnovo',
'63': 'Vidin',
'64': 'Vratsa',
'65': 'Yambol'},
'BH': {'01': 'Al Hadd',
'02': 'Al Manamah',
'05': 'Jidd Hafs',
'06': 'Sitrah',
'08': 'Al Mintaqah al Gharbiyah',
'09': 'Mintaqat Juzur Hawar',
'10': 'Al Mintaqah ash Shamaliyah',
'11': 'Al Mintaqah al Wusta',
'12': 'Madinat',
'13': 'Ar Rifa',
'14': 'Madinat Hamad',
'15': 'Al Muharraq',
'16': 'Al Asimah',
'17': 'Al Janubiyah',
'18': 'Ash Shamaliyah',
'19': 'Al Wusta'},
'BI': {'02': 'Bujumbura',
'09': 'Bubanza',
'10': 'Bururi',
'11': 'Cankuzo',
'12': 'Cibitoke',
'13': 'Gitega',
'14': 'Karuzi',
'15': 'Kayanza',
'16': 'Kirundo',
'17': 'Makamba',
'18': 'Muyinga',
'19': 'Ngozi',
'20': 'Rutana',
'21': 'Ruyigi',
'22': 'Muramvya',
'23': 'Mwaro'},
'BJ': {'07': 'Alibori',
'08': 'Atakora',
'09': 'Atlanyique',
'10': 'Borgou',
'11': 'Collines',
'12': 'Kouffo',
'13': 'Donga',
'14': 'Littoral',
'15': 'Mono',
'16': 'Oueme',
'17': 'Plateau',
'18': 'Zou'},
'BM': {'01': 'Devonshire',
'02': 'Hamilton',
'03': 'Hamilton',
'04': 'Paget',
'05': 'Pembroke',
'06': 'Saint George',
'07': "Saint George's",
'08': 'Sandys',
'09': 'Smiths',
'10': 'Southampton',
'11': 'Warwick'},
'BN': {'07': 'Alibori',
'08': 'Belait',
'09': 'Brunei and Muara',
'10': 'Temburong',
'11': 'Collines',
'12': 'Kouffo',
'13': 'Donga',
'14': 'Littoral',
'15': 'Tutong',
'16': 'Oueme',
'17': 'Plateau',
'18': 'Zou'},
'BO': {'01': 'Chuquisaca',
'02': 'Cochabamba',
'03': 'El Beni',
'04': 'La Paz',
'05': 'Oruro',
'06': 'Pando',
'07': 'Potosi',
'08': 'Santa Cruz',
'09': 'Tarija'},
'BR': {'01': 'Acre',
'02': 'Alagoas',
'03': 'Amapa',
'04': 'Amazonas',
'05': 'Bahia',
'06': 'Ceara',
'07': 'Distrito Federal',
'08': 'Espirito Santo',
'11': 'Mato Grosso do Sul',
'13': 'Maranhao',
'14': 'Mato Grosso',
'15': 'Minas Gerais',
'16': 'Para',
'17': 'Paraiba',
'18': 'Parana',
'20': 'Piaui',
'21': 'Rio de Janeiro',
'22': 'Rio Grande do Norte',
'23': 'Rio Grande do Sul',
'24': 'Rondonia',
'25': 'Roraima',
'26': 'Santa Catarina',
'27': 'Sao Paulo',
'28': 'Sergipe',
'29': 'Goias',
'30': 'Pernambuco',
'31': 'Tocantins'},
'BS': {'05': 'Bimini',
'06': 'Cat Island',
'10': 'Exuma',
'13': 'Inagua',
'15': 'Long Island',
'16': 'Mayaguana',
'18': 'Ragged Island',
'22': 'Harbour Island',
'23': 'New Providence',
'24': 'Acklins and Crooked Islands',
'25': 'Freeport',
'26': 'Fresh Creek',
'27': "Governor's Harbour",
'28': 'Green Turtle Cay',
'29': 'High Rock',
'30': 'Kemps Bay',
'31': 'Marsh Harbour',
'32': 'Nichollstown and Berry Islands',
'33': 'Rock Sound',
'34': 'Sandy Point',
'35': 'San Salvador and Rum Cay'},
'BT': {'05': 'Bumthang',
'06': 'Chhukha',
'07': 'Chirang',
'08': 'Daga',
'09': 'Geylegphug',
'10': 'Ha',
'11': 'Lhuntshi',
'12': 'Mongar',
'13': 'Paro',
'14': 'Pemagatsel',
'15': 'Punakha',
'16': 'Samchi',
'17': 'Samdrup',
'18': 'Shemgang',
'19': 'Tashigang',
'20': 'Thimphu',
'21': 'Tongsa',
'22': 'Wangdi Phodrang'},
'BW': {'01': 'Central',
'03': 'Ghanzi',
'04': 'Kgalagadi',
'05': 'Kgatleng',
'06': 'Kweneng',
'08': 'North-East',
'09': 'South-East',
'10': 'Southern',
'11': 'North-West'},
'BY': {'01': "Brestskaya Voblasts'",
'02': "Homyel'skaya Voblasts'",
'03': "Hrodzyenskaya Voblasts'",
'04': 'Minsk',
'05': "Minskaya Voblasts'",
'06': "Mahilyowskaya Voblasts'",
'07': "Vitsyebskaya Voblasts'"},
'BZ': {'01': 'Belize',
'02': 'Cayo',
'03': 'Corozal',
'04': 'Orange Walk',
'05': 'Stann Creek',
'06': 'Toledo'},
'CA': {'AB': 'Alberta',
'BC': 'British Columbia',
'MB': 'Manitoba',
'NB': 'New Brunswick',
'NL': 'Newfoundland',
'NS': 'Nova Scotia',
'NT': 'Northwest Territories',
'NU': 'Nunavut',
'ON': 'Ontario',
'PE': 'Prince Edward Island',
'QC': 'Quebec',
'SK': 'Saskatchewan',
'YT': 'Yukon Territory'},
'CD': {'01': 'Bandundu',
'02': 'Equateur',
'04': 'Kasai-Oriental',
'05': 'Katanga',
'06': 'Kinshasa',
'08': 'Bas-Congo',
'09': 'Orientale',
'10': 'Maniema',
'11': 'Nord-Kivu',
'12': 'Sud-Kivu'},
'CF': {'01': 'Bamingui-Bangoran',
'02': 'Basse-Kotto',
'03': 'Haute-Kotto',
'04': 'Mambere-Kadei',
'05': 'Haut-Mbomou',
'06': 'Kemo',
'07': 'Lobaye',
'08': 'Mbomou',
'09': 'Nana-Mambere',
'11': 'Ouaka',
'12': 'Ouham',
'13': 'Ouham-Pende',
'14': 'Cuvette-Ouest',
'15': 'Nana-Grebizi',
'16': 'Sangha-Mbaere',
'17': 'Ombella-Mpoko',
'18': 'Bangui'},
'CG': {'01': 'Bouenza',
'04': 'Kouilou',
'05': 'Lekoumou',
'06': 'Likouala',
'07': 'Niari',
'08': 'Plateaux',
'10': 'Sangha',
'11': 'Pool',
'12': 'Brazzaville',
'13': 'Cuvette',
'14': 'Cuvette-Ouest'},
'CH': {'01': 'Aargau',
'02': 'Ausser-Rhoden',
'03': 'Basel-Landschaft',
'04': 'Basel-Stadt',
'05': 'Bern',
'06': 'Fribourg',
'07': 'Geneve',
'08': 'Glarus',
'09': 'Graubunden',
'10': 'Inner-Rhoden',
'11': 'Luzern',
'12': 'Neuchatel',
'13': 'Nidwalden',
'14': 'Obwalden',
'15': 'Sankt Gallen',
'16': 'Schaffhausen',
'17': 'Schwyz',
'18': 'Solothurn',
'19': 'Thurgau',
'20': 'Ticino',
'21': 'Uri',
'22': 'Valais',
'23': 'Vaud',
'24': 'Zug',
'25': 'Zurich',
'26': 'Jura'},
'CI': {'74': 'Agneby',
'75': 'Bafing',
'76': 'Bas-Sassandra',
'77': 'Denguele',
'78': 'Dix-Huit Montagnes',
'79': 'Fromager',
'80': 'Haut-Sassandra',
'81': 'Lacs',
'82': 'Lagunes',
'83': 'Marahoue',
'84': 'Moyen-Cavally',
'85': 'Moyen-Comoe',
'86': "N'zi-Comoe",
'87': 'Savanes',
'88': 'Sud-Bandama',
'89': 'Sud-Comoe',
'90': 'Vallee du Bandama',
'91': 'Worodougou',
'92': 'Zanzan'},
'CL': {'01': 'Valparaiso',
'02': 'Aisen del General Carlos Ibanez del Campo',
'03': 'Antofagasta',
'04': 'Araucania',
'05': 'Atacama',
'06': 'Bio-Bio',
'07': 'Coquimbo',
'08': "Libertador General Bernardo O'Higgins",
'09': 'Los Lagos',
'10': 'Magallanes y de la Antartica Chilena',
'11': 'Maule',
'12': 'Region Metropolitana',
'13': 'Tarapaca',
'14': 'Los Lagos',
'15': 'Tarapaca',
'16': 'Arica y Parinacota',
'17': 'Los Rios'},
'CM': {'04': 'Est',
'05': 'Littoral',
'07': 'Nord-Ouest',
'08': 'Ouest',
'09': 'Sud-Ouest',
'10': 'Adamaoua',
'11': 'Centre',
'12': 'Extreme-Nord',
'13': 'Nord',
'14': 'Sud'},
'CN': {'01': 'Anhui',
'02': 'Zhejiang',
'03': 'Jiangxi',
'04': 'Jiangsu',
'05': 'Jilin',
'06': 'Qinghai',
'07': 'Fujian',
'08': 'Heilongjiang',
'09': 'Henan',
'10': 'Hebei',
'11': 'Hunan',
'12': 'Hubei',
'13': 'Xinjiang',
'14': 'Xizang',
'15': 'Gansu',
'16': 'Guangxi',
'18': 'Guizhou',
'19': 'Liaoning',
'20': 'Nei Mongol',
'21': 'Ningxia',
'22': 'Beijing',
'23': 'Shanghai',
'24': 'Shanxi',
'25': 'Shandong',
'26': 'Shaanxi',
'28': 'Tianjin',
'29': 'Yunnan',
'30': 'Guangdong',
'31': 'Hainan',
'32': 'Sichuan',
'33': 'Chongqing'},
'CO': {'01': 'Amazonas',
'02': 'Antioquia',
'03': 'Arauca',
'04': 'Atlantico',
'08': 'Caqueta',
'09': 'Cauca',
'10': 'Cesar',
'11': 'Choco',
'12': 'Cordoba',
'14': 'Guaviare',
'15': 'Guainia',
'16': 'Huila',
'17': 'La Guajira',
'19': 'Meta',
'20': 'Narino',
'21': 'Norte de Santander',
'22': 'Putumayo',
'23': 'Quindio',
'24': 'Risaralda',
'25': 'San Andres y Providencia',
'26': 'Santander',
'27': 'Sucre',
'28': 'Tolima',
'29': 'Valle del Cauca',
'30': 'Vaupes',
'31': 'Vichada',
'32': 'Casanare',
'33': 'Cundinamarca',
'34': 'Distrito Especial',
'35': 'Bolivar',
'36': 'Boyaca',
'37': 'Caldas',
'38': 'Magdalena'},
'CR': {'01': 'Alajuela',
'02': 'Cartago',
'03': 'Guanacaste',
'04': 'Heredia',
'06': 'Limon',
'07': 'Puntarenas',
'08': 'San Jose'},
'CU': {'01': 'Pinar del Rio',
'02': 'Ciudad de la Habana',
'03': 'Matanzas',
'04': 'Isla de la Juventud',
'05': 'Camaguey',
'07': 'Ciego de Avila',
'08': 'Cienfuegos',
'09': 'Granma',
'10': 'Guantanamo',
'11': 'La Habana',
'12': 'Holguin',
'13': 'Las Tunas',
'14': 'Sancti Spiritus',
'15': 'Santiago de Cuba',
'16': 'Villa Clara'},
'CV': {'01': 'Boa Vista',
'02': 'Brava',
'04': 'Maio',
'05': 'Paul',
'07': 'Ribeira Grande',
'08': 'Sal',
'10': 'Sao Nicolau',
'11': 'Sao Vicente',
'13': 'Mosteiros',
'14': 'Praia',
'15': 'Santa Catarina',
'16': 'Santa Cruz',
'17': 'Sao Domingos',
'18': 'Sao Filipe',
'19': 'Sao Miguel',
'20': 'Tarrafal'},
'CY': {'01': 'Famagusta',
'02': 'Kyrenia',
'03': 'Larnaca',
'04': 'Nicosia',
'05': 'Limassol',
'06': 'Paphos'},
'CZ': {'52': 'Hlavni mesto Praha',
'78': 'Jihomoravsky kraj',
'79': 'Jihocesky kraj',
'80': 'Vysocina',
'81': 'Karlovarsky kraj',
'82': 'Kralovehradecky kraj',
'83': 'Liberecky kraj',
'84': 'Olomoucky kraj',
'85': 'Moravskoslezsky kraj',
'86': 'Pardubicky kraj',
'87': 'Plzensky kraj',
'88': 'Stredocesky kraj',
'89': 'Ustecky kraj',
'90': 'Zlinsky kraj'},
'DE': {'01': 'Baden-Wurttemberg',
'02': 'Bayern',
'03': 'Bremen',
'04': 'Hamburg',
'05': 'Hessen',
'06': 'Niedersachsen',
'07': 'Nordrhein-Westfalen',
'08': 'Rheinland-Pfalz',
'09': 'Saarland',
'10': 'Schleswig-Holstein',
'11': 'Brandenburg',
'12': 'Mecklenburg-Vorpommern',
'13': 'Sachsen',
'14': 'Sachsen-Anhalt',
'15': 'Thuringen',
'16': 'Berlin'},
'DJ': {'01': 'Ali Sabieh',
'04': 'Obock',
'05': 'Tadjoura',
'06': 'Dikhil',
'07': 'Djibouti',
'08': 'Arta'},
'DK': {'17': 'Hovedstaden',
'18': 'Midtjylland',
'19': 'Nordjylland',
'20': 'Sjelland',
'21': 'Syddanmark'},
'DM': {'02': 'Saint Andrew',
'03': 'Saint David',
'04': 'Saint George',
'05': 'Saint John',
'06': 'Saint Joseph',
'07': 'Saint Luke',
'08': 'Saint Mark',
'09': 'Saint Patrick',
'10': 'Saint Paul',
'11': 'Saint Peter'},
'DO': {'01': 'Azua',
'02': 'Baoruco',
'03': 'Barahona',
'04': 'Dajabon',
'05': 'Distrito Nacional',
'06': 'Duarte',
'08': 'Espaillat',
'09': 'Independencia',
'10': 'La Altagracia',
'11': 'Elias Pina',
'12': 'La Romana',
'14': 'Maria Trinidad Sanchez',
'15': 'Monte Cristi',
'16': 'Pedernales',
'17': 'Peravia',
'18': 'Puerto Plata',
'19': 'Salcedo',
'20': 'Samana',
'21': 'Sanchez Ramirez',
'23': 'San Juan',
'24': 'San Pedro De Macoris',
'25': 'Santiago',
'26': 'Santiago Rodriguez',
'27': 'Valverde',
'28': 'El Seibo',
'29': 'Hato Mayor',
'30': 'La Vega',
'31': 'Monsenor Nouel',
'32': 'Monte Plata',
'33': 'San Cristobal',
'34': 'Distrito Nacional',
'35': 'Peravia',
'36': 'San Jose de Ocoa',
'37': 'Santo Domingo'},
'DZ': {'01': 'Alger',
'03': 'Batna',
'04': 'Constantine',
'06': 'Medea',
'07': 'Mostaganem',
'09': 'Oran',
'10': 'Saida',
'12': 'Setif',
'13': 'Tiaret',
'14': 'Tizi Ouzou',
'15': 'Tlemcen',
'18': 'Bejaia',
'19': 'Biskra',
'20': 'Blida',
'21': 'Bouira',
'22': 'Djelfa',
'23': 'Guelma',
'24': 'Jijel',
'25': 'Laghouat',
'26': 'Mascara',
'27': "M'sila",
'29': 'Oum el Bouaghi',
'30': 'Sidi Bel Abbes',
'31': 'Skikda',
'33': 'Tebessa',
'34': 'Adrar',
'35': 'Ain Defla',
'36': 'Ain Temouchent',
'37': 'Annaba',
'38': 'Bechar',
'39': 'Bordj Bou Arreridj',
'40': 'Boumerdes',
'41': 'Chlef',
'42': 'El Bayadh',
'43': 'El Oued',
'44': 'El Tarf',
'45': 'Ghardaia',
'46': 'Illizi',
'47': 'Khenchela',
'48': 'Mila',
'49': 'Naama',
'50': 'Ouargla',
'51': 'Relizane',
'52': 'Souk Ahras',
'53': 'Tamanghasset',
'54': 'Tindouf',
'55': 'Tipaza',
'56': 'Tissemsilt'},
'EC': {'01': 'Galapagos',
'02': 'Azuay',
'03': 'Bolivar',
'04': 'Canar',
'05': 'Carchi',
'06': 'Chimborazo',
'07': 'Cotopaxi',
'08': 'El Oro',
'09': 'Esmeraldas',
'10': 'Guayas',
'11': 'Imbabura',
'12': 'Loja',
'13': 'Los Rios',
'14': 'Manabi',
'15': 'Morona-Santiago',
'17': 'Pastaza',
'18': 'Pichincha',
'19': 'Tungurahua',
'20': 'Zamora-Chinchipe',
'22': 'Sucumbios',
'23': 'Napo',
'24': 'Orellana'},
'EE': {'01': 'Harjumaa',
'02': 'Hiiumaa',
'03': 'Ida-Virumaa',
'04': 'Jarvamaa',
'05': 'Jogevamaa',
'06': 'Kohtla-Jarve',
'07': 'Laanemaa',
'08': 'Laane-Virumaa',
'09': 'Narva',
'10': 'Parnu',
'11': 'Parnumaa',
'12': 'Polvamaa',
'13': 'Raplamaa',
'14': 'Saaremaa',
'15': 'Sillamae',
'16': 'Tallinn',
'17': 'Tartu',
'18': 'Tartumaa',
'19': 'Valgamaa',
'20': 'Viljandimaa',
'21': 'Vorumaa'},
'EG': {'01': 'Ad Daqahliyah',
'02': 'Al Bahr al Ahmar',
'03': 'Al Buhayrah',
'04': 'Al Fayyum',
'05': 'Al Gharbiyah',
'06': 'Al Iskandariyah',
'07': "Al Isma'iliyah",
'08': 'Al Jizah',
'09': 'Al Minufiyah',
'10': 'Al Minya',
'11': 'Al Qahirah',
'12': 'Al Qalyubiyah',
'13': 'Al Wadi al Jadid',
'14': 'Ash Sharqiyah',
'15': 'As Suways',
'16': 'Aswan',
'17': 'Asyut',
'18': 'Bani Suwayf',
'19': "Bur Sa'id",
'20': 'Dumyat',
'21': 'Kafr ash Shaykh',
'22': 'Matruh',
'23': 'Qina',
'24': 'Suhaj',
'26': "Janub Sina'",
'27': "Shamal Sina'"},
'ER': {'01': 'Anseba',
'02': 'Debub',
'03': "Debubawi K'eyih Bahri",
'04': 'Gash Barka',
'05': "Ma'akel",
'06': "Semenawi K'eyih Bahri"},
'ES': {'07': 'Islas Baleares',
'27': 'La Rioja',
'29': 'Madrid',
'31': 'Murcia',
'32': 'Navarra',
'34': 'Asturias',
'39': 'Cantabria',
'51': 'Andalucia',
'52': 'Aragon',
'53': 'Canarias',
'54': 'Castilla-La Mancha',
'55': 'Castilla y Leon',
'56': 'Catalonia',
'57': 'Extremadura',
'58': 'Galicia',
'59': 'Pais Vasco',
'60': 'Comunidad Valenciana'},
'ET': {'44': 'Adis Abeba',
'45': 'Afar',
'46': 'Amara',
'47': 'Binshangul Gumuz',
'48': 'Dire Dawa',
'49': 'Gambela Hizboch',
'50': 'Hareri Hizb',
'51': 'Oromiya',
'52': 'Sumale',
'53': 'Tigray',
'54': 'YeDebub Biheroch Bihereseboch na Hizboch'},
'FI': {'01': 'Aland',
'06': 'Lapland',
'08': 'Oulu',
'13': 'Southern Finland',
'14': 'Eastern Finland',
'15': 'Western Finland'},
'FJ': {'01': 'Central',
'02': 'Eastern',
'03': 'Northern',
'04': 'Rotuma',
'05': 'Western'},
'FM': {'01': 'Kosrae', '02': 'Pohnpei', '03': 'Chuuk', '04': 'Yap'},
'FR': {'97': 'Aquitaine',
'98': 'Auvergne',
'99': 'Basse-Normandie',
'A1': 'Bourgogne',
'A2': 'Bretagne',
'A3': 'Centre',
'A4': 'Champagne-Ardenne',
'A5': 'Corse',
'A6': 'Franche-Comte',
'A7': 'Haute-Normandie',
'A8': 'Ile-de-France',
'A9': 'Languedoc-Roussillon',
'B1': 'Limousin',
'B2': 'Lorraine',
'B3': 'Midi-Pyrenees',
'B4': 'Nord-Pas-de-Calais',
'B5': 'Pays de la Loire',
'B6': 'Picardie',
'B7': 'Poitou-Charentes',
'B8': "Provence-Alpes-Cote d'Azur",
'B9': 'Rhone-Alpes',
'C1': 'Alsace'},
'GA': {'01': 'Estuaire',
'02': 'Haut-Ogooue',
'03': 'Moyen-Ogooue',
'04': 'Ngounie',
'05': 'Nyanga',
'06': 'Ogooue-Ivindo',
'07': 'Ogooue-Lolo',
'08': 'Ogooue-Maritime',
'09': 'Woleu-Ntem'},
'GB': {'A1': 'Barking and Dagenham',
'A2': 'Barnet',
'A3': 'Barnsley',
'A4': 'Bath and North East Somerset',
'A5': 'Bedfordshire',
'A6': 'Bexley',
'A7': 'Birmingham',
'A8': 'Blackburn with Darwen',
'A9': 'Blackpool',
'B1': 'Bolton',
'B2': 'Bournemouth',
'B3': 'Bracknell Forest',
'B4': 'Bradford',
'B5': 'Brent',
'B6': 'Brighton and Hove',
'B7': 'Bristol, City of',
'B8': 'Bromley',
'B9': 'Buckinghamshire',
'C1': 'Bury',
'C2': 'Calderdale',
'C3': 'Cambridgeshire',
'C4': 'Camden',
'C5': 'Cheshire',
'C6': 'Cornwall',
'C7': 'Coventry',
'C8': 'Croydon',
'C9': 'Cumbria',
'D1': 'Darlington',
'D2': 'Derby',
'D3': 'Derbyshire',
'D4': 'Devon',
'D5': 'Doncaster',
'D6': 'Dorset',
'D7': 'Dudley',
'D8': 'Durham',
'D9': 'Ealing',
'E1': 'East Riding of Yorkshire',
'E2': 'East Sussex',
'E3': 'Enfield',
'E4': 'Essex',
'E5': 'Gateshead',
'E6': 'Gloucestershire',
'E7': 'Greenwich',
'E8': 'Hackney',
'E9': 'Halton',
'F1': 'Hammersmith and Fulham',
'F2': 'Hampshire',
'F3': 'Haringey',
'F4': 'Harrow',
'F5': 'Hartlepool',
'F6': 'Havering',
'F7': 'Herefordshire',
'F8': 'Hertford',
'F9': 'Hillingdon',
'G1': 'Hounslow',
'G2': 'Isle of Wight',
'G3': 'Islington',
'G4': 'Kensington and Chelsea',
'G5': 'Kent',
'G6': 'Kingston upon Hull, City of',
'G7': 'Kingston upon Thames',
'G8': 'Kirklees',
'G9': 'Knowsley',
'H1': 'Lambeth',
'H2': 'Lancashire',
'H3': 'Leeds',
'H4': 'Leicester',
'H5': 'Leicestershire',
'H6': 'Lewisham',
'H7': 'Lincolnshire',
'H8': 'Liverpool',
'H9': 'London, City of',
'I1': 'Luton',
'I2': 'Manchester',
'I3': 'Medway',
'I4': 'Merton',
'I5': 'Middlesbrough',
'I6': 'Milton Keynes',
'I7': 'Newcastle upon Tyne',
'I8': 'Newham',
'I9': 'Norfolk',
'J1': 'Northamptonshire',
'J2': 'North East Lincolnshire',
'J3': 'North Lincolnshire',
'J4': 'North Somerset',
'J5': 'North Tyneside',
'J6': 'Northumberland',
'J7': 'North Yorkshire',
'J8': 'Nottingham',
'J9': 'Nottinghamshire',
'K1': 'Oldham',
'K2': 'Oxfordshire',
'K3': 'Peterborough',
'K4': 'Plymouth',
'K5': 'Poole',
'K6': 'Portsmouth',
'K7': 'Reading',
'K8': 'Redbridge',
'K9': 'Redcar and Cleveland',
'L1': 'Richmond upon Thames',
'L2': 'Rochdale',
'L3': 'Rotherham',
'L4': 'Rutland',
'L5': 'Salford',
'L6': 'Shropshire',
'L7': 'Sandwell',
'L8': 'Sefton',
'L9': 'Sheffield',
'M1': 'Slough',
'M2': 'Solihull',
'M3': 'Somerset',
'M4': 'Southampton',
'M5': 'Southend-on-Sea',
'M6': 'South Gloucestershire',
'M7': 'South Tyneside',
'M8': 'Southwark',
'M9': 'Staffordshire',
'N1': 'St. Helens',
'N2': 'Stockport',
'N3': 'Stockton-on-Tees',
'N4': 'Stoke-on-Trent',
'N5': 'Suffolk',
'N6': 'Sunderland',
'N7': 'Surrey',
'N8': 'Sutton',
'N9': 'Swindon',
'O1': 'Tameside',
'O2': 'Telford and Wrekin',
'O3': 'Thurrock',
'O4': 'Torbay',
'O5': 'Tower Hamlets',
'O6': 'Trafford',
'O7': 'Wakefield',
'O8': 'Walsall',
'O9': 'Waltham Forest',
'P1': 'Wandsworth',
'P2': 'Warrington',
'P3': 'Warwickshire',
'P4': 'West Berkshire',
'P5': 'Westminster',
'P6': 'West Sussex',
'P7': 'Wigan',
'P8': 'Wiltshire',
'P9': 'Windsor and Maidenhead',
'Q1': 'Wirral',
'Q2': 'Wokingham',
'Q3': 'Wolverhampton',
'Q4': 'Worcestershire',
'Q5': 'York',
'Q6': 'Antrim',
'Q7': 'Ards',
'Q8': 'Armagh',
'Q9': 'Ballymena',
'R1': 'Ballymoney',
'R2': 'Banbridge',
'R3': 'Belfast',
'R4': 'Carrickfergus',
'R5': 'Castlereagh',
'R6': 'Coleraine',
'R7': 'Cookstown',
'R8': 'Craigavon',
'R9': 'Down',
'S1': 'Dungannon',
'S2': 'Fermanagh',
'S3': 'Larne',
'S4': 'Limavady',
'S5': 'Lisburn',
'S6': 'Derry',
'S7': 'Magherafelt',
'S8': 'Moyle',
'S9': 'Newry and Mourne',
'T1': 'Newtownabbey',
'T2': 'North Down',
'T3': 'Omagh',
'T4': 'Strabane',
'T5': 'Aberdeen City',
'T6': 'Aberdeenshire',
'T7': 'Angus',
'T8': 'Argyll and Bute',
'T9': 'Scottish Borders, The',
'U1': 'Clackmannanshire',
'U2': 'Dumfries and Galloway',
'U3': 'Dundee City',
'U4': 'East Ayrshire',
'U5': 'East Dunbartonshire',
'U6': 'East Lothian',
'U7': 'East Renfrewshire',
'U8': 'Edinburgh, City of',
'U9': 'Falkirk',
'V1': 'Fife',
'V2': 'Glasgow City',
'V3': 'Highland',
'V4': 'Inverclyde',
'V5': 'Midlothian',
'V6': 'Moray',
'V7': 'North Ayrshire',
'V8': 'North Lanarkshire',
'V9': 'Orkney',
'W1': 'Perth and Kinross',
'W2': 'Renfrewshire',
'W3': 'Shetland Islands',
'W4': 'South Ayrshire',
'W5': 'South Lanarkshire',
'W6': 'Stirling',
'W7': 'West Dunbartonshire',
'W8': 'Eilean Siar',
'W9': 'West Lothian',
'X1': 'Isle of Anglesey',
'X2': 'Blaenau Gwent',
'X3': 'Bridgend',
'X4': 'Caerphilly',
'X5': 'Cardiff',
'X6': 'Ceredigion',
'X7': 'Carmarthenshire',
'X8': 'Conwy',
'X9': 'Denbighshire',
'Y1': 'Flintshire',
'Y2': 'Gwynedd',
'Y3': 'Merthyr Tydfil',
'Y4': 'Monmouthshire',
'Y5': 'Neath Port Talbot',
'Y6': 'Newport',
'Y7': 'Pembrokeshire',
'Y8': 'Powys',
'Y9': 'Rhondda Cynon Taff',
'Z1': 'Swansea',
'Z2': 'Torfaen',
'Z3': 'Vale of Glamorgan, The',
'Z4': 'Wrexham',
'Z5': 'Bedfordshire',
'Z6': 'Central Bedfordshire',
'Z7': 'Cheshire East',
'Z8': 'Cheshire West and Chester',
'Z9': 'Isles of Scilly'},
'GD': {'01': 'Saint Andrew',
'02': 'Saint David',
'03': 'Saint George',
'04': 'Saint John',
'05': 'Saint Mark',
'06': 'Saint Patrick'},
'GE': {'01': 'Abashis Raioni',
'02': 'Abkhazia',
'03': 'Adigenis Raioni',
'04': 'Ajaria',
'05': 'Akhalgoris Raioni',
'06': "Akhalk'alak'is Raioni",
'07': "Akhalts'ikhis Raioni",
'08': 'Akhmetis Raioni',
'09': 'Ambrolauris Raioni',
'10': 'Aspindzis Raioni',
'11': "Baghdat'is Raioni",
'12': 'Bolnisis Raioni',
'13': 'Borjomis Raioni',
'14': "Chiat'ura",
'15': "Ch'khorotsqus Raioni",
'16': "Ch'okhatauris Raioni",
'17': "Dedop'listsqaros Raioni",
'18': 'Dmanisis Raioni',
'19': "Dushet'is Raioni",
'20': 'Gardabanis Raioni',
'21': 'Gori',
'22': 'Goris Raioni',
'23': 'Gurjaanis Raioni',
'24': 'Javis Raioni',
'25': "K'arelis Raioni",
'26': 'Kaspis Raioni',
'27': 'Kharagaulis Raioni',
'28': 'Khashuris Raioni',
'29': 'Khobis Raioni',
'30': 'Khonis Raioni',
'31': "K'ut'aisi",
'32': 'Lagodekhis Raioni',
'33': "Lanch'khut'is Raioni",
'34': 'Lentekhis Raioni',
'35': 'Marneulis Raioni',
'36': 'Martvilis Raioni',
'37': 'Mestiis Raioni',
'38': "Mts'khet'is Raioni",
'39': 'Ninotsmindis Raioni',
'40': 'Onis Raioni',
'41': "Ozurget'is Raioni",
'42': "P'ot'i",
'43': 'Qazbegis Raioni',
'44': 'Qvarlis Raioni',
'45': "Rust'avi",
'46': "Sach'kheris Raioni",
'47': 'Sagarejos Raioni',
'48': 'Samtrediis Raioni',
'49': 'Senakis Raioni',
'50': 'Sighnaghis Raioni',
'51': "T'bilisi",
'52': "T'elavis Raioni",
'53': "T'erjolis Raioni",
'54': "T'et'ritsqaros Raioni",
'55': "T'ianet'is Raioni",
'56': 'Tqibuli',
'57': "Ts'ageris Raioni",
'58': 'Tsalenjikhis Raioni',
'59': 'Tsalkis Raioni',
'60': 'Tsqaltubo',
'61': 'Vanis Raioni',
'62': "Zestap'onis Raioni",
'63': 'Zugdidi',
'64': 'Zugdidis Raioni'},
'GH': {'01': 'Greater Accra',
'02': 'Ashanti',
'03': 'Brong-Ahafo',
'04': 'Central',
'05': 'Eastern',
'06': 'Northern',
'08': 'Volta',
'09': 'Western',
'10': 'Upper East',
'11': 'Upper West'},
'GL': {'01': 'Nordgronland', '02': 'Ostgronland', '03': 'Vestgronland'},
'GM': {'01': 'Banjul',
'02': 'Lower River',
'03': 'Central River',
'04': 'Upper River',
'05': 'Western',
'07': 'North Bank'},
'GN': {'01': 'Beyla',
'02': 'Boffa',
'03': 'Boke',
'04': 'Conakry',
'05': 'Dabola',
'06': 'Dalaba',
'07': 'Dinguiraye',
'09': 'Faranah',
'10': 'Forecariah',
'11': 'Fria',
'12': 'Gaoual',
'13': 'Gueckedou',
'15': 'Kerouane',
'16': 'Kindia',
'17': 'Kissidougou',
'18': 'Koundara',
'19': 'Kouroussa',
'21': 'Macenta',
'22': 'Mali',
'23': 'Mamou',
'25': 'Pita',
'27': 'Telimele',
'28': 'Tougue',
'29': 'Yomou',
'30': 'Coyah',
'31': 'Dubreka',
'32': 'Kankan',
'33': 'Koubia',
'34': 'Labe',
'35': 'Lelouma',
'36': 'Lola',
'37': 'Mandiana',
'38': 'Nzerekore',
'39': 'Siguiri'},
'GQ': {'03': 'Annobon',
'04': 'Bioko Norte',
'05': 'Bioko Sur',
'06': 'Centro Sur',
'07': 'Kie-Ntem',
'08': 'Litoral',
'09': 'Wele-Nzas'},
'GR': {'01': 'Evros',
'02': 'Rodhopi',
'03': 'Xanthi',
'04': 'Drama',
'05': 'Serrai',
'06': 'Kilkis',
'07': 'Pella',
'08': 'Florina',
'09': 'Kastoria',
'10': 'Grevena',
'11': 'Kozani',
'12': 'Imathia',
'13': 'Thessaloniki',
'14': 'Kavala',
'15': 'Khalkidhiki',
'16': 'Pieria',
'17': 'Ioannina',
'18': 'Thesprotia',
'19': 'Preveza',
'20': 'Arta',
'21': 'Larisa',
'22': 'Trikala',
'23': 'Kardhitsa',
'24': 'Magnisia',
'25': 'Kerkira',
'26': 'Levkas',
'27': 'Kefallinia',
'28': 'Zakinthos',
'29': 'Fthiotis',
'30': 'Evritania',
'31': 'Aitolia kai Akarnania',
'32': 'Fokis',
'33': 'Voiotia',
'34': 'Evvoia',
'35': 'Attiki',
'36': 'Argolis',
'37': 'Korinthia',
'38': 'Akhaia',
'39': 'Ilia',
'40': 'Messinia',
'41': 'Arkadhia',
'42': 'Lakonia',
'43': 'Khania',
'44': 'Rethimni',
'45': 'Iraklion',
'46': 'Lasithi',
'47': 'Dhodhekanisos',
'48': 'Samos',
'49': 'Kikladhes',
'50': 'Khios',
'51': 'Lesvos'},
'GT': {'01': 'Alta Verapaz',
'02': 'Baja Verapaz',
'03': 'Chimaltenango',
'04': 'Chiquimula',
'05': 'El Progreso',
'06': 'Escuintla',
'07': 'Guatemala',
'08': 'Huehuetenango',
'09': 'Izabal',
'10': 'Jalapa',
'11': 'Jutiapa',
'12': 'Peten',
'13': 'Quetzaltenango',
'14': 'Quiche',
'15': 'Retalhuleu',
'16': 'Sacatepequez',
'17': 'San Marcos',
'18': 'Santa Rosa',
'19': 'Solola',
'20': 'Suchitepequez',
'21': 'Totonicapan',
'22': 'Zacapa'},
'GW': {'01': 'Bafata',
'02': 'Quinara',
'04': 'Oio',
'05': 'Bolama',
'06': 'Cacheu',
'07': 'Tombali',
'10': 'Gabu',
'11': 'Bissau',
'12': 'Biombo'},
'GY': {'10': 'Barima-Waini',
'11': 'Cuyuni-Mazaruni',
'12': 'Demerara-Mahaica',
'13': 'East Berbice-Corentyne',
'14': 'Essequibo Islands-West Demerara',
'15': 'Mahaica-Berbice',
'16': 'Pomeroon-Supenaam',
'17': 'Potaro-Siparuni',
'18': 'Upper Demerara-Berbice',
'19': 'Upper Takutu-Upper Essequibo'},
'HN': {'01': 'Atlantida',
'02': 'Choluteca',
'03': 'Colon',
'04': 'Comayagua',
'05': 'Copan',
'06': 'Cortes',
'07': 'El Paraiso',
'08': 'Francisco Morazan',
'09': 'Gracias a Dios',
'10': 'Intibuca',
'11': 'Islas de la Bahia',
'12': 'La Paz',
'13': 'Lempira',
'14': 'Ocotepeque',
'15': 'Olancho',
'16': 'Santa Barbara',
'17': 'Valle',
'18': 'Yoro'},
'HR': {'01': 'Bjelovarsko-Bilogorska',
'02': 'Brodsko-Posavska',
'03': 'Dubrovacko-Neretvanska',
'04': 'Istarska',
'05': 'Karlovacka',
'06': 'Koprivnicko-Krizevacka',
'07': 'Krapinsko-Zagorska',
'08': 'Licko-Senjska',
'09': 'Medimurska',
'10': 'Osjecko-Baranjska',
'11': 'Pozesko-Slavonska',
'12': 'Primorsko-Goranska',
'13': 'Sibensko-Kninska',
'14': 'Sisacko-Moslavacka',
'15': 'Splitsko-Dalmatinska',
'16': 'Varazdinska',
'17': 'Viroviticko-Podravska',
'18': 'Vukovarsko-Srijemska',
'19': 'Zadarska',
'20': 'Zagrebacka',
'21': 'Grad Zagreb'},
'HT': {'03': 'Nord-Ouest',
'06': 'Artibonite',
'07': 'Centre',
'09': 'Nord',
'10': 'Nord-Est',
'11': 'Ouest',
'12': 'Sud',
'13': 'Sud-Est',
'14': "Grand' Anse",
'15': 'Nippes'},
'HU': {'01': 'Bacs-Kiskun',
'02': 'Baranya',
'03': 'Bekes',
'04': 'Borsod-Abauj-Zemplen',
'05': 'Budapest',
'06': 'Csongrad',
'07': 'Debrecen',
'08': 'Fejer',
'09': 'Gyor-Moson-Sopron',
'10': 'Hajdu-Bihar',
'11': 'Heves',
'12': 'Komarom-Esztergom',
'13': 'Miskolc',
'14': 'Nograd',
'15': 'Pecs',
'16': 'Pest',
'17': 'Somogy',
'18': 'Szabolcs-Szatmar-Bereg',
'19': 'Szeged',
'20': 'Jasz-Nagykun-Szolnok',
'21': 'Tolna',
'22': 'Vas',
'23': 'Veszprem',
'24': 'Zala',
'25': 'Gyor',
'26': 'Bekescsaba',
'27': 'Dunaujvaros',
'28': 'Eger',
'29': 'Hodmezovasarhely',
'30': 'Kaposvar',
'31': 'Kecskemet',
'32': 'Nagykanizsa',
'33': 'Nyiregyhaza',
'34': 'Sopron',
'35': 'Szekesfehervar',
'36': 'Szolnok',
'37': 'Szombathely',
'38': 'Tatabanya',
'39': 'Veszprem',
'40': 'Zalaegerszeg',
'41': 'Salgotarjan',
'42': 'Szekszard',
'43': 'Erd'},
'ID': {'01': 'Aceh',
'02': 'Bali',
'03': 'Bengkulu',
'04': 'Jakarta Raya',
'05': 'Jambi',
'07': 'Jawa Tengah',
'08': 'Jawa Timur',
'10': 'Yogyakarta',
'11': 'Kalimantan Barat',
'12': 'Kalimantan Selatan',
'13': 'Kalimantan Tengah',
'14': 'Kalimantan Timur',
'15': 'Lampung',
'17': 'Nusa Tenggara Barat',
'18': 'Nusa Tenggara Timur',
'21': 'Sulawesi Tengah',
'22': 'Sulawesi Tenggara',
'24': 'Sumatera Barat',
'26': 'Sumatera Utara',
'28': 'Maluku',
'29': 'Maluku Utara',
'30': 'Jawa Barat',
'31': 'Sulawesi Utara',
'32': 'Sumatera Selatan',
'33': 'Banten',
'34': 'Gorontalo',
'35': 'Kepulauan Bangka Belitung',
'36': 'Papua',
'37': 'Riau',
'38': 'Sulawesi Selatan',
'39': 'Irian Jaya Barat',
'40': 'Kepulauan Riau',
'41': 'Sulawesi Barat'},
'IE': {'01': 'Carlow',
'02': 'Cavan',
'03': 'Clare',
'04': 'Cork',
'06': 'Donegal',
'07': 'Dublin',
'10': 'Galway',
'11': 'Kerry',
'12': 'Kildare',
'13': 'Kilkenny',
'14': 'Leitrim',
'15': 'Laois',
'16': 'Limerick',
'18': 'Longford',
'19': 'Louth',
'20': 'Mayo',
'21': 'Meath',
'22': 'Monaghan',
'23': 'Offaly',
'24': 'Roscommon',
'25': 'Sligo',
'26': 'Tipperary',
'27': 'Waterford',
'29': 'Westmeath',
'30': 'Wexford',
'31': 'Wicklow'},
'IL': {'01': 'HaDarom',
'02': 'HaMerkaz',
'03': 'HaZafon',
'04': 'Hefa',
'05': 'Tel Aviv',
'06': 'Yerushalayim'},
'IN': {'01': 'Andaman and Nicobar Islands',
'02': 'Andhra Pradesh',
'03': 'Assam',
'05': 'Chandigarh',
'06': 'Dadra and Nagar Haveli',
'07': 'Delhi',
'09': 'Gujarat',
'10': 'Haryana',
'11': 'Himachal Pradesh',
'12': 'Jammu and Kashmir',
'13': 'Kerala',
'14': 'Lakshadweep',
'16': 'Maharashtra',
'17': 'Manipur',
'18': 'Meghalaya',
'19': 'Karnataka',
'20': 'Nagaland',
'21': 'Orissa',
'22': 'Puducherry',
'23': 'Punjab',
'24': 'Rajasthan',
'25': 'Tamil Nadu',
'26': 'Tripura',
'28': 'West Bengal',
'29': 'Sikkim',
'30': 'Arunachal Pradesh',
'31': 'Mizoram',
'32': 'Daman and Diu',
'33': 'Goa',
'34': 'Bihar',
'35': 'Madhya Pradesh',
'36': 'Uttar Pradesh',
'37': 'Chhattisgarh',
'38': 'Jharkhand',
'39': 'Uttarakhand'},
'IQ': {'01': 'Al Anbar',
'02': 'Al Basrah',
'03': 'Al Muthanna',
'04': 'Al Qadisiyah',
'05': 'As Sulaymaniyah',
'06': 'Babil',
'07': 'Baghdad',
'08': 'Dahuk',
'09': 'Dhi Qar',
'10': 'Diyala',
'11': 'Arbil',
'12': "Karbala'",
'13': "At Ta'mim",
'14': 'Maysan',
'15': 'Ninawa',
'16': 'Wasit',
'17': 'An Najaf',
'18': 'Salah ad Din'},
'IR': {'01': 'Azarbayjan-e Bakhtari',
'03': 'Chahar Mahall va Bakhtiari',
'04': 'Sistan va Baluchestan',
'05': 'Kohkiluyeh va Buyer Ahmadi',
'07': 'Fars',
'08': 'Gilan',
'09': 'Hamadan',
'10': 'Ilam',
'11': 'Hormozgan',
'12': 'Kerman',
'13': 'Bakhtaran',
'15': 'Khuzestan',
'16': 'Kordestan',
'17': 'Mazandaran',
'18': 'Semnan Province',
'19': 'Markazi',
'21': 'Zanjan',
'22': 'Bushehr',
'23': 'Lorestan',
'24': 'Markazi',
'25': 'Semnan',
'26': 'Tehran',
'27': 'Zanjan',
'28': 'Esfahan',
'29': 'Kerman',
'30': 'Khorasan',
'31': 'Yazd',
'32': 'Ardabil',
'33': 'East Azarbaijan',
'34': 'Markazi',
'35': 'Mazandaran',
'36': 'Zanjan',
'37': 'Golestan',
'38': 'Qazvin',
'39': 'Qom',
'40': 'Yazd',
'41': 'Khorasan-e Janubi',
'42': 'Khorasan-e Razavi',
'43': 'Khorasan-e Shemali',
'44': 'Alborz'},
'IS': {'03': 'Arnessysla',
'05': 'Austur-Hunavatnssysla',
'06': 'Austur-Skaftafellssysla',
'07': 'Borgarfjardarsysla',
'09': 'Eyjafjardarsysla',
'10': 'Gullbringusysla',
'15': 'Kjosarsysla',
'17': 'Myrasysla',
'20': 'Nordur-Mulasysla',
'21': 'Nordur-Tingeyjarsysla',
'23': 'Rangarvallasysla',
'28': 'Skagafjardarsysla',
'29': 'Snafellsnes- og Hnappadalssysla',
'30': 'Strandasysla',
'31': 'Sudur-Mulasysla',
'32': 'Sudur-Tingeyjarsysla',
'34': 'Vestur-Bardastrandarsysla',
'35': 'Vestur-Hunavatnssysla',
'36': 'Vestur-Isafjardarsysla',
'37': 'Vestur-Skaftafellssysla',
'38': 'Austurland',
'39': 'Hofuoborgarsvaoio',
'40': 'Norourland Eystra',
'41': 'Norourland Vestra',
'42': 'Suourland',
'43': 'Suournes',
'44': 'Vestfiroir',
'45': 'Vesturland'},
'IT': {'01': 'Abruzzi',
'02': 'Basilicata',
'03': 'Calabria',
'04': 'Campania',
'05': 'Emilia-Romagna',
'06': 'Friuli-Venezia Giulia',
'07': 'Lazio',
'08': 'Liguria',
'09': 'Lombardia',
'10': 'Marche',
'11': 'Molise',
'12': 'Piemonte',
'13': 'Puglia',
'14': 'Sardegna',
'15': 'Sicilia',
'16': 'Toscana',
'17': 'Trentino-Alto Adige',
'18': 'Umbria',
'19': "Valle d'Aosta",
'20': 'Veneto'},
'JM': {'01': 'Clarendon',
'02': 'Hanover',
'04': 'Manchester',
'07': 'Portland',
'08': 'Saint Andrew',
'09': 'Saint Ann',
'10': 'Saint Catherine',
'11': 'Saint Elizabeth',
'12': 'Saint James',
'13': 'Saint Mary',
'14': 'Saint Thomas',
'15': 'Trelawny',
'16': 'Westmoreland',
'17': 'Kingston'},
'JO': {'02': "Al Balqa'",
'09': 'Al Karak',
'12': 'At Tafilah',
'15': 'Al Mafraq',
'16': 'Amman',
'17': 'Az Zaraqa',
'18': 'Irbid',
'19': "Ma'an",
'20': 'Ajlun',
'21': 'Al Aqabah',
'22': 'Jarash',
'23': 'Madaba'},
'JP': {'01': 'Aichi',
'02': 'Akita',
'03': 'Aomori',
'04': 'Chiba',
'05': 'Ehime',
'06': 'Fukui',
'07': 'Fukuoka',
'08': 'Fukushima',
'09': 'Gifu',
'10': 'Gumma',
'11': 'Hiroshima',
'12': 'Hokkaido',
'13': 'Hyogo',
'14': 'Ibaraki',
'15': 'Ishikawa',
'16': 'Iwate',
'17': 'Kagawa',
'18': 'Kagoshima',
'19': 'Kanagawa',
'20': 'Kochi',
'21': 'Kumamoto',
'22': 'Kyoto',
'23': 'Mie',
'24': 'Miyagi',
'25': 'Miyazaki',
'26': 'Nagano',
'27': 'Nagasaki',
'28': 'Nara',
'29': 'Niigata',
'30': 'Oita',
'31': 'Okayama',
'32': 'Osaka',
'33': 'Saga',
'34': 'Saitama',
'35': 'Shiga',
'36': 'Shimane',
'37': 'Shizuoka',
'38': 'Tochigi',
'39': 'Tokushima',
'40': 'Tokyo',
'41': 'Tottori',
'42': 'Toyama',
'43': 'Wakayama',
'44': 'Yamagata',
'45': 'Yamaguchi',
'46': 'Yamanashi',
'47': 'Okinawa'},
'KE': {'01': 'Central',
'02': 'Coast',
'03': 'Eastern',
'05': 'Nairobi Area',
'06': 'North-Eastern',
'07': 'Nyanza',
'08': 'Rift Valley',
'09': 'Western'},
'KG': {'01': 'Bishkek',
'02': 'Chuy',
'03': 'Jalal-Abad',
'04': 'Naryn',
'05': 'Osh',
'06': 'Talas',
'07': 'Ysyk-Kol',
'08': 'Osh',
'09': 'Batken'},
'KH': {'01': 'Batdambang',
'02': 'Kampong Cham',
'03': 'Kampong Chhnang',
'04': 'Kampong Speu',
'05': 'Kampong Thum',
'06': 'Kampot',
'07': 'Kandal',
'08': 'Koh Kong',
'09': 'Kracheh',
'10': 'Mondulkiri',
'11': 'Phnum Penh',
'12': 'Pursat',
'13': 'Preah Vihear',
'14': 'Prey Veng',
'15': 'Ratanakiri Kiri',
'16': 'Siem Reap',
'17': 'Stung Treng',
'18': 'Svay Rieng',
'19': 'Takeo',
'25': 'Banteay Meanchey',
'29': 'Batdambang',
'30': 'Pailin'},
'KI': {'01': 'Gilbert Islands',
'02': 'Line Islands',
'03': 'Phoenix Islands'},
'KM': {'01': 'Anjouan', '02': 'Grande Comore', '03': 'Moheli'},
'KN': {'01': 'Christ Church Nichola Town',
'02': 'Saint Anne Sandy Point',
'03': 'Saint George Basseterre',
'04': 'Saint George Gingerland',
'05': 'Saint James Windward',
'06': 'Saint John Capisterre',
'07': 'Saint John Figtree',
'08': 'Saint Mary Cayon',
'09': 'Saint Paul Capisterre',
'10': 'Saint Paul Charlestown',
'11': 'Saint Peter Basseterre',
'12': 'Saint Thomas Lowland',
'13': 'Saint Thomas Middle Island',
'15': 'Trinity Palmetto Point'},
'KP': {'01': 'Chagang-do',
'03': 'Hamgyong-namdo',
'06': 'Hwanghae-namdo',
'07': 'Hwanghae-bukto',
'08': 'Kaesong-si',
'09': 'Kangwon-do',
'11': "P'yongan-bukto",
'12': "P'yongyang-si",
'13': 'Yanggang-do',
'14': "Namp'o-si",
'15': "P'yongan-namdo",
'17': 'Hamgyong-bukto',
'18': 'Najin Sonbong-si'},
'KR': {'01': 'Cheju-do',
'03': 'Cholla-bukto',
'05': "Ch'ungch'ong-bukto",
'06': 'Kangwon-do',
'10': 'Pusan-jikhalsi',
'11': "Seoul-t'ukpyolsi",
'12': "Inch'on-jikhalsi",
'13': 'Kyonggi-do',
'14': 'Kyongsang-bukto',
'15': 'Taegu-jikhalsi',
'16': 'Cholla-namdo',
'17': "Ch'ungch'ong-namdo",
'18': 'Kwangju-jikhalsi',
'19': 'Taejon-jikhalsi',
'20': 'Kyongsang-namdo',
'21': 'Ulsan-gwangyoksi'},
'KW': {'01': 'Al Ahmadi',
'02': 'Al Kuwayt',
'05': 'Al Jahra',
'07': 'Al Farwaniyah',
'08': 'Hawalli',
'09': 'Mubarak al Kabir'},
'KY': {'01': 'Creek',
'02': 'Eastern',
'03': 'Midland',
'04': 'South Town',
'05': 'Spot Bay',
'06': 'Stake Bay',
'07': 'West End',
'08': 'Western'},
'KZ': {'01': 'Almaty',
'02': 'Almaty City',
'03': 'Aqmola',
'04': 'Aqtobe',
'05': 'Astana',
'06': 'Atyrau',
'07': 'West Kazakhstan',
'08': 'Bayqonyr',
'09': 'Mangghystau',
'10': 'South Kazakhstan',
'11': 'Pavlodar',
'12': 'Qaraghandy',
'13': 'Qostanay',
'14': 'Qyzylorda',
'15': 'East Kazakhstan',
'16': 'North Kazakhstan',
'17': 'Zhambyl'},
'LA': {'01': 'Attapu',
'02': 'Champasak',
'03': 'Houaphan',
'04': 'Khammouan',
'05': 'Louang Namtha',
'07': 'Oudomxai',
'08': 'Phongsali',
'09': 'Saravan',
'10': 'Savannakhet',
'11': 'Vientiane',
'13': 'Xaignabouri',
'14': 'Xiangkhoang',
'17': 'Louangphrabang'},
'LB': {'01': 'Beqaa',
'02': 'Al Janub',
'03': 'Liban-Nord',
'04': 'Beyrouth',
'05': 'Mont-Liban',
'06': 'Liban-Sud',
'07': 'Nabatiye',
'08': 'Beqaa',
'09': 'Liban-Nord',
'10': 'Aakk,r',
'11': 'Baalbek-Hermel'},
'LC': {'01': 'Anse-la-Raye',
'02': 'Dauphin',
'03': 'Castries',
'04': 'Choiseul',
'05': 'Dennery',
'06': 'Gros-Islet',
'07': 'Laborie',
'08': 'Micoud',
'09': 'Soufriere',
'10': 'Vieux-Fort',
'11': 'Praslin'},
'LI': {'01': 'Balzers',
'02': 'Eschen',
'03': 'Gamprin',
'04': 'Mauren',
'05': 'Planken',
'06': 'Ruggell',
'07': 'Schaan',
'08': 'Schellenberg',
'09': 'Triesen',
'10': 'Triesenberg',
'11': 'Vaduz',
'21': 'Gbarpolu',
'22': 'River Gee'},
'LK': {'29': 'Central',
'30': 'North Central',
'32': 'North Western',
'33': 'Sabaragamuwa',
'34': 'Southern',
'35': 'Uva',
'36': 'Western',
'37': 'Eastern',
'38': 'Northern'},
'LR': {'01': 'Bong',
'04': 'Grand Cape Mount',
'05': 'Lofa',
'06': 'Maryland',
'07': 'Monrovia',
'09': 'Nimba',
'10': 'Sino',
'11': 'Grand Bassa',
'12': 'Grand Cape Mount',
'13': 'Maryland',
'14': 'Montserrado',
'17': 'Margibi',
'18': 'River Cess',
'19': 'Grand Gedeh',
'20': 'Lofa',
'21': 'Gbarpolu',
'22': 'River Gee'},
'LS': {'10': 'Berea',
'11': 'Butha-Buthe',
'12': 'Leribe',
'13': 'Mafeteng',
'14': 'Maseru',
'15': 'Mohales Hoek',
'16': 'Mokhotlong',
'17': 'Qachas Nek',
'18': 'Quthing',
'19': 'Thaba-Tseka'},
'LT': {'56': 'Alytaus Apskritis',
'57': 'Kauno Apskritis',
'58': 'Klaipedos Apskritis',
'59': 'Marijampoles Apskritis',
'60': 'Panevezio Apskritis',
'61': 'Siauliu Apskritis',
'62': 'Taurages Apskritis',
'63': 'Telsiu Apskritis',
'64': 'Utenos Apskritis',
'65': 'Vilniaus Apskritis'},
'LU': {'01': 'Diekirch', '02': 'Grevenmacher', '03': 'Luxembourg'},
'LV': {'01': 'Aizkraukles',
'02': 'Aluksnes',
'03': 'Balvu',
'04': 'Bauskas',
'05': 'Cesu',
'06': 'Daugavpils',
'07': 'Daugavpils',
'08': 'Dobeles',
'09': 'Gulbenes',
'10': 'Jekabpils',
'11': 'Jelgava',
'12': 'Jelgavas',
'13': 'Jurmala',
'14': 'Kraslavas',
'15': 'Kuldigas',
'16': 'Liepaja',
'17': 'Liepajas',
'18': 'Limbazu',
'19': 'Ludzas',
'20': 'Madonas',
'21': 'Ogres',
'22': 'Preilu',
'23': 'Rezekne',
'24': 'Rezeknes',
'25': 'Riga',
'26': 'Rigas',
'27': 'Saldus',
'28': 'Talsu',
'29': 'Tukuma',
'30': 'Valkas',
'31': 'Valmieras',
'32': 'Ventspils',
'33': 'Ventspils'},
'LY': {'03': 'Al Aziziyah',
'05': 'Al Jufrah',
'08': 'Al Kufrah',
'13': "Ash Shati'",
'30': 'Murzuq',
'34': 'Sabha',
'41': 'Tarhunah',
'42': 'Tubruq',
'45': 'Zlitan',
'47': 'Ajdabiya',
'48': 'Al Fatih',
'49': 'Al Jabal al Akhdar',
'50': 'Al Khums',
'51': 'An Nuqat al Khams',
'52': 'Awbari',
'53': 'Az Zawiyah',
'54': 'Banghazi',
'55': 'Darnah',
'56': 'Ghadamis',
'57': 'Gharyan',
'58': 'Misratah',
'59': 'Sawfajjin',
'60': 'Surt',
'61': 'Tarabulus',
'62': 'Yafran'},
'MA': {'45': 'Grand Casablanca',
'46': 'Fes-Boulemane',
'47': 'Marrakech-Tensift-Al Haouz',
'48': 'Meknes-Tafilalet',
'49': 'Rabat-Sale-Zemmour-Zaer',
'50': 'Chaouia-Ouardigha',
'51': 'Doukkala-Abda',
'52': 'Gharb-Chrarda-Beni Hssen',
'53': 'Guelmim-Es Smara',
'54': 'Oriental',
'55': 'Souss-Massa-Dr,a',
'56': 'Tadla-Azilal',
'57': 'Tanger-Tetouan',
'58': 'Taza-Al Hoceima-Taounate',
'59': 'La,youne-Boujdour-Sakia El Hamra'},
'MC': {'01': 'La Condamine', '02': 'Monaco', '03': 'Monte-Carlo'},
'MD': {'51': 'Gagauzia',
'57': 'Chisinau',
'58': 'Stinga Nistrului',
'59': 'Anenii Noi',
'60': 'Balti',
'61': 'Basarabeasca',
'62': 'Bender',
'63': 'Briceni',
'64': 'Cahul',
'65': 'Cantemir',
'66': 'Calarasi',
'67': 'Causeni',
'68': 'Cimislia',
'69': 'Criuleni',
'70': 'Donduseni',
'71': 'Drochia',
'72': 'Dubasari',
'73': 'Edinet',
'74': 'Falesti',
'75': 'Floresti',
'76': 'Glodeni',
'77': 'Hincesti',
'78': 'Ialoveni',
'79': 'Leova',
'80': 'Nisporeni',
'81': 'Ocnita',
'82': 'Orhei',
'83': 'Rezina',
'84': 'Riscani',
'85': 'Singerei',
'86': 'Soldanesti',
'87': 'Soroca',
'88': 'Stefan-Voda',
'89': 'Straseni',
'90': 'Taraclia',
'91': 'Telenesti',
'92': 'Ungheni'},
'MG': {'01': 'Antsiranana',
'02': 'Fianarantsoa',
'03': 'Mahajanga',
'04': 'Toamasina',
'05': 'Antananarivo',
'06': 'Toliara'},
'MK': {'01': 'Aracinovo',
'02': 'Bac',
'03': 'Belcista',
'04': 'Berovo',
'05': 'Bistrica',
'06': 'Bitola',
'07': 'Blatec',
'08': 'Bogdanci',
'09': 'Bogomila',
'10': 'Bogovinje',
'11': 'Bosilovo',
'12': 'Brvenica',
'13': 'Cair',
'14': 'Capari',
'15': 'Caska',
'16': 'Cegrane',
'17': 'Centar',
'18': 'Centar Zupa',
'19': 'Cesinovo',
'20': 'Cucer-Sandevo',
'21': 'Debar',
'22': 'Delcevo',
'23': 'Delogozdi',
'24': 'Demir Hisar',
'25': 'Demir Kapija',
'26': 'Dobrusevo',
'27': 'Dolna Banjica',
'28': 'Dolneni',
'29': 'Dorce Petrov',
'30': 'Drugovo',
'31': 'Dzepciste',
'32': 'Gazi Baba',
'33': 'Gevgelija',
'34': 'Gostivar',
'35': 'Gradsko',
'36': 'Ilinden',
'37': 'Izvor',
'38': 'Jegunovce',
'39': 'Kamenjane',
'40': 'Karbinci',
'41': 'Karpos',
'42': 'Kavadarci',
'43': 'Kicevo',
'44': 'Kisela Voda',
'45': 'Klecevce',
'46': 'Kocani',
'47': 'Konce',
'48': 'Kondovo',
'49': 'Konopiste',
'50': 'Kosel',
'51': 'Kratovo',
'52': 'Kriva Palanka',
'53': 'Krivogastani',
'54': 'Krusevo',
'55': 'Kuklis',
'56': 'Kukurecani',
'57': 'Kumanovo',
'58': 'Labunista',
'59': 'Lipkovo',
'60': 'Lozovo',
'61': 'Lukovo',
'62': 'Makedonska Kamenica',
'63': 'Makedonski Brod',
'64': 'Mavrovi Anovi',
'65': 'Meseista',
'66': 'Miravci',
'67': 'Mogila',
'68': 'Murtino',
'69': 'Negotino',
'70': 'Negotino-Polosko',
'71': 'Novaci',
'72': 'Novo Selo',
'73': 'Oblesevo',
'74': 'Ohrid',
'75': 'Orasac',
'76': 'Orizari',
'77': 'Oslomej',
'78': 'Pehcevo',
'79': 'Petrovec',
'80': 'Plasnica',
'81': 'Podares',
'82': 'Prilep',
'83': 'Probistip',
'84': 'Radovis',
'85': 'Rankovce',
'86': 'Resen',
'87': 'Rosoman',
'88': 'Rostusa',
'89': 'Samokov',
'90': 'Saraj',
'91': 'Sipkovica',
'92': 'Sopiste',
'93': 'Sopotnica',
'94': 'Srbinovo',
'95': 'Staravina',
'96': 'Star Dojran',
'97': 'Staro Nagoricane',
'98': 'Stip',
'99': 'Struga',
'A1': 'Strumica',
'A2': 'Studenicani',
'A3': 'Suto Orizari',
'A4': 'Sveti Nikole',
'A5': 'Tearce',
'A6': 'Tetovo',
'A7': 'Topolcani',
'A8': 'Valandovo',
'A9': 'Vasilevo',
'B1': 'Veles',
'B2': 'Velesta',
'B3': 'Vevcani',
'B4': 'Vinica',
'B5': 'Vitoliste',
'B6': 'Vranestica',
'B7': 'Vrapciste',
'B8': 'Vratnica',
'B9': 'Vrutok',
'C1': 'Zajas',
'C2': 'Zelenikovo',
'C3': 'Zelino',
'C4': 'Zitose',
'C5': 'Zletovo',
'C6': 'Zrnovci'},
'ML': {'01': 'Bamako',
'03': 'Kayes',
'04': 'Mopti',
'05': 'Segou',
'06': 'Sikasso',
'07': 'Koulikoro',
'08': 'Tombouctou',
'09': 'Gao',
'10': 'Kidal'},
'MM': {'01': 'Rakhine State',
'02': 'Chin State',
'03': 'Irrawaddy',
'04': 'Kachin State',
'05': 'Karan State',
'06': 'Kayah State',
'07': 'Magwe',
'08': 'Mandalay',
'09': 'Pegu',
'10': 'Sagaing',
'11': 'Shan State',
'12': 'Tenasserim',
'13': 'Mon State',
'14': 'Rangoon',
'17': 'Yangon'},
'MN': {'01': 'Arhangay',
'02': 'Bayanhongor',
'03': 'Bayan-Olgiy',
'05': 'Darhan',
'06': 'Dornod',
'07': 'Dornogovi',
'08': 'Dundgovi',
'09': 'Dzavhan',
'10': 'Govi-Altay',
'11': 'Hentiy',
'12': 'Hovd',
'13': 'Hovsgol',
'14': 'Omnogovi',
'15': 'Ovorhangay',
'16': 'Selenge',
'17': 'Suhbaatar',
'18': 'Tov',
'19': 'Uvs',
'20': 'Ulaanbaatar',
'21': 'Bulgan',
'22': 'Erdenet',
'23': 'Darhan-Uul',
'24': 'Govisumber',
'25': 'Orhon'},
'MO': {'01': 'Ilhas', '02': 'Macau'},
'MR': {'01': 'Hodh Ech Chargui',
'02': 'Hodh El Gharbi',
'03': 'Assaba',
'04': 'Gorgol',
'05': 'Brakna',
'06': 'Trarza',
'07': 'Adrar',
'08': 'Dakhlet Nouadhibou',
'09': 'Tagant',
'10': 'Guidimaka',
'11': 'Tiris Zemmour',
'12': 'Inchiri'},
'MS': {'01': 'Saint Anthony', '02': 'Saint Georges', '03': 'Saint Peter'},
'MU': {'12': 'Black River',
'13': 'Flacq',
'14': 'Grand Port',
'15': 'Moka',
'16': 'Pamplemousses',
'17': 'Plaines Wilhems',
'18': 'Port Louis',
'19': 'Riviere du Rempart',
'20': 'Savanne',
'21': 'Agalega Islands',
'22': 'Cargados Carajos',
'23': 'Rodrigues'},
'MV': {'01': 'Seenu',
'05': 'Laamu',
'30': 'Alifu',
'31': 'Baa',
'32': 'Dhaalu',
'33': 'Faafu ',
'34': 'Gaafu Alifu',
'35': 'Gaafu Dhaalu',
'36': 'Haa Alifu',
'37': 'Haa Dhaalu',
'38': 'Kaafu',
'39': 'Lhaviyani',
'40': 'Maale',
'41': 'Meemu',
'42': 'Gnaviyani',
'43': 'Noonu',
'44': 'Raa',
'45': 'Shaviyani',
'46': 'Thaa',
'47': 'Vaavu'},
'MW': {'02': 'Chikwawa',
'03': 'Chiradzulu',
'04': 'Chitipa',
'05': 'Thyolo',
'06': 'Dedza',
'07': 'Dowa',
'08': 'Karonga',
'09': 'Kasungu',
'11': 'Lilongwe',
'12': 'Mangochi',
'13': 'Mchinji',
'15': 'Mzimba',
'16': 'Ntcheu',
'17': 'Nkhata Bay',
'18': 'Nkhotakota',
'19': 'Nsanje',
'20': 'Ntchisi',
'21': 'Rumphi',
'22': 'Salima',
'23': 'Zomba',
'24': 'Blantyre',
'25': 'Mwanza',
'26': 'Balaka',
'27': 'Likoma',
'28': 'Machinga',
'29': 'Mulanje',
'30': 'Phalombe'},
'MX': {'01': 'Aguascalientes',
'02': 'Baja California',
'03': 'Baja California Sur',
'04': 'Campeche',
'05': 'Chiapas',
'06': 'Chihuahua',
'07': 'Coahuila de Zaragoza',
'08': 'Colima',
'09': 'Distrito Federal',
'10': 'Durango',
'11': 'Guanajuato',
'12': 'Guerrero',
'13': 'Hidalgo',
'14': 'Jalisco',
'15': 'Mexico',
'16': 'Michoacan de Ocampo',
'17': 'Morelos',
'18': 'Nayarit',
'19': 'Nuevo Leon',
'20': 'Oaxaca',
'21': 'Puebla',
'22': 'Queretaro de Arteaga',
'23': 'Quintana Roo',
'24': 'San Luis Potosi',
'25': 'Sinaloa',
'26': 'Sonora',
'27': 'Tabasco',
'28': 'Tamaulipas',
'29': 'Tlaxcala',
'30': 'Veracruz-Llave',
'31': 'Yucatan',
'32': 'Zacatecas'},
'MY': {'01': 'Johor',
'02': 'Kedah',
'03': 'Kelantan',
'04': 'Melaka',
'05': 'Negeri Sembilan',
'06': 'Pahang',
'07': 'Perak',
'08': 'Perlis',
'09': 'Pulau Pinang',
'11': 'Sarawak',
'12': 'Selangor',
'13': 'Terengganu',
'14': 'Kuala Lumpur',
'15': 'Labuan',
'16': 'Sabah',
'17': 'Putrajaya'},
'MZ': {'01': 'Cabo Delgado',
'02': 'Gaza',
'03': 'Inhambane',
'04': 'Maputo',
'05': 'Sofala',
'06': 'Nampula',
'07': 'Niassa',
'08': 'Tete',
'09': 'Zambezia',
'10': 'Manica',
'11': 'Maputo'},
'NA': {'01': 'Bethanien',
'02': 'Caprivi Oos',
'03': 'Boesmanland',
'04': 'Gobabis',
'05': 'Grootfontein',
'06': 'Kaokoland',
'07': 'Karibib',
'08': 'Keetmanshoop',
'09': 'Luderitz',
'10': 'Maltahohe',
'11': 'Okahandja',
'12': 'Omaruru',
'13': 'Otjiwarongo',
'14': 'Outjo',
'15': 'Owambo',
'16': 'Rehoboth',
'17': 'Swakopmund',
'18': 'Tsumeb',
'20': 'Karasburg',
'21': 'Windhoek',
'22': 'Damaraland',
'23': 'Hereroland Oos',
'24': 'Hereroland Wes',
'25': 'Kavango',
'26': 'Mariental',
'27': 'Namaland',
'28': 'Caprivi',
'29': 'Erongo',
'30': 'Hardap',
'31': 'Karas',
'32': 'Kunene',
'33': 'Ohangwena',
'34': 'Okavango',
'35': 'Omaheke',
'36': 'Omusati',
'37': 'Oshana',
'38': 'Oshikoto',
'39': 'Otjozondjupa'},
'NE': {'01': 'Agadez',
'02': 'Diffa',
'03': 'Dosso',
'04': 'Maradi',
'05': 'Niamey',
'06': 'Tahoua',
'07': 'Zinder',
'08': 'Niamey'},
'NG': {'05': 'Lagos',
'11': 'Federal Capital Territory',
'16': 'Ogun',
'21': 'Akwa Ibom',
'22': 'Cross River',
'23': 'Kaduna',
'24': 'Katsina',
'25': 'Anambra',
'26': 'Benue',
'27': 'Borno',
'28': 'Imo',
'29': 'Kano',
'30': 'Kwara',
'31': 'Niger',
'32': 'Oyo',
'35': 'Adamawa',
'36': 'Delta',
'37': 'Edo',
'39': 'Jigawa',
'40': 'Kebbi',
'41': 'Kogi',
'42': 'Osun',
'43': 'Taraba',
'44': 'Yobe',
'45': 'Abia',
'46': 'Bauchi',
'47': 'Enugu',
'48': 'Ondo',
'49': 'Plateau',
'50': 'Rivers',
'51': 'Sokoto',
'52': 'Bayelsa',
'53': 'Ebonyi',
'54': 'Ekiti',
'55': 'Gombe',
'56': 'Nassarawa',
'57': 'Zamfara'},
'NI': {'01': 'Boaco',
'02': 'Carazo',
'03': 'Chinandega',
'04': 'Chontales',
'05': 'Esteli',
'06': 'Granada',
'07': 'Jinotega',
'08': 'Leon',
'09': 'Madriz',
'10': 'Managua',
'11': 'Masaya',
'12': 'Matagalpa',
'13': 'Nueva Segovia',
'14': 'Rio San Juan',
'15': 'Rivas',
'16': 'Zelaya',
'17': 'Autonoma Atlantico Norte',
'18': 'Region Autonoma Atlantico Sur'},
'NL': {'01': 'Drenthe',
'02': 'Friesland',
'03': 'Gelderland',
'04': 'Groningen',
'05': 'Limburg',
'06': 'Noord-Brabant',
'07': 'Noord-Holland',
'09': 'Utrecht',
'10': 'Zeeland',
'11': 'Zuid-Holland',
'15': 'Overijssel',
'16': 'Flevoland'},
'NO': {'01': 'Akershus',
'02': 'Aust-Agder',
'04': 'Buskerud',
'05': 'Finnmark',
'06': 'Hedmark',
'07': 'Hordaland',
'08': 'More og Romsdal',
'09': 'Nordland',
'10': 'Nord-Trondelag',
'11': 'Oppland',
'12': 'Oslo',
'13': 'Ostfold',
'14': 'Rogaland',
'15': 'Sogn og Fjordane',
'16': 'Sor-Trondelag',
'17': 'Telemark',
'18': 'Troms',
'19': 'Vest-Agder',
'20': 'Vestfold'},
'NP': {'01': 'Bagmati',
'02': 'Bheri',
'03': 'Dhawalagiri',
'04': 'Gandaki',
'05': 'Janakpur',
'06': 'Karnali',
'07': 'Kosi',
'08': 'Lumbini',
'09': 'Mahakali',
'10': 'Mechi',
'11': 'Narayani',
'12': 'Rapti',
'13': 'Sagarmatha',
'14': 'Seti'},
'NR': {'01': 'Aiwo',
'02': 'Anabar',
'03': 'Anetan',
'04': 'Anibare',
'05': 'Baiti',
'06': 'Boe',
'07': 'Buada',
'08': 'Denigomodu',
'09': 'Ewa',
'10': 'Ijuw',
'11': 'Meneng',
'12': 'Nibok',
'13': 'Uaboe',
'14': 'Yaren'},
'NZ': {'10': 'Chatham Islands',
'E7': 'Auckland',
'E8': 'Bay of Plenty',
'E9': 'Canterbury',
'F1': 'Gisborne',
'F2': "Hawke's Bay",
'F3': 'Manawatu-Wanganui',
'F4': 'Marlborough',
'F5': 'Nelson',
'F6': 'Northland',
'F7': 'Otago',
'F8': 'Southland',
'F9': 'Taranaki',
'G1': 'Waikato',
'G2': 'Wellington',
'G3': 'West Coast'},
'OM': {'01': 'Ad Dakhiliyah',
'02': 'Al Batinah',
'03': 'Al Wusta',
'04': 'Ash Sharqiyah',
'05': 'Az Zahirah',
'06': 'Masqat',
'07': 'Musandam',
'08': 'Zufar'},
'PA': {'01': 'Bocas del Toro',
'02': 'Chiriqui',
'03': 'Cocle',
'04': 'Colon',
'05': 'Darien',
'06': 'Herrera',
'07': 'Los Santos',
'08': 'Panama',
'09': 'San Blas',
'10': 'Veraguas'},
'PE': {'01': 'Amazonas',
'02': 'Ancash',
'03': 'Apurimac',
'04': 'Arequipa',
'05': 'Ayacucho',
'06': 'Cajamarca',
'07': 'Callao',
'08': 'Cusco',
'09': 'Huancavelica',
'10': 'Huanuco',
'11': 'Ica',
'12': 'Junin',
'13': 'La Libertad',
'14': 'Lambayeque',
'15': 'Lima',
'16': 'Loreto',
'17': 'Madre de Dios',
'18': 'Moquegua',
'19': 'Pasco',
'20': 'Piura',
'21': 'Puno',
'22': 'San Martin',
'23': 'Tacna',
'24': 'Tumbes',
'25': 'Ucayali'},
'PG': {'01': 'Central',
'02': 'Gulf',
'03': 'Milne Bay',
'04': 'Northern',
'05': 'Southern Highlands',
'06': 'Western',
'07': 'North Solomons',
'08': 'Chimbu',
'09': 'Eastern Highlands',
'10': 'East New Britain',
'11': 'East Sepik',
'12': 'Madang',
'13': 'Manus',
'14': 'Morobe',
'15': 'New Ireland',
'16': 'Western Highlands',
'17': 'West New Britain',
'18': 'Sandaun',
'19': 'Enga',
'20': 'National Capital'},
'PH': {'01': 'Abra',
'02': 'Agusan del Norte',
'03': 'Agusan del Sur',
'04': 'Aklan',
'05': 'Albay',
'06': 'Antique',
'07': 'Bataan',
'08': 'Batanes',
'09': 'Batangas',
'10': 'Benguet',
'11': 'Bohol',
'12': 'Bukidnon',
'13': 'Bulacan',
'14': 'Cagayan',
'15': 'Camarines Norte',
'16': 'Camarines Sur',
'17': 'Camiguin',
'18': 'Capiz',
'19': 'Catanduanes',
'20': 'Cavite',
'21': 'Cebu',
'22': 'Basilan',
'23': 'Eastern Samar',
'24': 'Davao',
'25': 'Davao del Sur',
'26': 'Davao Oriental',
'27': 'Ifugao',
'28': 'Ilocos Norte',
'29': 'Ilocos Sur',
'30': 'Iloilo',
'31': 'Isabela',
'32': 'Kalinga-Apayao',
'33': 'Laguna',
'34': 'Lanao del Norte',
'35': 'Lanao del Sur',
'36': 'La Union',
'37': 'Leyte',
'38': 'Marinduque',
'39': 'Masbate',
'40': 'Mindoro Occidental',
'41': 'Mindoro Oriental',
'42': 'Misamis Occidental',
'43': 'Misamis Oriental',
'44': 'Mountain',
'45': 'Negros Occidental',
'46': 'Negros Oriental',
'47': 'Nueva Ecija',
'48': 'Nueva Vizcaya',
'49': 'Palawan',
'50': 'Pampanga',
'51': 'Pangasinan',
'53': 'Rizal',
'54': 'Romblon',
'55': 'Samar',
'56': 'Maguindanao',
'57': 'North Cotabato',
'58': 'Sorsogon',
'59': 'Southern Leyte',
'60': 'Sulu',
'61': 'Surigao del Norte',
'62': 'Surigao del Sur',
'63': 'Tarlac',
'64': 'Zambales',
'65': 'Zamboanga del Norte',
'66': 'Zamboanga del Sur',
'67': 'Northern Samar',
'68': 'Quirino',
'69': 'Siquijor',
'70': 'South Cotabato',
'71': 'Sultan Kudarat',
'72': 'Tawitawi',
'A1': 'Angeles',
'A2': 'Bacolod',
'A3': 'Bago',
'A4': 'Baguio',
'A5': 'Bais',
'A6': 'Basilan City',
'A7': 'Batangas City',
'A8': 'Butuan',
'A9': 'Cabanatuan',
'B1': 'Cadiz',
'B2': 'Cagayan de Oro',
'B3': 'Calbayog',
'B4': 'Caloocan',
'B5': 'Canlaon',
'B6': 'Cavite City',
'B7': 'Cebu City',
'B8': 'Cotabato',
'B9': 'Dagupan',
'C1': 'Danao',
'C2': 'Dapitan',
'C3': 'Davao City',
'C4': 'Dipolog',
'C5': 'Dumaguete',
'C6': 'General Santos',
'C7': 'Gingoog',
'C8': 'Iligan',
'C9': 'Iloilo City',
'D1': 'Iriga',
'D2': 'La Carlota',
'D3': 'Laoag',
'D4': 'Lapu-Lapu',
'D5': 'Legaspi',
'D6': 'Lipa',
'D7': 'Lucena',
'D8': 'Mandaue',
'D9': 'Manila',
'E1': 'Marawi',
'E2': 'Naga',
'E3': 'Olongapo',
'E4': 'Ormoc',
'E5': 'Oroquieta',
'E6': 'Ozamis',
'E7': 'Pagadian',
'E8': 'Palayan',
'E9': 'Pasay',
'F1': 'Puerto Princesa',
'F2': 'Quezon City',
'F3': 'Roxas',
'F4': 'San Carlos',
'F5': 'San Carlos',
'F6': 'San Jose',
'F7': 'San Pablo',
'F8': 'Silay',
'F9': 'Surigao',
'G1': 'Tacloban',
'G2': 'Tagaytay',
'G3': 'Tagbilaran',
'G4': 'Tangub',
'G5': 'Toledo',
'G6': 'Trece Martires',
'G7': 'Zamboanga',
'G8': 'Aurora',
'H2': 'Quezon',
'H3': 'Negros Occidental',
'I6': 'Compostela Valley',
'I7': 'Davao del Norte',
'J7': 'Kalinga',
'K6': 'Malaybalay',
'L9': 'Passi',
'M5': 'San Jose del Monte',
'M6': 'San Juan',
'M8': 'Santiago',
'M9': 'Sarangani',
'N1': 'Sipalay',
'N3': 'Surigao del Norte',
'P1': 'Zambales',
'P2': 'Zamboanga'},
'PK': {'01': 'Federally Administered Tribal Areas',
'02': 'Balochistan',
'03': 'North-West Frontier',
'04': 'Punjab',
'05': 'Sindh',
'06': 'Azad Kashmir',
'07': 'Northern Areas',
'08': 'Islamabad'},
'PL': {'72': 'Dolnoslaskie',
'73': 'Kujawsko-Pomorskie',
'74': 'Lodzkie',
'75': 'Lubelskie',
'76': 'Lubuskie',
'77': 'Malopolskie',
'78': 'Mazowieckie',
'79': 'Opolskie',
'80': 'Podkarpackie',
'81': 'Podlaskie',
'82': 'Pomorskie',
'83': 'Slaskie',
'84': 'Swietokrzyskie',
'85': 'Warminsko-Mazurskie',
'86': 'Wielkopolskie',
'87': 'Zachodniopomorskie'},
'PS': {'GZ': 'Gaza', 'WE': 'West Bank'},
'PT': {'02': 'Aveiro',
'03': 'Beja',
'04': 'Braga',
'05': 'Braganca',
'06': 'Castelo Branco',
'07': 'Coimbra',
'08': 'Evora',
'09': 'Faro',
'10': 'Madeira',
'11': 'Guarda',
'13': 'Leiria',
'14': 'Lisboa',
'16': 'Portalegre',
'17': 'Porto',
'18': 'Santarem',
'19': 'Setubal',
'20': 'Viana do Castelo',
'21': 'Vila Real',
'22': 'Viseu',
'23': 'Azores'},
'PY': {'01': 'Alto Parana',
'02': 'Amambay',
'03': 'Boqueron',
'04': 'Caaguazu',
'05': 'Caazapa',
'06': 'Central',
'07': 'Concepcion',
'08': 'Cordillera',
'10': 'Guaira',
'11': 'Itapua',
'12': 'Misiones',
'13': 'Neembucu',
'15': 'Paraguari',
'16': 'Presidente Hayes',
'17': 'San Pedro',
'19': 'Canindeyu',
'20': 'Chaco',
'21': 'Nueva Asuncion',
'23': 'Alto Paraguay'},
'QA': {'01': 'Ad Dawhah',
'02': 'Al Ghuwariyah',
'03': 'Al Jumaliyah',
'04': 'Al Khawr',
'05': 'Al Wakrah Municipality',
'06': 'Ar Rayyan',
'08': 'Madinat ach Shamal',
'09': 'Umm Salal',
'10': 'Al Wakrah',
'11': 'Jariyan al Batnah',
'12': "Umm Sa'id"},
'RO': {'01': 'Alba',
'02': 'Arad',
'03': 'Arges',
'04': 'Bacau',
'05': 'Bihor',
'06': 'Bistrita-Nasaud',
'07': 'Botosani',
'08': 'Braila',
'09': 'Brasov',
'10': 'Bucuresti',
'11': 'Buzau',
'12': 'Caras-Severin',
'13': 'Cluj',
'14': 'Constanta',
'15': 'Covasna',
'16': 'Dambovita',
'17': 'Dolj',
'18': 'Galati',
'19': 'Gorj',
'20': 'Harghita',
'21': 'Hunedoara',
'22': 'Ialomita',
'23': 'Iasi',
'25': 'Maramures',
'26': 'Mehedinti',
'27': 'Mures',
'28': 'Neamt',
'29': 'Olt',
'30': 'Prahova',
'31': 'Salaj',
'32': 'Satu Mare',
'33': 'Sibiu',
'34': 'Suceava',
'35': 'Teleorman',
'36': 'Timis',
'37': 'Tulcea',
'38': 'Vaslui',
'39': 'Valcea',
'40': 'Vrancea',
'41': 'Calarasi',
'42': 'Giurgiu',
'43': 'Ilfov'},
'RS': {'01': 'Kosovo', '02': 'Vojvodina'},
'RU': {'01': 'Adygeya, Republic of',
'02': 'Aginsky Buryatsky AO',
'03': 'Gorno-Altay',
'04': 'Altaisky krai',
'05': 'Amur',
'06': "Arkhangel'sk",
'07': "Astrakhan'",
'08': 'Bashkortostan',
'09': 'Belgorod',
'10': 'Bryansk',
'11': 'Buryat',
'12': 'Chechnya',
'13': 'Chelyabinsk',
'14': 'Chita',
'15': 'Chukot',
'16': 'Chuvashia',
'17': 'Dagestan',
'18': 'Evenk',
'19': 'Ingush',
'20': 'Irkutsk',
'21': 'Ivanovo',
'22': 'Kabardin-Balkar',
'23': 'Kaliningrad',
'24': 'Kalmyk',
'25': 'Kaluga',
'26': 'Kamchatka',
'27': 'Karachay-Cherkess',
'28': 'Karelia',
'29': 'Kemerovo',
'30': 'Khabarovsk',
'31': 'Khakass',
'32': 'Khanty-Mansiy',
'33': 'Kirov',
'34': 'Komi',
'36': 'Koryak',
'37': 'Kostroma',
'38': 'Krasnodar',
'39': 'Krasnoyarsk',
'40': 'Kurgan',
'41': 'Kursk',
'42': 'Leningrad',
'43': 'Lipetsk',
'44': 'Magadan',
'45': 'Mariy-El',
'46': 'Mordovia',
'47': 'Moskva',
'48': 'Moscow City',
'49': 'Murmansk',
'50': 'Nenets',
'51': 'Nizhegorod',
'52': 'Novgorod',
'53': 'Novosibirsk',
'54': 'Omsk',
'55': 'Orenburg',
'56': 'Orel',
'57': 'Penza',
'58': "Perm'",
'59': "Primor'ye",
'60': 'Pskov',
'61': 'Rostov',
'62': "Ryazan'",
'63': 'Sakha',
'64': 'Sakhalin',
'65': 'Samara',
'66': 'Saint Petersburg City',
'67': 'Saratov',
'68': 'North Ossetia',
'69': 'Smolensk',
'70': "Stavropol'",
'71': 'Sverdlovsk',
'72': 'Tambovskaya oblast',
'73': 'Tatarstan',
'74': 'Taymyr',
'75': 'Tomsk',
'76': 'Tula',
'77': "Tver'",
'78': "Tyumen'",
'79': 'Tuva',
'80': 'Udmurt',
'81': "Ul'yanovsk",
'83': 'Vladimir',
'84': 'Volgograd',
'85': 'Vologda',
'86': 'Voronezh',
'87': 'Yamal-Nenets',
'88': "Yaroslavl'",
'89': 'Yevrey',
'90': 'Permskiy Kray',
'91': 'Krasnoyarskiy Kray',
'92': 'Kamchatskiy Kray',
'93': "Zabaykal'skiy Kray"},
'RW': {'01': 'Butare',
'06': 'Gitarama',
'07': 'Kibungo',
'09': 'Kigali',
'11': 'Est',
'12': 'Kigali',
'13': 'Nord',
'14': 'Ouest',
'15': 'Sud'},
'SA': {'02': 'Al Bahah',
'05': 'Al Madinah',
'06': 'Ash Sharqiyah',
'08': 'Al Qasim',
'10': 'Ar Riyad',
'11': 'Asir Province',
'13': "Ha'il",
'14': 'Makkah',
'15': 'Al Hudud ash Shamaliyah',
'16': 'Najran',
'17': 'Jizan',
'19': 'Tabuk',
'20': 'Al Jawf'},
'SB': {'03': 'Malaita',
'06': 'Guadalcanal',
'07': 'Isabel',
'08': 'Makira',
'09': 'Temotu',
'10': 'Central',
'11': 'Western',
'12': 'Choiseul',
'13': 'Rennell and Bellona'},
'SC': {'01': 'Anse aux Pins',
'02': 'Anse Boileau',
'03': 'Anse Etoile',
'04': 'Anse Louis',
'05': 'Anse Royale',
'06': 'Baie Lazare',
'07': 'Baie Sainte Anne',
'08': 'Beau Vallon',
'09': 'Bel Air',
'10': 'Bel Ombre',
'11': 'Cascade',
'12': 'Glacis',
'13': "Grand' Anse",
'14': "Grand' Anse",
'15': 'La Digue',
'16': 'La Riviere Anglaise',
'17': 'Mont Buxton',
'18': 'Mont Fleuri',
'19': 'Plaisance',
'20': 'Pointe La Rue',
'21': 'Port Glaud',
'22': 'Saint Louis',
'23': 'Takamaka'},
'SD': {'27': 'Al Wusta',
'28': "Al Istiwa'iyah",
'29': 'Al Khartum',
'30': 'Ash Shamaliyah',
'31': 'Ash Sharqiyah',
'32': 'Bahr al Ghazal',
'33': 'Darfur',
'34': 'Kurdufan',
'35': 'Upper Nile',
'40': 'Al Wahadah State',
'44': 'Central Equatoria State'},
'SE': {'02': 'Blekinge Lan',
'03': 'Gavleborgs Lan',
'05': 'Gotlands Lan',
'06': 'Hallands Lan',
'07': 'Jamtlands Lan',
'08': 'Jonkopings Lan',
'09': 'Kalmar Lan',
'10': 'Dalarnas Lan',
'12': 'Kronobergs Lan',
'14': 'Norrbottens Lan',
'15': 'Orebro Lan',
'16': 'Ostergotlands Lan',
'18': 'Sodermanlands Lan',
'21': 'Uppsala Lan',
'22': 'Varmlands Lan',
'23': 'Vasterbottens Lan',
'24': 'Vasternorrlands Lan',
'25': 'Vastmanlands Lan',
'26': 'Stockholms Lan',
'27': 'Skane Lan',
'28': 'Vastra Gotaland'},
'SH': {'01': 'Ascension', '02': 'Saint Helena', '03': 'Tristan da Cunha'},
'SI': {'01': 'Ajdovscina Commune',
'02': 'Beltinci Commune',
'03': 'Bled Commune',
'04': 'Bohinj Commune',
'05': 'Borovnica Commune',
'06': 'Bovec Commune',
'07': 'Brda Commune',
'08': 'Brezice Commune',
'09': 'Brezovica Commune',
'11': 'Celje Commune',
'12': 'Cerklje na Gorenjskem Commune',
'13': 'Cerknica Commune',
'14': 'Cerkno Commune',
'15': 'Crensovci Commune',
'16': 'Crna na Koroskem Commune',
'17': 'Crnomelj Commune',
'19': 'Divaca Commune',
'20': 'Dobrepolje Commune',
'22': 'Dol pri Ljubljani Commune',
'24': 'Dornava Commune',
'25': 'Dravograd Commune',
'26': 'Duplek Commune',
'27': 'Gorenja vas-Poljane Commune',
'28': 'Gorisnica Commune',
'29': 'Gornja Radgona Commune',
'30': 'Gornji Grad Commune',
'31': 'Gornji Petrovci Commune',
'32': 'Grosuplje Commune',
'34': 'Hrastnik Commune',
'35': 'Hrpelje-Kozina Commune',
'36': 'Idrija Commune',
'37': 'Ig Commune',
'38': 'Ilirska Bistrica Commune',
'39': 'Ivancna Gorica Commune',
'40': 'Izola-Isola Commune',
'42': 'Jursinci Commune',
'44': 'Kanal Commune',
'45': 'Kidricevo Commune',
'46': 'Kobarid Commune',
'47': 'Kobilje Commune',
'49': 'Komen Commune',
'50': 'Koper-Capodistria Urban Commune',
'51': 'Kozje Commune',
'52': 'Kranj Commune',
'53': 'Kranjska Gora Commune',
'54': 'Krsko Commune',
'55': 'Kungota Commune',
'57': 'Lasko Commune',
'61': 'Ljubljana Urban Commune',
'62': 'Ljubno Commune',
'64': 'Logatec Commune',
'66': 'Loski Potok Commune',
'68': 'Lukovica Commune',
'71': 'Medvode Commune',
'72': 'Menges Commune',
'73': 'Metlika Commune',
'74': 'Mezica Commune',
'76': 'Mislinja Commune',
'77': 'Moravce Commune',
'78': 'Moravske Toplice Commune',
'79': 'Mozirje Commune',
'80': 'Murska Sobota Urban Commune',
'81': 'Muta Commune',
'82': 'Naklo Commune',
'83': 'Nazarje Commune',
'84': 'Nova Gorica Urban Commune',
'86': 'Odranci Commune',
'87': 'Ormoz Commune',
'88': 'Osilnica Commune',
'89': 'Pesnica Commune',
'91': 'Pivka Commune',
'92': 'Podcetrtek Commune',
'94': 'Postojna Commune',
'97': 'Puconci Commune',
'98': 'Race-Fram Commune',
'99': 'Radece Commune',
'A1': 'Radenci Commune',
'A2': 'Radlje ob Dravi Commune',
'A3': 'Radovljica Commune',
'A6': 'Rogasovci Commune',
'A7': 'Rogaska Slatina Commune',
'A8': 'Rogatec Commune',
'B1': 'Semic Commune',
'B2': 'Sencur Commune',
'B3': 'Sentilj Commune',
'B4': 'Sentjernej Commune',
'B6': 'Sevnica Commune',
'B7': 'Sezana Commune',
'B8': 'Skocjan Commune',
'B9': 'Skofja Loka Commune',
'C1': 'Skofljica Commune',
'C2': 'Slovenj Gradec Urban Commune',
'C4': 'Slovenske Konjice Commune',
'C5': 'Smarje pri Jelsah Commune',
'C6': 'Smartno ob Paki Commune',
'C7': 'Sostanj Commune',
'C8': 'Starse Commune',
'C9': 'Store Commune',
'D1': 'Sveti Jurij Commune',
'D2': 'Tolmin Commune',
'D3': 'Trbovlje Commune',
'D4': 'Trebnje Commune',
'D5': 'Trzic Commune',
'D6': 'Turnisce Commune',
'D7': 'Velenje Urban Commune',
'D8': 'Velike Lasce Commune',
'E1': 'Vipava Commune',
'E2': 'Vitanje Commune',
'E3': 'Vodice Commune',
'E5': 'Vrhnika Commune',
'E6': 'Vuzenica Commune',
'E7': 'Zagorje ob Savi Commune',
'E9': 'Zavrc Commune',
'F1': 'Zelezniki Commune',
'F2': 'Ziri Commune',
'F3': 'Zrece Commune',
'F4': 'Benedikt Commune',
'F5': 'Bistrica ob Sotli Commune',
'F6': 'Bloke Commune',
'F7': 'Braslovce Commune',
'F8': 'Cankova Commune',
'F9': 'Cerkvenjak Commune',
'G1': 'Destrnik Commune',
'G2': 'Dobje Commune',
'G3': 'Dobrna Commune',
'G4': 'Dobrova-Horjul-Polhov Gradec Commune',
'G5': 'Dobrovnik-Dobronak Commune',
'G6': 'Dolenjske Toplice Commune',
'G7': 'Domzale Commune',
'G8': 'Grad Commune',
'G9': 'Hajdina Commune',
'H1': 'Hoce-Slivnica Commune',
'H2': 'Hodos-Hodos Commune',
'H3': 'Horjul Commune',
'H4': 'Jesenice Commune',
'H5': 'Jezersko Commune',
'H6': 'Kamnik Commune',
'H7': 'Kocevje Commune',
'H8': 'Komenda Commune',
'H9': 'Kostel Commune',
'I1': 'Krizevci Commune',
'I2': 'Kuzma Commune',
'I3': 'Lenart Commune',
'I4': 'Lendava-Lendva Commune',
'I5': 'Litija Commune',
'I6': 'Ljutomer Commune',
'I7': 'Loska Dolina Commune',
'I8': 'Lovrenc na Pohorju Commune',
'I9': 'Luce Commune',
'J1': 'Majsperk Commune',
'J2': 'Maribor Commune',
'J3': 'Markovci Commune',
'J4': 'Miklavz na Dravskem polju Commune',
'J5': 'Miren-Kostanjevica Commune',
'J6': 'Mirna Pec Commune',
'J7': 'Novo mesto Urban Commune',
'J8': 'Oplotnica Commune',
'J9': 'Piran-Pirano Commune',
'K1': 'Podlehnik Commune',
'K2': 'Podvelka Commune',
'K3': 'Polzela Commune',
'K4': 'Prebold Commune',
'K5': 'Preddvor Commune',
'K6': 'Prevalje Commune',
'K7': 'Ptuj Urban Commune',
'K8': 'Ravne na Koroskem Commune',
'K9': 'Razkrizje Commune',
'L1': 'Ribnica Commune',
'L2': 'Ribnica na Pohorju Commune',
'L3': 'Ruse Commune',
'L4': 'Salovci Commune',
'L5': 'Selnica ob Dravi Commune',
'L6': 'Sempeter-Vrtojba Commune',
'L7': 'Sentjur pri Celju Commune',
'L8': 'Slovenska Bistrica Commune',
'L9': 'Smartno pri Litiji Commune',
'M1': 'Sodrazica Commune',
'M2': 'Solcava Commune',
'M3': 'Sveta Ana Commune',
'M4': 'Sveti Andraz v Slovenskih goricah Commune',
'M5': 'Tabor Commune',
'M6': 'Tisina Commune',
'M7': 'Trnovska vas Commune',
'M8': 'Trzin Commune',
'M9': 'Velika Polana Commune',
'N1': 'Verzej Commune',
'N2': 'Videm Commune',
'N3': 'Vojnik Commune',
'N4': 'Vransko Commune',
'N5': 'Zalec Commune',
'N6': 'Zetale Commune',
'N7': 'Zirovnica Commune',
'N8': 'Zuzemberk Commune',
'N9': 'Apace Commune',
'O1': 'Cirkulane Commune'},
'SK': {'01': 'Banska Bystrica',
'02': 'Bratislava',
'03': 'Kosice',
'04': 'Nitra',
'05': 'Presov',
'06': 'Trencin',
'07': 'Trnava',
'08': 'Zilina'},
'SL': {'01': 'Eastern',
'02': 'Northern',
'03': 'Southern',
'04': 'Western Area'},
'SM': {'01': 'Acquaviva',
'02': 'Chiesanuova',
'03': 'Domagnano',
'04': 'Faetano',
'05': 'Fiorentino',
'06': 'Borgo Maggiore',
'07': 'San Marino',
'08': 'Monte Giardino',
'09': 'Serravalle'},
'SN': {'01': 'Dakar',
'03': 'Diourbel',
'05': 'Tambacounda',
'07': 'Thies',
'09': 'Fatick',
'10': 'Kaolack',
'11': 'Kolda',
'12': 'Ziguinchor',
'13': 'Louga',
'14': 'Saint-Louis',
'15': 'Matam'},
'SO': {'01': 'Bakool',
'02': 'Banaadir',
'03': 'Bari',
'04': 'Bay',
'05': 'Galguduud',
'06': 'Gedo',
'07': 'Hiiraan',
'08': 'Jubbada Dhexe',
'09': 'Jubbada Hoose',
'10': 'Mudug',
'11': 'Nugaal',
'12': 'Sanaag',
'13': 'Shabeellaha Dhexe',
'14': 'Shabeellaha Hoose',
'16': 'Woqooyi Galbeed',
'18': 'Nugaal',
'19': 'Togdheer',
'20': 'Woqooyi Galbeed',
'21': 'Awdal',
'22': 'Sool'},
'SR': {'10': 'Brokopondo',
'11': 'Commewijne',
'12': 'Coronie',
'13': 'Marowijne',
'14': 'Nickerie',
'15': 'Para',
'16': 'Paramaribo',
'17': 'Saramacca',
'18': 'Sipaliwini',
'19': 'Wanica'},
'SS': {'01': 'Central Equatoria',
'02': 'Eastern Equatoria',
'03': 'Jonglei',
'04': 'Lakes',
'05': 'Northern Bahr el Ghazal',
'06': 'Unity',
'07': 'Upper Nile',
'08': 'Warrap',
'09': 'Western Bahr el Ghazal',
'10': 'Western Equatoria'},
'ST': {'01': 'Principe', '02': 'Sao Tome'},
'SV': {'01': 'Ahuachapan',
'02': 'Cabanas',
'03': 'Chalatenango',
'04': 'Cuscatlan',
'05': 'La Libertad',
'06': 'La Paz',
'07': 'La Union',
'08': 'Morazan',
'09': 'San Miguel',
'10': 'San Salvador',
'11': 'Santa Ana',
'12': 'San Vicente',
'13': 'Sonsonate',
'14': 'Usulutan'},
'SY': {'01': 'Al Hasakah',
'02': 'Al Ladhiqiyah',
'03': 'Al Qunaytirah',
'04': 'Ar Raqqah',
'05': "As Suwayda'",
'06': 'Dar',
'07': 'Dayr az Zawr',
'08': 'Rif Dimashq',
'09': 'Halab',
'10': 'Hamah',
'11': 'Hims',
'12': 'Idlib',
'13': 'Dimashq',
'14': 'Tartus'},
'SZ': {'01': 'Hhohho',
'02': 'Lubombo',
'03': 'Manzini',
'04': 'Shiselweni',
'05': 'Praslin'},
'TD': {'01': 'Batha',
'02': 'Biltine',
'03': 'Borkou-Ennedi-Tibesti',
'04': 'Chari-Baguirmi',
'05': 'Guera',
'06': 'Kanem',
'07': 'Lac',
'08': 'Logone Occidental',
'09': 'Logone Oriental',
'10': 'Mayo-Kebbi',
'11': 'Moyen-Chari',
'12': 'Ouaddai',
'13': 'Salamat',
'14': 'Tandjile'},
'TG': {'22': 'Centrale',
'23': 'Kara',
'24': 'Maritime',
'25': 'Plateaux',
'26': 'Savanes'},
'TH': {'01': 'Mae Hong Son',
'02': 'Chiang Mai',
'03': 'Chiang Rai',
'04': 'Nan',
'05': 'Lamphun',
'06': 'Lampang',
'07': 'Phrae',
'08': 'Tak',
'09': 'Sukhothai',
'10': 'Uttaradit',
'11': 'Kamphaeng Phet',
'12': 'Phitsanulok',
'13': 'Phichit',
'14': 'Phetchabun',
'15': 'Uthai Thani',
'16': 'Nakhon Sawan',
'17': 'Nong Khai',
'18': 'Loei',
'20': 'Sakon Nakhon',
'21': 'Nakhon Phanom',
'22': 'Khon Kaen',
'23': 'Kalasin',
'24': 'Maha Sarakham',
'25': 'Roi Et',
'26': 'Chaiyaphum',
'27': 'Nakhon Ratchasima',
'28': 'Buriram',
'29': 'Surin',
'30': 'Sisaket',
'31': 'Narathiwat',
'32': 'Chai Nat',
'33': 'Sing Buri',
'34': 'Lop Buri',
'35': 'Ang Thong',
'36': 'Phra Nakhon Si Ayutthaya',
'37': 'Saraburi',
'38': 'Nonthaburi',
'39': 'Pathum Thani',
'40': 'Krung Thep',
'41': 'Phayao',
'42': 'Samut Prakan',
'43': 'Nakhon Nayok',
'44': 'Chachoengsao',
'45': 'Prachin Buri',
'46': 'Chon Buri',
'47': 'Rayong',
'48': 'Chanthaburi',
'49': 'Trat',
'50': 'Kanchanaburi',
'51': 'Suphan Buri',
'52': 'Ratchaburi',
'53': 'Nakhon Pathom',
'54': 'Samut Songkhram',
'55': 'Samut Sakhon',
'56': 'Phetchaburi',
'57': 'Prachuap Khiri Khan',
'58': 'Chumphon',
'59': 'Ranong',
'60': 'Surat Thani',
'61': 'Phangnga',
'62': 'Phuket',
'63': 'Krabi',
'64': 'Nakhon Si Thammarat',
'65': 'Trang',
'66': 'Phatthalung',
'67': 'Satun',
'68': 'Songkhla',
'69': 'Pattani',
'70': 'Yala',
'71': 'Ubon Ratchathani',
'72': 'Yasothon',
'73': 'Nakhon Phanom',
'74': 'Prachin Buri',
'75': 'Ubon Ratchathani',
'76': 'Udon Thani',
'77': 'Amnat Charoen',
'78': 'Mukdahan',
'79': 'Nong Bua Lamphu',
'80': 'Sa Kaeo'},
'TJ': {'01': 'Kuhistoni Badakhshon', '02': 'Khatlon', '03': 'Sughd'},
'TM': {'01': 'Ahal',
'02': 'Balkan',
'03': 'Dashoguz',
'04': 'Lebap',
'05': 'Mary'},
'TN': {'02': 'Kasserine',
'03': 'Kairouan',
'06': 'Jendouba',
'10': 'Qafsah',
'14': 'El Kef',
'15': 'Al Mahdia',
'16': 'Al Munastir',
'17': 'Bajah',
'18': 'Bizerte',
'19': 'Nabeul',
'22': 'Siliana',
'23': 'Sousse',
'27': 'Ben Arous',
'28': 'Madanin',
'29': 'Gabes',
'31': 'Kebili',
'32': 'Sfax',
'33': 'Sidi Bou Zid',
'34': 'Tataouine',
'35': 'Tozeur',
'36': 'Tunis',
'37': 'Zaghouan',
'38': 'Aiana',
'39': 'Manouba'},
'TO': {'01': 'Ha', '02': 'Tongatapu', '03': 'Vava'},
'TR': {'02': 'Adiyaman',
'03': 'Afyonkarahisar',
'04': 'Agri',
'05': 'Amasya',
'07': 'Antalya',
'08': 'Artvin',
'09': 'Aydin',
'10': 'Balikesir',
'11': 'Bilecik',
'12': 'Bingol',
'13': 'Bitlis',
'14': 'Bolu',
'15': 'Burdur',
'16': 'Bursa',
'17': 'Canakkale',
'19': 'Corum',
'20': 'Denizli',
'21': 'Diyarbakir',
'22': 'Edirne',
'23': 'Elazig',
'24': 'Erzincan',
'25': 'Erzurum',
'26': 'Eskisehir',
'28': 'Giresun',
'31': 'Hatay',
'32': 'Mersin',
'33': 'Isparta',
'34': 'Istanbul',
'35': 'Izmir',
'37': 'Kastamonu',
'38': 'Kayseri',
'39': 'Kirklareli',
'40': 'Kirsehir',
'41': 'Kocaeli',
'43': 'Kutahya',
'44': 'Malatya',
'45': 'Manisa',
'46': 'Kahramanmaras',
'48': 'Mugla',
'49': 'Mus',
'50': 'Nevsehir',
'52': 'Ordu',
'53': 'Rize',
'54': 'Sakarya',
'55': 'Samsun',
'57': 'Sinop',
'58': 'Sivas',
'59': 'Tekirdag',
'60': 'Tokat',
'61': 'Trabzon',
'62': 'Tunceli',
'63': 'Sanliurfa',
'64': 'Usak',
'65': 'Van',
'66': 'Yozgat',
'68': 'Ankara',
'69': 'Gumushane',
'70': 'Hakkari',
'71': 'Konya',
'72': 'Mardin',
'73': 'Nigde',
'74': 'Siirt',
'75': 'Aksaray',
'76': 'Batman',
'77': 'Bayburt',
'78': 'Karaman',
'79': 'Kirikkale',
'80': 'Sirnak',
'81': 'Adana',
'82': 'Cankiri',
'83': 'Gaziantep',
'84': 'Kars',
'85': 'Zonguldak',
'86': 'Ardahan',
'87': 'Bartin',
'88': 'Igdir',
'89': 'Karabuk',
'90': 'Kilis',
'91': 'Osmaniye',
'92': 'Yalova',
'93': 'Duzce'},
'TT': {'01': 'Arima',
'02': 'Caroni',
'03': 'Mayaro',
'04': 'Nariva',
'05': 'Port-of-Spain',
'06': 'Saint Andrew',
'07': 'Saint David',
'08': 'Saint George',
'09': 'Saint Patrick',
'10': 'San Fernando',
'11': 'Tobago',
'12': 'Victoria'},
'TW': {'01': 'Fu-chien',
'02': 'Kao-hsiung',
'03': "T'ai-pei",
'04': "T'ai-wan"},
'TZ': {'02': 'Pwani',
'03': 'Dodoma',
'04': 'Iringa',
'05': 'Kigoma',
'06': 'Kilimanjaro',
'07': 'Lindi',
'08': 'Mara',
'09': 'Mbeya',
'10': 'Morogoro',
'11': 'Mtwara',
'12': 'Mwanza',
'13': 'Pemba North',
'14': 'Ruvuma',
'15': 'Shinyanga',
'16': 'Singida',
'17': 'Tabora',
'18': 'Tanga',
'19': 'Kagera',
'20': 'Pemba South',
'21': 'Zanzibar Central',
'22': 'Zanzibar North',
'23': 'Dar es Salaam',
'24': 'Rukwa',
'25': 'Zanzibar Urban',
'26': 'Arusha',
'27': 'Manyara'},
'UA': {'01': "Cherkas'ka Oblast'",
'02': "Chernihivs'ka Oblast'",
'03': "Chernivets'ka Oblast'",
'04': "Dnipropetrovs'ka Oblast'",
'05': "Donets'ka Oblast'",
'06': "Ivano-Frankivs'ka Oblast'",
'07': "Kharkivs'ka Oblast'",
'08': "Khersons'ka Oblast'",
'09': "Khmel'nyts'ka Oblast'",
'10': "Kirovohrads'ka Oblast'",
'11': 'Krym',
'12': 'Kyyiv',
'13': "Kyyivs'ka Oblast'",
'14': "Luhans'ka Oblast'",
'15': "L'vivs'ka Oblast'",
'16': "Mykolayivs'ka Oblast'",
'17': "Odes'ka Oblast'",
'18': "Poltavs'ka Oblast'",
'19': "Rivnens'ka Oblast'",
'20': "Sevastopol'",
'21': "Sums'ka Oblast'",
'22': "Ternopil's'ka Oblast'",
'23': "Vinnyts'ka Oblast'",
'24': "Volyns'ka Oblast'",
'25': "Zakarpats'ka Oblast'",
'26': "Zaporiz'ka Oblast'",
'27': "Zhytomyrs'ka Oblast'"},
'UG': {'26': 'Apac',
'28': 'Bundibugyo',
'29': 'Bushenyi',
'30': 'Gulu',
'31': 'Hoima',
'33': 'Jinja',
'36': 'Kalangala',
'37': 'Kampala',
'38': 'Kamuli',
'39': 'Kapchorwa',
'40': 'Kasese',
'41': 'Kibale',
'42': 'Kiboga',
'43': 'Kisoro',
'45': 'Kotido',
'46': 'Kumi',
'47': 'Lira',
'50': 'Masindi',
'52': 'Mbarara',
'56': 'Mubende',
'58': 'Nebbi',
'59': 'Ntungamo',
'60': 'Pallisa',
'61': 'Rakai',
'65': 'Adjumani',
'66': 'Bugiri',
'67': 'Busia',
'69': 'Katakwi',
'70': 'Luwero',
'71': 'Masaka',
'72': 'Moyo',
'73': 'Nakasongola',
'74': 'Sembabule',
'76': 'Tororo',
'77': 'Arua',
'78': 'Iganga',
'79': 'Kabarole',
'80': 'Kaberamaido',
'81': 'Kamwenge',
'82': 'Kanungu',
'83': 'Kayunga',
'84': 'Kitgum',
'85': 'Kyenjojo',
'86': 'Mayuge',
'87': 'Mbale',
'88': 'Moroto',
'89': 'Mpigi',
'90': 'Mukono',
'91': 'Nakapiripirit',
'92': 'Pader',
'93': 'Rukungiri',
'94': 'Sironko',
'95': 'Soroti',
'96': 'Wakiso',
'97': 'Yumbe'},
'US': {'AA': 'Armed Forces Americas',
'AE': 'Armed Forces Europe, Middle East, & Canada',
'AK': 'Alaska',
'AL': 'Alabama',
'AP': 'Armed Forces Pacific',
'AR': 'Arkansas',
'AS': 'American Samoa',
'AZ': 'Arizona',
'CA': 'California',
'CO': 'Colorado',
'CT': 'Connecticut',
'DC': 'District of Columbia',
'DE': 'Delaware',
'FL': 'Florida',
'FM': 'Federated States of Micronesia',
'GA': 'Georgia',
'GU': 'Guam',
'HI': 'Hawaii',
'IA': 'Iowa',
'ID': 'Idaho',
'IL': 'Illinois',
'IN': 'Indiana',
'KS': 'Kansas',
'KY': 'Kentucky',
'LA': 'Louisiana',
'MA': 'Massachusetts',
'MD': 'Maryland',
'ME': 'Maine',
'MH': 'Marshall Islands',
'MI': 'Michigan',
'MN': 'Minnesota',
'MO': 'Missouri',
'MP': 'Northern Mariana Islands',
'MS': 'Mississippi',
'MT': 'Montana',
'NC': 'North Carolina',
'ND': 'North Dakota',
'NE': 'Nebraska',
'NH': 'New Hampshire',
'NJ': 'New Jersey',
'NM': 'New Mexico',
'NV': 'Nevada',
'NY': 'New York',
'OH': 'Ohio',
'OK': 'Oklahoma',
'OR': 'Oregon',
'PA': 'Pennsylvania',
'PW': 'Palau',
'RI': 'Rhode Island',
'SC': 'South Carolina',
'SD': 'South Dakota',
'TN': 'Tennessee',
'TX': 'Texas',
'UT': 'Utah',
'VA': 'Virginia',
'VI': 'Virgin Islands',
'VT': 'Vermont',
'WA': 'Washington',
'WI': 'Wisconsin',
'WV': 'West Virginia',
'WY': 'Wyoming'},
'UY': {'01': 'Artigas',
'02': 'Canelones',
'03': 'Cerro Largo',
'04': 'Colonia',
'05': 'Durazno',
'06': 'Flores',
'07': 'Florida',
'08': 'Lavalleja',
'09': 'Maldonado',
'10': 'Montevideo',
'11': 'Paysandu',
'12': 'Rio Negro',
'13': 'Rivera',
'14': 'Rocha',
'15': 'Salto',
'16': 'San Jose',
'17': 'Soriano',
'18': 'Tacuarembo',
'19': 'Treinta y Tres'},
'UZ': {'01': 'Andijon',
'02': 'Bukhoro',
'03': 'Farghona',
'04': 'Jizzakh',
'05': 'Khorazm',
'06': 'Namangan',
'07': 'Nawoiy',
'08': 'Qashqadaryo',
'09': 'Qoraqalpoghiston',
'10': 'Samarqand',
'11': 'Sirdaryo',
'12': 'Surkhondaryo',
'13': 'Toshkent',
'14': 'Toshkent'},
'VC': {'01': 'Charlotte',
'02': 'Saint Andrew',
'03': 'Saint David',
'04': 'Saint George',
'05': 'Saint Patrick',
'06': 'Grenadines'},
'VE': {'01': 'Amazonas',
'02': 'Anzoategui',
'03': 'Apure',
'04': 'Aragua',
'05': 'Barinas',
'06': 'Bolivar',
'07': 'Carabobo',
'08': 'Cojedes',
'09': 'Delta Amacuro',
'11': 'Falcon',
'12': 'Guarico',
'13': 'Lara',
'14': 'Merida',
'15': 'Miranda',
'16': 'Monagas',
'17': 'Nueva Esparta',
'18': 'Portuguesa',
'19': 'Sucre',
'20': 'Tachira',
'21': 'Trujillo',
'22': 'Yaracuy',
'23': 'Zulia',
'24': 'Dependencias Federales',
'25': 'Distrito Federal',
'26': 'Vargas'},
'VN': {'01': 'An Giang',
'03': 'Ben Tre',
'05': 'Cao Bang',
'09': 'Dong Thap',
'13': 'Hai Phong',
'20': 'Ho Chi Minh',
'21': 'Kien Giang',
'23': 'Lam Dong',
'24': 'Long An',
'30': 'Quang Ninh',
'32': 'Son La',
'33': 'Tay Ninh',
'34': 'Thanh Hoa',
'35': 'Thai Binh',
'37': 'Tien Giang',
'39': 'Lang Son',
'43': 'Dong Nai',
'44': 'Ha Noi',
'45': 'Ba Ria-Vung Tau',
'46': 'Binh Dinh',
'47': 'Binh Thuan',
'49': 'Gia Lai',
'50': 'Ha Giang',
'52': 'Ha Tinh',
'53': 'Hoa Binh',
'54': 'Khanh Hoa',
'55': 'Kon Tum',
'58': 'Nghe An',
'59': 'Ninh Binh',
'60': 'Ninh Thuan',
'61': 'Phu Yen',
'62': 'Quang Binh',
'63': 'Quang Ngai',
'64': 'Quang Tri',
'65': 'Soc Trang',
'66': 'Thua Thien-Hue',
'67': 'Tra Vinh',
'68': 'Tuyen Quang',
'69': 'Vinh Long',
'70': 'Yen Bai',
'71': 'Bac Giang',
'72': 'Bac Kan',
'73': 'Bac Lieu',
'74': 'Bac Ninh',
'75': 'Binh Duong',
'76': 'Binh Phuoc',
'77': 'Ca Mau',
'78': 'Da Nang',
'79': 'Hai Duong',
'80': 'Ha Nam',
'81': 'Hung Yen',
'82': 'Nam Dinh',
'83': 'Phu Tho',
'84': 'Quang Nam',
'85': 'Thai Nguyen',
'86': 'Vinh Phuc',
'87': 'Can Tho',
'88': 'Dac Lak',
'89': 'Lai Chau',
'90': 'Lao Cai',
'91': 'Dak Nong',
'92': 'Dien Bien',
'93': 'Hau Giang'},
'VU': {'05': 'Ambrym',
'06': 'Aoba',
'07': 'Torba',
'08': 'Efate',
'09': 'Epi',
'10': 'Malakula',
'11': 'Paama',
'12': 'Pentecote',
'13': 'Sanma',
'14': 'Shepherd',
'15': 'Tafea',
'16': 'Malampa',
'17': 'Penama',
'18': 'Shefa'},
'WS': {'02': 'Aiga-i-le-Tai',
'03': 'Atua',
'04': 'Fa',
'05': 'Gaga',
'06': 'Va',
'07': 'Gagaifomauga',
'08': 'Palauli',
'09': 'Satupa',
'10': 'Tuamasaga',
'11': 'Vaisigano'},
'YE': {'01': 'Abyan',
'02': 'Adan',
'03': 'Al Mahrah',
'04': 'Hadramawt',
'05': 'Shabwah',
'06': 'Lahij',
'07': "Al Bayda'",
'08': 'Al Hudaydah',
'09': 'Al Jawf',
'10': 'Al Mahwit',
'11': 'Dhamar',
'12': 'Hajjah',
'13': 'Ibb',
'14': "Ma'rib",
'15': "Sa'dah",
'16': "San'a'",
'17': 'Taizz',
'18': 'Ad Dali',
'19': 'Amran',
'20': "Al Bayda'",
'21': 'Al Jawf',
'22': 'Hajjah',
'23': 'Ibb',
'24': 'Lahij',
'25': 'Taizz'},
'ZA': {'01': 'North-Western Province',
'02': 'KwaZulu-Natal',
'03': 'Free State',
'05': 'Eastern Cape',
'06': 'Gauteng',
'07': 'Mpumalanga',
'08': 'Northern Cape',
'09': 'Limpopo',
'10': 'North-West',
'11': 'Western Cape'},
'ZM': {'01': 'Western',
'02': 'Central',
'03': 'Eastern',
'04': 'Luapula',
'05': 'Northern',
'06': 'North-Western',
'07': 'Southern',
'08': 'Copperbelt',
'09': 'Lusaka'},
'ZW': {'01': 'Manicaland',
'02': 'Midlands',
'03': 'Mashonaland Central',
'04': 'Mashonaland East',
'05': 'Mashonaland West',
'06': 'Matabeleland North',
'07': 'Matabeleland South',
'08': 'Masvingo',
'09': 'Bulawayo',
'10': 'Harare'}}
def region_name_by_country_and_region(country_code, region_name):
if country_code not in _country:
return region_name
regions = _country[country_code]
return regions.get(region_name, region_name)
|
mjschultz/django-password-policies | refs/heads/master | password_policies/views.py | 1 | from django.utils import timezone
from django.contrib.auth.decorators import login_required
from django.contrib.auth import get_user_model
from django.core import signing
from django.core.urlresolvers import reverse
from django.shortcuts import resolve_url
from django.utils.decorators import method_decorator
from django.utils.http import base36_to_int
from django.views.defaults import permission_denied
from django.views.generic import TemplateView
from django.views.generic.base import View
from django.views.generic.edit import FormView
from django.views.decorators.cache import never_cache
from django.views.decorators.csrf import csrf_protect
from django.views.decorators.debug import sensitive_post_parameters
from password_policies.conf import settings
from password_policies.forms import PasswordPoliciesForm
from password_policies.forms import PasswordPoliciesChangeForm
from password_policies.forms import PasswordResetForm
class LoggedOutMixin(View):
"""
A view mixin which verifies that the user has not authenticated.
.. note::
This should be the left-most mixin of a view.
"""
def dispatch(self, request, *args, **kwargs):
if request.user.is_authenticated():
template_name = settings.TEMPLATE_403_PAGE
return permission_denied(request, template_name=template_name)
return super(LoggedOutMixin, self).dispatch(request, *args, **kwargs)
class PasswordChangeDoneView(TemplateView):
"""
A view to redirect to after a successfull change of a user's password.
"""
#: The template used by this view. Defaults to
#: the same template used
#: by :func:`django.contrib.views.password_change_done`.
template_name = 'registration/password_change_done.html'
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(PasswordChangeDoneView, self).dispatch(*args, **kwargs)
class PasswordChangeFormView(FormView):
"""
A view that allows logged in users to change their password.
"""
#: The form used by this view.
form_class = PasswordPoliciesChangeForm
#: An URL to redirect to after the form has been successfully
#: validated.
success_url = None
#: The template used by this view. Defaults to
#: the same template used
#: by :func:`django.contrib.views.password_change`.
template_name = 'registration/password_change_form.html'
#: doc
redirect_field_name = settings.REDIRECT_FIELD_NAME
# @method_decorator(sensitive_post_parameters)
@method_decorator(csrf_protect)
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
redirect_field_name = kwargs.pop('redirect_field_name', None)
if redirect_field_name:
self.redirect_field_name = redirect_field_name
return super(PasswordChangeFormView, self).dispatch(*args, **kwargs)
def form_valid(self, form):
form.save()
return super(PasswordChangeFormView, self).form_valid(form)
def get_form(self, form_class):
return form_class(self.request.user, **self.get_form_kwargs())
def get_success_url(self):
"""
Returns a query string field with a previous URL if available (Mimicing
the login view. Used on forced password changes, to know which URL the
user was requesting before the password change.)
If not returns the :attr:`~PasswordChangeFormView.success_url` attribute
if set, otherwise the URL to the :class:`PasswordChangeDoneView`.
"""
checked = '_password_policies_last_checked'
last = '_password_policies_last_changed'
required = '_password_policies_change_required'
now = timezone.now()
self.request.session[checked] = now
self.request.session[last] = now
self.request.session[required] = False
redirect_to = self.request.POST.get(self.redirect_field_name, '')
if redirect_to:
url = redirect_to
elif self.success_url:
url = self.success_url
else:
url = reverse('password_change_done')
return url
def get_context_data(self, **kwargs):
name = self.redirect_field_name
kwargs[name] = self.request.REQUEST.get(name, '')
return super(PasswordChangeFormView, self).get_context_data(**kwargs)
class PasswordResetCompleteView(LoggedOutMixin, TemplateView):
"""
A view to redirect to after a password reset has been successfully
confirmed.
"""
#: The template used by this view. Defaults to
#: the same template used
#: by :func:`django.contrib.views.password_reset_complete`.
template_name = 'registration/password_reset_complete.html'
def get_context_data(self, **kwargs):
"""
Adds the login URL to redirect to (defaults to the LOGIN_URL setting
in Django) to the view's context.
"""
kwargs['login_url'] = resolve_url(settings.LOGIN_URL)
return super(PasswordResetCompleteView, self).get_context_data(**kwargs)
class PasswordResetConfirmView(LoggedOutMixin, FormView):
#: The form used by this view.
form_class = PasswordPoliciesForm
#: An URL to redirect to after the form has been successfully
#: validated.
success_url = None
#: The template used by this view. Defaults to
#: the same template used
#: by :func:`django.contrib.views.password_reset_confirm`.
template_name = 'registration/password_reset_confirm.html'
# @method_decorator(sensitive_post_parameters)
@method_decorator(never_cache)
def dispatch(self, request, *args, **kwargs):
self.uidb36 = args[0]
self.timestamp = args[1]
self.signature = args[2]
self.validlink = False
if self.uidb36 and self.timestamp and self.signature:
try:
uid_int = base36_to_int(self.uidb36)
self.user = get_user_model().objects.get(id=uid_int)
except (ValueError, get_user_model().DoesNotExist):
self.user = None
else:
signer = signing.TimestampSigner()
max_age = settings.PASSWORD_RESET_TIMEOUT_DAYS * 24 * 60 * 60
l = (self.user.password, self.timestamp, self.signature)
try:
signer.unsign(':'.join(l), max_age=max_age)
except (signing.BadSignature, signing.SignatureExpired):
pass
else:
self.validlink = True
return super(PasswordResetConfirmView, self).dispatch(request,
*args,
**kwargs)
def form_valid(self, form):
form.save()
return super(PasswordResetConfirmView, self).form_valid(form)
def get(self, request, *args, **kwargs):
if self.validlink:
return super(PasswordResetConfirmView, self).get(request,
*args,
**kwargs)
return self.render_to_response(self.get_context_data())
def get_context_data(self, **kwargs):
kwargs['user'] = self.user
kwargs['validlink'] = self.validlink
return super(PasswordResetConfirmView, self).get_context_data(**kwargs)
def get_form(self, form_class):
return form_class(self.user, **self.get_form_kwargs())
def get_success_url(self):
"""
Redirects to :attr:`~PasswordResetConfirmView.success_url`
if set, otherwise to the :class:`PasswordResetCompleteView`.
"""
if self.success_url:
url = self.success_url
else:
url = reverse('password_reset_complete')
return url
def post(self, request, *args, **kwargs):
if self.validlink:
return super(PasswordResetConfirmView, self).post(request,
*args,
**kwargs)
return self.render_to_response(self.get_context_data())
class PasswordResetDoneView(LoggedOutMixin, TemplateView):
"""
A view to redirect to after a password reset has been requested.
"""
#: The template used by this view. Defaults to
#: the same template used
#: by :func:`django.contrib.views.password_reset_done`.
template_name = 'registration/password_reset_done.html'
class PasswordResetFormView(LoggedOutMixin, FormView):
"""
A view that allows registered users to change their password.
"""
#: A relative path to a template in the root of a template directory
#: to generate the body of the mail.
email_template_name = 'registration/password_reset_email.txt'
#: A relative path to a template in the root of a template directory
#: to generate the HTML attachment of the mail.
email_html_template_name = 'registration/password_reset_email.html'
#: The form used by this view.
form_class = PasswordResetForm
#: The email address to use as sender of the email.
from_email = None
#: Determines wether this view is used by an admin site.
#: Overrides domain and site name if set to ``True``.
is_admin_site = False
#: A relative path to a template in the root of a template directory to
#: generate the subject of the mail.
subject_template_name = 'registration/password_reset_subject.txt'
#: An URL to redirect to after the form has been successfully
#: validated.
success_url = None
#: The template used by this view. Defaults to
#: the same template used
#: by :func:`django.contrib.views.password_reset`.
template_name = 'registration/password_reset_form.html'
def form_valid(self, form):
opts = {
'use_https': self.request.is_secure(),
'from_email': self.from_email,
'email_template_name': self.email_template_name,
'email_html_template_name': self.email_html_template_name,
'subject_template_name': self.subject_template_name,
'request': self.request,
}
if self.is_admin_site:
opts = dict(opts, domain_override=self.request.META['HTTP_HOST'])
form.save(**opts)
return super(PasswordResetFormView, self).form_valid(form)
@method_decorator(csrf_protect)
def dispatch(self, request, *args, **kwargs):
return super(PasswordResetFormView, self).dispatch(request,
*args,
**kwargs)
def get_success_url(self):
"""
Redirects to :attr:`~PasswordResetFormView.success_url`
if set, otherwise to the :class:`PasswordResetDoneView`.
"""
if self.success_url:
url = self.success_url
else:
url = reverse('password_reset_done')
return url
|
horken7/openhack | refs/heads/master | pinguin/data_collection/collect_data.py | 1 | import requests, json
import numpy as np
from pinguin.models import Jobs, Heatmap, Housing
from .arbetsformedlingen_api import HandlerArbetsformedlingenAPI
from .google_geocoding_api import HandlerGoogleGeocodingAPI
from .booli_api import HandlerBooliAPI
from geopy.geocoders import Nominatim
class CollectData:
def __init__(self):
self.abfAPI = HandlerArbetsformedlingenAPI()
self.ggcAPI = HandlerGoogleGeocodingAPI()
self.houseAPI = HandlerBooliAPI()
self.cities = ['stockholm', 'göteborg', 'malmö', 'visby', 'karlstad']
self.population = {'stockholm': 942370, 'göteborg': 572779, 'malmö': 342457, 'visby': 23576, 'karlstad': 61685}
self.occupations = ['lärare', 'kock', 'mjukvaruutvecklare', 'elektriker', 'vård']
self.geolocator = Nominatim()
self.get_heatmap()
self.get_jobs_city()
self.get_houses()
def get_heatmap(self):
heats = []
for city in self.cities:
listings = self.houseAPI.listings_city(city)
amount_houses = listings['totalCount']
inner_heat = []
amount_city = 0
for occupation in self.occupations:
json_data = self.abfAPI.get_occupation_city(occupation, city)
amount_occupation = json_data['matchningslista']['antal_platserTotal']
amount_city = amount_city + amount_occupation
value = (amount_occupation * amount_houses) / self.population[city]
inner_heat.append(value)
heats.append(inner_heat)
# location = self.geolocator.geocode(city)
# latitude = location.latitude
# longitude = location.longitude
latitude, longitude = self.ggcAPI.get_coordinates_city(city)
value_city = (amount_city * amount_houses) / self.population[city]
h = Heatmap(city=city, occupation='all', longitude=longitude, latitude=latitude, heat=value_city)
h.save()
print(city)
heatmatrix = np.asmatrix(heats)
for i in range(len(heatmatrix)):
heatmatrix[:, i] = heatmatrix[:, i] / max(heatmatrix[:, i])
for i, city in enumerate(self.cities):
location = self.geolocator.geocode(city)
latitude = location.latitude
longitude = location.longitude
# latitude, longitude = self.ggcAPI.get_coordinates_city('stockholm')
for j, occupation in enumerate(self.occupations):
heat = heatmatrix[i,j]
h = Heatmap(city=city, occupation=occupation, longitude=longitude, latitude=latitude, heat=heat)
h.save()
def get_jobs_city(self):
for city in self.cities:
for occupation in self.occupations:
try:
json_data = self.abfAPI.get_occupation_city(occupation, city)
ads = json_data['matchningslista']['matchningdata']
for job in ads[:15]:
ad_id = job['annonsid']
json_data = self.abfAPI.get_platsannons(ad_id)
company = json_data['platsannons']['arbetsplats']['arbetsplatsnamn']
address = json_data['platsannons']['arbetsplats']['besoksadress']
zipcode = json_data['platsannons']['arbetsplats']['postnummer']
city2 = json_data['platsannons']['arbetsplats']['postort']
try:
location = self.geolocator.geocode(address + ' ' + zipcode + ' ' + city2)
latitude = location.latitude
longitude = location.longitude
j = Jobs(company=company, type=occupation, city=city, latitude=latitude, longitude=longitude, ad_id=ad_id)
j.save()
except:
pass
print(city, occupation)
except:
pass
def get_houses(self):
for city in self.cities:
listings = self.houseAPI.listings_city(city)
houses = listings['listings']
for h in houses[:15]:
try:
address = h['location']['address']['streetAddress']
longitude = h['location']['position']['longitude']
latitude = h['location']['position']['latitude']
prize = h['listPrice']
squaremeters = h['livingArea']
ad_id = h['booliId']
ha = Housing(address=address, city=city, longitude=longitude, latitude=latitude, prize=prize, squaremeters=squaremeters, ad_id=ad_id)
ha.save()
except:
pass |
xiami9916057/volatility | refs/heads/master | volatility/plugins/mac/check_trap_table.py | 44 | # Volatility
# Copyright (C) 2007-2013 Volatility Foundation
#
# This file is part of Volatility.
#
# Volatility is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Volatility is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Volatility. If not, see <http://www.gnu.org/licenses/>.
#
"""
@author: Andrew Case
@license: GNU General Public License 2.0
@contact: atcuno@gmail.com
@organization:
"""
import volatility.obj as obj
import volatility.plugins.mac.common as common
class mac_check_trap_table(common.AbstractMacCommand):
""" Checks to see if mach trap table entries are hooked """
def _set_vtypes(self):
x86_10_vtypes = {
'mach_trap' : [ 16, {
'mach_trap_function': [ 4, ['pointer', ['void']]]
}]}
x86_other_vtypes = {
'mach_trap' : [ 8, {
'mach_trap_function': [ 4, ['pointer', ['void']]]
}]}
x64_10_vtypes = {
'mach_trap' : [ 40, {
'mach_trap_function': [ 8, ['pointer', ['void']]]
}]}
x64_other_vtypes = {
'mach_trap' : [ 16, {
'mach_trap_function': [ 8, ['pointer', ['void']]]
}]}
arch = self.addr_space.profile.metadata.get('memory_model', '32bit')
major = self.addr_space.profile.metadata.get('major', 0)
if arch == "32bit":
if major == 10:
vtypes = x86_10_vtypes
else:
vtypes = x86_other_vtypes
else:
if major == 10:
vtypes = x64_10_vtypes
else:
vtypes = x64_other_vtypes
self.addr_space.profile.vtypes.update(vtypes)
self.addr_space.profile.compile()
def calculate(self):
common.set_plugin_members(self)
self._set_vtypes()
sym_addrs = self.profile.get_all_addresses()
table_addr = self.addr_space.profile.get_symbol("_mach_trap_table")
ntraps = obj.Object("int", offset = self.addr_space.profile.get_symbol("_mach_trap_count"), vm = self.addr_space)
traps = obj.Object(theType = "Array", offset = table_addr, vm = self.addr_space, count = ntraps, targetType = "mach_trap")
for (i, trap) in enumerate(traps):
ent_addr = trap.mach_trap_function.v()
if not ent_addr:
continue
hooked = ent_addr not in sym_addrs
if hooked == False:
sym_name = self.profile.get_symbol_by_address("kernel", ent_addr)
else:
sym_name = "HOOKED"
yield (table_addr, "TrapTable", i, ent_addr, sym_name, hooked)
def render_text(self, outfd, data):
self.table_header(outfd, [("Table Name", "15"),
("Index", "6"),
("Address", "[addrpad]"),
("Symbol", "<50")])
for (_, table_name, i, call_addr, sym_name, _) in data:
self.table_row(outfd, table_name, i, call_addr, sym_name)
|
eyohansa/django | refs/heads/master | django/contrib/gis/maps/google/overlays.py | 151 | from __future__ import unicode_literals
from functools import total_ordering
from django.contrib.gis.geos import (
LinearRing, LineString, Point, Polygon, fromstr,
)
from django.utils import six
from django.utils.encoding import python_2_unicode_compatible
from django.utils.html import html_safe
@html_safe
@python_2_unicode_compatible
class GEvent(object):
"""
A Python wrapper for the Google GEvent object.
Events can be attached to any object derived from GOverlayBase with the
add_event() call.
For more information please see the Google Maps API Reference:
https://developers.google.com/maps/documentation/javascript/reference#event
Example:
from django.shortcuts import render_to_response
from django.contrib.gis.maps.google import GoogleMap, GEvent, GPolyline
def sample_request(request):
polyline = GPolyline('LINESTRING(101 26, 112 26, 102 31)')
event = GEvent('click',
'function() { location.href = "http://www.google.com"}')
polyline.add_event(event)
return render_to_response('mytemplate.html',
{'google' : GoogleMap(polylines=[polyline])})
"""
def __init__(self, event, action):
"""
Initializes a GEvent object.
Parameters:
event:
string for the event, such as 'click'. The event must be a valid
event for the object in the Google Maps API.
There is no validation of the event type within Django.
action:
string containing a Javascript function, such as
'function() { location.href = "newurl";}'
The string must be a valid Javascript function. Again there is no
validation fo the function within Django.
"""
self.event = event
self.action = action
def __str__(self):
"Returns the parameter part of a GEvent."
return '"%s", %s' % (self.event, self.action)
@html_safe
@python_2_unicode_compatible
class GOverlayBase(object):
def __init__(self):
self.events = []
def latlng_from_coords(self, coords):
"Generates a JavaScript array of GLatLng objects for the given coordinates."
return '[%s]' % ','.join('new GLatLng(%s,%s)' % (y, x) for x, y in coords)
def add_event(self, event):
"Attaches a GEvent to the overlay object."
self.events.append(event)
def __str__(self):
"The string representation is the JavaScript API call."
return '%s(%s)' % (self.__class__.__name__, self.js_params)
class GPolygon(GOverlayBase):
"""
A Python wrapper for the Google GPolygon object. For more information
please see the Google Maps API Reference:
https://developers.google.com/maps/documentation/javascript/reference#Polygon
"""
def __init__(self, poly,
stroke_color='#0000ff', stroke_weight=2, stroke_opacity=1,
fill_color='#0000ff', fill_opacity=0.4):
"""
The GPolygon object initializes on a GEOS Polygon or a parameter that
may be instantiated into GEOS Polygon. Please note that this will not
depict a Polygon's internal rings.
Keyword Options:
stroke_color:
The color of the polygon outline. Defaults to '#0000ff' (blue).
stroke_weight:
The width of the polygon outline, in pixels. Defaults to 2.
stroke_opacity:
The opacity of the polygon outline, between 0 and 1. Defaults to 1.
fill_color:
The color of the polygon fill. Defaults to '#0000ff' (blue).
fill_opacity:
The opacity of the polygon fill. Defaults to 0.4.
"""
if isinstance(poly, six.string_types):
poly = fromstr(poly)
if isinstance(poly, (tuple, list)):
poly = Polygon(poly)
if not isinstance(poly, Polygon):
raise TypeError('GPolygon may only initialize on GEOS Polygons.')
# Getting the envelope of the input polygon (used for automatically
# determining the zoom level).
self.envelope = poly.envelope
# Translating the coordinates into a JavaScript array of
# Google `GLatLng` objects.
self.points = self.latlng_from_coords(poly.shell.coords)
# Stroke settings.
self.stroke_color, self.stroke_opacity, self.stroke_weight = stroke_color, stroke_opacity, stroke_weight
# Fill settings.
self.fill_color, self.fill_opacity = fill_color, fill_opacity
super(GPolygon, self).__init__()
@property
def js_params(self):
return '%s, "%s", %s, %s, "%s", %s' % (self.points, self.stroke_color, self.stroke_weight, self.stroke_opacity,
self.fill_color, self.fill_opacity)
class GPolyline(GOverlayBase):
"""
A Python wrapper for the Google GPolyline object. For more information
please see the Google Maps API Reference:
https://developers.google.com/maps/documentation/javascript/reference#Polyline
"""
def __init__(self, geom, color='#0000ff', weight=2, opacity=1):
"""
The GPolyline object may be initialized on GEOS LineStirng, LinearRing,
and Polygon objects (internal rings not supported) or a parameter that
may instantiated into one of the above geometries.
Keyword Options:
color:
The color to use for the polyline. Defaults to '#0000ff' (blue).
weight:
The width of the polyline, in pixels. Defaults to 2.
opacity:
The opacity of the polyline, between 0 and 1. Defaults to 1.
"""
# If a GEOS geometry isn't passed in, try to construct one.
if isinstance(geom, six.string_types):
geom = fromstr(geom)
if isinstance(geom, (tuple, list)):
geom = Polygon(geom)
# Generating the lat/lng coordinate pairs.
if isinstance(geom, (LineString, LinearRing)):
self.latlngs = self.latlng_from_coords(geom.coords)
elif isinstance(geom, Polygon):
self.latlngs = self.latlng_from_coords(geom.shell.coords)
else:
raise TypeError('GPolyline may only initialize on GEOS LineString, LinearRing, and/or Polygon geometries.')
# Getting the envelope for automatic zoom determination.
self.envelope = geom.envelope
self.color, self.weight, self.opacity = color, weight, opacity
super(GPolyline, self).__init__()
@property
def js_params(self):
return '%s, "%s", %s, %s' % (self.latlngs, self.color, self.weight, self.opacity)
@total_ordering
class GIcon(object):
"""
Creates a GIcon object to pass into a Gmarker object.
The keyword arguments map to instance attributes of the same name. These,
in turn, correspond to a subset of the attributes of the official GIcon
javascript object:
https://developers.google.com/maps/documentation/javascript/reference#Icon
Because a Google map often uses several different icons, a name field has
been added to the required arguments.
Required Arguments:
varname:
A string which will become the basis for the js variable name of
the marker, for this reason, your code should assign a unique
name for each GIcon you instantiate, otherwise there will be
name space collisions in your javascript.
Keyword Options:
image:
The url of the image to be used as the icon on the map defaults
to 'G_DEFAULT_ICON'
iconsize:
a tuple representing the pixel size of the foreground (not the
shadow) image of the icon, in the format: (width, height) ex.:
GIcon('fast_food',
image="/media/icon/star.png",
iconsize=(15,10))
Would indicate your custom icon was 15px wide and 10px height.
shadow:
the url of the image of the icon's shadow
shadowsize:
a tuple representing the pixel size of the shadow image, format is
the same as ``iconsize``
iconanchor:
a tuple representing the pixel coordinate relative to the top left
corner of the icon image at which this icon is anchored to the map.
In (x, y) format. x increases to the right in the Google Maps
coordinate system and y increases downwards in the Google Maps
coordinate system.)
infowindowanchor:
The pixel coordinate relative to the top left corner of the icon
image at which the info window is anchored to this icon.
"""
def __init__(self, varname, image=None, iconsize=None,
shadow=None, shadowsize=None, iconanchor=None,
infowindowanchor=None):
self.varname = varname
self.image = image
self.iconsize = iconsize
self.shadow = shadow
self.shadowsize = shadowsize
self.iconanchor = iconanchor
self.infowindowanchor = infowindowanchor
def __eq__(self, other):
return self.varname == other.varname
def __lt__(self, other):
return self.varname < other.varname
def __hash__(self):
# XOR with hash of GIcon type so that hash('varname') won't
# equal hash(GIcon('varname')).
return hash(self.__class__) ^ hash(self.varname)
class GMarker(GOverlayBase):
"""
A Python wrapper for the Google GMarker object. For more information
please see the Google Maps API Reference:
https://developers.google.com/maps/documentation/javascript/reference#Marker
Example:
from django.shortcuts import render_to_response
from django.contrib.gis.maps.google.overlays import GMarker, GEvent
def sample_request(request):
marker = GMarker('POINT(101 26)')
event = GEvent('click',
'function() { location.href = "http://www.google.com"}')
marker.add_event(event)
return render_to_response('mytemplate.html',
{'google' : GoogleMap(markers=[marker])})
"""
def __init__(self, geom, title=None, draggable=False, icon=None):
"""
The GMarker object may initialize on GEOS Points or a parameter
that may be instantiated into a GEOS point. Keyword options map to
GMarkerOptions -- so far only the title option is supported.
Keyword Options:
title:
Title option for GMarker, will be displayed as a tooltip.
draggable:
Draggable option for GMarker, disabled by default.
"""
# If a GEOS geometry isn't passed in, try to construct one.
if isinstance(geom, six.string_types):
geom = fromstr(geom)
if isinstance(geom, (tuple, list)):
geom = Point(geom)
if isinstance(geom, Point):
self.latlng = self.latlng_from_coords(geom.coords)
else:
raise TypeError('GMarker may only initialize on GEOS Point geometry.')
# Getting the envelope for automatic zoom determination.
self.envelope = geom.envelope
# TODO: Add support for more GMarkerOptions
self.title = title
self.draggable = draggable
self.icon = icon
super(GMarker, self).__init__()
def latlng_from_coords(self, coords):
return 'new GLatLng(%s,%s)' % (coords[1], coords[0])
def options(self):
result = []
if self.title:
result.append('title: "%s"' % self.title)
if self.icon:
result.append('icon: %s' % self.icon.varname)
if self.draggable:
result.append('draggable: true')
return '{%s}' % ','.join(result)
@property
def js_params(self):
return '%s, %s' % (self.latlng, self.options())
|
chylli/phantomjs | refs/heads/master | src/qt/qtwebkit/Tools/Scripts/webkitpy/tool/__init__.py | 6014 | # Required for Python to search this directory for module files
|
Chilledheart/chromium | refs/heads/master | third_party/typ/typ/tests/main_test.py | 33 | # Copyright 2014 Dirk Pranke. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import io
import json
import os
import sys
import textwrap
from typ import main
from typ import test_case
from typ import Host
from typ import VERSION
from typ.fakes import test_result_server_fake
is_python3 = bool(sys.version_info.major == 3)
if is_python3: # pragma: python3
# pylint: disable=redefined-builtin,invalid-name
unicode = str
d = textwrap.dedent
PASS_TEST_PY = """
import unittest
class PassingTest(unittest.TestCase):
def test_pass(self):
pass
"""
PASS_TEST_FILES = {'pass_test.py': PASS_TEST_PY}
FAIL_TEST_PY = """
import unittest
class FailingTest(unittest.TestCase):
def test_fail(self):
self.fail()
"""
FAIL_TEST_FILES = {'fail_test.py': FAIL_TEST_PY}
OUTPUT_TEST_PY = """
import sys
import unittest
class PassTest(unittest.TestCase):
def test_out(self):
sys.stdout.write("hello on stdout\\n")
sys.stdout.flush()
def test_err(self):
sys.stderr.write("hello on stderr\\n")
class FailTest(unittest.TestCase):
def test_out_err_fail(self):
sys.stdout.write("hello on stdout\\n")
sys.stdout.flush()
sys.stderr.write("hello on stderr\\n")
self.fail()
"""
OUTPUT_TEST_FILES = {'output_test.py': OUTPUT_TEST_PY}
SF_TEST_PY = """
import sys
import unittest
class SkipMethods(unittest.TestCase):
@unittest.skip('reason')
def test_reason(self):
self.fail()
@unittest.skipIf(True, 'reason')
def test_skip_if_true(self):
self.fail()
@unittest.skipIf(False, 'reason')
def test_skip_if_false(self):
self.fail()
class SkipSetup(unittest.TestCase):
def setUp(self):
self.skipTest('setup failed')
def test_notrun(self):
self.fail()
@unittest.skip('skip class')
class SkipClass(unittest.TestCase):
def test_method(self):
self.fail()
class SetupClass(unittest.TestCase):
@classmethod
def setUpClass(cls):
sys.stdout.write('in setupClass\\n')
sys.stdout.flush()
assert False, 'setupClass failed'
def test_method1(self):
pass
def test_method2(self):
pass
class ExpectedFailures(unittest.TestCase):
@unittest.expectedFailure
def test_fail(self):
self.fail()
@unittest.expectedFailure
def test_pass(self):
pass
"""
SF_TEST_FILES = {'sf_test.py': SF_TEST_PY}
LOAD_TEST_PY = """
import unittest
def load_tests(_, _2, _3):
class BaseTest(unittest.TestCase):
pass
def method_fail(self):
self.fail()
def method_pass(self):
pass
setattr(BaseTest, "test_fail", method_fail)
setattr(BaseTest, "test_pass", method_pass)
suite = unittest.TestSuite()
suite.addTest(BaseTest("test_fail"))
suite.addTest(BaseTest("test_pass"))
return suite
"""
LOAD_TEST_FILES = {'load_test.py': LOAD_TEST_PY}
path_to_main = os.path.join(
os.path.dirname(os.path.dirname(os.path.abspath(__file__))),
'runner.py')
class TestCli(test_case.MainTestCase):
prog = [sys.executable, path_to_main]
files_to_ignore = ['*.pyc']
def test_bad_arg(self):
self.check(['--bad-arg'], ret=2, out='',
rerr='.*: error: unrecognized arguments: --bad-arg\n')
self.check(['-help'], ret=2, out='',
rerr=(".*: error: argument -h/--help: "
"ignored explicit argument 'elp'\n"))
def test_bad_metadata(self):
self.check(['--metadata', 'foo'], ret=2, err='',
out='Error: malformed --metadata "foo"\n')
def test_basic(self):
self.check([], files=PASS_TEST_FILES,
ret=0,
out=('[1/1] pass_test.PassingTest.test_pass passed\n'
'1 test run, 0 failures.\n'), err='')
def test_coverage(self):
try:
import coverage # pylint: disable=W0612
files = {
'pass_test.py': PASS_TEST_PY,
'fail_test.py': FAIL_TEST_PY,
}
self.check(['-c', 'pass_test'], files=files, ret=0, err='',
out=d("""\
[1/1] pass_test.PassingTest.test_pass passed
1 test run, 0 failures.
Name Stmts Miss Cover
-------------------------------
fail_test 4 4 0%
pass_test 4 0 100%
-------------------------------
TOTAL 8 4 50%
"""))
except ImportError: # pragma: no cover
# We can never cover this line, since running coverage means
# that import will succeed.
self.check(['-c'], files=PASS_TEST_FILES, ret=1,
out='Error: coverage is not installed\n', err='')
def test_debugger(self):
if sys.version_info.major == 3: # pragma: python3
return
else: # pragma: python2
_, out, _, _ = self.check(['-d'], stdin='quit()\n',
files=PASS_TEST_FILES, ret=0, err='')
self.assertIn('(Pdb) ', out)
def test_dryrun(self):
self.check(['-n'], files=PASS_TEST_FILES, ret=0, err='',
out=d("""\
[1/1] pass_test.PassingTest.test_pass passed
1 test run, 0 failures.
"""))
def test_error(self):
files = {'err_test.py': d("""\
import unittest
class ErrTest(unittest.TestCase):
def test_err(self):
foo = bar
""")}
_, out, _, _ = self.check([''], files=files, ret=1, err='')
self.assertIn('[1/1] err_test.ErrTest.test_err failed unexpectedly',
out)
self.assertIn('1 test run, 1 failure', out)
def test_fail(self):
_, out, _, _ = self.check([], files=FAIL_TEST_FILES, ret=1, err='')
self.assertIn('fail_test.FailingTest.test_fail failed unexpectedly',
out)
def test_fail_then_pass(self):
files = {'fail_then_pass_test.py': d("""\
import unittest
count = 0
class FPTest(unittest.TestCase):
def test_count(self):
global count
count += 1
if count == 1:
self.fail()
""")}
_, out, _, files = self.check(['--retry-limit', '3',
'--write-full-results-to',
'full_results.json'],
files=files, ret=0, err='')
self.assertIn('Retrying failed tests (attempt #1 of 3)', out)
self.assertNotIn('Retrying failed tests (attempt #2 of 3)', out)
self.assertIn('1 test run, 0 failures.\n', out)
results = json.loads(files['full_results.json'])
self.assertEqual(
results['tests'][
'fail_then_pass_test']['FPTest']['test_count']['actual'],
'FAIL PASS')
def test_failures_are_not_elided(self):
_, out, _, _ = self.check(['--terminal-width=20'],
files=FAIL_TEST_FILES, ret=1, err='')
self.assertIn('[1/1] fail_test.FailingTest.test_fail failed '
'unexpectedly:\n', out)
def test_file_list(self):
files = PASS_TEST_FILES
self.check(['-f', '-'], files=files, stdin='pass_test\n', ret=0)
self.check(['-f', '-'], files=files, stdin='pass_test.PassingTest\n',
ret=0)
self.check(['-f', '-'], files=files,
stdin='pass_test.PassingTest.test_pass\n',
ret=0)
files = {'pass_test.py': PASS_TEST_PY,
'test_list.txt': 'pass_test.PassingTest.test_pass\n'}
self.check(['-f', 'test_list.txt'], files=files, ret=0)
def test_find(self):
files = PASS_TEST_FILES
self.check(['-l'], files=files, ret=0,
out='pass_test.PassingTest.test_pass\n')
self.check(['-l', 'pass_test'], files=files, ret=0, err='',
out='pass_test.PassingTest.test_pass\n')
self.check(['-l', 'pass_test.py'], files=files, ret=0, err='',
out='pass_test.PassingTest.test_pass\n')
self.check(['-l', './pass_test.py'], files=files, ret=0, err='',
out='pass_test.PassingTest.test_pass\n')
self.check(['-l', '.'], files=files, ret=0, err='',
out='pass_test.PassingTest.test_pass\n')
self.check(['-l', 'pass_test.PassingTest.test_pass'], files=files,
ret=0, err='',
out='pass_test.PassingTest.test_pass\n')
self.check(['-l', '.'], files=files, ret=0, err='',
out='pass_test.PassingTest.test_pass\n')
def test_find_from_subdirs(self):
files = {
'foo/__init__.py': '',
'foo/pass_test.py': PASS_TEST_PY,
'bar/__init__.py': '',
'bar/tmp': '',
}
self.check(['-l', '../foo/pass_test.py'], files=files, cwd='bar',
ret=0, err='',
out='foo.pass_test.PassingTest.test_pass\n')
self.check(['-l', 'foo'], files=files, cwd='bar',
ret=0, err='',
out='foo.pass_test.PassingTest.test_pass\n')
self.check(['-l', '--path', '../foo', 'pass_test'],
files=files, cwd='bar', ret=0, err='',
out='pass_test.PassingTest.test_pass\n')
def test_help(self):
self.check(['--help'], ret=0, rout='.*', err='')
def test_import_failure_missing_file(self):
self.check(['-l', 'foo'], ret=1, err='',
rout='Failed to load "foo".*')
def test_import_failure_missing_package(self):
files = {'foo.py': d("""\
import unittest
import package_that_does_not_exist
class ImportFailureTest(unittest.TestCase):
def test_case(self):
pass
""")}
self.check(['-l', 'foo.py'], files=files, ret=1, err='',
rout=('Failed to load "foo.py": No module named '
'\'?package_that_does_not_exist\'?\n'))
def test_import_failure_no_tests(self):
files = {'foo.py': 'import unittest'}
self.check(['-l', 'foo.bar'], files=files, ret=1, err='',
rout='Failed to load "foo.bar":.*')
def test_import_failure_syntax_error(self):
files = {'syn_test.py': d("""\
import unittest
class SyntaxErrorTest(unittest.TestCase):
def test_syntax_error_in_test(self):
syntax error
""")}
_, out, _, _ = self.check([], files=files, ret=1, err='')
self.assertIn('Failed to import test module: syn_test', out)
self.assertIn((' syntax error\n'
' ^\n'
'SyntaxError: invalid syntax\n'), out)
def test_interrupt(self):
files = {'interrupt_test.py': d("""\
import unittest
class Foo(unittest.TestCase):
def test_interrupt(self):
raise KeyboardInterrupt()
""")}
self.check(['-j', '1'], files=files, ret=130, out='',
err='interrupted, exiting\n')
def test_isolate(self):
self.check(['--isolate', '*test_pass*'], files=PASS_TEST_FILES, ret=0,
out=('[1/1] pass_test.PassingTest.test_pass passed\n'
'1 test run, 0 failures.\n'), err='')
def test_load_tests_failure(self):
files = {'foo_test.py': d("""\
import unittest
def load_tests(_, _2, _3):
raise ValueError('this should fail')
""")}
self.check([], files=files, ret=1, err='',
out=('foo_test.load_tests() failed: this should fail\n'))
def test_load_tests_single_worker(self):
files = LOAD_TEST_FILES
_, out, _, _ = self.check(['-j', '1', '-v'], files=files, ret=1,
err='')
self.assertIn('[1/2] load_test.BaseTest.test_fail failed', out)
self.assertIn('[2/2] load_test.BaseTest.test_pass passed', out)
self.assertIn('2 tests run, 1 failure.\n', out)
def test_load_tests_multiple_workers(self):
_, out, _, _ = self.check([], files=LOAD_TEST_FILES, ret=1, err='')
# The output for this test is nondeterministic since we may run
# two tests in parallel. So, we just test that some of the substrings
# we care about are present.
self.assertIn('test_pass passed', out)
self.assertIn('test_fail failed', out)
self.assertIn('2 tests run, 1 failure.\n', out)
def test_missing_builder_name(self):
self.check(['--test-results-server', 'localhost'], ret=2,
out=('Error: --builder-name must be specified '
'along with --test-result-server\n'
'Error: --master-name must be specified '
'along with --test-result-server\n'
'Error: --test-type must be specified '
'along with --test-result-server\n'), err='')
def test_ninja_status_env(self):
self.check(['-v', 'output_test.PassTest.test_out'],
files=OUTPUT_TEST_FILES, aenv={'NINJA_STATUS': 'ns: '},
out=d("""\
ns: output_test.PassTest.test_out passed
1 test run, 0 failures.
"""), err='')
def test_output_for_failures(self):
_, out, _, _ = self.check(['output_test.FailTest'],
files=OUTPUT_TEST_FILES,
ret=1, err='')
self.assertIn('[1/1] output_test.FailTest.test_out_err_fail '
'failed unexpectedly:\n'
' hello on stdout\n'
' hello on stderr\n', out)
def test_quiet(self):
self.check(['-q'], files=PASS_TEST_FILES, ret=0, err='', out='')
def test_retry_limit(self):
_, out, _, _ = self.check(['--retry-limit', '2'],
files=FAIL_TEST_FILES, ret=1, err='')
self.assertIn('Retrying failed tests', out)
lines = out.splitlines()
self.assertEqual(len([l for l in lines
if 'test_fail failed unexpectedly:' in l]),
3)
def test_skip(self):
self.check(['--skip', '*test_fail*'], files=FAIL_TEST_FILES, ret=1,
out='No tests to run.\n', err='')
files = {'fail_test.py': FAIL_TEST_PY,
'pass_test.py': PASS_TEST_PY}
self.check(['-j', '1', '--skip', '*test_fail*'], files=files, ret=0,
out=('[1/2] fail_test.FailingTest.test_fail was skipped\n'
'[2/2] pass_test.PassingTest.test_pass passed\n'
'2 tests run, 0 failures.\n'), err='')
# This tests that we print test_started updates for skipped tests
# properly. It also tests how overwriting works.
_, out, _, _ = self.check(['-j', '1', '--overwrite', '--skip',
'*test_fail*'], files=files, ret=0,
err='', universal_newlines=False)
# We test this string separately and call out.strip() to
# avoid the trailing \r\n we get on windows, while keeping
# the \r's elsewhere in the string.
self.assertMultiLineEqual(
out.strip(),
('[0/2] fail_test.FailingTest.test_fail\r'
' \r'
'[1/2] fail_test.FailingTest.test_fail was skipped\r'
' \r'
'[1/2] pass_test.PassingTest.test_pass\r'
' \r'
'[2/2] pass_test.PassingTest.test_pass passed\r'
' \r'
'2 tests run, 0 failures.'))
def test_skips_and_failures(self):
_, out, _, _ = self.check(['-j', '1', '-v', '-v'], files=SF_TEST_FILES,
ret=1, err='')
# We do a bunch of assertIn()'s to work around the non-portable
# tracebacks.
self.assertIn(('[1/9] sf_test.ExpectedFailures.test_fail failed:\n'
' Traceback '), out)
self.assertIn(('[2/9] sf_test.ExpectedFailures.test_pass '
'passed unexpectedly'), out)
self.assertIn(('[3/9] sf_test.SetupClass.test_method1 '
'failed unexpectedly:\n'
' in setupClass\n'), out)
self.assertIn(('[4/9] sf_test.SetupClass.test_method2 '
'failed unexpectedly:\n'
' in setupClass\n'), out)
self.assertIn(('[5/9] sf_test.SkipClass.test_method was skipped:\n'
' skip class\n'), out)
self.assertIn(('[6/9] sf_test.SkipMethods.test_reason was skipped:\n'
' reason\n'), out)
self.assertIn(('[7/9] sf_test.SkipMethods.test_skip_if_false '
'failed unexpectedly:\n'
' Traceback'), out)
self.assertIn(('[8/9] sf_test.SkipMethods.test_skip_if_true '
'was skipped:\n'
' reason\n'
'[9/9] sf_test.SkipSetup.test_notrun was skipped:\n'
' setup failed\n'
'9 tests run, 4 failures.\n'), out)
def test_skip_and_all(self):
# --all should override --skip
self.check(['-l', '--skip', '*test_pass'],
files=PASS_TEST_FILES, ret=1, err='',
out='No tests to run.\n')
self.check(['-l', '--all', '--skip', '*test_pass'],
files=PASS_TEST_FILES, ret=0, err='',
out='pass_test.PassingTest.test_pass\n')
def test_skip_decorators_and_all(self):
_, out, _, _ = self.check(['--all', '-j', '1', '-v', '-v'],
files=SF_TEST_FILES, ret=1, err='')
self.assertIn('sf_test.SkipClass.test_method failed', out)
self.assertIn('sf_test.SkipMethods.test_reason failed', out)
self.assertIn('sf_test.SkipMethods.test_skip_if_true failed', out)
self.assertIn('sf_test.SkipMethods.test_skip_if_false failed', out)
# --all does not override explicit calls to skipTest(), only
# the decorators.
self.assertIn('sf_test.SkipSetup.test_notrun was skipped', out)
def test_subdir(self):
files = {
'foo/__init__.py': '',
'foo/bar/__init__.py': '',
'foo/bar/pass_test.py': PASS_TEST_PY
}
self.check(['foo/bar'], files=files, ret=0, err='',
out=d("""\
[1/1] foo.bar.pass_test.PassingTest.test_pass passed
1 test run, 0 failures.
"""))
def test_timing(self):
self.check(['-t'], files=PASS_TEST_FILES, ret=0, err='',
rout=(r'\[1/1\] pass_test.PassingTest.test_pass passed '
r'\d+.\d+s\n'
r'1 test run in \d+.\d+s, 0 failures.'))
def test_test_results_server(self):
server = test_result_server_fake.start()
self.assertNotEqual(server, None, 'could not start fake server')
try:
self.check(['--test-results-server',
'%s:%d' % server.server_address,
'--master-name', 'fake_master',
'--builder-name', 'fake_builder',
'--test-type', 'typ_tests',
'--metadata', 'foo=bar'],
files=PASS_TEST_FILES, ret=0, err='',
out=('[1/1] pass_test.PassingTest.test_pass passed\n'
'1 test run, 0 failures.\n'))
finally:
posts = server.stop()
self.assertEqual(len(posts), 1)
payload = posts[0][2].decode('utf8')
self.assertIn('"test_pass": {"expected": "PASS", "actual": "PASS"}',
payload)
self.assertTrue(payload.endswith('--\r\n'))
self.assertNotEqual(server.log.getvalue(), '')
def test_test_results_server_error(self):
server = test_result_server_fake.start(code=500)
self.assertNotEqual(server, None, 'could not start fake server')
try:
self.check(['--test-results-server',
'%s:%d' % server.server_address,
'--master-name', 'fake_master',
'--builder-name', 'fake_builder',
'--test-type', 'typ_tests',
'--metadata', 'foo=bar'],
files=PASS_TEST_FILES, ret=1, err='',
out=('[1/1] pass_test.PassingTest.test_pass passed\n'
'1 test run, 0 failures.\n'
'Uploading the JSON results raised '
'"HTTP Error 500: Internal Server Error"\n'))
finally:
_ = server.stop()
def test_test_results_server_not_running(self):
self.check(['--test-results-server', 'localhost:99999',
'--master-name', 'fake_master',
'--builder-name', 'fake_builder',
'--test-type', 'typ_tests',
'--metadata', 'foo=bar'],
files=PASS_TEST_FILES, ret=1, err='',
rout=(r'\[1/1\] pass_test.PassingTest.test_pass passed\n'
'1 test run, 0 failures.\n'
'Uploading the JSON results raised .*\n'))
def test_verbose_2(self):
self.check(['-vv', '-j', '1', 'output_test.PassTest'],
files=OUTPUT_TEST_FILES, ret=0,
out=d("""\
[1/2] output_test.PassTest.test_err passed:
hello on stderr
[2/2] output_test.PassTest.test_out passed:
hello on stdout
2 tests run, 0 failures.
"""), err='')
def test_verbose_3(self):
self.check(['-vvv', '-j', '1', 'output_test.PassTest'],
files=OUTPUT_TEST_FILES, ret=0,
out=d("""\
[0/2] output_test.PassTest.test_err queued
[1/2] output_test.PassTest.test_err passed:
hello on stderr
[1/2] output_test.PassTest.test_out queued
[2/2] output_test.PassTest.test_out passed:
hello on stdout
2 tests run, 0 failures.
"""), err='')
def test_version(self):
self.check('--version', ret=0, out=(VERSION + '\n'))
def test_write_full_results_to(self):
_, _, _, files = self.check(['--write-full-results-to',
'results.json'], files=PASS_TEST_FILES)
self.assertIn('results.json', files)
results = json.loads(files['results.json'])
self.assertEqual(results['interrupted'], False)
self.assertEqual(results['path_delimiter'], '.')
self.assertEqual(results['tests'],
{u'pass_test': {
u'PassingTest': {
u'test_pass': {
u'actual': u'PASS',
u'expected': u'PASS',
}
}
}})
def test_write_trace_to(self):
_, _, _, files = self.check(['--write-trace-to', 'trace.json'],
files=PASS_TEST_FILES)
self.assertIn('trace.json', files)
trace_obj = json.loads(files['trace.json'])
self.assertEqual(trace_obj['otherData'], {})
self.assertEqual(len(trace_obj['traceEvents']), 5)
event = trace_obj['traceEvents'][0]
self.assertEqual(event['name'], 'pass_test.PassingTest.test_pass')
self.assertEqual(event['ph'], 'X')
self.assertEqual(event['tid'], 1)
self.assertEqual(event['args']['expected'], ['Pass'])
self.assertEqual(event['args']['actual'], 'Pass')
class TestMain(TestCli):
prog = []
def make_host(self):
return Host()
def call(self, host, argv, stdin, env):
stdin = unicode(stdin)
host.stdin = io.StringIO(stdin)
if env:
host.getenv = env.get
host.capture_output()
orig_sys_path = sys.path[:]
orig_sys_modules = list(sys.modules.keys())
try:
ret = main(argv + ['-j', '1'], host)
finally:
out, err = host.restore_output()
modules_to_unload = []
for k in sys.modules:
if k not in orig_sys_modules:
modules_to_unload.append(k)
for k in modules_to_unload:
del sys.modules[k]
sys.path = orig_sys_path
return ret, out, err
def test_debugger(self):
# TODO: this test seems to hang under coverage.
pass
|
owaiskhan/Retransmission-Combining | refs/heads/master | gr-digital/examples/ofdm/benchmark_tx.py | 1 | #!/usr/bin/env python
#
# Copyright 2005,2006,2011 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio import gr
from gnuradio import eng_notation
from gnuradio.eng_option import eng_option
from optparse import OptionParser
import time, struct, sys
from gnuradio import digital
# from current dir
from transmit_path import transmit_path
from uhd_interface import uhd_transmitter
import struct, sys, os
print os.getpid()
#raw_input("Press enter to continue")
class my_top_block(gr.top_block):
def __init__(self, options):
gr.top_block.__init__(self)
if(options.tx_freq is not None):
self.sink = uhd_transmitter(options.args,
options.bandwidth,
options.tx_freq, options.tx_gain,
options.spec, options.antenna,
options.verbose)
elif(options.to_file is not None):
self.sink = gr.file_sink(gr.sizeof_gr_complex, options.to_file)
else:
self.sink = gr.null_sink(gr.sizeof_gr_complex)
# do this after for any adjustments to the options that may
# occur in the sinks (specifically the UHD sink)
self.txpath = transmit_path(options)
self.connect(self.txpath, self.sink)
# /////////////////////////////////////////////////////////////////////////////
# main
# /////////////////////////////////////////////////////////////////////////////
def main():
def send_pkt(payload='', eof=False):
return tb.txpath.send_pkt(payload, 0, eof)
parser = OptionParser(option_class=eng_option, conflict_handler="resolve")
expert_grp = parser.add_option_group("Expert")
parser.add_option("-s", "--size", type="eng_float", default=400,
help="set packet size [default=%default]")
parser.add_option("-M", "--megabytes", type="eng_float", default=1.0,
help="set megabytes to transmit [default=%default]")
parser.add_option("","--discontinuous", action="store_true", default=False,
help="enable discontinuous mode")
parser.add_option("","--from-file", default=None,
help="use intput file for packet contents")
parser.add_option("","--to-file", default=None,
help="Output file for modulated samples")
transmit_path.add_options(parser, expert_grp)
digital.ofdm_mod.add_options(parser, expert_grp)
uhd_transmitter.add_options(parser)
(options, args) = parser.parse_args ()
# build the graph
tb = my_top_block(options)
r = gr.enable_realtime_scheduling()
if r != gr.RT_OK:
print "Warning: failed to enable realtime scheduling"
tb.start() # start flow graph
# generate and send packets
nbytes = int(1e6 * options.megabytes)
n = 0
pktno = 0
pkt_size = int(options.size)
while n < nbytes:
if options.from_file is None:
if(pktno % 2 == 0):
data = (pkt_size) * chr(3 & 0xff)
else:
data = (pkt_size) * chr(3 & 0xff)
#data = (pkt_size - 2) * chr(pktno & 0xff)
#data = (pkt_size - 2) * chr(0x34)
else:
data = source_file.read(pkt_size - 2)
if data == '':
break;
#payload = struct.pack('!H', pktno & 0xffff) + data
payload = data
send_pkt(payload)
n += len(payload)
sys.stderr.write('.')
#if options.discontinuous and pktno % 5 == 4:
# time.sleep(1)
pktno += 1
time.sleep(1.5)
#time.sleep(0.1)
send_pkt(eof=True)
tb.wait() # wait for it to finish
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
pass
|
iut-ibk/DynaMind-ToolBox | refs/heads/master | DynaMind-Performance-Assessment/3rdparty/CD3Waterbalance/WaterDemandModel/C_WaterDemandModel.py | 1 | __author__ = 'Jeddah'
__project__ = "WaterDemandMdeol"
import sampling_db as DB
import C_ApplianceType as Appliance
import C_UnitType as Unit
import C_ResidentialUnit as residential
import C_CommercialUnit as commercial
class WaterDemandModel():
"""
"""
__residential_units = [] # the number of residential units represented by an array. Where the integer at each index represent the number of actors in the unit. i.e. [3,4] = 2 units of 3 and 4 res actors respectively
__commercial_units = [] # the number of commercial units represented by an array. Where the integer at each index represent the number of actors in the unit. i.e. [10,15] = 2 units of 10 and 15 res actors respectively
__unit_type = str
__timeseries = {} # is the dictionary initialised at the start of each day to store the volumes of water generated by each appliance at an hourly timestep. As well as the number of actors in the building
__demand_output = {}
def __init__(self,residential_units,commercial_units):
self.__residential_units = residential_units
self.__commercial_units = commercial_units
self.__unit_type = str
self.__demand_output = {}
print residential_units
print self.__residential_units
self.__timeseries = WaterDemandModel.getInitialisedTimeseries(self)
#initialise methods:
self.initialiseTimeseries()
self.run()
def initialiseTimeseries(self):
"""
This method initialises a timeseries array for each event and places it within the dictionary called 'timeseries'. The key of the dictionary is the unit_type (from C_Unit_TYpe) e.g. "COMMERCIAL" and appliance_type (from C_Appliances()) e.g. 'toilet'.
For each appliance key an array of zeros is initialised. Where each zero represents one timestep of the timeseries. Volumes generated at different time steps can then be used to populate the initialised timeseries array.
The size of the array is dependant on the input 'elements'. Arrays for each appliance are made for all of the unit_types.
A key for the number of actors ("actors") is also initialised in this method
:return: an initialised dictionary {RESIDENTIAL:{event_type1:[0,0,0,0,0....n(elements)],event_type2: [0,0,0,0,0,0,....n(elements)],actors: 0.0}
"""
for u_type in Unit.unit_types: # for each of the unit types (e.g. residential, commercial etc.)
self.__timeseries[u_type] = {} # make a nested dictionary (e.g. timeseries = {'commercial': {} :, 'residential' :{}, }
self.__timeseries[u_type]["actors"]= 0.0 # create a key for the number of actors. Initialise it with a zero
# appliances used depend on unit type, therefore grab the correct appliance list:
if u_type == "RESIDENTIAL":
appliances = Appliance.residential_appliance_types
elif u_type == "COMMERCIAL":
appliances = Appliance.commercial_appliance_types
else:
print "error: the unit type does not exist"
# create an array of zero's for each appliance in the unit:
for a_type in appliances: # for all of the appliance types e.g. toilet, shower, etc.
array = [] # make a empty array
for i in range(24): # for each step in the timeseries of 24hrs:
array.append(0.0) # add a zero to a
self.__timeseries[u_type][a_type] = array # append it to the relevant array in the dictionary specified by unit type and appliance type.
def getInitialisedTimeseries(self):
return self.__timeseries
def createTimeSeries(self,events,unit_type,numberOfActors):
"""
This method is used to create a timeseries of water use volumes for each end use (event_type) within each unit type.
It does this by extracting the relevant information (volume, time) from the event objects generated by the relevant unit class
For each event_type a different array of water use volumes (at each hour of the day) is generated.
This information is stored in the form of a dictionary.
The method also adds the number of actors within the unit - based on inputs to the method
:param events: All the water use event objects from each appliance
:param unit_type: i.e. Comm or Res. Required for the dictionary key to attach the relevant number of actors
:param numberOfActors: number of actors (people) within the unit being modelled
:return: A filled in dictionary containing the water use volumes at each hour of the day for each appliances and the number of actors within
the unit.
"""
self.__timeseries[unit_type]["actors"] = numberOfActors # get the number of actors
for e in events: # for all of the water events generated by a household:
u_type = e.getUnitType() # get the unit type of the event (i.e is the event occurring in a residential unit or commercial unit)
# For individual event types:
volumes = self.__timeseries[u_type][e.getAppliance()] # From the dictionary, get the initialised array for the specific event_type.
# Each time it goes through the loop, it calls up the correct array (corresponding to the key),
# and adds the volume to it that is generated at a specific time using the code below.
start = e.getStartTime() # get the start time of the event
volume = e.getVolume() # get the volume of the event
volumes[int(start)] += volume # Add the volume generated to the relevant timestep of the volumes array (who's index is the same as that of the start time)
def run(self):
"""
This method is used to call all relevant methods within the WaterDemandModel code. It creates water demand
timeseries for a specified number of commercial and residential units.
:return: dictionary of the water volumes (L) made at each time step for each appliance. And the number of actors in the unit.
"""
"RESIDENTIAL EVENTS"
res_name_counter = 1 # used to create a name for each unit e.g. R1, R2
res_actor_counter = 0 # represents the index of the res_unit array --> so the relevant number of people can be returned
print self.__residential_units
for i in range(len(self.__residential_units)): # for the number of res units specified:
# create a res unit ID R1 --> Rn
res_name = "R" + str(res_name_counter)
self.__demand_output[res_name] = {}
res_name_counter += 1
# get the number of actors in the unit from the input vector:
number_of_residential_actors = self.__residential_units[res_actor_counter]
res_actor_counter += 1
r = residential.ResidentialUnit(number_of_residential_actors) # instantiate the Residential Unit Class (with the number of actors)
all_resident_unit_events = r.getAllUnitEvents() # get all of the residential unit events
self.createTimeSeries(all_resident_unit_events,"RESIDENTIAL",number_of_residential_actors) # populate the empty dictionary with volumes for each appliance. Append the number of actors.
self.__demand_output[res_name] = self.__timeseries["RESIDENTIAL"] # append the populated dictionary to the output dictionary. Only return Residential information. Otherwise an empty dictionary for Commercial is returned as well.
"COMMERCIAL EVENTS"
com_name_counter = 1 # used to create a name for each unit e.g. C1, C2
com_actor_counter = 0 # represents the index of the res_unit array --> so the relevant number of people can be returned
for j in range(len(self.__commercial_units)): # for the number of commercial units specified:
# create a res unit ID C1 --> Cn
comm_name = "C" + str(com_name_counter)
self.__demand_output[comm_name] = {}
com_name_counter += 1
# get the number of actors in the unit from the input vector:
number_of_commercial_actors = self.__commercial_units[com_actor_counter]
com_actor_counter += 1
c = commercial.CommercialUnit(number_of_commercial_actors) # instantiate the Commercial Unit Class (with the number of actors)
all_commercial_unit_events = c.getAllUnitEvents() # get all of the commercial unit events
self.createTimeSeries(all_commercial_unit_events,"COMMERCIAL",number_of_commercial_actors) # populate the empty dictionary with volumes for each appliance. Append the number of actors.
self.__demand_output[comm_name] = self.__timeseries["COMMERCIAL"] # append the populated dictionary to the output dictionary. Only return Commercial information. Otherwise an empty dictionary for Residential is returned as well.
def getDemands(self):
return self.__demand_output
if __name__ == "__main__":
res_units = [2]
com_units = [8]
run = WaterDemandModel(res_units,com_units)
a = run.getDemands()
print run.getDemands()
|
shsingh/ansible | refs/heads/devel | lib/ansible/plugins/action/enos.py | 9 | # (C) 2017 Red Hat Inc.
# Copyright (C) 2017 Lenovo.
#
# GNU General Public License v3.0+
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
# Contains Action Plugin methods for ENOS Config Module
# Lenovo Networking
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import sys
import copy
from ansible import constants as C
from ansible.plugins.action.network import ActionModule as ActionNetworkModule
from ansible.module_utils.network.enos.enos import enos_provider_spec
from ansible.module_utils.network.common.utils import load_provider
from ansible.utils.display import Display
display = Display()
class ActionModule(ActionNetworkModule):
def run(self, tmp=None, task_vars=None):
del tmp # tmp no longer has any effect
module_name = self._task.action.split('.')[-1]
self._config_module = True if module_name == 'enos_config' else False
if self._play_context.connection == 'local':
provider = load_provider(enos_provider_spec, self._task.args)
pc = copy.deepcopy(self._play_context)
pc.connection = 'network_cli'
pc.network_os = 'enos'
pc.remote_addr = provider['host'] or self._play_context.remote_addr
pc.port = provider['port'] or self._play_context.port or 22
pc.remote_user = provider['username'] or self._play_context.connection_user
pc.password = provider['password'] or self._play_context.password
pc.private_key_file = provider['ssh_keyfile'] or self._play_context.private_key_file
command_timeout = int(provider['timeout'] or C.PERSISTENT_COMMAND_TIMEOUT)
pc.become = provider['authorize'] or True
pc.become_pass = provider['auth_pass']
pc.become_method = 'enable'
display.vvv('using connection plugin %s (was local)' % pc.connection, pc.remote_addr)
connection = self._shared_loader_obj.connection_loader.get('persistent', pc, sys.stdin, task_uuid=self._task._uuid)
connection.set_options(direct={'persistent_command_timeout': command_timeout})
socket_path = connection.run()
display.vvvv('socket_path: %s' % socket_path, pc.remote_addr)
if not socket_path:
return {'failed': True,
'msg': 'unable to open shell. Please see: ' +
'https://docs.ansible.com/ansible/network_debug_troubleshooting.html#unable-to-open-shell'}
task_vars['ansible_socket'] = socket_path
result = super(ActionModule, self).run(task_vars=task_vars)
return result
|
cnsoft/kbengine-cocos2dx | refs/heads/cocos2dx-cnsoft | kbe/src/lib/python/Tools/scripts/pdeps.py | 66 | #! /usr/bin/env python3
# pdeps
#
# Find dependencies between a bunch of Python modules.
#
# Usage:
# pdeps file1.py file2.py ...
#
# Output:
# Four tables separated by lines like '--- Closure ---':
# 1) Direct dependencies, listing which module imports which other modules
# 2) The inverse of (1)
# 3) Indirect dependencies, or the closure of the above
# 4) The inverse of (3)
#
# To do:
# - command line options to select output type
# - option to automatically scan the Python library for referenced modules
# - option to limit output to particular modules
import sys
import re
import os
# Main program
#
def main():
args = sys.argv[1:]
if not args:
print('usage: pdeps file.py file.py ...')
return 2
#
table = {}
for arg in args:
process(arg, table)
#
print('--- Uses ---')
printresults(table)
#
print('--- Used By ---')
inv = inverse(table)
printresults(inv)
#
print('--- Closure of Uses ---')
reach = closure(table)
printresults(reach)
#
print('--- Closure of Used By ---')
invreach = inverse(reach)
printresults(invreach)
#
return 0
# Compiled regular expressions to search for import statements
#
m_import = re.compile('^[ \t]*from[ \t]+([^ \t]+)[ \t]+')
m_from = re.compile('^[ \t]*import[ \t]+([^#]+)')
# Collect data from one file
#
def process(filename, table):
fp = open(filename, 'r')
mod = os.path.basename(filename)
if mod[-3:] == '.py':
mod = mod[:-3]
table[mod] = list = []
while 1:
line = fp.readline()
if not line: break
while line[-1:] == '\\':
nextline = fp.readline()
if not nextline: break
line = line[:-1] + nextline
m_found = m_import.match(line) or m_from.match(line)
if m_found:
(a, b), (a1, b1) = m_found.regs[:2]
else: continue
words = line[a1:b1].split(',')
# print '#', line, words
for word in words:
word = word.strip()
if word not in list:
list.append(word)
fp.close()
# Compute closure (this is in fact totally general)
#
def closure(table):
modules = list(table.keys())
#
# Initialize reach with a copy of table
#
reach = {}
for mod in modules:
reach[mod] = table[mod][:]
#
# Iterate until no more change
#
change = 1
while change:
change = 0
for mod in modules:
for mo in reach[mod]:
if mo in modules:
for m in reach[mo]:
if m not in reach[mod]:
reach[mod].append(m)
change = 1
#
return reach
# Invert a table (this is again totally general).
# All keys of the original table are made keys of the inverse,
# so there may be empty lists in the inverse.
#
def inverse(table):
inv = {}
for key in table.keys():
if key not in inv:
inv[key] = []
for item in table[key]:
store(inv, item, key)
return inv
# Store "item" in "dict" under "key".
# The dictionary maps keys to lists of items.
# If there is no list for the key yet, it is created.
#
def store(dict, key, item):
if key in dict:
dict[key].append(item)
else:
dict[key] = [item]
# Tabulate results neatly
#
def printresults(table):
modules = sorted(table.keys())
maxlen = 0
for mod in modules: maxlen = max(maxlen, len(mod))
for mod in modules:
list = sorted(table[mod])
print(mod.ljust(maxlen), ':', end=' ')
if mod in list:
print('(*)', end=' ')
for ref in list:
print(ref, end=' ')
print()
# Call main and honor exit status
if __name__ == '__main__':
try:
sys.exit(main())
except KeyboardInterrupt:
sys.exit(1)
|
40223133/2015W11 | refs/heads/master | static/Brython3.1.1-20150328-091302/Lib/unittest/test/support.py | 770 | import unittest
class TestEquality(object):
"""Used as a mixin for TestCase"""
# Check for a valid __eq__ implementation
def test_eq(self):
for obj_1, obj_2 in self.eq_pairs:
self.assertEqual(obj_1, obj_2)
self.assertEqual(obj_2, obj_1)
# Check for a valid __ne__ implementation
def test_ne(self):
for obj_1, obj_2 in self.ne_pairs:
self.assertNotEqual(obj_1, obj_2)
self.assertNotEqual(obj_2, obj_1)
class TestHashing(object):
"""Used as a mixin for TestCase"""
# Check for a valid __hash__ implementation
def test_hash(self):
for obj_1, obj_2 in self.eq_pairs:
try:
if not hash(obj_1) == hash(obj_2):
self.fail("%r and %r do not hash equal" % (obj_1, obj_2))
except KeyboardInterrupt:
raise
except Exception as e:
self.fail("Problem hashing %r and %r: %s" % (obj_1, obj_2, e))
for obj_1, obj_2 in self.ne_pairs:
try:
if hash(obj_1) == hash(obj_2):
self.fail("%s and %s hash equal, but shouldn't" %
(obj_1, obj_2))
except KeyboardInterrupt:
raise
except Exception as e:
self.fail("Problem hashing %s and %s: %s" % (obj_1, obj_2, e))
class LoggingResult(unittest.TestResult):
def __init__(self, log):
self._events = log
super().__init__()
def startTest(self, test):
self._events.append('startTest')
super().startTest(test)
def startTestRun(self):
self._events.append('startTestRun')
super(LoggingResult, self).startTestRun()
def stopTest(self, test):
self._events.append('stopTest')
super().stopTest(test)
def stopTestRun(self):
self._events.append('stopTestRun')
super(LoggingResult, self).stopTestRun()
def addFailure(self, *args):
self._events.append('addFailure')
super().addFailure(*args)
def addSuccess(self, *args):
self._events.append('addSuccess')
super(LoggingResult, self).addSuccess(*args)
def addError(self, *args):
self._events.append('addError')
super().addError(*args)
def addSkip(self, *args):
self._events.append('addSkip')
super(LoggingResult, self).addSkip(*args)
def addExpectedFailure(self, *args):
self._events.append('addExpectedFailure')
super(LoggingResult, self).addExpectedFailure(*args)
def addUnexpectedSuccess(self, *args):
self._events.append('addUnexpectedSuccess')
super(LoggingResult, self).addUnexpectedSuccess(*args)
class ResultWithNoStartTestRunStopTestRun(object):
"""An object honouring TestResult before startTestRun/stopTestRun."""
def __init__(self):
self.failures = []
self.errors = []
self.testsRun = 0
self.skipped = []
self.expectedFailures = []
self.unexpectedSuccesses = []
self.shouldStop = False
def startTest(self, test):
pass
def stopTest(self, test):
pass
def addError(self, test):
pass
def addFailure(self, test):
pass
def addSuccess(self, test):
pass
def wasSuccessful(self):
return True
|
Sorsly/subtle | refs/heads/master | google-cloud-sdk/lib/surface/compute/backend_services/list.py | 6 | # Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command for listing backend services."""
from googlecloudsdk.api_lib.compute import base_classes
class List(base_classes.GlobalRegionalLister):
"""List backend services."""
def Collection(self):
return 'compute.backendServices.alpha'
@property
def aggregation_service(self):
return self.compute.backendServices
@property
def global_service(self):
return self.compute.backendServices
@property
def regional_service(self):
return self.compute.regionBackendServices
@property
def resource_type(self):
return 'backendServices'
@property
def allowed_filtering_types(self):
return ['regionBackendServices', 'backendServices']
List.detailed_help = base_classes.GetGlobalRegionalListerHelp(
'backend services')
|
schiesbn/backupRotate | refs/heads/master | backuprotate.py | 1 | #!/usr/bin/python
"""
(c) Copyright 2013 Bjoern Schiessle <bjoern@schiessle.org>
This program is free software released under the MIT License, for more details
see LICENSE.txt or http://opensource.org/licenses/MIT
This program was written for personal usage. So don't expect any active
development beside adjustments to my own needs. Feel free to reuse it and
adjust it to your own needs.
This small program allows you to specify a 'path' where your backups
are located and a 'number' of backups you want to keep. The program will read
the directory 'path' and delete all files beside the latest 'number' of files.
The list of files will be sorted alphabetically, the program assumes that the
oldest file is at the top of the list and the newest file is at the bottom of
the list.
"""
import os, sys, getopt
class BackupRotate:
def __init__(self, path="/mnt/backup/databases", number=120, dryRun=False):
self.path = path
self.number = number
self.dryRun = dryRun
def setPath(self, path):
self.path = path
def setNumber(self, number):
self.number = number
def setDryRun(self, dryRun):
self.dryRun = dryRun
def delOldBackups(self):
filesList = sorted(os.listdir(self.path))
numberOfDeletedFiles = len(filesList) - self.number
if (numberOfDeletedFiles > 0):
for file in filesList[0: numberOfDeletedFiles]:
if (self.dryRun):
print "I would delete:", file
else:
print "Delete:", file
os.remove(self.path + '/' + file)
else:
print "Nothing to do!"
def printHelp():
print 'optional parameters: '
print '-h, --help show help'
print '-d, --dryRun don\'t delete any files'
print '-p, --path=PATH specifiy the path of the backup files'
print '-n, --number=NUMBER specifiy the number of kept backups'
def main(argv):
backupRotate = BackupRotate()
try:
opts, args = getopt.getopt(argv,"hdp:n:",["help","dryRun", "path=", "number="])
except getopt.GetoptError:
printHelp()
sys.exit(2)
for opt, arg in opts:
if opt in ('-h', '--help'):
printHelp()
sys.exit()
elif opt in ("-p", "--path"):
backupRotate.setPath(arg)
elif opt in ("-d", "--dryRun"):
backupRotate.setDryRun(True)
elif opt in ("-n", "--number"):
backupRotate.setNumber(int(arg))
backupRotate.delOldBackups()
if __name__ == "__main__":
main(sys.argv[1:])
|
Just-D/chromium-1 | refs/heads/master | tools/perf/measurements/page_cycler_unittest.py | 6 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
import unittest
from telemetry.internal.browser import browser_options
from telemetry.internal.results import page_test_results
from telemetry.internal import story_runner
from telemetry.testing import simple_mock
from measurements import page_cycler
from metrics import keychain_metric
# Allow testing protected members in the unit test.
# pylint: disable=W0212
class MockMemoryMetric(object):
"""Used instead of simple_mock.MockObject so that the precise order and
number of calls need not be specified."""
def __init__(self):
pass
def Start(self, page, tab):
pass
def Stop(self, page, tab):
pass
def AddResults(self, tab, results):
pass
def AddSummaryResults(self, tab, results):
pass
class FakePage(object):
"""Used to mock loading a page."""
def __init__(self, url):
self.url = url
self.is_file = url.startswith('file://')
class FakeTab(object):
"""Used to mock a browser tab."""
def __init__(self):
self.clear_cache_calls = 0
self.navigated_urls = []
def ClearCache(self, force=False):
assert force
self.clear_cache_calls += 1
def EvaluateJavaScript(self, script):
# If the page cycler invokes javascript to measure the number of keychain
# accesses, return a valid JSON dictionary.
keychain_histogram_name = keychain_metric.KeychainMetric.HISTOGRAM_NAME
# Fake data for keychain metric.
if keychain_histogram_name in script:
return '{{ "{0}" : 0 }}'.format(keychain_histogram_name)
return 1
def Navigate(self, url):
self.navigated_urls.append(url)
def WaitForJavaScriptExpression(self, _, __):
pass
@property
def browser(self):
return FakeBrowser()
class FakeBrowser(object):
_iteration = 0
@property
def cpu_stats(self):
FakeBrowser._iteration += 1
return {
'Browser': {'CpuProcessTime': FakeBrowser._iteration,
'TotalTime': FakeBrowser._iteration * 2},
'Renderer': {'CpuProcessTime': FakeBrowser._iteration,
'TotalTime': FakeBrowser._iteration * 3},
'Gpu': {'CpuProcessTime': FakeBrowser._iteration,
'TotalTime': FakeBrowser._iteration * 4}
}
@property
def platform(self):
return FakePlatform()
@property
def http_server(self):
class FakeHttpServer(object):
def UrlOf(self, url_path):
return 'http://fakeserver:99999/%s' % url_path
return FakeHttpServer()
@property
def supports_cpu_metrics(self):
return True
@property
def supports_memory_metrics(self):
return True
@property
def supports_power_metrics(self):
return True
class FakePlatform(object):
def GetOSName(self):
return 'fake'
def CanMonitorPower(self):
return False
class PageCyclerUnitTest(unittest.TestCase):
def SetUpCycler(self, page_repeat=1, pageset_repeat=10, cold_load_percent=50,
report_speed_index=False, setup_memory_module=False):
cycler = page_cycler.PageCycler(
page_repeat = page_repeat,
pageset_repeat = pageset_repeat,
cold_load_percent = cold_load_percent,
report_speed_index = report_speed_index)
options = browser_options.BrowserFinderOptions()
options.browser_options.platform = FakePlatform()
parser = options.CreateParser()
story_runner.AddCommandLineArgs(parser)
args = ['--page-repeat=%i' % page_repeat,
'--pageset-repeat=%i' % pageset_repeat]
parser.parse_args(args)
story_runner.ProcessCommandLineArgs(parser, options)
cycler.CustomizeBrowserOptions(options.browser_options)
if setup_memory_module:
# Mock out memory metrics; the real ones require a real browser.
mock_memory_metric = MockMemoryMetric()
mock_memory_module = simple_mock.MockObject()
mock_memory_module.ExpectCall(
'MemoryMetric').WithArgs(simple_mock.DONT_CARE).WillReturn(
mock_memory_metric)
real_memory_module = page_cycler.memory
try:
page_cycler.memory = mock_memory_module
browser = FakeBrowser()
cycler.WillStartBrowser(options.browser_options.platform)
cycler.DidStartBrowser(browser)
finally:
page_cycler.memory = real_memory_module
return cycler
def testOptionsColdLoadNoArgs(self):
cycler = self.SetUpCycler()
self.assertEquals(cycler._cold_run_start_index, 5)
def testOptionsColdLoadPagesetRepeat(self):
cycler = self.SetUpCycler(pageset_repeat=20, page_repeat=2)
self.assertEquals(cycler._cold_run_start_index, 20)
def testOptionsColdLoadRequested(self):
cycler = self.SetUpCycler(pageset_repeat=21, page_repeat=2,
cold_load_percent=40)
self.assertEquals(cycler._cold_run_start_index, 26)
def testCacheHandled(self):
cycler = self.SetUpCycler(pageset_repeat=5,
cold_load_percent=50,
setup_memory_module=True)
url_name = 'http://fakepage.com'
page = FakePage(url_name)
tab = FakeTab()
for i in range(5):
results = page_test_results.PageTestResults()
results.WillRunPage(page)
cycler.WillNavigateToPage(page, tab)
self.assertEqual(max(0, i - 2), tab.clear_cache_calls,
'Iteration %d tab.clear_cache_calls %d' %
(i, tab.clear_cache_calls))
cycler.ValidateAndMeasurePage(page, tab, results)
results.DidRunPage(page)
values = results.all_page_specific_values
self.assertGreater(len(values), 2)
self.assertEqual(values[0].page, page)
chart_name = 'cold_times' if i == 0 or i > 2 else 'warm_times'
self.assertEqual(values[0].name, '%s.page_load_time' % chart_name)
self.assertEqual(values[0].units, 'ms')
cycler.DidNavigateToPage(page, tab)
def testColdWarm(self):
cycler = self.SetUpCycler(pageset_repeat=3, setup_memory_module=True)
pages = [FakePage('http://fakepage1.com'), FakePage('http://fakepage2.com')]
tab = FakeTab()
for i in range(3):
for page in pages:
results = page_test_results.PageTestResults()
results.WillRunPage(page)
cycler.WillNavigateToPage(page, tab)
cycler.ValidateAndMeasurePage(page, tab, results)
results.DidRunPage(page)
values = results.all_page_specific_values
self.assertGreater(len(values), 2)
self.assertEqual(values[0].page, page)
chart_name = 'cold_times' if i == 0 or i > 1 else 'warm_times'
self.assertEqual(values[0].name, '%s.page_load_time' % chart_name)
self.assertEqual(values[0].units, 'ms')
cycler.DidNavigateToPage(page, tab)
def testResults(self):
cycler = self.SetUpCycler(setup_memory_module=True)
pages = [FakePage('http://fakepage1.com'), FakePage('http://fakepage2.com')]
tab = FakeTab()
for i in range(2):
for page in pages:
results = page_test_results.PageTestResults()
results.WillRunPage(page)
cycler.WillNavigateToPage(page, tab)
cycler.ValidateAndMeasurePage(page, tab, results)
results.DidRunPage(page)
values = results.all_page_specific_values
# On Mac, there is an additional measurement: the number of keychain
# accesses.
value_count = 3
if sys.platform == 'darwin':
value_count += 1
self.assertEqual(value_count, len(values))
self.assertEqual(values[0].page, page)
chart_name = 'cold_times' if i == 0 else 'warm_times'
self.assertEqual(values[0].name, '%s.page_load_time' % chart_name)
self.assertEqual(values[0].units, 'ms')
expected_values = ['gpu', 'browser']
for value, expected in zip(values[1:len(expected_values) + 1],
expected_values):
self.assertEqual(value.page, page)
self.assertEqual(value.name,
'cpu_utilization.cpu_utilization_%s' % expected)
self.assertEqual(value.units, '%')
cycler.DidNavigateToPage(page, tab)
def testLegacyPagesAvoidCrossRenderNavigation(self):
# For legacy page cyclers with file URLs, verify that WillNavigateToPage
# does an initial navigate to avoid paying for a cross-renderer navigation.
cycler = self.SetUpCycler(setup_memory_module=True)
pages = [FakePage('file://fakepage1.com'), FakePage('file://fakepage2.com')]
tab = FakeTab()
self.assertEqual([], tab.navigated_urls)
for page in pages * 2:
cycler.WillNavigateToPage(page, tab)
self.assertEqual(
['http://fakeserver:99999/nonexistent.html'], tab.navigated_urls)
|
tienfuc/gdcmdtools | refs/heads/master | gdcmdtools/about.py | 2 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import csv
import sys
import urllib
import requests
import json
import pprint
import re
import logging
logger = logging.getLogger("gdabout")
import random
import os
import json
from gdcmdtools.base import GDBase
from gdcmdtools.perm import GDPerm
from gdcmdtools.auth import GDAuth
class GDAbout:
def __init__(self, args):
for key, value in vars(args).items():
setattr(self, key, value)
auth = GDAuth()
creds = auth.get_credentials()
self.auth_user = creds.id_token.get("email", None)
if creds is None:
raise Exception("Failed to retrieve credentials")
self.http = auth.get_authorized_http()
base = GDBase()
self.service = base.get_drive_service(self.http)
self.root = base.get_root()
def run(self):
try:
response = self.service.about().get().execute()
except Exception as e:
logger.error(e)
raise
else:
return response
|
taroplus/spark | refs/heads/master | examples/src/main/python/ml/min_hash_lsh_example.py | 52 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import print_function
# $example on$
from pyspark.ml.feature import MinHashLSH
from pyspark.ml.linalg import Vectors
from pyspark.sql.functions import col
# $example off$
from pyspark.sql import SparkSession
"""
An example demonstrating MinHashLSH.
Run with:
bin/spark-submit examples/src/main/python/ml/min_hash_lsh_example.py
"""
if __name__ == "__main__":
spark = SparkSession \
.builder \
.appName("MinHashLSHExample") \
.getOrCreate()
# $example on$
dataA = [(0, Vectors.sparse(6, [0, 1, 2], [1.0, 1.0, 1.0]),),
(1, Vectors.sparse(6, [2, 3, 4], [1.0, 1.0, 1.0]),),
(2, Vectors.sparse(6, [0, 2, 4], [1.0, 1.0, 1.0]),)]
dfA = spark.createDataFrame(dataA, ["id", "features"])
dataB = [(3, Vectors.sparse(6, [1, 3, 5], [1.0, 1.0, 1.0]),),
(4, Vectors.sparse(6, [2, 3, 5], [1.0, 1.0, 1.0]),),
(5, Vectors.sparse(6, [1, 2, 4], [1.0, 1.0, 1.0]),)]
dfB = spark.createDataFrame(dataB, ["id", "features"])
key = Vectors.sparse(6, [1, 3], [1.0, 1.0])
mh = MinHashLSH(inputCol="features", outputCol="hashes", numHashTables=5)
model = mh.fit(dfA)
# Feature Transformation
print("The hashed dataset where hashed values are stored in the column 'hashes':")
model.transform(dfA).show()
# Compute the locality sensitive hashes for the input rows, then perform approximate
# similarity join.
# We could avoid computing hashes by passing in the already-transformed dataset, e.g.
# `model.approxSimilarityJoin(transformedA, transformedB, 0.6)`
print("Approximately joining dfA and dfB on distance smaller than 0.6:")
model.approxSimilarityJoin(dfA, dfB, 0.6, distCol="JaccardDistance")\
.select(col("datasetA.id").alias("idA"),
col("datasetB.id").alias("idB"),
col("JaccardDistance")).show()
# Compute the locality sensitive hashes for the input rows, then perform approximate nearest
# neighbor search.
# We could avoid computing hashes by passing in the already-transformed dataset, e.g.
# `model.approxNearestNeighbors(transformedA, key, 2)`
# It may return less than 2 rows when not enough approximate near-neighbor candidates are
# found.
print("Approximately searching dfA for 2 nearest neighbors of the key:")
model.approxNearestNeighbors(dfA, key, 2).show()
# $example off$
spark.stop()
|
xingyepei/edx-platform | refs/heads/release | common/lib/xmodule/xmodule/open_ended_grading_classes/__init__.py | 248 | __author__ = 'vik'
|
ar7z1/ansible | refs/heads/devel | lib/ansible/modules/net_tools/nios/nios_host_record.py | 17 | #!/usr/bin/python
# Copyright (c) 2018 Red Hat, Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = '''
---
module: nios_host_record
version_added: "2.5"
author: "Peter Sprygada (@privateip)"
short_description: Configure Infoblox NIOS host records
description:
- Adds and/or removes instances of host record objects from
Infoblox NIOS servers. This module manages NIOS C(record:host) objects
using the Infoblox WAPI interface over REST.
- Updates instances of host record object from Infoblox NIOS servers.
requirements:
- infoblox-client
extends_documentation_fragment: nios
options:
name:
description:
- Specifies the fully qualified hostname to add or remove from
the system. User can also update the hostname as it is possible
to pass a dict containing I(new_name), I(old_name). See examples.
required: true
view:
description:
- Sets the DNS view to associate this host record with. The DNS
view must already be configured on the system
required: true
default: default
aliases:
- dns_view
configure_for_dns:
version_added: "2.7"
description:
- Sets the DNS to particular parent. If user needs to bypass DNS
user can make the value to false.
type: bool
required: false
default: true
aliases:
- dns
ipv4addrs:
description:
- Configures the IPv4 addresses for this host record. This argument
accepts a list of values (see suboptions)
aliases:
- ipv4
suboptions:
ipv4addr:
description:
- Configures the IPv4 address for the host record
required: true
aliases:
- address
configure_for_dhcp:
description:
- Configure the host_record over DHCP instead of DNS, if user
changes it to true, user need to mention MAC address to configure
required: false
aliases:
- dhcp
mac:
description:
- Configures the hardware MAC address for the host record. If user makes
DHCP to true, user need to mention MAC address.
required: false
aliases:
- mac
ipv6addrs:
description:
- Configures the IPv6 addresses for the host record. This argument
accepts a list of values (see options)
aliases:
- ipv6
suboptions:
ipv6addr:
description:
- Configures the IPv6 address for the host record
required: true
aliases:
- address
configure_for_dhcp:
description:
- Configure the host_record over DHCP instead of DNS, if user
changes it to true, user need to mention MAC address to configure
required: false
aliases:
- dhcp
aliases:
version_added: "2.6"
description:
- Configures an optional list of additional aliases to add to the host
record. These are equivalent to CNAMEs but held within a host
record. Must be in list format.
ttl:
description:
- Configures the TTL to be associated with this host record
extattrs:
description:
- Allows for the configuration of Extensible Attributes on the
instance of the object. This argument accepts a set of key / value
pairs for configuration.
comment:
description:
- Configures a text string comment to be associated with the instance
of this object. The provided text string will be configured on the
object instance.
state:
description:
- Configures the intended state of the instance of the object on
the NIOS server. When this value is set to C(present), the object
is configured on the device and when this value is set to C(absent)
the value is removed (if necessary) from the device.
default: present
choices:
- present
- absent
'''
EXAMPLES = '''
- name: configure an ipv4 host record
nios_host_record:
name: host.ansible.com
ipv4:
- address: 192.168.10.1
aliases:
- cname.ansible.com
state: present
provider:
host: "{{ inventory_hostname_short }}"
username: admin
password: admin
connection: local
- name: add a comment to an existing host record
nios_host_record:
name: host.ansible.com
ipv4:
- address: 192.168.10.1
comment: this is a test comment
state: present
provider:
host: "{{ inventory_hostname_short }}"
username: admin
password: admin
connection: local
- name: remove a host record from the system
nios_host_record:
name: host.ansible.com
state: absent
provider:
host: "{{ inventory_hostname_short }}"
username: admin
password: admin
connection: local
- name: update an ipv4 host record
nios_host_record:
name: {new_name: host-new.ansible.com, old_name: host.ansible.com}
ipv4:
- address: 192.168.10.1
state: present
provider:
host: "{{ inventory_hostname_short }}"
username: admin
password: admin
connection: local
- name: create an ipv4 host record bypassing DNS
nios_host_record:
name: new_host
ipv4:
- address: 192.168.10.1
dns: false
state: present
provider:
host: "{{ inventory_hostname_short }}"
username: admin
password: admin
connection: local
- name: create an ipv4 host record over DHCP
nios_host_record:
name: host.ansible.com
ipv4:
- address: 192.168.10.1
dhcp: true
mac: 00-80-C8-E3-4C-BD
state: present
provider:
host: "{{ inventory_hostname_short }}"
username: admin
password: admin
connection: local
'''
RETURN = ''' # '''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six import iteritems
from ansible.module_utils.net_tools.nios.api import WapiModule
from ansible.module_utils.net_tools.nios.api import NIOS_HOST_RECORD
def ipaddr(module, key, filtered_keys=None):
''' Transforms the input value into a struct supported by WAPI
This function will transform the input from the playbook into a struct
that is valid for WAPI in the form of:
{
ipv4addr: <value>,
mac: <value>
}
This function does not validate the values are properly formatted or in
the acceptable range, that is left to WAPI.
'''
filtered_keys = filtered_keys or list()
objects = list()
for item in module.params[key]:
objects.append(dict([(k, v) for k, v in iteritems(item) if v is not None and k not in filtered_keys]))
return objects
def ipv4addrs(module):
return ipaddr(module, 'ipv4addrs', filtered_keys=['address', 'dhcp'])
def ipv6addrs(module):
return ipaddr(module, 'ipv6addrs', filtered_keys=['address', 'dhcp'])
def main():
''' Main entry point for module execution
'''
ipv4addr_spec = dict(
ipv4addr=dict(required=True, aliases=['address'], ib_req=True),
configure_for_dhcp=dict(type='bool', required=False, aliases=['dhcp'], ib_req=True),
mac=dict(required=False, aliases=['mac'], ib_req=True)
)
ipv6addr_spec = dict(
ipv6addr=dict(required=True, aliases=['address'], ib_req=True),
configure_for_dhcp=dict(type='bool', required=False, aliases=['configure_for_dhcp'], ib_req=True),
mac=dict(required=False, aliases=['mac'], ib_req=True)
)
ib_spec = dict(
name=dict(required=True, ib_req=True),
view=dict(default='default', aliases=['dns_view'], ib_req=True),
ipv4addrs=dict(type='list', aliases=['ipv4'], elements='dict', options=ipv4addr_spec, transform=ipv4addrs),
ipv6addrs=dict(type='list', aliases=['ipv6'], elements='dict', options=ipv6addr_spec, transform=ipv6addrs),
configure_for_dns=dict(type='bool', default=True, required=False, aliases=['dns'], ib_req=True),
aliases=dict(type='list'),
ttl=dict(type='int'),
extattrs=dict(type='dict'),
comment=dict(),
)
argument_spec = dict(
provider=dict(required=True),
state=dict(default='present', choices=['present', 'absent'])
)
argument_spec.update(ib_spec)
argument_spec.update(WapiModule.provider_spec)
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True)
wapi = WapiModule(module)
result = wapi.run(NIOS_HOST_RECORD, ib_spec)
module.exit_json(**result)
if __name__ == '__main__':
main()
|
wenderen/servo | refs/heads/master | tests/wpt/web-platform-tests/tools/pytest/testing/code/test_code.py | 169 | import sys
import _pytest._code
import py
import pytest
def test_ne():
code1 = _pytest._code.Code(compile('foo = "bar"', '', 'exec'))
assert code1 == code1
code2 = _pytest._code.Code(compile('foo = "baz"', '', 'exec'))
assert code2 != code1
def test_code_gives_back_name_for_not_existing_file():
name = 'abc-123'
co_code = compile("pass\n", name, 'exec')
assert co_code.co_filename == name
code = _pytest._code.Code(co_code)
assert str(code.path) == name
assert code.fullsource is None
def test_code_with_class():
class A:
pass
pytest.raises(TypeError, "_pytest._code.Code(A)")
if True:
def x():
pass
def test_code_fullsource():
code = _pytest._code.Code(x)
full = code.fullsource
assert 'test_code_fullsource()' in str(full)
def test_code_source():
code = _pytest._code.Code(x)
src = code.source()
expected = """def x():
pass"""
assert str(src) == expected
def test_frame_getsourcelineno_myself():
def func():
return sys._getframe(0)
f = func()
f = _pytest._code.Frame(f)
source, lineno = f.code.fullsource, f.lineno
assert source[lineno].startswith(" return sys._getframe(0)")
def test_getstatement_empty_fullsource():
def func():
return sys._getframe(0)
f = func()
f = _pytest._code.Frame(f)
prop = f.code.__class__.fullsource
try:
f.code.__class__.fullsource = None
assert f.statement == _pytest._code.Source("")
finally:
f.code.__class__.fullsource = prop
def test_code_from_func():
co = _pytest._code.Code(test_frame_getsourcelineno_myself)
assert co.firstlineno
assert co.path
def test_builtin_patch_unpatch(monkeypatch):
cpy_builtin = py.builtin.builtins
comp = cpy_builtin.compile
def mycompile(*args, **kwargs):
return comp(*args, **kwargs)
class Sub(AssertionError):
pass
monkeypatch.setattr(cpy_builtin, 'AssertionError', Sub)
monkeypatch.setattr(cpy_builtin, 'compile', mycompile)
_pytest._code.patch_builtins()
assert cpy_builtin.AssertionError != Sub
assert cpy_builtin.compile != mycompile
_pytest._code.unpatch_builtins()
assert cpy_builtin.AssertionError is Sub
assert cpy_builtin.compile == mycompile
def test_unicode_handling():
value = py.builtin._totext('\xc4\x85\xc4\x87\n', 'utf-8').encode('utf8')
def f():
raise Exception(value)
excinfo = pytest.raises(Exception, f)
str(excinfo)
if sys.version_info[0] < 3:
unicode(excinfo)
@pytest.mark.skipif(sys.version_info[0] >= 3, reason='python 2 only issue')
def test_unicode_handling_syntax_error():
value = py.builtin._totext('\xc4\x85\xc4\x87\n', 'utf-8').encode('utf8')
def f():
raise SyntaxError('invalid syntax', (None, 1, 3, value))
excinfo = pytest.raises(Exception, f)
str(excinfo)
if sys.version_info[0] < 3:
unicode(excinfo)
def test_code_getargs():
def f1(x):
pass
c1 = _pytest._code.Code(f1)
assert c1.getargs(var=True) == ('x',)
def f2(x, *y):
pass
c2 = _pytest._code.Code(f2)
assert c2.getargs(var=True) == ('x', 'y')
def f3(x, **z):
pass
c3 = _pytest._code.Code(f3)
assert c3.getargs(var=True) == ('x', 'z')
def f4(x, *y, **z):
pass
c4 = _pytest._code.Code(f4)
assert c4.getargs(var=True) == ('x', 'y', 'z')
def test_frame_getargs():
def f1(x):
return sys._getframe(0)
fr1 = _pytest._code.Frame(f1('a'))
assert fr1.getargs(var=True) == [('x', 'a')]
def f2(x, *y):
return sys._getframe(0)
fr2 = _pytest._code.Frame(f2('a', 'b', 'c'))
assert fr2.getargs(var=True) == [('x', 'a'), ('y', ('b', 'c'))]
def f3(x, **z):
return sys._getframe(0)
fr3 = _pytest._code.Frame(f3('a', b='c'))
assert fr3.getargs(var=True) == [('x', 'a'), ('z', {'b': 'c'})]
def f4(x, *y, **z):
return sys._getframe(0)
fr4 = _pytest._code.Frame(f4('a', 'b', c='d'))
assert fr4.getargs(var=True) == [('x', 'a'), ('y', ('b',)),
('z', {'c': 'd'})]
class TestExceptionInfo:
def test_bad_getsource(self):
try:
if False: pass
else: assert False
except AssertionError:
exci = _pytest._code.ExceptionInfo()
assert exci.getrepr()
class TestTracebackEntry:
def test_getsource(self):
try:
if False: pass
else: assert False
except AssertionError:
exci = _pytest._code.ExceptionInfo()
entry = exci.traceback[0]
source = entry.getsource()
assert len(source) == 4
assert 'else: assert False' in source[3]
|
samzhang111/scikit-learn | refs/heads/master | examples/model_selection/plot_precision_recall.py | 249 | """
================
Precision-Recall
================
Example of Precision-Recall metric to evaluate classifier output quality.
In information retrieval, precision is a measure of result relevancy, while
recall is a measure of how many truly relevant results are returned. A high
area under the curve represents both high recall and high precision, where high
precision relates to a low false positive rate, and high recall relates to a
low false negative rate. High scores for both show that the classifier is
returning accurate results (high precision), as well as returning a majority of
all positive results (high recall).
A system with high recall but low precision returns many results, but most of
its predicted labels are incorrect when compared to the training labels. A
system with high precision but low recall is just the opposite, returning very
few results, but most of its predicted labels are correct when compared to the
training labels. An ideal system with high precision and high recall will
return many results, with all results labeled correctly.
Precision (:math:`P`) is defined as the number of true positives (:math:`T_p`)
over the number of true positives plus the number of false positives
(:math:`F_p`).
:math:`P = \\frac{T_p}{T_p+F_p}`
Recall (:math:`R`) is defined as the number of true positives (:math:`T_p`)
over the number of true positives plus the number of false negatives
(:math:`F_n`).
:math:`R = \\frac{T_p}{T_p + F_n}`
These quantities are also related to the (:math:`F_1`) score, which is defined
as the harmonic mean of precision and recall.
:math:`F1 = 2\\frac{P \\times R}{P+R}`
It is important to note that the precision may not decrease with recall. The
definition of precision (:math:`\\frac{T_p}{T_p + F_p}`) shows that lowering
the threshold of a classifier may increase the denominator, by increasing the
number of results returned. If the threshold was previously set too high, the
new results may all be true positives, which will increase precision. If the
previous threshold was about right or too low, further lowering the threshold
will introduce false positives, decreasing precision.
Recall is defined as :math:`\\frac{T_p}{T_p+F_n}`, where :math:`T_p+F_n` does
not depend on the classifier threshold. This means that lowering the classifier
threshold may increase recall, by increasing the number of true positive
results. It is also possible that lowering the threshold may leave recall
unchanged, while the precision fluctuates.
The relationship between recall and precision can be observed in the
stairstep area of the plot - at the edges of these steps a small change
in the threshold considerably reduces precision, with only a minor gain in
recall. See the corner at recall = .59, precision = .8 for an example of this
phenomenon.
Precision-recall curves are typically used in binary classification to study
the output of a classifier. In order to extend Precision-recall curve and
average precision to multi-class or multi-label classification, it is necessary
to binarize the output. One curve can be drawn per label, but one can also draw
a precision-recall curve by considering each element of the label indicator
matrix as a binary prediction (micro-averaging).
.. note::
See also :func:`sklearn.metrics.average_precision_score`,
:func:`sklearn.metrics.recall_score`,
:func:`sklearn.metrics.precision_score`,
:func:`sklearn.metrics.f1_score`
"""
print(__doc__)
import matplotlib.pyplot as plt
import numpy as np
from sklearn import svm, datasets
from sklearn.metrics import precision_recall_curve
from sklearn.metrics import average_precision_score
from sklearn.cross_validation import train_test_split
from sklearn.preprocessing import label_binarize
from sklearn.multiclass import OneVsRestClassifier
# import some data to play with
iris = datasets.load_iris()
X = iris.data
y = iris.target
# Binarize the output
y = label_binarize(y, classes=[0, 1, 2])
n_classes = y.shape[1]
# Add noisy features
random_state = np.random.RandomState(0)
n_samples, n_features = X.shape
X = np.c_[X, random_state.randn(n_samples, 200 * n_features)]
# Split into training and test
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=.5,
random_state=random_state)
# Run classifier
classifier = OneVsRestClassifier(svm.SVC(kernel='linear', probability=True,
random_state=random_state))
y_score = classifier.fit(X_train, y_train).decision_function(X_test)
# Compute Precision-Recall and plot curve
precision = dict()
recall = dict()
average_precision = dict()
for i in range(n_classes):
precision[i], recall[i], _ = precision_recall_curve(y_test[:, i],
y_score[:, i])
average_precision[i] = average_precision_score(y_test[:, i], y_score[:, i])
# Compute micro-average ROC curve and ROC area
precision["micro"], recall["micro"], _ = precision_recall_curve(y_test.ravel(),
y_score.ravel())
average_precision["micro"] = average_precision_score(y_test, y_score,
average="micro")
# Plot Precision-Recall curve
plt.clf()
plt.plot(recall[0], precision[0], label='Precision-Recall curve')
plt.xlabel('Recall')
plt.ylabel('Precision')
plt.ylim([0.0, 1.05])
plt.xlim([0.0, 1.0])
plt.title('Precision-Recall example: AUC={0:0.2f}'.format(average_precision[0]))
plt.legend(loc="lower left")
plt.show()
# Plot Precision-Recall curve for each class
plt.clf()
plt.plot(recall["micro"], precision["micro"],
label='micro-average Precision-recall curve (area = {0:0.2f})'
''.format(average_precision["micro"]))
for i in range(n_classes):
plt.plot(recall[i], precision[i],
label='Precision-recall curve of class {0} (area = {1:0.2f})'
''.format(i, average_precision[i]))
plt.xlim([0.0, 1.0])
plt.ylim([0.0, 1.05])
plt.xlabel('Recall')
plt.ylabel('Precision')
plt.title('Extension of Precision-Recall curve to multi-class')
plt.legend(loc="lower right")
plt.show()
|
ryfeus/lambda-packs | refs/heads/master | Selenium_Chromium/source/setuptools/ssl_support.py | 64 | import os
import socket
import atexit
import re
import functools
from setuptools.extern.six.moves import urllib, http_client, map, filter
from pkg_resources import ResolutionError, ExtractionError
try:
import ssl
except ImportError:
ssl = None
__all__ = [
'VerifyingHTTPSHandler', 'find_ca_bundle', 'is_available', 'cert_paths',
'opener_for'
]
cert_paths = """
/etc/pki/tls/certs/ca-bundle.crt
/etc/ssl/certs/ca-certificates.crt
/usr/share/ssl/certs/ca-bundle.crt
/usr/local/share/certs/ca-root.crt
/etc/ssl/cert.pem
/System/Library/OpenSSL/certs/cert.pem
/usr/local/share/certs/ca-root-nss.crt
/etc/ssl/ca-bundle.pem
""".strip().split()
try:
HTTPSHandler = urllib.request.HTTPSHandler
HTTPSConnection = http_client.HTTPSConnection
except AttributeError:
HTTPSHandler = HTTPSConnection = object
is_available = ssl is not None and object not in (HTTPSHandler, HTTPSConnection)
try:
from ssl import CertificateError, match_hostname
except ImportError:
try:
from backports.ssl_match_hostname import CertificateError
from backports.ssl_match_hostname import match_hostname
except ImportError:
CertificateError = None
match_hostname = None
if not CertificateError:
class CertificateError(ValueError):
pass
if not match_hostname:
def _dnsname_match(dn, hostname, max_wildcards=1):
"""Matching according to RFC 6125, section 6.4.3
http://tools.ietf.org/html/rfc6125#section-6.4.3
"""
pats = []
if not dn:
return False
# Ported from python3-syntax:
# leftmost, *remainder = dn.split(r'.')
parts = dn.split(r'.')
leftmost = parts[0]
remainder = parts[1:]
wildcards = leftmost.count('*')
if wildcards > max_wildcards:
# Issue #17980: avoid denials of service by refusing more
# than one wildcard per fragment. A survey of established
# policy among SSL implementations showed it to be a
# reasonable choice.
raise CertificateError(
"too many wildcards in certificate DNS name: " + repr(dn))
# speed up common case w/o wildcards
if not wildcards:
return dn.lower() == hostname.lower()
# RFC 6125, section 6.4.3, subitem 1.
# The client SHOULD NOT attempt to match a presented identifier in which
# the wildcard character comprises a label other than the left-most label.
if leftmost == '*':
# When '*' is a fragment by itself, it matches a non-empty dotless
# fragment.
pats.append('[^.]+')
elif leftmost.startswith('xn--') or hostname.startswith('xn--'):
# RFC 6125, section 6.4.3, subitem 3.
# The client SHOULD NOT attempt to match a presented identifier
# where the wildcard character is embedded within an A-label or
# U-label of an internationalized domain name.
pats.append(re.escape(leftmost))
else:
# Otherwise, '*' matches any dotless string, e.g. www*
pats.append(re.escape(leftmost).replace(r'\*', '[^.]*'))
# add the remaining fragments, ignore any wildcards
for frag in remainder:
pats.append(re.escape(frag))
pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
return pat.match(hostname)
def match_hostname(cert, hostname):
"""Verify that *cert* (in decoded format as returned by
SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125
rules are followed, but IP addresses are not accepted for *hostname*.
CertificateError is raised on failure. On success, the function
returns nothing.
"""
if not cert:
raise ValueError("empty or no certificate")
dnsnames = []
san = cert.get('subjectAltName', ())
for key, value in san:
if key == 'DNS':
if _dnsname_match(value, hostname):
return
dnsnames.append(value)
if not dnsnames:
# The subject is only checked when there is no dNSName entry
# in subjectAltName
for sub in cert.get('subject', ()):
for key, value in sub:
# XXX according to RFC 2818, the most specific Common Name
# must be used.
if key == 'commonName':
if _dnsname_match(value, hostname):
return
dnsnames.append(value)
if len(dnsnames) > 1:
raise CertificateError("hostname %r "
"doesn't match either of %s"
% (hostname, ', '.join(map(repr, dnsnames))))
elif len(dnsnames) == 1:
raise CertificateError("hostname %r "
"doesn't match %r"
% (hostname, dnsnames[0]))
else:
raise CertificateError("no appropriate commonName or "
"subjectAltName fields were found")
class VerifyingHTTPSHandler(HTTPSHandler):
"""Simple verifying handler: no auth, subclasses, timeouts, etc."""
def __init__(self, ca_bundle):
self.ca_bundle = ca_bundle
HTTPSHandler.__init__(self)
def https_open(self, req):
return self.do_open(
lambda host, **kw: VerifyingHTTPSConn(host, self.ca_bundle, **kw), req
)
class VerifyingHTTPSConn(HTTPSConnection):
"""Simple verifying connection: no auth, subclasses, timeouts, etc."""
def __init__(self, host, ca_bundle, **kw):
HTTPSConnection.__init__(self, host, **kw)
self.ca_bundle = ca_bundle
def connect(self):
sock = socket.create_connection(
(self.host, self.port), getattr(self, 'source_address', None)
)
# Handle the socket if a (proxy) tunnel is present
if hasattr(self, '_tunnel') and getattr(self, '_tunnel_host', None):
self.sock = sock
self._tunnel()
# http://bugs.python.org/issue7776: Python>=3.4.1 and >=2.7.7
# change self.host to mean the proxy server host when tunneling is
# being used. Adapt, since we are interested in the destination
# host for the match_hostname() comparison.
actual_host = self._tunnel_host
else:
actual_host = self.host
if hasattr(ssl, 'create_default_context'):
ctx = ssl.create_default_context(cafile=self.ca_bundle)
self.sock = ctx.wrap_socket(sock, server_hostname=actual_host)
else:
# This is for python < 2.7.9 and < 3.4?
self.sock = ssl.wrap_socket(
sock, cert_reqs=ssl.CERT_REQUIRED, ca_certs=self.ca_bundle
)
try:
match_hostname(self.sock.getpeercert(), actual_host)
except CertificateError:
self.sock.shutdown(socket.SHUT_RDWR)
self.sock.close()
raise
def opener_for(ca_bundle=None):
"""Get a urlopen() replacement that uses ca_bundle for verification"""
return urllib.request.build_opener(
VerifyingHTTPSHandler(ca_bundle or find_ca_bundle())
).open
# from jaraco.functools
def once(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
if not hasattr(func, 'always_returns'):
func.always_returns = func(*args, **kwargs)
return func.always_returns
return wrapper
@once
def get_win_certfile():
try:
import wincertstore
except ImportError:
return None
class CertFile(wincertstore.CertFile):
def __init__(self):
super(CertFile, self).__init__()
atexit.register(self.close)
def close(self):
try:
super(CertFile, self).close()
except OSError:
pass
_wincerts = CertFile()
_wincerts.addstore('CA')
_wincerts.addstore('ROOT')
return _wincerts.name
def find_ca_bundle():
"""Return an existing CA bundle path, or None"""
extant_cert_paths = filter(os.path.isfile, cert_paths)
return (
get_win_certfile()
or next(extant_cert_paths, None)
or _certifi_where()
)
def _certifi_where():
try:
return __import__('certifi').where()
except (ImportError, ResolutionError, ExtractionError):
pass
|
svanschalkwyk/datafari | refs/heads/master | windows/python/Lib/unittest/test/test_loader.py | 40 | import sys
import types
import unittest
class Test_TestLoader(unittest.TestCase):
### Tests for TestLoader.loadTestsFromTestCase
################################################################
# "Return a suite of all tests cases contained in the TestCase-derived
# class testCaseClass"
def test_loadTestsFromTestCase(self):
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
def foo_bar(self): pass
tests = unittest.TestSuite([Foo('test_1'), Foo('test_2')])
loader = unittest.TestLoader()
self.assertEqual(loader.loadTestsFromTestCase(Foo), tests)
# "Return a suite of all tests cases contained in the TestCase-derived
# class testCaseClass"
#
# Make sure it does the right thing even if no tests were found
def test_loadTestsFromTestCase__no_matches(self):
class Foo(unittest.TestCase):
def foo_bar(self): pass
empty_suite = unittest.TestSuite()
loader = unittest.TestLoader()
self.assertEqual(loader.loadTestsFromTestCase(Foo), empty_suite)
# "Return a suite of all tests cases contained in the TestCase-derived
# class testCaseClass"
#
# What happens if loadTestsFromTestCase() is given an object
# that isn't a subclass of TestCase? Specifically, what happens
# if testCaseClass is a subclass of TestSuite?
#
# This is checked for specifically in the code, so we better add a
# test for it.
def test_loadTestsFromTestCase__TestSuite_subclass(self):
class NotATestCase(unittest.TestSuite):
pass
loader = unittest.TestLoader()
try:
loader.loadTestsFromTestCase(NotATestCase)
except TypeError:
pass
else:
self.fail('Should raise TypeError')
# "Return a suite of all tests cases contained in the TestCase-derived
# class testCaseClass"
#
# Make sure loadTestsFromTestCase() picks up the default test method
# name (as specified by TestCase), even though the method name does
# not match the default TestLoader.testMethodPrefix string
def test_loadTestsFromTestCase__default_method_name(self):
class Foo(unittest.TestCase):
def runTest(self):
pass
loader = unittest.TestLoader()
# This has to be false for the test to succeed
self.assertFalse('runTest'.startswith(loader.testMethodPrefix))
suite = loader.loadTestsFromTestCase(Foo)
self.assertIsInstance(suite, loader.suiteClass)
self.assertEqual(list(suite), [Foo('runTest')])
################################################################
### /Tests for TestLoader.loadTestsFromTestCase
### Tests for TestLoader.loadTestsFromModule
################################################################
# "This method searches `module` for classes derived from TestCase"
def test_loadTestsFromModule__TestCase_subclass(self):
m = types.ModuleType('m')
class MyTestCase(unittest.TestCase):
def test(self):
pass
m.testcase_1 = MyTestCase
loader = unittest.TestLoader()
suite = loader.loadTestsFromModule(m)
self.assertIsInstance(suite, loader.suiteClass)
expected = [loader.suiteClass([MyTestCase('test')])]
self.assertEqual(list(suite), expected)
# "This method searches `module` for classes derived from TestCase"
#
# What happens if no tests are found (no TestCase instances)?
def test_loadTestsFromModule__no_TestCase_instances(self):
m = types.ModuleType('m')
loader = unittest.TestLoader()
suite = loader.loadTestsFromModule(m)
self.assertIsInstance(suite, loader.suiteClass)
self.assertEqual(list(suite), [])
# "This method searches `module` for classes derived from TestCase"
#
# What happens if no tests are found (TestCases instances, but no tests)?
def test_loadTestsFromModule__no_TestCase_tests(self):
m = types.ModuleType('m')
class MyTestCase(unittest.TestCase):
pass
m.testcase_1 = MyTestCase
loader = unittest.TestLoader()
suite = loader.loadTestsFromModule(m)
self.assertIsInstance(suite, loader.suiteClass)
self.assertEqual(list(suite), [loader.suiteClass()])
# "This method searches `module` for classes derived from TestCase"s
#
# What happens if loadTestsFromModule() is given something other
# than a module?
#
# XXX Currently, it succeeds anyway. This flexibility
# should either be documented or loadTestsFromModule() should
# raise a TypeError
#
# XXX Certain people are using this behaviour. We'll add a test for it
def test_loadTestsFromModule__not_a_module(self):
class MyTestCase(unittest.TestCase):
def test(self):
pass
class NotAModule(object):
test_2 = MyTestCase
loader = unittest.TestLoader()
suite = loader.loadTestsFromModule(NotAModule)
reference = [unittest.TestSuite([MyTestCase('test')])]
self.assertEqual(list(suite), reference)
# Check that loadTestsFromModule honors (or not) a module
# with a load_tests function.
def test_loadTestsFromModule__load_tests(self):
m = types.ModuleType('m')
class MyTestCase(unittest.TestCase):
def test(self):
pass
m.testcase_1 = MyTestCase
load_tests_args = []
def load_tests(loader, tests, pattern):
self.assertIsInstance(tests, unittest.TestSuite)
load_tests_args.extend((loader, tests, pattern))
return tests
m.load_tests = load_tests
loader = unittest.TestLoader()
suite = loader.loadTestsFromModule(m)
self.assertIsInstance(suite, unittest.TestSuite)
self.assertEqual(load_tests_args, [loader, suite, None])
load_tests_args = []
suite = loader.loadTestsFromModule(m, use_load_tests=False)
self.assertEqual(load_tests_args, [])
def test_loadTestsFromModule__faulty_load_tests(self):
m = types.ModuleType('m')
def load_tests(loader, tests, pattern):
raise TypeError('some failure')
m.load_tests = load_tests
loader = unittest.TestLoader()
suite = loader.loadTestsFromModule(m)
self.assertIsInstance(suite, unittest.TestSuite)
self.assertEqual(suite.countTestCases(), 1)
test = list(suite)[0]
self.assertRaisesRegexp(TypeError, "some failure", test.m)
################################################################
### /Tests for TestLoader.loadTestsFromModule()
### Tests for TestLoader.loadTestsFromName()
################################################################
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
#
# Is ValueError raised in response to an empty name?
def test_loadTestsFromName__empty_name(self):
loader = unittest.TestLoader()
try:
loader.loadTestsFromName('')
except ValueError, e:
self.assertEqual(str(e), "Empty module name")
else:
self.fail("TestLoader.loadTestsFromName failed to raise ValueError")
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
#
# What happens when the name contains invalid characters?
def test_loadTestsFromName__malformed_name(self):
loader = unittest.TestLoader()
# XXX Should this raise ValueError or ImportError?
try:
loader.loadTestsFromName('abc () //')
except ValueError:
pass
except ImportError:
pass
else:
self.fail("TestLoader.loadTestsFromName failed to raise ValueError")
# "The specifier name is a ``dotted name'' that may resolve ... to a
# module"
#
# What happens when a module by that name can't be found?
def test_loadTestsFromName__unknown_module_name(self):
loader = unittest.TestLoader()
try:
loader.loadTestsFromName('sdasfasfasdf')
except ImportError, e:
self.assertEqual(str(e), "No module named sdasfasfasdf")
else:
self.fail("TestLoader.loadTestsFromName failed to raise ImportError")
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
#
# What happens when the module is found, but the attribute can't?
def test_loadTestsFromName__unknown_attr_name(self):
loader = unittest.TestLoader()
try:
loader.loadTestsFromName('unittest.sdasfasfasdf')
except AttributeError, e:
self.assertEqual(str(e), "'module' object has no attribute 'sdasfasfasdf'")
else:
self.fail("TestLoader.loadTestsFromName failed to raise AttributeError")
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
#
# What happens when we provide the module, but the attribute can't be
# found?
def test_loadTestsFromName__relative_unknown_name(self):
loader = unittest.TestLoader()
try:
loader.loadTestsFromName('sdasfasfasdf', unittest)
except AttributeError, e:
self.assertEqual(str(e), "'module' object has no attribute 'sdasfasfasdf'")
else:
self.fail("TestLoader.loadTestsFromName failed to raise AttributeError")
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
# ...
# "The method optionally resolves name relative to the given module"
#
# Does loadTestsFromName raise ValueError when passed an empty
# name relative to a provided module?
#
# XXX Should probably raise a ValueError instead of an AttributeError
def test_loadTestsFromName__relative_empty_name(self):
loader = unittest.TestLoader()
try:
loader.loadTestsFromName('', unittest)
except AttributeError:
pass
else:
self.fail("Failed to raise AttributeError")
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
# ...
# "The method optionally resolves name relative to the given module"
#
# What happens when an impossible name is given, relative to the provided
# `module`?
def test_loadTestsFromName__relative_malformed_name(self):
loader = unittest.TestLoader()
# XXX Should this raise AttributeError or ValueError?
try:
loader.loadTestsFromName('abc () //', unittest)
except ValueError:
pass
except AttributeError:
pass
else:
self.fail("TestLoader.loadTestsFromName failed to raise ValueError")
# "The method optionally resolves name relative to the given module"
#
# Does loadTestsFromName raise TypeError when the `module` argument
# isn't a module object?
#
# XXX Accepts the not-a-module object, ignoring the object's type
# This should raise an exception or the method name should be changed
#
# XXX Some people are relying on this, so keep it for now
def test_loadTestsFromName__relative_not_a_module(self):
class MyTestCase(unittest.TestCase):
def test(self):
pass
class NotAModule(object):
test_2 = MyTestCase
loader = unittest.TestLoader()
suite = loader.loadTestsFromName('test_2', NotAModule)
reference = [MyTestCase('test')]
self.assertEqual(list(suite), reference)
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
#
# Does it raise an exception if the name resolves to an invalid
# object?
def test_loadTestsFromName__relative_bad_object(self):
m = types.ModuleType('m')
m.testcase_1 = object()
loader = unittest.TestLoader()
try:
loader.loadTestsFromName('testcase_1', m)
except TypeError:
pass
else:
self.fail("Should have raised TypeError")
# "The specifier name is a ``dotted name'' that may
# resolve either to ... a test case class"
def test_loadTestsFromName__relative_TestCase_subclass(self):
m = types.ModuleType('m')
class MyTestCase(unittest.TestCase):
def test(self):
pass
m.testcase_1 = MyTestCase
loader = unittest.TestLoader()
suite = loader.loadTestsFromName('testcase_1', m)
self.assertIsInstance(suite, loader.suiteClass)
self.assertEqual(list(suite), [MyTestCase('test')])
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
def test_loadTestsFromName__relative_TestSuite(self):
m = types.ModuleType('m')
class MyTestCase(unittest.TestCase):
def test(self):
pass
m.testsuite = unittest.TestSuite([MyTestCase('test')])
loader = unittest.TestLoader()
suite = loader.loadTestsFromName('testsuite', m)
self.assertIsInstance(suite, loader.suiteClass)
self.assertEqual(list(suite), [MyTestCase('test')])
# "The specifier name is a ``dotted name'' that may resolve ... to
# ... a test method within a test case class"
def test_loadTestsFromName__relative_testmethod(self):
m = types.ModuleType('m')
class MyTestCase(unittest.TestCase):
def test(self):
pass
m.testcase_1 = MyTestCase
loader = unittest.TestLoader()
suite = loader.loadTestsFromName('testcase_1.test', m)
self.assertIsInstance(suite, loader.suiteClass)
self.assertEqual(list(suite), [MyTestCase('test')])
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
#
# Does loadTestsFromName() raise the proper exception when trying to
# resolve "a test method within a test case class" that doesn't exist
# for the given name (relative to a provided module)?
def test_loadTestsFromName__relative_invalid_testmethod(self):
m = types.ModuleType('m')
class MyTestCase(unittest.TestCase):
def test(self):
pass
m.testcase_1 = MyTestCase
loader = unittest.TestLoader()
try:
loader.loadTestsFromName('testcase_1.testfoo', m)
except AttributeError, e:
self.assertEqual(str(e), "type object 'MyTestCase' has no attribute 'testfoo'")
else:
self.fail("Failed to raise AttributeError")
# "The specifier name is a ``dotted name'' that may resolve ... to
# ... a callable object which returns a ... TestSuite instance"
def test_loadTestsFromName__callable__TestSuite(self):
m = types.ModuleType('m')
testcase_1 = unittest.FunctionTestCase(lambda: None)
testcase_2 = unittest.FunctionTestCase(lambda: None)
def return_TestSuite():
return unittest.TestSuite([testcase_1, testcase_2])
m.return_TestSuite = return_TestSuite
loader = unittest.TestLoader()
suite = loader.loadTestsFromName('return_TestSuite', m)
self.assertIsInstance(suite, loader.suiteClass)
self.assertEqual(list(suite), [testcase_1, testcase_2])
# "The specifier name is a ``dotted name'' that may resolve ... to
# ... a callable object which returns a TestCase ... instance"
def test_loadTestsFromName__callable__TestCase_instance(self):
m = types.ModuleType('m')
testcase_1 = unittest.FunctionTestCase(lambda: None)
def return_TestCase():
return testcase_1
m.return_TestCase = return_TestCase
loader = unittest.TestLoader()
suite = loader.loadTestsFromName('return_TestCase', m)
self.assertIsInstance(suite, loader.suiteClass)
self.assertEqual(list(suite), [testcase_1])
# "The specifier name is a ``dotted name'' that may resolve ... to
# ... a callable object which returns a TestCase ... instance"
#*****************************************************************
#Override the suiteClass attribute to ensure that the suiteClass
#attribute is used
def test_loadTestsFromName__callable__TestCase_instance_ProperSuiteClass(self):
class SubTestSuite(unittest.TestSuite):
pass
m = types.ModuleType('m')
testcase_1 = unittest.FunctionTestCase(lambda: None)
def return_TestCase():
return testcase_1
m.return_TestCase = return_TestCase
loader = unittest.TestLoader()
loader.suiteClass = SubTestSuite
suite = loader.loadTestsFromName('return_TestCase', m)
self.assertIsInstance(suite, loader.suiteClass)
self.assertEqual(list(suite), [testcase_1])
# "The specifier name is a ``dotted name'' that may resolve ... to
# ... a test method within a test case class"
#*****************************************************************
#Override the suiteClass attribute to ensure that the suiteClass
#attribute is used
def test_loadTestsFromName__relative_testmethod_ProperSuiteClass(self):
class SubTestSuite(unittest.TestSuite):
pass
m = types.ModuleType('m')
class MyTestCase(unittest.TestCase):
def test(self):
pass
m.testcase_1 = MyTestCase
loader = unittest.TestLoader()
loader.suiteClass=SubTestSuite
suite = loader.loadTestsFromName('testcase_1.test', m)
self.assertIsInstance(suite, loader.suiteClass)
self.assertEqual(list(suite), [MyTestCase('test')])
# "The specifier name is a ``dotted name'' that may resolve ... to
# ... a callable object which returns a TestCase or TestSuite instance"
#
# What happens if the callable returns something else?
def test_loadTestsFromName__callable__wrong_type(self):
m = types.ModuleType('m')
def return_wrong():
return 6
m.return_wrong = return_wrong
loader = unittest.TestLoader()
try:
loader.loadTestsFromName('return_wrong', m)
except TypeError:
pass
else:
self.fail("TestLoader.loadTestsFromName failed to raise TypeError")
# "The specifier can refer to modules and packages which have not been
# imported; they will be imported as a side-effect"
def test_loadTestsFromName__module_not_loaded(self):
# We're going to try to load this module as a side-effect, so it
# better not be loaded before we try.
#
module_name = 'unittest.test.dummy'
sys.modules.pop(module_name, None)
loader = unittest.TestLoader()
try:
suite = loader.loadTestsFromName(module_name)
self.assertIsInstance(suite, loader.suiteClass)
self.assertEqual(list(suite), [])
# module should now be loaded, thanks to loadTestsFromName()
self.assertIn(module_name, sys.modules)
finally:
if module_name in sys.modules:
del sys.modules[module_name]
################################################################
### Tests for TestLoader.loadTestsFromName()
### Tests for TestLoader.loadTestsFromNames()
################################################################
# "Similar to loadTestsFromName(), but takes a sequence of names rather
# than a single name."
#
# What happens if that sequence of names is empty?
def test_loadTestsFromNames__empty_name_list(self):
loader = unittest.TestLoader()
suite = loader.loadTestsFromNames([])
self.assertIsInstance(suite, loader.suiteClass)
self.assertEqual(list(suite), [])
# "Similar to loadTestsFromName(), but takes a sequence of names rather
# than a single name."
# ...
# "The method optionally resolves name relative to the given module"
#
# What happens if that sequence of names is empty?
#
# XXX Should this raise a ValueError or just return an empty TestSuite?
def test_loadTestsFromNames__relative_empty_name_list(self):
loader = unittest.TestLoader()
suite = loader.loadTestsFromNames([], unittest)
self.assertIsInstance(suite, loader.suiteClass)
self.assertEqual(list(suite), [])
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
#
# Is ValueError raised in response to an empty name?
def test_loadTestsFromNames__empty_name(self):
loader = unittest.TestLoader()
try:
loader.loadTestsFromNames([''])
except ValueError, e:
self.assertEqual(str(e), "Empty module name")
else:
self.fail("TestLoader.loadTestsFromNames failed to raise ValueError")
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
#
# What happens when presented with an impossible module name?
def test_loadTestsFromNames__malformed_name(self):
loader = unittest.TestLoader()
# XXX Should this raise ValueError or ImportError?
try:
loader.loadTestsFromNames(['abc () //'])
except ValueError:
pass
except ImportError:
pass
else:
self.fail("TestLoader.loadTestsFromNames failed to raise ValueError")
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
#
# What happens when no module can be found for the given name?
def test_loadTestsFromNames__unknown_module_name(self):
loader = unittest.TestLoader()
try:
loader.loadTestsFromNames(['sdasfasfasdf'])
except ImportError, e:
self.assertEqual(str(e), "No module named sdasfasfasdf")
else:
self.fail("TestLoader.loadTestsFromNames failed to raise ImportError")
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
#
# What happens when the module can be found, but not the attribute?
def test_loadTestsFromNames__unknown_attr_name(self):
loader = unittest.TestLoader()
try:
loader.loadTestsFromNames(['unittest.sdasfasfasdf', 'unittest'])
except AttributeError, e:
self.assertEqual(str(e), "'module' object has no attribute 'sdasfasfasdf'")
else:
self.fail("TestLoader.loadTestsFromNames failed to raise AttributeError")
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
# ...
# "The method optionally resolves name relative to the given module"
#
# What happens when given an unknown attribute on a specified `module`
# argument?
def test_loadTestsFromNames__unknown_name_relative_1(self):
loader = unittest.TestLoader()
try:
loader.loadTestsFromNames(['sdasfasfasdf'], unittest)
except AttributeError, e:
self.assertEqual(str(e), "'module' object has no attribute 'sdasfasfasdf'")
else:
self.fail("TestLoader.loadTestsFromName failed to raise AttributeError")
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
# ...
# "The method optionally resolves name relative to the given module"
#
# Do unknown attributes (relative to a provided module) still raise an
# exception even in the presence of valid attribute names?
def test_loadTestsFromNames__unknown_name_relative_2(self):
loader = unittest.TestLoader()
try:
loader.loadTestsFromNames(['TestCase', 'sdasfasfasdf'], unittest)
except AttributeError, e:
self.assertEqual(str(e), "'module' object has no attribute 'sdasfasfasdf'")
else:
self.fail("TestLoader.loadTestsFromName failed to raise AttributeError")
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
# ...
# "The method optionally resolves name relative to the given module"
#
# What happens when faced with the empty string?
#
# XXX This currently raises AttributeError, though ValueError is probably
# more appropriate
def test_loadTestsFromNames__relative_empty_name(self):
loader = unittest.TestLoader()
try:
loader.loadTestsFromNames([''], unittest)
except AttributeError:
pass
else:
self.fail("Failed to raise ValueError")
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
# ...
# "The method optionally resolves name relative to the given module"
#
# What happens when presented with an impossible attribute name?
def test_loadTestsFromNames__relative_malformed_name(self):
loader = unittest.TestLoader()
# XXX Should this raise AttributeError or ValueError?
try:
loader.loadTestsFromNames(['abc () //'], unittest)
except AttributeError:
pass
except ValueError:
pass
else:
self.fail("TestLoader.loadTestsFromNames failed to raise ValueError")
# "The method optionally resolves name relative to the given module"
#
# Does loadTestsFromNames() make sure the provided `module` is in fact
# a module?
#
# XXX This validation is currently not done. This flexibility should
# either be documented or a TypeError should be raised.
def test_loadTestsFromNames__relative_not_a_module(self):
class MyTestCase(unittest.TestCase):
def test(self):
pass
class NotAModule(object):
test_2 = MyTestCase
loader = unittest.TestLoader()
suite = loader.loadTestsFromNames(['test_2'], NotAModule)
reference = [unittest.TestSuite([MyTestCase('test')])]
self.assertEqual(list(suite), reference)
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
#
# Does it raise an exception if the name resolves to an invalid
# object?
def test_loadTestsFromNames__relative_bad_object(self):
m = types.ModuleType('m')
m.testcase_1 = object()
loader = unittest.TestLoader()
try:
loader.loadTestsFromNames(['testcase_1'], m)
except TypeError:
pass
else:
self.fail("Should have raised TypeError")
# "The specifier name is a ``dotted name'' that may resolve ... to
# ... a test case class"
def test_loadTestsFromNames__relative_TestCase_subclass(self):
m = types.ModuleType('m')
class MyTestCase(unittest.TestCase):
def test(self):
pass
m.testcase_1 = MyTestCase
loader = unittest.TestLoader()
suite = loader.loadTestsFromNames(['testcase_1'], m)
self.assertIsInstance(suite, loader.suiteClass)
expected = loader.suiteClass([MyTestCase('test')])
self.assertEqual(list(suite), [expected])
# "The specifier name is a ``dotted name'' that may resolve ... to
# ... a TestSuite instance"
def test_loadTestsFromNames__relative_TestSuite(self):
m = types.ModuleType('m')
class MyTestCase(unittest.TestCase):
def test(self):
pass
m.testsuite = unittest.TestSuite([MyTestCase('test')])
loader = unittest.TestLoader()
suite = loader.loadTestsFromNames(['testsuite'], m)
self.assertIsInstance(suite, loader.suiteClass)
self.assertEqual(list(suite), [m.testsuite])
# "The specifier name is a ``dotted name'' that may resolve ... to ... a
# test method within a test case class"
def test_loadTestsFromNames__relative_testmethod(self):
m = types.ModuleType('m')
class MyTestCase(unittest.TestCase):
def test(self):
pass
m.testcase_1 = MyTestCase
loader = unittest.TestLoader()
suite = loader.loadTestsFromNames(['testcase_1.test'], m)
self.assertIsInstance(suite, loader.suiteClass)
ref_suite = unittest.TestSuite([MyTestCase('test')])
self.assertEqual(list(suite), [ref_suite])
# "The specifier name is a ``dotted name'' that may resolve ... to ... a
# test method within a test case class"
#
# Does the method gracefully handle names that initially look like they
# resolve to "a test method within a test case class" but don't?
def test_loadTestsFromNames__relative_invalid_testmethod(self):
m = types.ModuleType('m')
class MyTestCase(unittest.TestCase):
def test(self):
pass
m.testcase_1 = MyTestCase
loader = unittest.TestLoader()
try:
loader.loadTestsFromNames(['testcase_1.testfoo'], m)
except AttributeError, e:
self.assertEqual(str(e), "type object 'MyTestCase' has no attribute 'testfoo'")
else:
self.fail("Failed to raise AttributeError")
# "The specifier name is a ``dotted name'' that may resolve ... to
# ... a callable object which returns a ... TestSuite instance"
def test_loadTestsFromNames__callable__TestSuite(self):
m = types.ModuleType('m')
testcase_1 = unittest.FunctionTestCase(lambda: None)
testcase_2 = unittest.FunctionTestCase(lambda: None)
def return_TestSuite():
return unittest.TestSuite([testcase_1, testcase_2])
m.return_TestSuite = return_TestSuite
loader = unittest.TestLoader()
suite = loader.loadTestsFromNames(['return_TestSuite'], m)
self.assertIsInstance(suite, loader.suiteClass)
expected = unittest.TestSuite([testcase_1, testcase_2])
self.assertEqual(list(suite), [expected])
# "The specifier name is a ``dotted name'' that may resolve ... to
# ... a callable object which returns a TestCase ... instance"
def test_loadTestsFromNames__callable__TestCase_instance(self):
m = types.ModuleType('m')
testcase_1 = unittest.FunctionTestCase(lambda: None)
def return_TestCase():
return testcase_1
m.return_TestCase = return_TestCase
loader = unittest.TestLoader()
suite = loader.loadTestsFromNames(['return_TestCase'], m)
self.assertIsInstance(suite, loader.suiteClass)
ref_suite = unittest.TestSuite([testcase_1])
self.assertEqual(list(suite), [ref_suite])
# "The specifier name is a ``dotted name'' that may resolve ... to
# ... a callable object which returns a TestCase or TestSuite instance"
#
# Are staticmethods handled correctly?
def test_loadTestsFromNames__callable__call_staticmethod(self):
m = types.ModuleType('m')
class Test1(unittest.TestCase):
def test(self):
pass
testcase_1 = Test1('test')
class Foo(unittest.TestCase):
@staticmethod
def foo():
return testcase_1
m.Foo = Foo
loader = unittest.TestLoader()
suite = loader.loadTestsFromNames(['Foo.foo'], m)
self.assertIsInstance(suite, loader.suiteClass)
ref_suite = unittest.TestSuite([testcase_1])
self.assertEqual(list(suite), [ref_suite])
# "The specifier name is a ``dotted name'' that may resolve ... to
# ... a callable object which returns a TestCase or TestSuite instance"
#
# What happens when the callable returns something else?
def test_loadTestsFromNames__callable__wrong_type(self):
m = types.ModuleType('m')
def return_wrong():
return 6
m.return_wrong = return_wrong
loader = unittest.TestLoader()
try:
loader.loadTestsFromNames(['return_wrong'], m)
except TypeError:
pass
else:
self.fail("TestLoader.loadTestsFromNames failed to raise TypeError")
# "The specifier can refer to modules and packages which have not been
# imported; they will be imported as a side-effect"
def test_loadTestsFromNames__module_not_loaded(self):
# We're going to try to load this module as a side-effect, so it
# better not be loaded before we try.
#
module_name = 'unittest.test.dummy'
sys.modules.pop(module_name, None)
loader = unittest.TestLoader()
try:
suite = loader.loadTestsFromNames([module_name])
self.assertIsInstance(suite, loader.suiteClass)
self.assertEqual(list(suite), [unittest.TestSuite()])
# module should now be loaded, thanks to loadTestsFromName()
self.assertIn(module_name, sys.modules)
finally:
if module_name in sys.modules:
del sys.modules[module_name]
################################################################
### /Tests for TestLoader.loadTestsFromNames()
### Tests for TestLoader.getTestCaseNames()
################################################################
# "Return a sorted sequence of method names found within testCaseClass"
#
# Test.foobar is defined to make sure getTestCaseNames() respects
# loader.testMethodPrefix
def test_getTestCaseNames(self):
class Test(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
def foobar(self): pass
loader = unittest.TestLoader()
self.assertEqual(loader.getTestCaseNames(Test), ['test_1', 'test_2'])
# "Return a sorted sequence of method names found within testCaseClass"
#
# Does getTestCaseNames() behave appropriately if no tests are found?
def test_getTestCaseNames__no_tests(self):
class Test(unittest.TestCase):
def foobar(self): pass
loader = unittest.TestLoader()
self.assertEqual(loader.getTestCaseNames(Test), [])
# "Return a sorted sequence of method names found within testCaseClass"
#
# Are not-TestCases handled gracefully?
#
# XXX This should raise a TypeError, not return a list
#
# XXX It's too late in the 2.5 release cycle to fix this, but it should
# probably be revisited for 2.6
def test_getTestCaseNames__not_a_TestCase(self):
class BadCase(int):
def test_foo(self):
pass
loader = unittest.TestLoader()
names = loader.getTestCaseNames(BadCase)
self.assertEqual(names, ['test_foo'])
# "Return a sorted sequence of method names found within testCaseClass"
#
# Make sure inherited names are handled.
#
# TestP.foobar is defined to make sure getTestCaseNames() respects
# loader.testMethodPrefix
def test_getTestCaseNames__inheritance(self):
class TestP(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
def foobar(self): pass
class TestC(TestP):
def test_1(self): pass
def test_3(self): pass
loader = unittest.TestLoader()
names = ['test_1', 'test_2', 'test_3']
self.assertEqual(loader.getTestCaseNames(TestC), names)
################################################################
### /Tests for TestLoader.getTestCaseNames()
### Tests for TestLoader.testMethodPrefix
################################################################
# "String giving the prefix of method names which will be interpreted as
# test methods"
#
# Implicit in the documentation is that testMethodPrefix is respected by
# all loadTestsFrom* methods.
def test_testMethodPrefix__loadTestsFromTestCase(self):
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
def foo_bar(self): pass
tests_1 = unittest.TestSuite([Foo('foo_bar')])
tests_2 = unittest.TestSuite([Foo('test_1'), Foo('test_2')])
loader = unittest.TestLoader()
loader.testMethodPrefix = 'foo'
self.assertEqual(loader.loadTestsFromTestCase(Foo), tests_1)
loader.testMethodPrefix = 'test'
self.assertEqual(loader.loadTestsFromTestCase(Foo), tests_2)
# "String giving the prefix of method names which will be interpreted as
# test methods"
#
# Implicit in the documentation is that testMethodPrefix is respected by
# all loadTestsFrom* methods.
def test_testMethodPrefix__loadTestsFromModule(self):
m = types.ModuleType('m')
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
def foo_bar(self): pass
m.Foo = Foo
tests_1 = [unittest.TestSuite([Foo('foo_bar')])]
tests_2 = [unittest.TestSuite([Foo('test_1'), Foo('test_2')])]
loader = unittest.TestLoader()
loader.testMethodPrefix = 'foo'
self.assertEqual(list(loader.loadTestsFromModule(m)), tests_1)
loader.testMethodPrefix = 'test'
self.assertEqual(list(loader.loadTestsFromModule(m)), tests_2)
# "String giving the prefix of method names which will be interpreted as
# test methods"
#
# Implicit in the documentation is that testMethodPrefix is respected by
# all loadTestsFrom* methods.
def test_testMethodPrefix__loadTestsFromName(self):
m = types.ModuleType('m')
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
def foo_bar(self): pass
m.Foo = Foo
tests_1 = unittest.TestSuite([Foo('foo_bar')])
tests_2 = unittest.TestSuite([Foo('test_1'), Foo('test_2')])
loader = unittest.TestLoader()
loader.testMethodPrefix = 'foo'
self.assertEqual(loader.loadTestsFromName('Foo', m), tests_1)
loader.testMethodPrefix = 'test'
self.assertEqual(loader.loadTestsFromName('Foo', m), tests_2)
# "String giving the prefix of method names which will be interpreted as
# test methods"
#
# Implicit in the documentation is that testMethodPrefix is respected by
# all loadTestsFrom* methods.
def test_testMethodPrefix__loadTestsFromNames(self):
m = types.ModuleType('m')
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
def foo_bar(self): pass
m.Foo = Foo
tests_1 = unittest.TestSuite([unittest.TestSuite([Foo('foo_bar')])])
tests_2 = unittest.TestSuite([Foo('test_1'), Foo('test_2')])
tests_2 = unittest.TestSuite([tests_2])
loader = unittest.TestLoader()
loader.testMethodPrefix = 'foo'
self.assertEqual(loader.loadTestsFromNames(['Foo'], m), tests_1)
loader.testMethodPrefix = 'test'
self.assertEqual(loader.loadTestsFromNames(['Foo'], m), tests_2)
# "The default value is 'test'"
def test_testMethodPrefix__default_value(self):
loader = unittest.TestLoader()
self.assertTrue(loader.testMethodPrefix == 'test')
################################################################
### /Tests for TestLoader.testMethodPrefix
### Tests for TestLoader.sortTestMethodsUsing
################################################################
# "Function to be used to compare method names when sorting them in
# getTestCaseNames() and all the loadTestsFromX() methods"
def test_sortTestMethodsUsing__loadTestsFromTestCase(self):
def reversed_cmp(x, y):
return -cmp(x, y)
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
loader = unittest.TestLoader()
loader.sortTestMethodsUsing = reversed_cmp
tests = loader.suiteClass([Foo('test_2'), Foo('test_1')])
self.assertEqual(loader.loadTestsFromTestCase(Foo), tests)
# "Function to be used to compare method names when sorting them in
# getTestCaseNames() and all the loadTestsFromX() methods"
def test_sortTestMethodsUsing__loadTestsFromModule(self):
def reversed_cmp(x, y):
return -cmp(x, y)
m = types.ModuleType('m')
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
m.Foo = Foo
loader = unittest.TestLoader()
loader.sortTestMethodsUsing = reversed_cmp
tests = [loader.suiteClass([Foo('test_2'), Foo('test_1')])]
self.assertEqual(list(loader.loadTestsFromModule(m)), tests)
# "Function to be used to compare method names when sorting them in
# getTestCaseNames() and all the loadTestsFromX() methods"
def test_sortTestMethodsUsing__loadTestsFromName(self):
def reversed_cmp(x, y):
return -cmp(x, y)
m = types.ModuleType('m')
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
m.Foo = Foo
loader = unittest.TestLoader()
loader.sortTestMethodsUsing = reversed_cmp
tests = loader.suiteClass([Foo('test_2'), Foo('test_1')])
self.assertEqual(loader.loadTestsFromName('Foo', m), tests)
# "Function to be used to compare method names when sorting them in
# getTestCaseNames() and all the loadTestsFromX() methods"
def test_sortTestMethodsUsing__loadTestsFromNames(self):
def reversed_cmp(x, y):
return -cmp(x, y)
m = types.ModuleType('m')
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
m.Foo = Foo
loader = unittest.TestLoader()
loader.sortTestMethodsUsing = reversed_cmp
tests = [loader.suiteClass([Foo('test_2'), Foo('test_1')])]
self.assertEqual(list(loader.loadTestsFromNames(['Foo'], m)), tests)
# "Function to be used to compare method names when sorting them in
# getTestCaseNames()"
#
# Does it actually affect getTestCaseNames()?
def test_sortTestMethodsUsing__getTestCaseNames(self):
def reversed_cmp(x, y):
return -cmp(x, y)
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
loader = unittest.TestLoader()
loader.sortTestMethodsUsing = reversed_cmp
test_names = ['test_2', 'test_1']
self.assertEqual(loader.getTestCaseNames(Foo), test_names)
# "The default value is the built-in cmp() function"
def test_sortTestMethodsUsing__default_value(self):
loader = unittest.TestLoader()
self.assertTrue(loader.sortTestMethodsUsing is cmp)
# "it can be set to None to disable the sort."
#
# XXX How is this different from reassigning cmp? Are the tests returned
# in a random order or something? This behaviour should die
def test_sortTestMethodsUsing__None(self):
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
loader = unittest.TestLoader()
loader.sortTestMethodsUsing = None
test_names = ['test_2', 'test_1']
self.assertEqual(set(loader.getTestCaseNames(Foo)), set(test_names))
################################################################
### /Tests for TestLoader.sortTestMethodsUsing
### Tests for TestLoader.suiteClass
################################################################
# "Callable object that constructs a test suite from a list of tests."
def test_suiteClass__loadTestsFromTestCase(self):
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
def foo_bar(self): pass
tests = [Foo('test_1'), Foo('test_2')]
loader = unittest.TestLoader()
loader.suiteClass = list
self.assertEqual(loader.loadTestsFromTestCase(Foo), tests)
# It is implicit in the documentation for TestLoader.suiteClass that
# all TestLoader.loadTestsFrom* methods respect it. Let's make sure
def test_suiteClass__loadTestsFromModule(self):
m = types.ModuleType('m')
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
def foo_bar(self): pass
m.Foo = Foo
tests = [[Foo('test_1'), Foo('test_2')]]
loader = unittest.TestLoader()
loader.suiteClass = list
self.assertEqual(loader.loadTestsFromModule(m), tests)
# It is implicit in the documentation for TestLoader.suiteClass that
# all TestLoader.loadTestsFrom* methods respect it. Let's make sure
def test_suiteClass__loadTestsFromName(self):
m = types.ModuleType('m')
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
def foo_bar(self): pass
m.Foo = Foo
tests = [Foo('test_1'), Foo('test_2')]
loader = unittest.TestLoader()
loader.suiteClass = list
self.assertEqual(loader.loadTestsFromName('Foo', m), tests)
# It is implicit in the documentation for TestLoader.suiteClass that
# all TestLoader.loadTestsFrom* methods respect it. Let's make sure
def test_suiteClass__loadTestsFromNames(self):
m = types.ModuleType('m')
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
def foo_bar(self): pass
m.Foo = Foo
tests = [[Foo('test_1'), Foo('test_2')]]
loader = unittest.TestLoader()
loader.suiteClass = list
self.assertEqual(loader.loadTestsFromNames(['Foo'], m), tests)
# "The default value is the TestSuite class"
def test_suiteClass__default_value(self):
loader = unittest.TestLoader()
self.assertIs(loader.suiteClass, unittest.TestSuite)
# Make sure the dotted name resolution works even if the actual
# function doesn't have the same name as is used to find it.
def test_loadTestsFromName__function_with_different_name_than_method(self):
# lambdas have the name '<lambda>'.
m = types.ModuleType('m')
class MyTestCase(unittest.TestCase):
test = lambda: 1
m.testcase_1 = MyTestCase
loader = unittest.TestLoader()
suite = loader.loadTestsFromNames(['testcase_1.test'], m)
self.assertIsInstance(suite, loader.suiteClass)
ref_suite = unittest.TestSuite([MyTestCase('test')])
self.assertEqual(list(suite), [ref_suite])
if __name__ == '__main__':
unittest.main()
|
angstwad/ansible | refs/heads/devel | test/units/playbook/__init__.py | 7690 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
|
jamesrenfro/sharealike | refs/heads/master | shareproject/apps/shareserver/constants.py | 1 |
MODEL_NAMES = {
'DOG' : 'Dog',
'PICTURE' : 'Picture',
'PERSON' : 'Person',
'SEARCH_INDEX' : 'SearchIndex'
}
|
dudepare/django | refs/heads/master | django/contrib/sessions/backends/base.py | 298 | from __future__ import unicode_literals
import base64
import logging
import string
from datetime import datetime, timedelta
from django.conf import settings
from django.contrib.sessions.exceptions import SuspiciousSession
from django.core.exceptions import SuspiciousOperation
from django.utils import timezone
from django.utils.crypto import (
constant_time_compare, get_random_string, salted_hmac,
)
from django.utils.encoding import force_bytes, force_text
from django.utils.module_loading import import_string
# session_key should not be case sensitive because some backends can store it
# on case insensitive file systems.
VALID_KEY_CHARS = string.ascii_lowercase + string.digits
class CreateError(Exception):
"""
Used internally as a consistent exception type to catch from save (see the
docstring for SessionBase.save() for details).
"""
pass
class SessionBase(object):
"""
Base class for all Session classes.
"""
TEST_COOKIE_NAME = 'testcookie'
TEST_COOKIE_VALUE = 'worked'
def __init__(self, session_key=None):
self._session_key = session_key
self.accessed = False
self.modified = False
self.serializer = import_string(settings.SESSION_SERIALIZER)
def __contains__(self, key):
return key in self._session
def __getitem__(self, key):
return self._session[key]
def __setitem__(self, key, value):
self._session[key] = value
self.modified = True
def __delitem__(self, key):
del self._session[key]
self.modified = True
def get(self, key, default=None):
return self._session.get(key, default)
def pop(self, key, default=None):
self.modified = self.modified or key in self._session
return self._session.pop(key, default)
def setdefault(self, key, value):
if key in self._session:
return self._session[key]
else:
self.modified = True
self._session[key] = value
return value
def set_test_cookie(self):
self[self.TEST_COOKIE_NAME] = self.TEST_COOKIE_VALUE
def test_cookie_worked(self):
return self.get(self.TEST_COOKIE_NAME) == self.TEST_COOKIE_VALUE
def delete_test_cookie(self):
del self[self.TEST_COOKIE_NAME]
def _hash(self, value):
key_salt = "django.contrib.sessions" + self.__class__.__name__
return salted_hmac(key_salt, value).hexdigest()
def encode(self, session_dict):
"Returns the given session dictionary serialized and encoded as a string."
serialized = self.serializer().dumps(session_dict)
hash = self._hash(serialized)
return base64.b64encode(hash.encode() + b":" + serialized).decode('ascii')
def decode(self, session_data):
encoded_data = base64.b64decode(force_bytes(session_data))
try:
# could produce ValueError if there is no ':'
hash, serialized = encoded_data.split(b':', 1)
expected_hash = self._hash(serialized)
if not constant_time_compare(hash.decode(), expected_hash):
raise SuspiciousSession("Session data corrupted")
else:
return self.serializer().loads(serialized)
except Exception as e:
# ValueError, SuspiciousOperation, unpickling exceptions. If any of
# these happen, just return an empty dictionary (an empty session).
if isinstance(e, SuspiciousOperation):
logger = logging.getLogger('django.security.%s' %
e.__class__.__name__)
logger.warning(force_text(e))
return {}
def update(self, dict_):
self._session.update(dict_)
self.modified = True
def has_key(self, key):
return key in self._session
def keys(self):
return self._session.keys()
def values(self):
return self._session.values()
def items(self):
return self._session.items()
def iterkeys(self):
return self._session.iterkeys()
def itervalues(self):
return self._session.itervalues()
def iteritems(self):
return self._session.iteritems()
def clear(self):
# To avoid unnecessary persistent storage accesses, we set up the
# internals directly (loading data wastes time, since we are going to
# set it to an empty dict anyway).
self._session_cache = {}
self.accessed = True
self.modified = True
def is_empty(self):
"Returns True when there is no session_key and the session is empty"
try:
return not bool(self._session_key) and not self._session_cache
except AttributeError:
return True
def _get_new_session_key(self):
"Returns session key that isn't being used."
while True:
session_key = get_random_string(32, VALID_KEY_CHARS)
if not self.exists(session_key):
break
return session_key
def _get_or_create_session_key(self):
if self._session_key is None:
self._session_key = self._get_new_session_key()
return self._session_key
def _validate_session_key(self, key):
"""
Key must be truthy and at least 8 characters long. 8 characters is an
arbitrary lower bound for some minimal key security.
"""
return key and len(key) >= 8
def _get_session_key(self):
return self.__session_key
def _set_session_key(self, value):
"""
Validate session key on assignment. Invalid values will set to None.
"""
if self._validate_session_key(value):
self.__session_key = value
else:
self.__session_key = None
session_key = property(_get_session_key)
_session_key = property(_get_session_key, _set_session_key)
def _get_session(self, no_load=False):
"""
Lazily loads session from storage (unless "no_load" is True, when only
an empty dict is stored) and stores it in the current instance.
"""
self.accessed = True
try:
return self._session_cache
except AttributeError:
if self.session_key is None or no_load:
self._session_cache = {}
else:
self._session_cache = self.load()
return self._session_cache
_session = property(_get_session)
def get_expiry_age(self, **kwargs):
"""Get the number of seconds until the session expires.
Optionally, this function accepts `modification` and `expiry` keyword
arguments specifying the modification and expiry of the session.
"""
try:
modification = kwargs['modification']
except KeyError:
modification = timezone.now()
# Make the difference between "expiry=None passed in kwargs" and
# "expiry not passed in kwargs", in order to guarantee not to trigger
# self.load() when expiry is provided.
try:
expiry = kwargs['expiry']
except KeyError:
expiry = self.get('_session_expiry')
if not expiry: # Checks both None and 0 cases
return settings.SESSION_COOKIE_AGE
if not isinstance(expiry, datetime):
return expiry
delta = expiry - modification
return delta.days * 86400 + delta.seconds
def get_expiry_date(self, **kwargs):
"""Get session the expiry date (as a datetime object).
Optionally, this function accepts `modification` and `expiry` keyword
arguments specifying the modification and expiry of the session.
"""
try:
modification = kwargs['modification']
except KeyError:
modification = timezone.now()
# Same comment as in get_expiry_age
try:
expiry = kwargs['expiry']
except KeyError:
expiry = self.get('_session_expiry')
if isinstance(expiry, datetime):
return expiry
if not expiry: # Checks both None and 0 cases
expiry = settings.SESSION_COOKIE_AGE
return modification + timedelta(seconds=expiry)
def set_expiry(self, value):
"""
Sets a custom expiration for the session. ``value`` can be an integer,
a Python ``datetime`` or ``timedelta`` object or ``None``.
If ``value`` is an integer, the session will expire after that many
seconds of inactivity. If set to ``0`` then the session will expire on
browser close.
If ``value`` is a ``datetime`` or ``timedelta`` object, the session
will expire at that specific future time.
If ``value`` is ``None``, the session uses the global session expiry
policy.
"""
if value is None:
# Remove any custom expiration for this session.
try:
del self['_session_expiry']
except KeyError:
pass
return
if isinstance(value, timedelta):
value = timezone.now() + value
self['_session_expiry'] = value
def get_expire_at_browser_close(self):
"""
Returns ``True`` if the session is set to expire when the browser
closes, and ``False`` if there's an expiry date. Use
``get_expiry_date()`` or ``get_expiry_age()`` to find the actual expiry
date/age, if there is one.
"""
if self.get('_session_expiry') is None:
return settings.SESSION_EXPIRE_AT_BROWSER_CLOSE
return self.get('_session_expiry') == 0
def flush(self):
"""
Removes the current session data from the database and regenerates the
key.
"""
self.clear()
self.delete()
self._session_key = None
def cycle_key(self):
"""
Creates a new session key, whilst retaining the current session data.
"""
data = self._session_cache
key = self.session_key
self.create()
self._session_cache = data
self.delete(key)
# Methods that child classes must implement.
def exists(self, session_key):
"""
Returns True if the given session_key already exists.
"""
raise NotImplementedError('subclasses of SessionBase must provide an exists() method')
def create(self):
"""
Creates a new session instance. Guaranteed to create a new object with
a unique key and will have saved the result once (with empty data)
before the method returns.
"""
raise NotImplementedError('subclasses of SessionBase must provide a create() method')
def save(self, must_create=False):
"""
Saves the session data. If 'must_create' is True, a new session object
is created (otherwise a CreateError exception is raised). Otherwise,
save() can update an existing object with the same key.
"""
raise NotImplementedError('subclasses of SessionBase must provide a save() method')
def delete(self, session_key=None):
"""
Deletes the session data under this key. If the key is None, the
current session key value is used.
"""
raise NotImplementedError('subclasses of SessionBase must provide a delete() method')
def load(self):
"""
Loads the session data and returns a dictionary.
"""
raise NotImplementedError('subclasses of SessionBase must provide a load() method')
@classmethod
def clear_expired(cls):
"""
Remove expired sessions from the session store.
If this operation isn't possible on a given backend, it should raise
NotImplementedError. If it isn't necessary, because the backend has
a built-in expiration mechanism, it should be a no-op.
"""
raise NotImplementedError('This backend does not support clear_expired().')
|
mediaessenz/TYPO3-metaseo | refs/heads/develop | Documentation/_make/conf.py | 2 | # -*- coding: utf-8 -*-
#
# MetaSEO Import from tq_seo documentation build configuration file, created by
# TYPO3 extension sphinx on Tue, 01 Apr 2014 17:53:40 +0200.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- PHP highlighting configuration --------------------------------------------
from sphinx.highlighting import lexers
if lexers:
from pygments.lexers.web import PhpLexer
lexers['php'] = PhpLexer(startinline=True)
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.ifconfig']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['../_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'Index'
# General information about the project.
project = u'MetaSEO Import from tq_seo'
copyright = u'2014, Markus Blaschke'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.0'
# The full version, including alpha/beta/rc tags.
release = '1.0.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
today_fmt = '%Y-%m-%d %H:%M'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_make']
exclude_trees = ['_make']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['../_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'metaseo_tqseo_importdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
'preamble': '\\usepackage{typo3}'
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('Index', 'metaseo_tqseo_import.tex', u'MetaSEO Import from tq_seo',
u'Markus Blaschke', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for rst2pdf output ------------------------------------------------
# Grouping the document tree into PDF files. List of tuples
# (source start file, target name, title, author, options).
#
# If there is more than one author, separate them with \\.
# For example: r'Guido van Rossum\\Fred L. Drake, Jr., editor'
#
# The options element is a dictionary that lets you override
# this config per-document.
# For example,
# ('index', u'MyProject', u'My Project', u'Author Name',
# dict(pdf_compressed = True))
# would mean that specific document would be compressed
# regardless of the global pdf_compressed setting.
pdf_documents = [
('Index', 'metaseo_tqseo_import', u'MetaSEO Import from tq_seo',
u'Markus Blaschke'),
]
# A comma-separated list of custom stylesheets. Example:
pdf_stylesheets = ['sphinx','kerning','a4']
# A list of folders to search for stylesheets. Example:
pdf_style_path = ['.', '_styles']
# Create a compressed PDF
# Use True/False or 1/0
# Example: compressed=True
#pdf_compressed = False
# A colon-separated list of folders to search for fonts. Example:
# pdf_font_path = ['/usr/share/fonts', '/usr/share/texmf-dist/fonts/']
# Language to be used for hyphenation support
#pdf_language = "en_US"
# Mode for literal blocks wider than the frame. Can be
# overflow, shrink or truncate
#pdf_fit_mode = "shrink"
# Section level that forces a break page.
# For example: 1 means top-level sections start in a new page
# 0 means disabled
#pdf_break_level = 0
# When a section starts in a new page, force it to be 'even', 'odd',
# or just use 'any'
#pdf_breakside = 'any'
# Insert footnotes where they are defined instead of
# at the end.
#pdf_inline_footnotes = True
# verbosity level. 0 1 or 2
#pdf_verbosity = 0
# If false, no index is generated.
#pdf_use_index = True
# If false, no modindex is generated.
#pdf_use_modindex = True
# If false, no coverpage is generated.
#pdf_use_coverpage = True
# Name of the cover page template to use
#pdf_cover_template = 'sphinxcover.tmpl'
# Documents to append as an appendix to all manuals.
#pdf_appendices = []
# Enable experimental feature to split table cells. Use it
# if you get "DelayedTable too big" errors
#pdf_splittables = False
# Set the default DPI for images
#pdf_default_dpi = 72
# Enable rst2pdf extension modules (default is only vectorpdf)
# you need vectorpdf if you want to use sphinx's graphviz support
#pdf_extensions = ['vectorpdf']
# Page template name for "regular" pages
#pdf_page_template = 'cutePage'
# Show Table Of Contents at the beginning?
#pdf_use_toc = True
# How many levels deep should the table of contents be?
pdf_toc_depth = 9999
# Add section number to section references
pdf_use_numbered_links = False
# Background images fitting mode
pdf_fit_background_mode = 'scale'
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('Index', 'metaseo_tqseo_import', u'MetaSEO Import from tq_seo',
[u'Markus Blaschke'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('Index', 'metaseo_tqseo_import', u'MetaSEO Import from tq_seo',
u'Markus Blaschke', 'MetaSEO Import from tq_seo', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
#=================================================
#
# TYPO3 codeblock BEGIN:
#
# Insert this codeblock at the end of your Sphinx
# builder configuration file 'conf.py'.
# This may enable TYPO3 specific features like
# TYPO3 themes. It makes Yaml settings files work.
#
#-------------------------------------------------
if 1 and "TYPO3 specific":
try:
t3DocTeam
except NameError:
t3DocTeam = {}
try:
import t3sphinx
html_theme_path.insert(0, t3sphinx.themes_dir)
html_theme = 'typo3sphinx'
except:
html_theme = 'default'
t3DocTeam['conf_py_file'] = None
try:
t3DocTeam['conf_py_file'] = __file__
except:
import inspect
t3DocTeam['conf_py_file'] = inspect.getfile(
inspect.currentframe())
t3DocTeam['conf_py_package_dir'] = os.path.abspath(os.path.dirname(
t3DocTeam['conf_py_file']))
t3DocTeam['relpath_to_master_doc'] = '..'
t3DocTeam['relpath_to_logdir'] = '_not_versioned'
t3DocTeam['path_to_logdir'] = os.path.join(
t3DocTeam['conf_py_package_dir'],
t3DocTeam['relpath_to_logdir'])
t3DocTeam['pathToYamlSettings'] = os.path.join(
t3DocTeam['conf_py_package_dir'],
t3DocTeam['relpath_to_master_doc'], 'Settings.yml')
try:
t3DocTeam['pathToGlobalYamlSettings'] = \
t3sphinx.pathToGlobalYamlSettings
except:
t3DocTeam['pathToGlobalYamlSettings'] = None
if not t3DocTeam['pathToGlobalYamlSettings']:
t3DocTeam['pathToGlobalYamlSettings'] = os.path.join(
t3DocTeam['conf_py_package_dir'], 'GlobalSettings.yml')
try:
__function = t3sphinx.yamlsettings.processYamlSettings
except:
__function = None
if not __function:
try:
import yamlsettings
__function = yamlsettings.processYamlSettings
except:
__function = None
if __function:
__function(globals(), t3DocTeam)
#-------------------------------------------------
#
# TYPO3 codeblock END.
#
#=================================================
|
DrPaulBrewer/rtlsdr-automated-wxsat-capture | refs/heads/master | noaacapture.py | 1 | import time
import pypredict
import subprocess
satellites = ['NOAA-18','NOAA-19','NOAA-15']
freqs = [137912500, 137100000, 137620000]
sample = '44100'
wavrate='11025'
def runForDuration(cmdline, duration):
try:
child = subprocess.Popen(cmdline)
time.sleep(duration)
child.terminate()
except OSError as e:
print "OS Error during command: "+" ".join(cmdline)
print "OS Error: "+e.strerror
def recordFM(freq, fname, duration):
# still experimenting with options - unsure as to best settings
cmdline = ['rtl_fm',\
'-f',str(freq),\
'-s',sample,\
'-g','43',\
'-F','9',\
'-A','fast',\
'-E','dc',\
fname+'.raw']
runForDuration(cmdline, duration)
def transcode(fname):
cmdline = ['sox','-t','raw','-r',sample,'-es','-b','16','-c','1','-V1',fname+'.raw',fname+'.wav','rate',wavrate]
subprocess.call(cmdline)
def decode(fname):
cmdline = ['/root/atpdec-1.7/atpdec',fname+'.wav']
subprocess.call(cmdline)
def recordWAV(freq,fname,duration):
recordFM(freq,fname,duration)
transcode(fname)
def spectrum(fname,duration):
cmdline = ['rtl_power','-f','137000000:138000000:1000','-i','1m','-g','40',fname+'.csv']
runForDuration(cmdline,duration)
def findNextPass():
predictions = [pypredict.aoslos(s) for s in satellites]
aoses = [p[0] for p in predictions]
nextIndex = aoses.index(min(aoses))
return (satellites[nextIndex],\
freqs[nextIndex],\
predictions[nextIndex])
while True:
(satName, freq, (aosTime, losTime)) = findNextPass()
now = time.time()
towait = aosTime-now
if towait>0:
print "waiting "+str(towait)+" seconds for "+satName
time.sleep(towait)
# dir= sat name and filename = start time
fname='./'+satName+'/'+str(aosTime)
print "beginning pass "+fname+" predicted end "+str(losTime)
recordWAV(freq,fname,losTime-aosTime)
decode(fname) # make picture
# spectrum(fname,losTime-aosTime)
print "finished pass "+fname+" at "+str(time.time())
time.sleep(60.0)
|
rruebner/odoo | refs/heads/master | addons/hr_timesheet_sheet/wizard/__init__.py | 443 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import hr_timesheet_current
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.