repo_name
stringlengths
5
100
ref
stringlengths
12
67
path
stringlengths
4
244
copies
stringlengths
1
8
content
stringlengths
0
1.05M
UManPychron/pychron
refs/heads/develop
alembic_dvc/versions/69484796482_sample_prep_updates.py
2
"""sample prep updates Revision ID: 69484796482 Revises: 35388de3b2c1 Create Date: 2018-02-26 15:56:36.885264 """ # revision identifiers, used by Alembic. import datetime from sqlalchemy import func revision = '69484796482' down_revision = '508864cbfc71' from alembic import op import sqlalchemy as sa def upgrade(): op.add_column('SampleTbl', sa.Column('storage_location', sa.String(140))) op.add_column('SampleTbl', sa.Column('lithology', sa.String(140))) op.add_column('SampleTbl', sa.Column('location', sa.String(140))) op.add_column('SampleTbl', sa.Column('approximate_age', sa.Float)) op.add_column('SampleTbl', sa.Column('elevation', sa.Float)) op.add_column('SampleTbl', sa.Column('create_date', sa.DateTime)) op.add_column('SampleTbl', sa.Column('update_date', sa.DateTime)) def downgrade(): op.drop_column('SampleTbl', 'storage_location') op.drop_column('SampleTbl', 'lithology') op.drop_column('SampleTbl', 'approximate_age') op.drop_column('SampleTbl', 'elevation') op.drop_column('SampleTbl', 'create_date') op.drop_column('SampleTbl', 'location') op.drop_column('SampleTbl', 'update_date')
JioCloud/cinder
refs/heads/master
cinder/tests/unit/api/extensions/__init__.py
12133432
vesmbo/alexsash
refs/heads/master
app/onesite/migrations/__init__.py
12133432
pzfreo/ox-clo
refs/heads/master
code/wind-and-sfpd/python3-wind-sfpd.py
1
from pyspark import SparkContext, SparkConf from pyspark.sql import SQLContext, Row from dateutil.parser import parse from datetime import datetime from numpy import array from scipy import spatial from pyspark.mllib.linalg import Vectors from pyspark.mllib.stat import Statistics #spark sql setup conf = SparkConf().setAppName("wind-sfpd") #comment out this next line if running in pyspark / jupyter sc = SparkContext(conf=conf) sqlc = SQLContext(sc) # a useful function to parse and clean date/time def date_and_hour(s): dt = parse(s.replace('?',' ')) hour = dt.hour return dt.strftime("%Y-%m-%d")+":" +str(hour) # start by reading the wind and temperature date df = sqlc.read.format('csv').options(header='true').load('/home/oxclo/datafiles/wind2014/*.csv') tidied = df.rdd.map(lambda r: Row(station = r.Station_ID, datehour =date_and_hour(r.Interval_End_Time), temp=r.Ambient_Temperature_Deg_C, wind=r.Wind_Velocity_Mtr_Sec)).toDF() nonulls = tidied.filter(tidied.temp.isNotNull()).filter(tidied.wind.isNotNull()) numbered = nonulls.rdd.map(lambda row: Row(station=row.station, datehour=row.datehour, wind=float(row.wind), temp=float(row.temp))).toDF() averages = numbered.groupBy(['station','datehour']).agg({'temp':'avg', 'wind':'avg'}) cleanedaverages = averages.rdd.map(lambda row: Row(station=row.station, datehour=row.datehour, temp=row['avg(temp)'], wind=row['avg(wind)'])).toDF() print("wind and temp is now available in cleanedaverages") cleanedaverages.show(10) # now read the incident data and clean idf = sqlc.read.format('csv').options(header='true').load('/home/oxclo/datafiles/incidents/sfpd.csv') withyx2014 = idf.filter(idf.X.isNotNull()).filter(idf.Y.isNotNull()).filter(idf.Date.contains('2014')) tidy = withyx2014.rdd.map(lambda row: Row(datehour = date_and_hour(row.Date+" "+row.Time),yx=[float(row.Y),float(row.X)])).toDF() # need to associate incidents with nearest weather station def locate(l,index,locations): distance,i = index.query(l) return locations[i] def map_yx_to_station(yx): return locate(yx, spatial.KDTree(array( [[37.7816834,-122.3887657], [37.7469112,-122.4821759], [37.7411022,-120.804151], [37.4834543,-122.3187302], [37.7576436,-122.3916382], [37.7970013,-122.4140409], [37.748496,-122.4567461], [37.7288155,-122.4210133], [37.5839487,-121.9499339], [37.7157156,-122.4145311], [37.7329613,-122.5051491], [37.7575891,-122.3923824], [37.7521169,-122.4497687]])), ["SF18", "SF04", "SF15", "SF17", "SF36", "SF37",\ "SF07", "SF11", "SF12", "SF14", "SF16", "SF19", "SF34"] ) withstations = tidy.rdd.map(lambda row: Row(station=map_yx_to_station(row.yx), datehour=row.datehour)).toDF() withstations.registerTempTable('stationincidents') incidentcount = sqlc.sql("select station, datehour, count(1) as incidents from stationincidents group by station, datehour") print("we now have incidents by station/hour in incidentcount") incidentcount.show(10) # now join the two tables joined = cleanedaverages.join(incidentcount, ['station', 'datehour'], 'outer') # if incident data doesn't exist for that station/datehour, then it is 0 zeroed = joined.rdd.map(lambda row: Row(station = row.station, datehour=row.datehour, temp = row.temp, wind = row.wind, incidents = row.incidents if row.incidents else 0)).toDF() # if temp/wind data doesn't exist for that station/datehour, then we can't use that row final = zeroed.filter(zeroed.temp.isNotNull()).filter(zeroed.wind.isNotNull()).filter(zeroed.temp!=0) # finally apply correlation test vecs = final.rdd.map(lambda row: Vectors.dense([row.temp,row.wind,row.incidents])) print(Statistics.corr(vecs))
rajarammallya/melange
refs/heads/master
melange/tests/unit/test_ipam_views.py
1
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2011 OpenStack LLC. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from melange import tests from melange.ipam import models from melange.ipam import views from melange.tests.factories import models as factory_models class TestIpConfigurationView(tests.BaseTest): def test_data_returns_block_ip_info(self): block1 = factory_models.IpBlockFactory() block2 = factory_models.IpBlockFactory() interface = factory_models.InterfaceFactory(vif_id_on_device="123") ip1 = factory_models.IpAddressFactory(ip_block_id=block1.id, interface_id=interface.id) ip2 = factory_models.IpAddressFactory(ip_block_id=block2.id, interface_id=interface.id) expected_ip1_config = _ip_data(ip1, block1) expected_ip2_config = _ip_data(ip2, block2) ip_configuration_view = views.IpConfigurationView(ip1, ip2) self.assertEqual(expected_ip1_config, ip_configuration_view.data()[0]) self.assertEqual(expected_ip2_config, ip_configuration_view.data()[1]) def test_data_returns_deallocated_ip_info(self): block = factory_models.IpBlockFactory() interface = factory_models.InterfaceFactory(vif_id_on_device="123") ip = factory_models.IpAddressFactory(ip_block_id=block.id, interface_id=interface.id) ip.deallocate() deallocated_ip = models.IpAddress.find(ip.id) expected_ip_config = _ip_data(deallocated_ip, block) ip_configuration_view = views.IpConfigurationView(deallocated_ip) self.assertEqual(expected_ip_config, ip_configuration_view.data()[0]) def test_data_returns_route_info(self): block = factory_models.IpBlockFactory() interface = factory_models.InterfaceFactory(vif_id_on_device="123") route1 = factory_models.IpRouteFactory(source_block_id=block.id) route2 = factory_models.IpRouteFactory(source_block_id=block.id) ip = factory_models.IpAddressFactory(ip_block_id=block.id, interface_id=interface.id) expected_ip_config_routes = [_route_data(route1), _route_data(route2)] ip_configuration_view = views.IpConfigurationView(ip).data()[0] ip1_config_routes = ip_configuration_view['ip_block'].pop('ip_routes') self.assertItemsEqual(expected_ip_config_routes, ip1_config_routes) def _ip_data(ip, block): return { 'id': ip.id, 'interface_id': ip.virtual_interface_id, 'address': ip.address, 'version': ip.version, 'ip_block': { 'id': block.id, 'cidr': block.cidr, 'network_id': block.network_id, 'broadcast': block.broadcast, 'gateway': block.gateway, 'netmask': block.netmask, 'dns1': block.dns1, 'dns2': block.dns2, 'ip_routes': [], }, } def _route_data(route): return { 'id': route.id, 'destination': route.destination, 'gateway': route.gateway, 'netmask': route.netmask, } class TestInterfaceConfigurationView(tests.BaseTest): def test_data_returns_mac_address(self): interface = factory_models.InterfaceFactory() mac = models.MacAddress.create(interface_id=interface.id, address="ab-bc-cd-12-23-34") data = views.InterfaceConfigurationView(interface).data() self.assertEqual(data['mac_address'], mac.unix_format) self.assertEqual(data['id'], interface.virtual_interface_id) def test_data_returns_ip_address_configuration_information(self): interface = factory_models.InterfaceFactory() ip1 = factory_models.IpAddressFactory(interface_id=interface.id) ip2 = factory_models.IpAddressFactory(interface_id=interface.id) data = views.InterfaceConfigurationView(interface).data() self.assertEqual(len(data['ip_addresses']), 2) self.assertItemsEqual(data['ip_addresses'], views.IpConfigurationView(ip1, ip2).data())
ychen820/microblog
refs/heads/master
y/google-cloud-sdk/platform/gsutil/third_party/boto/boto/gs/lifecycle.py
157
# Copyright 2013 Google Inc. # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, dis- # tribute, sublicense, and/or sell copies of the Software, and to permit # persons to whom the Software is furnished to do so, subject to the fol- # lowing conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. from boto.exception import InvalidLifecycleConfigError # Relevant tags for the lifecycle configuration XML document. LIFECYCLE_CONFIG = 'LifecycleConfiguration' RULE = 'Rule' ACTION = 'Action' DELETE = 'Delete' CONDITION = 'Condition' AGE = 'Age' CREATED_BEFORE = 'CreatedBefore' NUM_NEWER_VERSIONS = 'NumberOfNewerVersions' IS_LIVE = 'IsLive' # List of all action elements. LEGAL_ACTIONS = [DELETE] # List of all action parameter elements. LEGAL_ACTION_PARAMS = [] # List of all condition elements. LEGAL_CONDITIONS = [AGE, CREATED_BEFORE, NUM_NEWER_VERSIONS, IS_LIVE] # Dictionary mapping actions to supported action parameters for each action. LEGAL_ACTION_ACTION_PARAMS = { DELETE: [], } class Rule(object): """ A lifecycle rule for a bucket. :ivar action: Action to be taken. :ivar action_params: A dictionary of action specific parameters. Each item in the dictionary represents the name and value of an action parameter. :ivar conditions: A dictionary of conditions that specify when the action should be taken. Each item in the dictionary represents the name and value of a condition. """ def __init__(self, action=None, action_params=None, conditions=None): self.action = action self.action_params = action_params or {} self.conditions = conditions or {} # Name of the current enclosing tag (used to validate the schema). self.current_tag = RULE def validateStartTag(self, tag, parent): """Verify parent of the start tag.""" if self.current_tag != parent: raise InvalidLifecycleConfigError( 'Invalid tag %s found inside %s tag' % (tag, self.current_tag)) def validateEndTag(self, tag): """Verify end tag against the start tag.""" if tag != self.current_tag: raise InvalidLifecycleConfigError( 'Mismatched start and end tags (%s/%s)' % (self.current_tag, tag)) def startElement(self, name, attrs, connection): if name == ACTION: self.validateStartTag(name, RULE) elif name in LEGAL_ACTIONS: self.validateStartTag(name, ACTION) # Verify there is only one action tag in the rule. if self.action is not None: raise InvalidLifecycleConfigError( 'Only one action tag is allowed in each rule') self.action = name elif name in LEGAL_ACTION_PARAMS: # Make sure this tag is found in an action tag. if self.current_tag not in LEGAL_ACTIONS: raise InvalidLifecycleConfigError( 'Tag %s found outside of action' % name) # Make sure this tag is allowed for the current action tag. if name not in LEGAL_ACTION_ACTION_PARAMS[self.action]: raise InvalidLifecycleConfigError( 'Tag %s not allowed in action %s' % (name, self.action)) elif name == CONDITION: self.validateStartTag(name, RULE) elif name in LEGAL_CONDITIONS: self.validateStartTag(name, CONDITION) # Verify there is no duplicate conditions. if name in self.conditions: raise InvalidLifecycleConfigError( 'Found duplicate conditions %s' % name) else: raise InvalidLifecycleConfigError('Unsupported tag ' + name) self.current_tag = name def endElement(self, name, value, connection): self.validateEndTag(name) if name == RULE: # We have to validate the rule after it is fully populated because # the action and condition elements could be in any order. self.validate() elif name == ACTION: self.current_tag = RULE elif name in LEGAL_ACTIONS: self.current_tag = ACTION elif name in LEGAL_ACTION_PARAMS: self.current_tag = self.action # Add the action parameter name and value to the dictionary. self.action_params[name] = value.strip() elif name == CONDITION: self.current_tag = RULE elif name in LEGAL_CONDITIONS: self.current_tag = CONDITION # Add the condition name and value to the dictionary. self.conditions[name] = value.strip() else: raise InvalidLifecycleConfigError('Unsupported end tag ' + name) def validate(self): """Validate the rule.""" if not self.action: raise InvalidLifecycleConfigError( 'No action was specified in the rule') if not self.conditions: raise InvalidLifecycleConfigError( 'No condition was specified for action %s' % self.action) def to_xml(self): """Convert the rule into XML string representation.""" s = '<' + RULE + '>' s += '<' + ACTION + '>' if self.action_params: s += '<' + self.action + '>' for param in LEGAL_ACTION_PARAMS: if param in self.action_params: s += ('<' + param + '>' + self.action_params[param] + '</' + param + '>') s += '</' + self.action + '>' else: s += '<' + self.action + '/>' s += '</' + ACTION + '>' s += '<' + CONDITION + '>' for condition in LEGAL_CONDITIONS: if condition in self.conditions: s += ('<' + condition + '>' + self.conditions[condition] + '</' + condition + '>') s += '</' + CONDITION + '>' s += '</' + RULE + '>' return s class LifecycleConfig(list): """ A container of rules associated with a lifecycle configuration. """ def __init__(self): # Track if root tag has been seen. self.has_root_tag = False def startElement(self, name, attrs, connection): if name == LIFECYCLE_CONFIG: if self.has_root_tag: raise InvalidLifecycleConfigError( 'Only one root tag is allowed in the XML') self.has_root_tag = True elif name == RULE: if not self.has_root_tag: raise InvalidLifecycleConfigError('Invalid root tag ' + name) rule = Rule() self.append(rule) return rule else: raise InvalidLifecycleConfigError('Unsupported tag ' + name) def endElement(self, name, value, connection): if name == LIFECYCLE_CONFIG: pass else: raise InvalidLifecycleConfigError('Unsupported end tag ' + name) def to_xml(self): """Convert LifecycleConfig object into XML string representation.""" s = '<?xml version="1.0" encoding="UTF-8"?>' s += '<' + LIFECYCLE_CONFIG + '>' for rule in self: s += rule.to_xml() s += '</' + LIFECYCLE_CONFIG + '>' return s def add_rule(self, action, action_params, conditions): """ Add a rule to this Lifecycle configuration. This only adds the rule to the local copy. To install the new rule(s) on the bucket, you need to pass this Lifecycle config object to the configure_lifecycle method of the Bucket object. :type action: str :param action: Action to be taken. :type action_params: dict :param action_params: A dictionary of action specific parameters. Each item in the dictionary represents the name and value of an action parameter. :type conditions: dict :param conditions: A dictionary of conditions that specify when the action should be taken. Each item in the dictionary represents the name and value of a condition. """ rule = Rule(action, action_params, conditions) self.append(rule)
2014c2g2/2014c2
refs/heads/master
exts/sphinx.search.py
38
# -*- coding: utf-8 -*- """ sphinx.search ~~~~~~~~~~~~~ Create a search index for offline search. :copyright: Copyright 2007-2011 by the Sphinx team, see AUTHORS. :license: BSD, see LICENSE for details. 採用結巴套件 for Python3 進行中文繁體內容的分詞 https://github.com/fxsjy/jieba """ import re import cPickle as pickle from docutils.nodes import comment, Text, NodeVisitor, SkipNode from sphinx.util import jsdump, rpartition try: # http://bitbucket.org/methane/porterstemmer/ from porterstemmer import Stemmer as CStemmer CSTEMMER = True except ImportError: from sphinx.util.stemmer import PorterStemmer CSTEMMER = False # 採用結巴分詞套件 import jieba word_re = re.compile(r'\w+(?u)') stopwords = set(""" a and are as at be but by for if in into is it near no not of on or such that the their then there these they this to was will with """.split()) class _JavaScriptIndex(object): """ The search index as javascript file that calls a function on the documentation search object to register the index. """ PREFIX = 'Search.setIndex(' SUFFIX = ')' def dumps(self, data): return self.PREFIX + jsdump.dumps(data) + self.SUFFIX def loads(self, s): data = s[len(self.PREFIX):-len(self.SUFFIX)] if not data or not s.startswith(self.PREFIX) or not \ s.endswith(self.SUFFIX): raise ValueError('invalid data') return jsdump.loads(data) def dump(self, data, f): f.write(self.dumps(data)) def load(self, f): return self.loads(f.read()) js_index = _JavaScriptIndex() if CSTEMMER: class Stemmer(CStemmer): def stem(self, word): return self(word.lower()) else: class Stemmer(PorterStemmer): """ All those porter stemmer implementations look hideous. make at least the stem method nicer. """ def stem(self, word): word = word.lower() return PorterStemmer.stem(self, word, 0, len(word) - 1) class WordCollector(NodeVisitor): """ A special visitor that collects words for the `IndexBuilder`. """ def __init__(self, document): NodeVisitor.__init__(self, document) self.found_words = [] def dispatch_visit(self, node): if node.__class__ is comment: raise SkipNode if node.__class__ is Text: # 採用結巴套件 words = jieba.cut(node.astext().encode("utf8"),cut_all=True) words.reverse() self.found_words.extend(words) #testfile.write(node.astext().encode("utf8")+"\n") #testfile.write(u",".join(words).encode("utf8") + "\n") class IndexBuilder(object): """ Helper class that creates a searchindex based on the doctrees passed to the `feed` method. """ formats = { 'jsdump': jsdump, 'pickle': pickle } def __init__(self, env): self.env = env self._stemmer = Stemmer() # filename -> title self._titles = {} # stemmed word -> set(filenames) self._mapping = {} # objtype -> index self._objtypes = {} # objtype index -> objname (localized) self._objnames = {} def load(self, stream, format): """Reconstruct from frozen data.""" if isinstance(format, basestring): format = self.formats[format] frozen = format.load(stream) # if an old index is present, we treat it as not existing. if not isinstance(frozen, dict): raise ValueError('old format') index2fn = frozen['filenames'] self._titles = dict(zip(index2fn, frozen['titles'])) self._mapping = {} for k, v in frozen['terms'].iteritems(): if isinstance(v, int): self._mapping[k] = set([index2fn[v]]) else: self._mapping[k] = set(index2fn[i] for i in v) # no need to load keywords/objtypes def dump(self, stream, format): """Dump the frozen index to a stream.""" if isinstance(format, basestring): format = self.formats[format] format.dump(self.freeze(), stream) def get_objects(self, fn2index): rv = {} otypes = self._objtypes onames = self._objnames for domainname, domain in self.env.domains.iteritems(): for fullname, dispname, type, docname, anchor, prio in \ domain.get_objects(): # XXX use dispname? if docname not in fn2index: continue if prio < 0: continue # XXX splitting at dot is kind of Python specific prefix, name = rpartition(fullname, '.') pdict = rv.setdefault(prefix, {}) try: i = otypes[domainname, type] except KeyError: i = len(otypes) otypes[domainname, type] = i otype = domain.object_types.get(type) if otype: # use unicode() to fire translation proxies onames[i] = unicode(domain.get_type_name(otype)) else: onames[i] = type pdict[name] = (fn2index[docname], i, prio) return rv def get_terms(self, fn2index): rv = {} for k, v in self._mapping.iteritems(): if len(v) == 1: fn, = v if fn in fn2index: rv[k] = fn2index[fn] else: rv[k] = [fn2index[fn] for fn in v if fn in fn2index] return rv def freeze(self): """Create a usable data structure for serializing.""" filenames = self._titles.keys() titles = self._titles.values() fn2index = dict((f, i) for (i, f) in enumerate(filenames)) terms = self.get_terms(fn2index) objects = self.get_objects(fn2index) # populates _objtypes objtypes = dict((v, k[0] + ':' + k[1]) for (k, v) in self._objtypes.iteritems()) objnames = self._objnames return dict(filenames=filenames, titles=titles, terms=terms, objects=objects, objtypes=objtypes, objnames=objnames) def prune(self, filenames): """Remove data for all filenames not in the list.""" new_titles = {} for filename in filenames: if filename in self._titles: new_titles[filename] = self._titles[filename] self._titles = new_titles for wordnames in self._mapping.itervalues(): wordnames.intersection_update(filenames) def feed(self, filename, title, doctree): """Feed a doctree to the index.""" self._titles[filename] = title visitor = WordCollector(doctree) doctree.walk(visitor) def add_term(word, stem=self._stemmer.stem): word = stem(word) if len(word) < 2 or word in stopwords or word.isdigit(): return self._mapping.setdefault(word, set()).add(filename) # 使用結巴套件 words = jieba.cut(title.encode("utf8"),cut_all=True) for word in words: add_term(word) for word in visitor.found_words: add_term(word) def load_indexer(self): def func(docnames): import os.path as path print "############### CHINESE INDEXER ###############" self.indexer = IndexBuilder(self.env) keep = set(self.env.all_docs) - set(docnames) try: f = open(path.join(self.outdir, self.searchindex_filename), 'rb') try: self.indexer.load(f, self.indexer_format) finally: f.close() except (IOError, OSError, ValueError): if keep: self.warn('search index couldn\'t be loaded, but not all ' 'documents will be built: the index will be ' 'incomplete.') # delete all entries for files that will be rebuilt self.indexer.prune(keep) return func def builder_inited(app): if app.builder.name == 'html': print "****************************" app.builder.load_indexer = load_indexer(app.builder) def setup(app): app.connect('builder-inited', builder_inited)
waheedahmed/edx-platform
refs/heads/master
lms/djangoapps/commerce/urls.py
53
""" Defines the URL routes for this app. """ from django.conf.urls import patterns, url from commerce import views urlpatterns = patterns( '', url(r'^checkout/cancel/$', views.checkout_cancel, name='checkout_cancel'), url(r'^checkout/error/$', views.checkout_error, name='checkout_error'), url(r'^checkout/receipt/$', views.checkout_receipt, name='checkout_receipt'), )
ai-ku/langvis
refs/heads/master
jython-2.1/Lib/test/test_time.py
7
from test_support import * import time print_test('time (test_time.py)', 1) time.altzone time.clock() t = 1.0e9 print_test('gmtime', 2) assert time.gmtime(t)[0] == 2001 #(2001, 9, 9, 1, 46, 40, 6, 252, 0) print_test('asctime') assert time.asctime((2001, 9, 9, 1, 46, 40, 6, 252, 0))[-4:] == '2001' #'Sun Sep 09 01:46:40 2001' print_test('ctime') print_test('localtime') assert time.ctime(t) == time.asctime(time.localtime(t)) time.daylight print_test('mktime') assert time.mktime(time.localtime(t)) == t print_test('time', 2) print_test('sleep', 2) t0 = time.time() time.sleep(1.2) t1 = time.time() assert abs(t1-t0 - 1.2) < 0.5 time.timezone time.tzname # expected errors try: time.asctime(0) except TypeError: pass try: time.mktime((999999, 999999, 999999, 999999, 999999, 999999, 999999, 999999, 999999)) except OverflowError: pass
nandoflorestan/babel
refs/heads/master
docs/conf.py
6
# -*- coding: utf-8 -*- # # Babel documentation build configuration file, created by # sphinx-quickstart on Wed Jul 3 17:53:01 2013. # # This file is execfile()d with the current directory set to its containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys, os # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath('..')) sys.path.append(os.path.abspath('_themes')) # -- General configuration ----------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx'] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'Babel' copyright = u'2013, The Babel Team' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = '1.0' # The full version, including alpha/beta/rc tags. release = '1.0' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build'] # The reST default role (used for this markup: `text`) to use for all documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'babel' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. html_theme_path = ['_themes'] # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. html_sidebars = { 'index': ['sidebar-about.html', 'localtoc.html', 'sidebar-links.html', 'searchbox.html'], '**': ['sidebar-logo.html', 'localtoc.html', 'relations.html', 'searchbox.html'] } # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. html_show_sourcelink = False # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'Babeldoc' # -- Options for LaTeX output -------------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', # Needed for unicode symbol conversion. 'fontpkg': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ ('index', 'Babel.tex', u'Babel Documentation', u'The Babel Team', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. latex_logo = '_static/logo.png' # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output -------------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index_', 'babel', u'Babel Documentation', [u'The Babel Team'], 1) ] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ------------------------------------------------ # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index_', 'Babel', u'Babel Documentation', u'The Babel Team', 'Babel', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' intersphinx_mapping = { 'http://docs.python.org/2': None, }
jackytu/newbrandx
refs/heads/rankx
tests/_site/shipping/methods.py
118
class Free(object): pass
mohamedadaly/trex
refs/heads/master
build/msvc/gen.py
1
from __future__ import print_function import sys import os vcppguid = "8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942" # C++ project siguid = "2150E333-8FDC-42A3-9474-1A3956D46DE8" # project group # to generate a new uuid: # # import uuid # uuid.uuid4().__str__().upper() def create_mex_project(name, uuid11, uuid09): return { "type": vcppguid, "name": name, "file11": "matlab\\mex\\" + name + "_vc11.vcxproj", "file09": "matlab\\mex\\" + name + "_vc09.vcproj", "uuid11": uuid11, "uuid09": uuid09, "files": [] } P_astra = { "type": vcppguid, "name": "astra_vc11", "file11": "astra_vc11.vcxproj", "file09": "astra_vc09.vcproj", "uuid11": "BE9F1326-527C-4284-AE2C-D1E25D539CEA", "uuid09": "12926444-6723-46A8-B388-12E65E0577FA" } P0 = create_mex_project("astra_mex", "3FDA35E0-0D54-4663-A3E6-5ABA96F32221", "3FDA35E0-0D54-4663-A3E6-5ABA96F32221") P1 = create_mex_project("astra_mex_algorithm", "056BF7A9-294D-487C-8CC3-BE629077CA94", "056BF7A9-294D-487C-8CC3-BE629077CA94") P2 = create_mex_project("astra_mex_data2d", "E4092269-B19C-46F7-A84E-4F146CC70E44", "E4092269-B19C-46F7-A84E-4F146CC70E44") P3 = create_mex_project("astra_mex_data3d", "0BEC029B-0929-4BF9-BD8B-9C9806A52065", "0BEC029B-0929-4BF9-BD8B-9C9806A52065") P4 = create_mex_project("astra_mex_matrix", "9D041710-2119-4230-BCF2-5FBE753FDE49", "9D041710-2119-4230-BCF2-5FBE753FDE49") P5 = create_mex_project("astra_mex_projector", "4DD6056F-8EEE-4C9A-B2A9-923F01A32E97", "4DD6056F-8EEE-4C9A-B2A9-923F01A32E97") P6 = create_mex_project("astra_mex_projector3d", "F94CCD79-AA11-42DF-AC8A-6C9D2238A883", "F94CCD79-AA11-42DF-AC8A-6C9D2238A883") P7 = create_mex_project("astra_mex_log", "03B833F5-4FD6-4FBE-AAF4-E3305CD56D2E", "CA2840B3-DA68-41B5-AC57-F5DFD20ED8F8") P8 = create_mex_project("astra_mex_direct", "0F68F4E2-BE1B-4A9A-B101-AECF4C069CC7", "85FE09A6-FA49-4314-A2B1-59D77C7442A8") F_astra_mex = { "type": siguid, "name": "astra_mex", "file11": "astra_mex", "file09": "astra_mex", "uuid11": "5E99A109-374E-4102-BE9B-99BA1FA8AA30", "uuid09": "33EF0AC5-B475-40BF-BAE5-67075B204D10", "entries": [ P0, P1, P2, P3, P4, P5, P6, P7, P8 ] } P0["files"] = [ "astra_mex_c.cpp", "mexHelpFunctions.cpp", "mexHelpFunctions.h", "mexInitFunctions.cpp", "mexInitFunctions.h", ] P1["files"] = [ "astra_mex_algorithm_c.cpp", "mexHelpFunctions.cpp", "mexHelpFunctions.h", "mexInitFunctions.cpp", "mexInitFunctions.h", ] P2["files"] = [ "astra_mex_data2d_c.cpp", "mexHelpFunctions.cpp", "mexHelpFunctions.h", "mexCopyDataHelpFunctions.cpp", "mexCopyDataHelpFunctions.h", "mexDataManagerHelpFunctions.cpp", "mexDataManagerHelpFunctions.h", "mexInitFunctions.cpp", "mexInitFunctions.h", ] P3["files"] = [ "astra_mex_data3d_c.cpp", "mexHelpFunctions.cpp", "mexHelpFunctions.h", "mexCopyDataHelpFunctions.cpp", "mexCopyDataHelpFunctions.h", "mexDataManagerHelpFunctions.cpp", "mexDataManagerHelpFunctions.h", "mexInitFunctions.cpp", "mexInitFunctions.h", ] P4["files"] = [ "astra_mex_matrix_c.cpp", "mexHelpFunctions.cpp", "mexHelpFunctions.h", "mexInitFunctions.cpp", "mexInitFunctions.h", ] P5["files"] = [ "astra_mex_projector_c.cpp", "mexHelpFunctions.cpp", "mexHelpFunctions.h", "mexInitFunctions.cpp", "mexInitFunctions.h", ] P6["files"] = [ "astra_mex_projector3d_c.cpp", "mexHelpFunctions.cpp", "mexHelpFunctions.h", "mexInitFunctions.cpp", "mexInitFunctions.h", ] P7["files"] = [ "astra_mex_log_c.cpp", "mexHelpFunctions.cpp", "mexHelpFunctions.h", "mexInitFunctions.cpp", "mexInitFunctions.h", ] P8["files"] = [ "astra_mex_direct_c.cpp", "mexHelpFunctions.cpp", "mexHelpFunctions.h", "mexCopyDataHelpFunctions.cpp", "mexCopyDataHelpFunctions.h", "mexDataManagerHelpFunctions.cpp", "mexDataManagerHelpFunctions.h", "mexInitFunctions.cpp", "mexInitFunctions.h", ] P_astra["filter_names"] = [ "Algorithms", "Data Structures", "Projectors", "CUDA", "Global &amp; Other", "Geometries", "Algorithms\\headers", "Algorithms\\source", "Data Structures\\headers", "Data Structures\\source", "Global &amp; Other\\headers", "Global &amp; Other\\source", "Geometries\\headers", "Geometries\\source", "Projectors\\headers", "Projectors\\inline", "Projectors\\source", "CUDA\\astra headers", "CUDA\\astra source", "CUDA\\cuda headers", "CUDA\\cuda source", ] P_astra["filters"] = {} P_astra["filters"]["Algorithms"] = [ "262b0d17-774a-4cb1-b51a-b358d2d02791" ] P_astra["filters"]["Data Structures"] = [ "76d6d672-670b-4454-b3ab-10dc8f9b8710" ] P_astra["filters"]["Projectors"] = [ "77a581a9-60da-4265-97c0-80cdf97408c0" ] P_astra["filters"]["CUDA"] = [ "c1af0e56-5fcc-4e75-b5db-88eeb4148185" ] P_astra["filters"]["Global &amp; Other"] = [ "72fbe846-10ef-4c52-88df-13bd66c4cbfc" ] P_astra["filters"]["Geometries"] = [ "7ef37c12-c98c-4dd6-938d-12f49279eae0" ] P_astra["filters"]["CUDA\\cuda source"] = [ "04a878ed-77b4-4525-9bc2-38ccd65282c5", "cuda\\2d\\algo.cu", "cuda\\2d\\arith.cu", "cuda\\2d\\astra.cu", "cuda\\2d\\cgls.cu", "cuda\\2d\\darthelper.cu", "cuda\\2d\\em.cu", "cuda\\2d\\fan_bp.cu", "cuda\\2d\\fan_fp.cu", "cuda\\2d\\fft.cu", "cuda\\2d\\par_bp.cu", "cuda\\2d\\par_fp.cu", "cuda\\2d\\sart.cu", "cuda\\2d\\sirt.cu", "cuda\\2d\\util.cu", "cuda\\3d\\algo3d.cu", "cuda\\3d\\arith3d.cu", "cuda\\3d\\astra3d.cu", "cuda\\3d\\cgls3d.cu", "cuda\\3d\\cone_bp.cu", "cuda\\3d\\cone_fp.cu", "cuda\\3d\\darthelper3d.cu", "cuda\\3d\\fdk.cu", "cuda\\3d\\mem3d.cu", "cuda\\3d\\par3d_bp.cu", "cuda\\3d\\par3d_fp.cu", "cuda\\3d\\sirt3d.cu", "cuda\\3d\\util3d.cu", ] P_astra["filters"]["Algorithms\\source"] = [ "9df653ab-26c3-4bec-92a2-3dda22fda761", "src\\Algorithm.cpp", "src\\ArtAlgorithm.cpp", "src\\AsyncAlgorithm.cpp", "src\\BackProjectionAlgorithm.cpp", "src\\CglsAlgorithm.cpp", "src\\FilteredBackProjectionAlgorithm.cpp", "src\\ForwardProjectionAlgorithm.cpp", "src\\PluginAlgorithm.cpp", "src\\ReconstructionAlgorithm2D.cpp", "src\\ReconstructionAlgorithm3D.cpp", "src\\SartAlgorithm.cpp", "src\\SirtAlgorithm.cpp", ] P_astra["filters"]["Data Structures\\source"] = [ "95346487-8185-487b-a794-3e7fb5fcbd4c", "src\\Float32Data.cpp", "src\\Float32Data2D.cpp", "src\\Float32Data3D.cpp", "src\\Float32Data3DMemory.cpp", "src\\Float32ProjectionData2D.cpp", "src\\Float32ProjectionData3D.cpp", "src\\Float32ProjectionData3DMemory.cpp", "src\\Float32VolumeData2D.cpp", "src\\Float32VolumeData3D.cpp", "src\\Float32VolumeData3DMemory.cpp", "src\\SparseMatrix.cpp", ] P_astra["filters"]["Global &amp; Other\\source"] = [ "1546cb47-7e5b-42c2-b695-ef172024c14b", "src\\AstraObjectFactory.cpp", "src\\AstraObjectManager.cpp", "src\\CompositeGeometryManager.cpp", "src\\Config.cpp", "src\\Fourier.cpp", "src\\Globals.cpp", "src\\Logging.cpp", "src\\PlatformDepSystemCode.cpp", "src\\Utilities.cpp", "src\\XMLDocument.cpp", "src\\XMLNode.cpp", ] P_astra["filters"]["Geometries\\source"] = [ "dc27bff7-4256-4311-a131-47612a44af20", "src\\ConeProjectionGeometry3D.cpp", "src\\ConeVecProjectionGeometry3D.cpp", "src\\FanFlatProjectionGeometry2D.cpp", "src\\FanFlatVecProjectionGeometry2D.cpp", "src\\GeometryUtil3D.cpp", "src\\ParallelProjectionGeometry2D.cpp", "src\\ParallelProjectionGeometry3D.cpp", "src\\ParallelVecProjectionGeometry3D.cpp", "src\\ProjectionGeometry2D.cpp", "src\\ProjectionGeometry3D.cpp", "src\\SparseMatrixProjectionGeometry2D.cpp", "src\\VolumeGeometry2D.cpp", "src\\VolumeGeometry3D.cpp", ] P_astra["filters"]["Projectors\\source"] = [ "2d60e3c8-7874-4cee-b139-991ac15e811d", "src\\DataProjector.cpp", "src\\DataProjectorPolicies.cpp", "src\\FanFlatBeamLineKernelProjector2D.cpp", "src\\FanFlatBeamStripKernelProjector2D.cpp", "src\\ParallelBeamBlobKernelProjector2D.cpp", "src\\ParallelBeamLinearKernelProjector2D.cpp", "src\\ParallelBeamLineKernelProjector2D.cpp", "src\\ParallelBeamStripKernelProjector2D.cpp", "src\\Projector2D.cpp", "src\\Projector3D.cpp", "src\\SparseMatrixProjector2D.cpp", ] P_astra["filters"]["CUDA\\astra source"] = [ "bbef012e-598a-456f-90d8-416bdcb4221c", "src\\CudaBackProjectionAlgorithm.cpp", "src\\CudaBackProjectionAlgorithm3D.cpp", "src\\CudaCglsAlgorithm.cpp", "src\\CudaCglsAlgorithm3D.cpp", "src\\CudaDartMaskAlgorithm.cpp", "src\\CudaDartMaskAlgorithm3D.cpp", "src\\CudaDartSmoothingAlgorithm.cpp", "src\\CudaDartSmoothingAlgorithm3D.cpp", "src\\CudaDataOperationAlgorithm.cpp", "src\\CudaEMAlgorithm.cpp", "src\\CudaFDKAlgorithm3D.cpp", "src\\CudaFilteredBackProjectionAlgorithm.cpp", "src\\CudaForwardProjectionAlgorithm.cpp", "src\\CudaForwardProjectionAlgorithm3D.cpp", "src\\CudaProjector2D.cpp", "src\\CudaProjector3D.cpp", "src\\CudaReconstructionAlgorithm2D.cpp", "src\\CudaRoiSelectAlgorithm.cpp", "src\\CudaSartAlgorithm.cpp", "src\\CudaSirtAlgorithm.cpp", "src\\CudaSirtAlgorithm3D.cpp", ] P_astra["filters"]["CUDA\\cuda headers"] = [ "4e17872e-db7d-41bc-9760-fad1c253b583", "cuda\\2d\\algo.h", "cuda\\2d\\arith.h", "cuda\\2d\\astra.h", "cuda\\2d\\cgls.h", "cuda\\2d\\darthelper.h", "cuda\\2d\\dims.h", "cuda\\2d\\em.h", "cuda\\2d\\fan_bp.h", "cuda\\2d\\fan_fp.h", "cuda\\2d\\fbp_filters.h", "cuda\\2d\\fft.h", "cuda\\2d\\par_bp.h", "cuda\\2d\\par_fp.h", "cuda\\2d\\sart.h", "cuda\\2d\\sirt.h", "cuda\\2d\\util.h", "cuda\\3d\\algo3d.h", "cuda\\3d\\arith3d.h", "cuda\\3d\\astra3d.h", "cuda\\3d\\cgls3d.h", "cuda\\3d\\cone_bp.h", "cuda\\3d\\cone_fp.h", "cuda\\3d\\darthelper3d.h", "cuda\\3d\\dims3d.h", "cuda\\3d\\fdk.h", "cuda\\3d\\mem3d.h", "cuda\\3d\\par3d_bp.h", "cuda\\3d\\par3d_fp.h", "cuda\\3d\\sirt3d.h", "cuda\\3d\\util3d.h", ] P_astra["filters"]["Algorithms\\headers"] = [ "a76ffd6d-3895-4365-b27e-fc9a72f2ed75", "include\\astra\\Algorithm.h", "include\\astra\\AlgorithmTypelist.h", "include\\astra\\ArtAlgorithm.h", "include\\astra\\AsyncAlgorithm.h", "include\\astra\\BackProjectionAlgorithm.h", "include\\astra\\CglsAlgorithm.h", "include\\astra\\CudaBackProjectionAlgorithm.h", "include\\astra\\CudaBackProjectionAlgorithm3D.h", "include\\astra\\FilteredBackProjectionAlgorithm.h", "include\\astra\\ForwardProjectionAlgorithm.h", "include\\astra\\PluginAlgorithm.h", "include\\astra\\ReconstructionAlgorithm2D.h", "include\\astra\\ReconstructionAlgorithm3D.h", "include\\astra\\SartAlgorithm.h", "include\\astra\\SirtAlgorithm.h", ] P_astra["filters"]["Data Structures\\headers"] = [ "444c44b0-6454-483a-be26-7cb9c8ab0b98", "include\\astra\\Float32Data.h", "include\\astra\\Float32Data2D.h", "include\\astra\\Float32Data3D.h", "include\\astra\\Float32Data3DMemory.h", "include\\astra\\Float32ProjectionData2D.h", "include\\astra\\Float32ProjectionData3D.h", "include\\astra\\Float32ProjectionData3DMemory.h", "include\\astra\\Float32VolumeData2D.h", "include\\astra\\Float32VolumeData3D.h", "include\\astra\\Float32VolumeData3DMemory.h", "include\\astra\\SparseMatrix.h", ] P_astra["filters"]["Global &amp; Other\\headers"] = [ "1c52efc8-a77e-4c72-b9be-f6429a87e6d7", "include\\astra\\AstraObjectFactory.h", "include\\astra\\AstraObjectManager.h", "include\\astra\\clog.h", "include\\astra\\CompositeGeometryManager.h", "include\\astra\\Config.h", "include\\astra\\Fourier.h", "include\\astra\\Globals.h", "include\\astra\\Logging.h", "include\\astra\\PlatformDepSystemCode.h", "include\\astra\\Singleton.h", "include\\astra\\TypeList.h", "include\\astra\\Utilities.h", "include\\astra\\Vector3D.h", "include\\astra\\XMLDocument.h", "include\\astra\\XMLNode.h", ] P_astra["filters"]["Geometries\\headers"] = [ "eddb31ba-0db7-4ab1-a490-36623aaf8901", "include\\astra\\ConeProjectionGeometry3D.h", "include\\astra\\ConeVecProjectionGeometry3D.h", "include\\astra\\FanFlatProjectionGeometry2D.h", "include\\astra\\FanFlatVecProjectionGeometry2D.h", "include\\astra\\GeometryUtil2D.h", "include\\astra\\GeometryUtil3D.h", "include\\astra\\ParallelProjectionGeometry2D.h", "include\\astra\\ParallelProjectionGeometry3D.h", "include\\astra\\ParallelVecProjectionGeometry3D.h", "include\\astra\\ProjectionGeometry2D.h", "include\\astra\\ProjectionGeometry3D.h", "include\\astra\\SparseMatrixProjectionGeometry2D.h", "include\\astra\\VolumeGeometry2D.h", "include\\astra\\VolumeGeometry3D.h", ] P_astra["filters"]["Projectors\\headers"] = [ "91ae2cfd-6b45-46eb-ad99-2f16e5ce4b1e", "include\\astra\\DataProjector.h", "include\\astra\\DataProjectorPolicies.h", "include\\astra\\FanFlatBeamLineKernelProjector2D.h", "include\\astra\\FanFlatBeamStripKernelProjector2D.h", "include\\astra\\ParallelBeamBlobKernelProjector2D.h", "include\\astra\\ParallelBeamLinearKernelProjector2D.h", "include\\astra\\ParallelBeamLineKernelProjector2D.h", "include\\astra\\ParallelBeamStripKernelProjector2D.h", "include\\astra\\Projector2D.h", "include\\astra\\Projector3D.h", "include\\astra\\ProjectorTypelist.h", "include\\astra\\SparseMatrixProjector2D.h", ] P_astra["filters"]["CUDA\\astra headers"] = [ "bd4e1f94-2f56-4db6-b946-20c29d65a351", "include\\astra\\CudaCglsAlgorithm.h", "include\\astra\\CudaCglsAlgorithm3D.h", "include\\astra\\CudaDartMaskAlgorithm.h", "include\\astra\\CudaDartMaskAlgorithm3D.h", "include\\astra\\CudaDartSmoothingAlgorithm.h", "include\\astra\\CudaDartSmoothingAlgorithm3D.h", "include\\astra\\CudaDataOperationAlgorithm.h", "include\\astra\\CudaEMAlgorithm.h", "include\\astra\\CudaFDKAlgorithm3D.h", "include\\astra\\CudaFilteredBackProjectionAlgorithm.h", "include\\astra\\CudaForwardProjectionAlgorithm.h", "include\\astra\\CudaForwardProjectionAlgorithm3D.h", "include\\astra\\CudaProjector2D.h", "include\\astra\\CudaProjector3D.h", "include\\astra\\CudaReconstructionAlgorithm2D.h", "include\\astra\\CudaRoiSelectAlgorithm.h", "include\\astra\\CudaSartAlgorithm.h", "include\\astra\\CudaSirtAlgorithm.h", "include\\astra\\CudaSirtAlgorithm3D.h", ] P_astra["filters"]["Projectors\\inline"] = [ "0daffd63-ba49-4a5f-8d7a-5322e0e74f22", "include\\astra\\DataProjectorPolicies.inl", "include\\astra\\FanFlatBeamLineKernelProjector2D.inl", "include\\astra\\FanFlatBeamStripKernelProjector2D.inl", "include\\astra\\ParallelBeamBlobKernelProjector2D.inl", "include\\astra\\ParallelBeamLinearKernelProjector2D.inl", "include\\astra\\ParallelBeamLineKernelProjector2D.inl", "include\\astra\\ParallelBeamStripKernelProjector2D.inl", "include\\astra\\SparseMatrixProjector2D.inl", ] P_astra["files"] = [] for f in P_astra["filters"]: P_astra["files"].extend(P_astra["filters"][f][1:]) P_astra["files"].sort() projects = [ P_astra, F_astra_mex, P0, P1, P2, P3, P4, P5, P6, P7, P8 ] bom = "\xef\xbb\xbf" class Configuration: def __init__(self, debug, cuda, x64): self.debug = debug self.cuda = cuda self.x64 = x64 def type(self): if self.debug: return "Debug" else: return "Release" def config(self): n = self.type() if self.cuda: n += "_CUDA" return n def platform(self): if self.x64: n = "x64" else: n = "Win32" return n def name(self): n = self.config() n += "|" n += self.platform() return n def target(self): n = "Astra" if self.cuda: n += "Cuda" if self.x64: n += "64" else: n += "32" if self.debug: n += "D" return n configs = [ Configuration(a,b,c) for a in [ True, False ] for b in [ True, False ] for c in [ False, True ] ] def write_sln(version): main_project = P_astra if version == 9: F = open("astra_vc09.sln", "w") elif version == 11: F = open("astra_vc11.sln", "w") else: assert(False) print(bom, file=F) if version == 9: print("Microsoft Visual Studio Solution File, Format Version 10.00", file=F) print("# Visual Studio 2008", file=F) uuid = "uuid09" file_ = "file09" elif version == 11: print("Microsoft Visual Studio Solution File, Format Version 12.00", file=F) print("# Visual Studio 2012", file=F) uuid = "uuid11" file_ = "file11" for p in projects: s = '''Project("{%s}") = "%s", "%s", "{%s}"''' % (p["type"], p["name"], p[file_], p[uuid]) print(s, file=F) if "mex" in p["name"]: print("\tProjectSection(ProjectDependencies) = postProject", file=F) print("\t\t{%s} = {%s}" % (main_project[uuid], main_project[uuid]), file=F) print("\tEndProjectSection", file=F) print("EndProject", file=F) print("Global", file=F) print("\tGlobalSection(SolutionConfigurationPlatforms) = preSolution", file=F) for c in configs: print("\t\t" + c.name() + " = " + c.name(), file=F) print("\tEndGlobalSection", file=F) print("\tGlobalSection(ProjectConfigurationPlatforms) = postSolution", file=F) for p in projects: if "entries" in p: continue for c in configs: print("\t\t{" + p[uuid] + "}." + c.name() + ".ActiveCfg = " + c.name(), file=F) print("\t\t{" + p[uuid] + "}." + c.name() + ".Build.0 = " + c.name(), file=F) print("\tEndGlobalSection", file=F) print("\tGlobalSection(SolutionProperties) = preSolution", file=F) print("\t\tHideSolutionNode = FALSE", file=F) print("\tEndGlobalSection", file=F) print("\tGlobalSection(NestedProjects) = preSolution", file=F) for p in projects: if "entries" not in p: continue for e in p["entries"]: print("\t\t{" + e[uuid] + "} = {" + p[uuid] + "}", file=F) print("\tEndGlobalSection", file=F) print("EndGlobal", file=F) F.close() def write_project11_start(P, F): print(bom + '<?xml version="1.0" encoding="utf-8"?>', file=F) print('<Project DefaultTargets="Build" ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">', file=F) print(' <ItemGroup Label="ProjectConfigurations">', file=F) for c in configs: print(' <ProjectConfiguration Include="' + c.name() + '">', file=F) print(' <Configuration>' + c.config() + '</Configuration>', file=F) print(' <Platform>' + c.platform() + '</Platform>', file=F) print(' </ProjectConfiguration>', file=F) print(' </ItemGroup>', file=F) print(' <PropertyGroup Label="Globals">', file=F) if 'mex' in P["name"]: print(' <ProjectName>' + P["name"] + '</ProjectName>', file=F) print(' <ProjectGuid>{' + P["uuid11"] + '}</ProjectGuid>', file=F) if 'mex' in P["name"]: print(' <RootNamespace>astraMatlab</RootNamespace>', file=F) else: print(' <RootNamespace>' + P["name"] + '</RootNamespace>', file=F) print(' </PropertyGroup>', file=F) print(' <Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />', file=F) for c in configs: print(''' <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='%s'" Label="Configuration">''' % (c.name(), ), file=F) print(' <ConfigurationType>DynamicLibrary</ConfigurationType>', file=F) if 'mex' not in P["name"]: if c.debug: print(' <UseDebugLibraries>true</UseDebugLibraries>', file=F) else: print(' <UseDebugLibraries>false</UseDebugLibraries>', file=F) print(' <PlatformToolset>v110</PlatformToolset>', file=F) if 'mex' not in P["name"]: if not c.debug: print(' <WholeProgramOptimization>true</WholeProgramOptimization>', file=F) print(' <CharacterSet>MultiByte</CharacterSet>', file=F) print(' </PropertyGroup>', file=F) print(' <Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />', file=F) print(' <ImportGroup Label="ExtensionSettings">', file=F) if "mex" not in P["name"]: print(' <Import Project="$(VCTargetsPath)\BuildCustomizations\CUDA 5.5.props" />', file=F) print(' </ImportGroup>', file=F) for c in configs: print(''' <ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='%s'">''' % (c.name(), ), file=F) print(''' <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />''', file=F) print(''' </ImportGroup>''', file=F) print(' <PropertyGroup Label="UserMacros" />', file=F) def write_project11_end(P, F): l = [ f for f in P["files"] if len(f) > 4 and f[-4:] == ".cpp" ] if l: print(' <ItemGroup>', file=F) for f in l: if ("cuda" in f) or ("Cuda" in f): print(' <ClCompile Include="' + f + '">', file=F) for c in configs: if not c.cuda: print(''' <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='%s'">true</ExcludedFromBuild>''' % (c.name(), ), file=F) print(' </ClCompile>', file=F) else: print(' <ClCompile Include="' + f + '" />', file=F) print(' </ItemGroup>', file=F) l = [ f for f in P["files"] if len(f) > 2 and f[-2:] == ".h" ] if l: print(' <ItemGroup>', file=F) for f in l: print(' <ClInclude Include="' + f + '" />', file=F) print(' </ItemGroup>', file=F) l = [ f for f in P["files"] if len(f) > 3 and f[-3:] == ".cu" ] if l: print(' <ItemGroup>', file=F) for f in l: print(' <CudaCompile Include="' + f + '">', file=F) for c in configs: if not c.cuda: print(''' <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='%s'">true</ExcludedFromBuild>''' % (c.name(), ), file=F) print(' </CudaCompile>', file=F) print(' </ItemGroup>', file=F) l = [ f for f in P["files"] if len(f) > 4 and f[-4:] == ".inl" ] if l: print(' <ItemGroup>', file=F) for f in l: print(' <None Include="' + f + '" />', file=F) print(' </ItemGroup>', file=F) print(' <Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />', file=F) print(' <ImportGroup Label="ExtensionTargets">', file=F) if "mex" not in P["name"]: print(' <Import Project="$(VCTargetsPath)\BuildCustomizations\CUDA 5.5.targets" />', file=F) print(' </ImportGroup>', file=F) print('</Project>', end="", file=F) def write_main_project11(): P = P_astra; F = open(P["file11"], "w") write_project11_start(P, F) for c in configs: print(''' <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='%s'">''' % (c.name(), ), file=F) if c.cuda: print(' <IncludePath>$(CUDA_INC_PATH);$(IncludePath)</IncludePath>', file=F) print(' <LibraryPath>$(CUDA_LIB_PATH);$(LibraryPath)</LibraryPath>', file=F) print(' <OutDir>$(SolutionDir)bin\\$(Platform)\\' + c.config() + '\\</OutDir>', file=F) print(' <IntDir>$(OutDir)obj\\</IntDir>', file=F) print(' <TargetExt>.dll</TargetExt>', file=F) print(' <TargetName>' + c.target() + '</TargetName>', file=F) print(' <GenerateManifest>true</GenerateManifest>', file=F) print(' </PropertyGroup>', file=F) for c in configs: print(''' <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='%s'">''' % (c.name(), ), file=F) print(' <ClCompile>', file=F) if c.debug: print(' <RuntimeLibrary>MultiThreadedDebugDLL</RuntimeLibrary>', file=F) else: print(' <RuntimeLibrary>MultiThreadedDLL</RuntimeLibrary>', file=F) print(' <WarningLevel>Level3</WarningLevel>', file=F) print(' <AdditionalIncludeDirectories>lib\include;include\;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>', file=F) print(' <OpenMPSupport>true</OpenMPSupport>', file=F) if not c.x64: # /arch:SSE2 is implicit on x64 print(' <EnableEnhancedInstructionSet>StreamingSIMDExtensions2</EnableEnhancedInstructionSet>', file=F) if c.debug: print(' <Optimization>Disabled</Optimization>', file=F) else: print(' <Optimization>MaxSpeed</Optimization>', file=F) print(' <FunctionLevelLinking>true</FunctionLevelLinking>', file=F) print(' <IntrinsicFunctions>true</IntrinsicFunctions>', file=F) print(' <InlineFunctionExpansion>AnySuitable</InlineFunctionExpansion>', file=F) print(' <FavorSizeOrSpeed>Speed</FavorSizeOrSpeed>', file=F) d=' <PreprocessorDefinitions>' if c.cuda: d+="ASTRA_CUDA;" d+="__SSE2__;" d+="DLL_EXPORTS;_CRT_SECURE_NO_WARNINGS;" d+='%(PreprocessorDefinitions)</PreprocessorDefinitions>' print(d, file=F) print(' <MultiProcessorCompilation>true</MultiProcessorCompilation>', file=F) print(' <SDLCheck>true</SDLCheck>', file=F) print(' </ClCompile>', file=F) print(' <Link>', file=F) print(' <GenerateDebugInformation>true</GenerateDebugInformation>', file=F) if not c.debug: print(' <EnableCOMDATFolding>true</EnableCOMDATFolding>', file=F) print(' <OptimizeReferences>true</OptimizeReferences>', file=F) print(' <OutputFile>bin\\' + c.platform() + '\\' + c.config() + '\\' + c.target() + '.dll</OutputFile>', file=F) if c.cuda: print(' <AdditionalDependencies>cudart.lib;cufft.lib;%(AdditionalDependencies)</AdditionalDependencies>', file=F) l = ' <AdditionalLibraryDirectories>'; if c.x64: l += 'lib\\x64' else: l += 'lib\\win32' l += ';%(AdditionalLibraryDirectories)' if c.cuda: l += ';$(CudaToolkitLibDir)' l += '</AdditionalLibraryDirectories>' print(l, file=F) print(' </Link>', file=F) if c.cuda: print(' <CudaCompile>', file=F) if c.x64: print(' <TargetMachinePlatform>64</TargetMachinePlatform>', file=F) else: print(' <TargetMachinePlatform>32</TargetMachinePlatform>', file=F) print(' <GenerateLineInfo>true</GenerateLineInfo>', file=F) print(' <CodeGeneration>compute_20,sm_20;compute_30,sm_30;compute_30,sm_35;compute_30,compute_30</CodeGeneration>', file=F) print(' </CudaCompile>', file=F) print(' </ItemDefinitionGroup>', file=F) write_project11_end(P, F) F.close() def write_mex_project11(P): F = open("matlab/mex/" + P["name"] + "_vc11.vcxproj", "w") write_project11_start(P, F) print(' <PropertyGroup>', file=F) print(' <_ProjectFileVersion>11.0.60610.1</_ProjectFileVersion>', file=F) print(' </PropertyGroup>', file=F) for c in configs: print(''' <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='%s'">''' % (c.name(), ), file=F) print(' <OutDir>$(SolutionDir)bin\\$(Platform)\\$(Configuration)\\</OutDir>', file=F) print(' <IntDir>$(OutDir)obj\\$(ProjectName)\\</IntDir>', file=F) print(' <TargetName>$(ProjectName)_c</TargetName>', file=F) if c.x64: print(' <TargetExt>.mexw64</TargetExt>', file=F) else: print(' <TargetExt>.mexw32</TargetExt>', file=F) print(' </PropertyGroup>', file=F) for c in configs: print(''' <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='%s'">''' % (c.name(), ), file=F) print(' <ClCompile>', file=F) if c.debug: print(' <RuntimeLibrary>MultiThreadedDebugDLL</RuntimeLibrary>', file=F) else: print(' <RuntimeLibrary>MultiThreadedDLL</RuntimeLibrary>', file=F) # print(' <WarningLevel>Level3</WarningLevel>', file=F) #print(' <AdditionalIncludeDirectories>$(MATLAB_ROOT)\extern\include\;..\..\lib\include;..\..\include;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>', file=F) # FIXME: This CUDA_PATH shouldn't be necessary print(' <AdditionalIncludeDirectories>$(MATLAB_ROOT)\extern\include\;$(CUDA_PATH)\include;..\..\lib\include;..\..\include;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>', file=F) print(' <OpenMPSupport>true</OpenMPSupport>', file=F) if not c.x64: # /arch:SSE2 is implicit on x64 print(' <EnableEnhancedInstructionSet>StreamingSIMDExtensions2</EnableEnhancedInstructionSet>', file=F) if c.debug: print(' <Optimization>Disabled</Optimization>', file=F) else: print(' <Optimization>MaxSpeed</Optimization>', file=F) # print(' <FunctionLevelLinking>true</FunctionLevelLinking>', file=F) # print(' <IntrinsicFunctions>true</IntrinsicFunctions>', file=F) # print(' <InlineFunctionExpansion>AnySuitable</InlineFunctionExpansion>', file=F) # print(' <FavorSizeOrSpeed>Speed</FavorSizeOrSpeed>', file=F) d=' <PreprocessorDefinitions>' if c.cuda: d+="ASTRA_CUDA;" d+="__SSE2__;" # d+="DLL_EXPORTS;_CRT_SECURE_NO_WARNINGS;" d+='%(PreprocessorDefinitions)</PreprocessorDefinitions>' print(d, file=F) print(' <MultiProcessorCompilation>true</MultiProcessorCompilation>', file=F) # print(' <SDLCheck>true</SDLCheck>', file=F) # if c.debug: # <DebugInformationFormat>EditAndContinue</DebugInformationFormat> ?? print(' </ClCompile>', file=F) print(' <Link>', file=F) # if not c.debug: # print(' <EnableCOMDATFolding>true</EnableCOMDATFolding>', file=F) # print(' <OptimizeReferences>true</OptimizeReferences>', file=F) if c.x64: print(' <OutputFile>$(OutDir)$(ProjectName)_c.mexw64</OutputFile>', file=F) else: print(' <OutputFile>$(OutDir)$(ProjectName)_c.mexw32</OutputFile>', file=F) print(' <AdditionalDependencies>%s.lib;libmex.lib;libmx.lib;libut.lib;%%(AdditionalDependencies)</AdditionalDependencies>' % (c.target(), ), file=F) l = ' <AdditionalLibraryDirectories>'; if c.x64: l += '..\\..\\lib\\x64\\;..\\..\\bin\\x64\\' else: l += '..\\..\\lib\\win32\\;..\\..\\bin\\win32\\' l += c.config() if c.x64: l += ';$(MATLAB_ROOT)\extern\lib\win64\microsoft' else: l += ';$(MATLAB_ROOT)\extern\lib\win32\microsoft' l += ';%(AdditionalLibraryDirectories)' l += '</AdditionalLibraryDirectories>' print(l, file=F) print(' <ModuleDefinitionFile>mex.def</ModuleDefinitionFile>', file=F) print(' <GenerateDebugInformation>true</GenerateDebugInformation>', file=F) print(' </Link>', file=F) print(' </ItemDefinitionGroup>', file=F) write_project11_end(P, F) F.close() def write_main_filters11(): P = P_astra F = open(P["name"] + ".vcxproj.filters", "w") print(bom + '<?xml version="1.0" encoding="utf-8"?>', file=F) print('<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">', file=F) print(' <ItemGroup>', file=F) for Filter in P_astra["filter_names"]: L = P_astra["filters"][Filter][1:] l = [ f for f in L if len(f) > 3 and f[-3:] == ".cu" ] for f in l: print(' <CudaCompile Include="' + f + '">', file=F) print(' <Filter>' + Filter + '</Filter>', file=F) print(' </CudaCompile>', file=F) print(' </ItemGroup>', file=F) print(' <ItemGroup>', file=F) for Filter in P_astra["filter_names"]: L = P_astra["filters"][Filter][1:] l = [ f for f in L if len(f) > 4 and f[-4:] == ".cpp" ] for f in l: print(' <ClCompile Include="' + f + '">', file=F) print(' <Filter>' + Filter + '</Filter>', file=F) print(' </ClCompile>', file=F) print(' </ItemGroup>', file=F) print(' <ItemGroup>', file=F) for Filter in P_astra["filter_names"]: L = P_astra["filters"][Filter][1:] l = [ f for f in L if len(f) > 2 and f[-2:] == ".h" ] for f in l: print(' <ClInclude Include="' + f + '">', file=F) print(' <Filter>' + Filter + '</Filter>', file=F) print(' </ClInclude>', file=F) print(' </ItemGroup>', file=F) print(' <ItemGroup>', file=F) for Filter in P_astra["filter_names"]: L = P_astra["filters"][Filter][1:] l = [ f for f in L if len(f) > 4 and f[-4:] == ".inl" ] for f in l: print(' <None Include="' + f + '">', file=F) print(' <Filter>' + Filter + '</Filter>', file=F) print(' </None>', file=F) print(' </ItemGroup>', file=F) print(' <ItemGroup>', file=F) for f in P["filter_names"]: print(' <Filter Include="' + f + '">', file=F) print(' <UniqueIdentifier>{' + P["filters"][f][0] + '}</UniqueIdentifier>', file=F) print(' </Filter>', file=F) print(' </ItemGroup>', file=F) print('</Project>', end="", file=F) F.close() def write_project09_start(P, F): print('<?xml version="1.0" encoding="Windows-1252"?>', file=F) print('<VisualStudioProject', file=F) print('\tProjectType="Visual C++"', file=F) print('\tVersion="9.00"', file=F) if "mex" in P["name"]: print('\tName="%s"' % (P["name"], ), file=F) print('\tProjectGUID="{%s}"' % (P["uuid09"],), file=F) if "mex" in P["name"]: print('\tRootNamespace="astraMatlab"', file=F) else: print('\tRootNamespace="astra"', file=F) print('\tTargetFrameworkVersion="131072"', file=F) print('\t>', file=F) print(r''' <Platforms> <Platform Name="Win32" /> <Platform Name="x64" /> </Platforms>''', file=F) def write_project09_unused_tools(F): print(r''' <Tool Name="VCPreBuildEventTool" /> <Tool Name="VCCustomBuildTool" /> <Tool Name="VCXMLDataGeneratorTool" /> <Tool Name="VCWebServiceProxyGeneratorTool" /> <Tool Name="VCMIDLTool" /> <Tool Name="VCManagedResourceCompilerTool" /> <Tool Name="VCResourceCompilerTool" /> <Tool Name="VCPreLinkEventTool" /> <Tool Name="VCALinkTool" /> <Tool Name="VCManifestTool" /> <Tool Name="VCXDCMakeTool" /> <Tool Name="VCBscMakeTool" /> <Tool Name="VCFxCopTool" /> <Tool Name="VCAppVerifierTool" /> <Tool Name="VCPostBuildEventTool" />''', file=F) def write_main_project09(): P = P_astra; F = open(P["file09"], "w") write_project09_start(P, F) print(r''' <ToolFiles> <DefaultToolFile FileName="NvCudaRuntimeApi.v5.5.rules" /> </ToolFiles>''', file=F) print('\t<Configurations>', file=F) for c in configs: print('\t\t<Configuration', file=F) print('\t\t\tName="%s"' % (c.name(), ), file=F) print('\t\t\tOutputDirectory="$(SolutionDir)bin\$(PlatformName)\%s"' % (c.config(), ), file=F) print(r''' IntermediateDirectory="$(OutDir)/obj" ConfigurationType="2" >''', file=F) write_project09_unused_tools(F) print('\t\t\t<Tool', file=F) print('\t\t\t\tName="VCCLCompilerTool"', file=F) if c.cuda: print('\t\t\t\tAdditionalIncludeDirectories="&quot;$(CUDA_INC_PATH)&quot;;lib\\include;include"', file=F) print('\t\t\t\tPreprocessorDefinitions="ASTRA_CUDA;DLL_EXPORTS;__SSE2__"', file=F) else: print('\t\t\t\tAdditionalIncludeDirectories="lib\\include;include"', file=F) print('\t\t\t\tPreprocessorDefinitions="DLL_EXPORTS;__SSE2__"', file=F) if c.debug: print(r''' Optimization="0" InlineFunctionExpansion="0" FavorSizeOrSpeed="0" EnableFiberSafeOptimizations="false" WholeProgramOptimization="false" RuntimeLibrary="3"''', file=F) else: print(r''' Optimization="3" InlineFunctionExpansion="2" FavorSizeOrSpeed="1" RuntimeLibrary="2"''', file=F) if not c.x64: # /arch:SSE2 is implicit on x64 print('\t\t\t\tEnableEnhancedInstructionSet="2"', file=F) # SSE2 print('\t\t\t\tOpenMP="true"', file=F) print('\t\t\t\tAdditionalOptions="/MP"', file=F) # build with multiple processes print('\t\t\t/>', file=F) print('\t\t\t<Tool', file=F) print('\t\t\t\tName="VCLinkerTool"', file=F) if c.cuda: print('\t\t\t\tAdditionalDependencies="cudart.lib cufft.lib"', file=F) print('\t\t\t\tOutputFile="bin\\%s\\%s.dll"' % (c.platform(), c.target()), file=F) if c.cuda: print('\t\t\t\tAdditionalLibraryDirectories="&quot;.\\lib\\%s&quot;;&quot;$(CUDA_LIB_PATH)&quot;"' % (c.platform(), ), file=F) else: print('\t\t\t\tAdditionalLibraryDirectories="&quot;.\\lib\\%s&quot;"' % (c.platform(), ), file=F) print('\t\t\t\tGenerateManifest="true"', file=F) print('\t\t\t\tModuleDefinitionFile=""', file=F) if c.debug: print('\t\t\t\tGenerateDebugInformation="true"', file=F) if c.x64: print('\t\t\t\tTargetMachine="17"', file=F) # x64 else: print('\t\t\t\tTargetMachine="1"', file=F) # x86 print('\t\t\t/>', file=F) print('\t\t\t<Tool', file=F) print('\t\t\t\tName="Cudart Build Rule"', file=F) print('\t\t\t\tArch1="20"', file=F) print('\t\t\t\tArch2="30"', file=F) print('\t\t\t\tArch3="35"', file=F) if c.x64: print('\t\t\t\tTargetMachinePlatform="1"', file=F) # x64 else: print('\t\t\t\tTargetMachinePlatform="0"', file=F) # x86 if c.debug: print('\t\t\t\tRuntime="3"', file=F) # MDD else: print('\t\t\t\tRuntime="2"', file=F) # MD print('\t\t\t\tExtraCppOptions="-Iinclude -Ilib/include"', file=F) if c.cuda: print('\t\t\t\tDefines="ASTRA_CUDA;DLL_EXPORTS"', file=F) else: # This 'else' doesn't make much sense print('\t\t\t\tDefines="DLL_EXPORTS"', file=F) # TODO!!! print('\t\t\t/>', file=F) print('\t\t</Configuration>', file=F) print('\t</Configurations>', file=F) print('\t<References>', file=F) print('\t</References>', file=F) print('\t<Files>', file=F) print(r''' <Filter Name="Resource Files" Filter="rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav" UniqueIdentifier="{67DA6AB6-F800-4c08-8B7A-83BB121AAD01}" > <File RelativePath=".\src\astra.def" > </File> </Filter>''', file=F) curgroup = None for Filter in P["filter_names"]: if "\\" not in Filter: continue # TODO [ group, subgroup ] = Filter.split("\\") if group != curgroup: if curgroup != None: print('\t\t</Filter>', file=F) print('\t\t<Filter', file=F) print('\t\t\tName="%s"' % (group, ), file=F) print('\t\t\t>', file=F) curgroup = group print('\t\t\t<Filter', file=F) print('\t\t\t\tName="%s"' % (subgroup, ), file=F) print('\t\t\t\t>', file=F) for f in P["filters"][Filter][1:]: print('\t\t\t\t<File', file=F) print('\t\t\t\t\tRelativePath=".\\%s"' % (f, ), file=F) print('\t\t\t\t\t>', file=F) if (("Cuda" in f) or ("cuda" in f)) and not (f[-2:] == ".h"): for c in configs: if not c.cuda: print('\t\t\t\t\t<FileConfiguration', file=F) print('\t\t\t\t\t\tName="%s"' % (c.name(), ), file=F) print('\t\t\t\t\t\tExcludedFromBuild="true"', file=F) print('\t\t\t\t\t\t>', file=F) print('\t\t\t\t\t\t<Tool', file=F) if len(f) > 3 and f[-3:] == ".cu": print('\t\t\t\t\t\t\tName="Cudart Build Rule"', file=F) else: print('\t\t\t\t\t\t\tName="VCCLCompilerTool"', file=F) print('\t\t\t\t\t\t/>', file=F) print('\t\t\t\t\t</FileConfiguration>', file=F) print('\t\t\t\t</File>', file=F) print('\t\t\t</Filter>', file=F) print('\t\t</Filter>', file=F) print('\t</Files>', file=F) print('\t<Globals>', file=F) print('\t</Globals>', file=F) print('</VisualStudioProject>', file=F) F.close() def write_mex_project09(P): F = open("matlab/mex/" + P["name"] + "_vc09.vcproj", "w") write_project09_start(P, F) print('\t<ToolFiles>', file=F) print('\t</ToolFiles>', file=F) print('\t<Configurations>', file=F) for c in configs: print('\t\t<Configuration', file=F) print('\t\t\tName="%s"' % (c.name(), ), file=F) print('\t\t\tOutputDirectory="$(SolutionDir)bin\$(PlatformName)\$(ConfigurationName)"', file=F) print(r''' IntermediateDirectory="$(OutDir)\obj\$(ProjectName)" ConfigurationType="2" >''', file=F) write_project09_unused_tools(F) print('\t\t\t<Tool', file=F) print('\t\t\t\tName="VCCLCompilerTool"', file=F) if c.cuda: print('\t\t\t\tAdditionalIncludeDirectories="$(MATLAB_ROOT)\\extern\\include\\;&quot;$(CUDA_INC_PATH)&quot;;..\\..\\lib\\include;..\\..\\include"', file=F) print('\t\t\t\tPreprocessorDefinitions="ASTRA_CUDA;__SSE2__"', file=F) else: print('\t\t\t\tAdditionalIncludeDirectories="$(MATLAB_ROOT)\\extern\\include\\;..\\..\\lib\\include;..\\..\\include"', file=F) print('\t\t\t\tPreprocessorDefinitions="__SSE2__"', file=F) if c.debug: print(r''' Optimization="0" RuntimeLibrary="3"''', file=F) else: print(r''' Optimization="2" RuntimeLibrary="2"''', file=F) if not c.x64: # /arch:SSE2 is implicit on x64 print('\t\t\t\tEnableEnhancedInstructionSet="2"', file=F) # SSE2 print('\t\t\t\tOpenMP="true"', file=F) print('\t\t\t\tAdditionalOptions="/MP"', file=F) # build with multiple processes print('\t\t\t/>', file=F) print('\t\t\t<Tool', file=F) print('\t\t\t\tName="VCLinkerTool"', file=F) print('\t\t\t\tAdditionalDependencies="%s.lib libmex.lib libmx.lib libut.lib"' % (c.target(), ), file=F) if c.x64: print('\t\t\t\tOutputFile="$(OutDir)\\$(ProjectName)_c.mexw64"', file=F) else: print('\t\t\t\tOutputFile="$(OutDir)\\$(ProjectName)_c.mexw32"', file=F) if c.x64: print('\t\t\t\tAdditionalLibraryDirectories="..\\..\\bin\\x64;$(MATLAB_ROOT)\\extern\\lib\\win64\\microsoft;..\\..\\lib\\x64"', file=F) else: print('\t\t\t\tAdditionalLibraryDirectories="..\\..\\bin\\win32;$(MATLAB_ROOT)\\extern\\lib\\win32\\microsoft;..\\..\\lib\\win32"', file=F) print('\t\t\t\tModuleDefinitionFile="mex.def"', file=F) if c.debug: print('\t\t\t\tGenerateDebugInformation="true"', file=F) else: print('\t\t\t\tGenerateDebugInformation="false"', file=F) if c.x64: print('\t\t\t\tTargetMachine="17"', file=F) # x64 else: print('\t\t\t\tTargetMachine="1"', file=F) # x86 print('\t\t\t/>', file=F) print('\t\t</Configuration>', file=F) print('\t</Configurations>', file=F) print('\t<References>', file=F) print('\t</References>', file=F) print('\t<Files>', file=F) for f in P["files"]: print('\t\t<File', file=F) print('\t\t\tRelativePath=".\\%s"' % (f, ), file=F) print('\t\t\t>', file=F) print('\t\t</File>', file=F) print('\t</Files>', file=F) print('\t<Globals>', file=F) print('\t</Globals>', file=F) print('</VisualStudioProject>', file=F) if (len(sys.argv) != 2) or (sys.argv[1] not in ["vc09", "vc11", "all"]): print("Usage: python gen.py [vc09|vc11|all]", file=sys.stderr) sys.exit(1) try: open("../../src/AstraObjectManager.cpp", "r") except IOError: print("Run gen.py from the build/msvc directory", file=sys.stderr) sys.exit(1) # Change directory to main dir os.chdir("../..") if sys.argv[1] in ["vc11", "all"]: # HACK P_astra["name"] = "astra_vc11" write_sln(11) write_main_project11() write_main_filters11() write_mex_project11(P0) write_mex_project11(P1) write_mex_project11(P2) write_mex_project11(P3) write_mex_project11(P4) write_mex_project11(P5) write_mex_project11(P6) write_mex_project11(P7) write_mex_project11(P8) if sys.argv[1] in ["vc09", "all"]: # HACK P_astra["name"] = "astra" write_sln(9) write_main_project09() write_mex_project09(P0) write_mex_project09(P1) write_mex_project09(P2) write_mex_project09(P3) write_mex_project09(P4) write_mex_project09(P5) write_mex_project09(P6) write_mex_project09(P7) write_mex_project09(P8)
selfcommit/simian
refs/heads/master
src/tests/simian/mac/munki/handlers/icons_test.py
2
#!/usr/bin/env python # # Copyright 2017 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS-IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import base64 import httplib import mock import stubout import webtest import cloudstorage as gcs from google.apputils import app from google.apputils import basetest from simian import settings from simian.mac.common import auth from tests.simian.mac.common import test from simian.mac.urls import app as gae_app @mock.patch.object(auth, 'DoAnyAuth') class IconsModuleTest(test.AppengineTest): def setUp(self): super(IconsModuleTest, self).setUp() self.testapp = webtest.TestApp(gae_app) def testNotFound(self, *_): settings.ICONS_GCS_BUCKET = 'test' self.testapp.get('/icons/filename.png', status=httplib.NOT_FOUND) def testSuccess(self, *_): settings.ICONS_GCS_BUCKET = 'test' content = 'IMAGE_CONTENT' with gcs.open( '/test/%s.png' % base64.urlsafe_b64encode('filename'), 'w') as f: f.write(content) resp = self.testapp.get('/icons/filename.png', status=httplib.OK) self.assertEqual(content, resp.body) def main(unused_argv): basetest.main() if __name__ == '__main__': app.run()
codeworldprodigy/lab4
refs/heads/master
lib/werkzeug/datastructures.py
314
# -*- coding: utf-8 -*- """ werkzeug.datastructures ~~~~~~~~~~~~~~~~~~~~~~~ This module provides mixins and classes with an immutable interface. :copyright: (c) 2013 by the Werkzeug Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ import re import sys import codecs import mimetypes from itertools import repeat from werkzeug._internal import _missing, _empty_stream from werkzeug._compat import iterkeys, itervalues, iteritems, iterlists, \ PY2, text_type, integer_types, string_types, make_literal_wrapper _locale_delim_re = re.compile(r'[_-]') def is_immutable(self): raise TypeError('%r objects are immutable' % self.__class__.__name__) def iter_multi_items(mapping): """Iterates over the items of a mapping yielding keys and values without dropping any from more complex structures. """ if isinstance(mapping, MultiDict): for item in iteritems(mapping, multi=True): yield item elif isinstance(mapping, dict): for key, value in iteritems(mapping): if isinstance(value, (tuple, list)): for value in value: yield key, value else: yield key, value else: for item in mapping: yield item def native_itermethods(names): if not PY2: return lambda x: x def setmethod(cls, name): itermethod = getattr(cls, name) setattr(cls, 'iter%s' % name, itermethod) listmethod = lambda self, *a, **kw: list(itermethod(self, *a, **kw)) listmethod.__doc__ = \ 'Like :py:meth:`iter%s`, but returns a list.' % name setattr(cls, name, listmethod) def wrap(cls): for name in names: setmethod(cls, name) return cls return wrap class ImmutableListMixin(object): """Makes a :class:`list` immutable. .. versionadded:: 0.5 :private: """ _hash_cache = None def __hash__(self): if self._hash_cache is not None: return self._hash_cache rv = self._hash_cache = hash(tuple(self)) return rv def __reduce_ex__(self, protocol): return type(self), (list(self),) def __delitem__(self, key): is_immutable(self) def __delslice__(self, i, j): is_immutable(self) def __iadd__(self, other): is_immutable(self) __imul__ = __iadd__ def __setitem__(self, key, value): is_immutable(self) def __setslice__(self, i, j, value): is_immutable(self) def append(self, item): is_immutable(self) remove = append def extend(self, iterable): is_immutable(self) def insert(self, pos, value): is_immutable(self) def pop(self, index=-1): is_immutable(self) def reverse(self): is_immutable(self) def sort(self, cmp=None, key=None, reverse=None): is_immutable(self) class ImmutableList(ImmutableListMixin, list): """An immutable :class:`list`. .. versionadded:: 0.5 :private: """ def __repr__(self): return '%s(%s)' % ( self.__class__.__name__, dict.__repr__(self), ) class ImmutableDictMixin(object): """Makes a :class:`dict` immutable. .. versionadded:: 0.5 :private: """ _hash_cache = None @classmethod def fromkeys(cls, keys, value=None): instance = super(cls, cls).__new__(cls) instance.__init__(zip(keys, repeat(value))) return instance def __reduce_ex__(self, protocol): return type(self), (dict(self),) def _iter_hashitems(self): return iteritems(self) def __hash__(self): if self._hash_cache is not None: return self._hash_cache rv = self._hash_cache = hash(frozenset(self._iter_hashitems())) return rv def setdefault(self, key, default=None): is_immutable(self) def update(self, *args, **kwargs): is_immutable(self) def pop(self, key, default=None): is_immutable(self) def popitem(self): is_immutable(self) def __setitem__(self, key, value): is_immutable(self) def __delitem__(self, key): is_immutable(self) def clear(self): is_immutable(self) class ImmutableMultiDictMixin(ImmutableDictMixin): """Makes a :class:`MultiDict` immutable. .. versionadded:: 0.5 :private: """ def __reduce_ex__(self, protocol): return type(self), (list(iteritems(self, multi=True)),) def _iter_hashitems(self): return iteritems(self, multi=True) def add(self, key, value): is_immutable(self) def popitemlist(self): is_immutable(self) def poplist(self, key): is_immutable(self) def setlist(self, key, new_list): is_immutable(self) def setlistdefault(self, key, default_list=None): is_immutable(self) class UpdateDictMixin(object): """Makes dicts call `self.on_update` on modifications. .. versionadded:: 0.5 :private: """ on_update = None def calls_update(name): def oncall(self, *args, **kw): rv = getattr(super(UpdateDictMixin, self), name)(*args, **kw) if self.on_update is not None: self.on_update(self) return rv oncall.__name__ = name return oncall def setdefault(self, key, default=None): modified = key not in self rv = super(UpdateDictMixin, self).setdefault(key, default) if modified and self.on_update is not None: self.on_update(self) return rv def pop(self, key, default=_missing): modified = key in self if default is _missing: rv = super(UpdateDictMixin, self).pop(key) else: rv = super(UpdateDictMixin, self).pop(key, default) if modified and self.on_update is not None: self.on_update(self) return rv __setitem__ = calls_update('__setitem__') __delitem__ = calls_update('__delitem__') clear = calls_update('clear') popitem = calls_update('popitem') update = calls_update('update') del calls_update class TypeConversionDict(dict): """Works like a regular dict but the :meth:`get` method can perform type conversions. :class:`MultiDict` and :class:`CombinedMultiDict` are subclasses of this class and provide the same feature. .. versionadded:: 0.5 """ def get(self, key, default=None, type=None): """Return the default value if the requested data doesn't exist. If `type` is provided and is a callable it should convert the value, return it or raise a :exc:`ValueError` if that is not possible. In this case the function will return the default as if the value was not found: >>> d = TypeConversionDict(foo='42', bar='blub') >>> d.get('foo', type=int) 42 >>> d.get('bar', -1, type=int) -1 :param key: The key to be looked up. :param default: The default value to be returned if the key can't be looked up. If not further specified `None` is returned. :param type: A callable that is used to cast the value in the :class:`MultiDict`. If a :exc:`ValueError` is raised by this callable the default value is returned. """ try: rv = self[key] if type is not None: rv = type(rv) except (KeyError, ValueError): rv = default return rv class ImmutableTypeConversionDict(ImmutableDictMixin, TypeConversionDict): """Works like a :class:`TypeConversionDict` but does not support modifications. .. versionadded:: 0.5 """ def copy(self): """Return a shallow mutable copy of this object. Keep in mind that the standard library's :func:`copy` function is a no-op for this class like for any other python immutable type (eg: :class:`tuple`). """ return TypeConversionDict(self) def __copy__(self): return self @native_itermethods(['keys', 'values', 'items', 'lists', 'listvalues']) class MultiDict(TypeConversionDict): """A :class:`MultiDict` is a dictionary subclass customized to deal with multiple values for the same key which is for example used by the parsing functions in the wrappers. This is necessary because some HTML form elements pass multiple values for the same key. :class:`MultiDict` implements all standard dictionary methods. Internally, it saves all values for a key as a list, but the standard dict access methods will only return the first value for a key. If you want to gain access to the other values, too, you have to use the `list` methods as explained below. Basic Usage: >>> d = MultiDict([('a', 'b'), ('a', 'c')]) >>> d MultiDict([('a', 'b'), ('a', 'c')]) >>> d['a'] 'b' >>> d.getlist('a') ['b', 'c'] >>> 'a' in d True It behaves like a normal dict thus all dict functions will only return the first value when multiple values for one key are found. From Werkzeug 0.3 onwards, the `KeyError` raised by this class is also a subclass of the :exc:`~exceptions.BadRequest` HTTP exception and will render a page for a ``400 BAD REQUEST`` if caught in a catch-all for HTTP exceptions. A :class:`MultiDict` can be constructed from an iterable of ``(key, value)`` tuples, a dict, a :class:`MultiDict` or from Werkzeug 0.2 onwards some keyword parameters. :param mapping: the initial value for the :class:`MultiDict`. Either a regular dict, an iterable of ``(key, value)`` tuples or `None`. """ def __init__(self, mapping=None): if isinstance(mapping, MultiDict): dict.__init__(self, ((k, l[:]) for k, l in iterlists(mapping))) elif isinstance(mapping, dict): tmp = {} for key, value in iteritems(mapping): if isinstance(value, (tuple, list)): value = list(value) else: value = [value] tmp[key] = value dict.__init__(self, tmp) else: tmp = {} for key, value in mapping or (): tmp.setdefault(key, []).append(value) dict.__init__(self, tmp) def __getstate__(self): return dict(self.lists()) def __setstate__(self, value): dict.clear(self) dict.update(self, value) def __getitem__(self, key): """Return the first data value for this key; raises KeyError if not found. :param key: The key to be looked up. :raise KeyError: if the key does not exist. """ if key in self: return dict.__getitem__(self, key)[0] raise exceptions.BadRequestKeyError(key) def __setitem__(self, key, value): """Like :meth:`add` but removes an existing key first. :param key: the key for the value. :param value: the value to set. """ dict.__setitem__(self, key, [value]) def add(self, key, value): """Adds a new value for the key. .. versionadded:: 0.6 :param key: the key for the value. :param value: the value to add. """ dict.setdefault(self, key, []).append(value) def getlist(self, key, type=None): """Return the list of items for a given key. If that key is not in the `MultiDict`, the return value will be an empty list. Just as `get` `getlist` accepts a `type` parameter. All items will be converted with the callable defined there. :param key: The key to be looked up. :param type: A callable that is used to cast the value in the :class:`MultiDict`. If a :exc:`ValueError` is raised by this callable the value will be removed from the list. :return: a :class:`list` of all the values for the key. """ try: rv = dict.__getitem__(self, key) except KeyError: return [] if type is None: return list(rv) result = [] for item in rv: try: result.append(type(item)) except ValueError: pass return result def setlist(self, key, new_list): """Remove the old values for a key and add new ones. Note that the list you pass the values in will be shallow-copied before it is inserted in the dictionary. >>> d = MultiDict() >>> d.setlist('foo', ['1', '2']) >>> d['foo'] '1' >>> d.getlist('foo') ['1', '2'] :param key: The key for which the values are set. :param new_list: An iterable with the new values for the key. Old values are removed first. """ dict.__setitem__(self, key, list(new_list)) def setdefault(self, key, default=None): """Returns the value for the key if it is in the dict, otherwise it returns `default` and sets that value for `key`. :param key: The key to be looked up. :param default: The default value to be returned if the key is not in the dict. If not further specified it's `None`. """ if key not in self: self[key] = default else: default = self[key] return default def setlistdefault(self, key, default_list=None): """Like `setdefault` but sets multiple values. The list returned is not a copy, but the list that is actually used internally. This means that you can put new values into the dict by appending items to the list: >>> d = MultiDict({"foo": 1}) >>> d.setlistdefault("foo").extend([2, 3]) >>> d.getlist("foo") [1, 2, 3] :param key: The key to be looked up. :param default: An iterable of default values. It is either copied (in case it was a list) or converted into a list before returned. :return: a :class:`list` """ if key not in self: default_list = list(default_list or ()) dict.__setitem__(self, key, default_list) else: default_list = dict.__getitem__(self, key) return default_list def items(self, multi=False): """Return an iterator of ``(key, value)`` pairs. :param multi: If set to `True` the iterator returned will have a pair for each value of each key. Otherwise it will only contain pairs for the first value of each key. """ for key, values in iteritems(dict, self): if multi: for value in values: yield key, value else: yield key, values[0] def lists(self): """Return a list of ``(key, values)`` pairs, where values is the list of all values associated with the key.""" for key, values in iteritems(dict, self): yield key, list(values) def keys(self): return iterkeys(dict, self) __iter__ = keys def values(self): """Returns an iterator of the first value on every key's value list.""" for values in itervalues(dict, self): yield values[0] def listvalues(self): """Return an iterator of all values associated with a key. Zipping :meth:`keys` and this is the same as calling :meth:`lists`: >>> d = MultiDict({"foo": [1, 2, 3]}) >>> zip(d.keys(), d.listvalues()) == d.lists() True """ return itervalues(dict, self) def copy(self): """Return a shallow copy of this object.""" return self.__class__(self) def to_dict(self, flat=True): """Return the contents as regular dict. If `flat` is `True` the returned dict will only have the first item present, if `flat` is `False` all values will be returned as lists. :param flat: If set to `False` the dict returned will have lists with all the values in it. Otherwise it will only contain the first value for each key. :return: a :class:`dict` """ if flat: return dict(iteritems(self)) return dict(self.lists()) def update(self, other_dict): """update() extends rather than replaces existing key lists.""" for key, value in iter_multi_items(other_dict): MultiDict.add(self, key, value) def pop(self, key, default=_missing): """Pop the first item for a list on the dict. Afterwards the key is removed from the dict, so additional values are discarded: >>> d = MultiDict({"foo": [1, 2, 3]}) >>> d.pop("foo") 1 >>> "foo" in d False :param key: the key to pop. :param default: if provided the value to return if the key was not in the dictionary. """ try: return dict.pop(self, key)[0] except KeyError as e: if default is not _missing: return default raise exceptions.BadRequestKeyError(str(e)) def popitem(self): """Pop an item from the dict.""" try: item = dict.popitem(self) return (item[0], item[1][0]) except KeyError as e: raise exceptions.BadRequestKeyError(str(e)) def poplist(self, key): """Pop the list for a key from the dict. If the key is not in the dict an empty list is returned. .. versionchanged:: 0.5 If the key does no longer exist a list is returned instead of raising an error. """ return dict.pop(self, key, []) def popitemlist(self): """Pop a ``(key, list)`` tuple from the dict.""" try: return dict.popitem(self) except KeyError as e: raise exceptions.BadRequestKeyError(str(e)) def __copy__(self): return self.copy() def __repr__(self): return '%s(%r)' % (self.__class__.__name__, list(iteritems(self, multi=True))) class _omd_bucket(object): """Wraps values in the :class:`OrderedMultiDict`. This makes it possible to keep an order over multiple different keys. It requires a lot of extra memory and slows down access a lot, but makes it possible to access elements in O(1) and iterate in O(n). """ __slots__ = ('prev', 'key', 'value', 'next') def __init__(self, omd, key, value): self.prev = omd._last_bucket self.key = key self.value = value self.next = None if omd._first_bucket is None: omd._first_bucket = self if omd._last_bucket is not None: omd._last_bucket.next = self omd._last_bucket = self def unlink(self, omd): if self.prev: self.prev.next = self.next if self.next: self.next.prev = self.prev if omd._first_bucket is self: omd._first_bucket = self.next if omd._last_bucket is self: omd._last_bucket = self.prev @native_itermethods(['keys', 'values', 'items', 'lists', 'listvalues']) class OrderedMultiDict(MultiDict): """Works like a regular :class:`MultiDict` but preserves the order of the fields. To convert the ordered multi dict into a list you can use the :meth:`items` method and pass it ``multi=True``. In general an :class:`OrderedMultiDict` is an order of magnitude slower than a :class:`MultiDict`. .. admonition:: note Due to a limitation in Python you cannot convert an ordered multi dict into a regular dict by using ``dict(multidict)``. Instead you have to use the :meth:`to_dict` method, otherwise the internal bucket objects are exposed. """ def __init__(self, mapping=None): dict.__init__(self) self._first_bucket = self._last_bucket = None if mapping is not None: OrderedMultiDict.update(self, mapping) def __eq__(self, other): if not isinstance(other, MultiDict): return NotImplemented if isinstance(other, OrderedMultiDict): iter1 = iteritems(self, multi=True) iter2 = iteritems(other, multi=True) try: for k1, v1 in iter1: k2, v2 = next(iter2) if k1 != k2 or v1 != v2: return False except StopIteration: return False try: next(iter2) except StopIteration: return True return False if len(self) != len(other): return False for key, values in iterlists(self): if other.getlist(key) != values: return False return True def __ne__(self, other): return not self.__eq__(other) def __reduce_ex__(self, protocol): return type(self), (list(iteritems(self, multi=True)),) def __getstate__(self): return list(iteritems(self, multi=True)) def __setstate__(self, values): dict.clear(self) for key, value in values: self.add(key, value) def __getitem__(self, key): if key in self: return dict.__getitem__(self, key)[0].value raise exceptions.BadRequestKeyError(key) def __setitem__(self, key, value): self.poplist(key) self.add(key, value) def __delitem__(self, key): self.pop(key) def keys(self): return (key for key, value in iteritems(self)) __iter__ = keys def values(self): return (value for key, value in iteritems(self)) def items(self, multi=False): ptr = self._first_bucket if multi: while ptr is not None: yield ptr.key, ptr.value ptr = ptr.next else: returned_keys = set() while ptr is not None: if ptr.key not in returned_keys: returned_keys.add(ptr.key) yield ptr.key, ptr.value ptr = ptr.next def lists(self): returned_keys = set() ptr = self._first_bucket while ptr is not None: if ptr.key not in returned_keys: yield ptr.key, self.getlist(ptr.key) returned_keys.add(ptr.key) ptr = ptr.next def listvalues(self): for key, values in iterlists(self): yield values def add(self, key, value): dict.setdefault(self, key, []).append(_omd_bucket(self, key, value)) def getlist(self, key, type=None): try: rv = dict.__getitem__(self, key) except KeyError: return [] if type is None: return [x.value for x in rv] result = [] for item in rv: try: result.append(type(item.value)) except ValueError: pass return result def setlist(self, key, new_list): self.poplist(key) for value in new_list: self.add(key, value) def setlistdefault(self, key, default_list=None): raise TypeError('setlistdefault is unsupported for ' 'ordered multi dicts') def update(self, mapping): for key, value in iter_multi_items(mapping): OrderedMultiDict.add(self, key, value) def poplist(self, key): buckets = dict.pop(self, key, ()) for bucket in buckets: bucket.unlink(self) return [x.value for x in buckets] def pop(self, key, default=_missing): try: buckets = dict.pop(self, key) except KeyError as e: if default is not _missing: return default raise exceptions.BadRequestKeyError(str(e)) for bucket in buckets: bucket.unlink(self) return buckets[0].value def popitem(self): try: key, buckets = dict.popitem(self) except KeyError as e: raise exceptions.BadRequestKeyError(str(e)) for bucket in buckets: bucket.unlink(self) return key, buckets[0].value def popitemlist(self): try: key, buckets = dict.popitem(self) except KeyError as e: raise exceptions.BadRequestKeyError(str(e)) for bucket in buckets: bucket.unlink(self) return key, [x.value for x in buckets] def _options_header_vkw(value, kw): return dump_options_header(value, dict((k.replace('_', '-'), v) for k, v in kw.items())) def _unicodify_header_value(value): if isinstance(value, bytes): value = value.decode('latin-1') if not isinstance(value, text_type): value = text_type(value) return value @native_itermethods(['keys', 'values', 'items']) class Headers(object): """An object that stores some headers. It has a dict-like interface but is ordered and can store the same keys multiple times. This data structure is useful if you want a nicer way to handle WSGI headers which are stored as tuples in a list. From Werkzeug 0.3 onwards, the :exc:`KeyError` raised by this class is also a subclass of the :class:`~exceptions.BadRequest` HTTP exception and will render a page for a ``400 BAD REQUEST`` if caught in a catch-all for HTTP exceptions. Headers is mostly compatible with the Python :class:`wsgiref.headers.Headers` class, with the exception of `__getitem__`. :mod:`wsgiref` will return `None` for ``headers['missing']``, whereas :class:`Headers` will raise a :class:`KeyError`. To create a new :class:`Headers` object pass it a list or dict of headers which are used as default values. This does not reuse the list passed to the constructor for internal usage. :param defaults: The list of default values for the :class:`Headers`. .. versionchanged:: 0.9 This data structure now stores unicode values similar to how the multi dicts do it. The main difference is that bytes can be set as well which will automatically be latin1 decoded. .. versionchanged:: 0.9 The :meth:`linked` function was removed without replacement as it was an API that does not support the changes to the encoding model. """ def __init__(self, defaults=None): self._list = [] if defaults is not None: if isinstance(defaults, (list, Headers)): self._list.extend(defaults) else: self.extend(defaults) def __getitem__(self, key, _get_mode=False): if not _get_mode: if isinstance(key, integer_types): return self._list[key] elif isinstance(key, slice): return self.__class__(self._list[key]) if not isinstance(key, string_types): raise exceptions.BadRequestKeyError(key) ikey = key.lower() for k, v in self._list: if k.lower() == ikey: return v # micro optimization: if we are in get mode we will catch that # exception one stack level down so we can raise a standard # key error instead of our special one. if _get_mode: raise KeyError() raise exceptions.BadRequestKeyError(key) def __eq__(self, other): return other.__class__ is self.__class__ and \ set(other._list) == set(self._list) def __ne__(self, other): return not self.__eq__(other) def get(self, key, default=None, type=None, as_bytes=False): """Return the default value if the requested data doesn't exist. If `type` is provided and is a callable it should convert the value, return it or raise a :exc:`ValueError` if that is not possible. In this case the function will return the default as if the value was not found: >>> d = Headers([('Content-Length', '42')]) >>> d.get('Content-Length', type=int) 42 If a headers object is bound you must not add unicode strings because no encoding takes place. .. versionadded:: 0.9 Added support for `as_bytes`. :param key: The key to be looked up. :param default: The default value to be returned if the key can't be looked up. If not further specified `None` is returned. :param type: A callable that is used to cast the value in the :class:`Headers`. If a :exc:`ValueError` is raised by this callable the default value is returned. :param as_bytes: return bytes instead of unicode strings. """ try: rv = self.__getitem__(key, _get_mode=True) except KeyError: return default if as_bytes: rv = rv.encode('latin1') if type is None: return rv try: return type(rv) except ValueError: return default def getlist(self, key, type=None, as_bytes=False): """Return the list of items for a given key. If that key is not in the :class:`Headers`, the return value will be an empty list. Just as :meth:`get` :meth:`getlist` accepts a `type` parameter. All items will be converted with the callable defined there. .. versionadded:: 0.9 Added support for `as_bytes`. :param key: The key to be looked up. :param type: A callable that is used to cast the value in the :class:`Headers`. If a :exc:`ValueError` is raised by this callable the value will be removed from the list. :return: a :class:`list` of all the values for the key. :param as_bytes: return bytes instead of unicode strings. """ ikey = key.lower() result = [] for k, v in self: if k.lower() == ikey: if as_bytes: v = v.encode('latin1') if type is not None: try: v = type(v) except ValueError: continue result.append(v) return result def get_all(self, name): """Return a list of all the values for the named field. This method is compatible with the :mod:`wsgiref` :meth:`~wsgiref.headers.Headers.get_all` method. """ return self.getlist(name) def items(self, lower=False): for key, value in self: if lower: key = key.lower() yield key, value def keys(self, lower=False): for key, _ in iteritems(self, lower): yield key def values(self): for _, value in iteritems(self): yield value def extend(self, iterable): """Extend the headers with a dict or an iterable yielding keys and values. """ if isinstance(iterable, dict): for key, value in iteritems(iterable): if isinstance(value, (tuple, list)): for v in value: self.add(key, v) else: self.add(key, value) else: for key, value in iterable: self.add(key, value) def __delitem__(self, key, _index_operation=True): if _index_operation and isinstance(key, (integer_types, slice)): del self._list[key] return key = key.lower() new = [] for k, v in self._list: if k.lower() != key: new.append((k, v)) self._list[:] = new def remove(self, key): """Remove a key. :param key: The key to be removed. """ return self.__delitem__(key, _index_operation=False) def pop(self, key=None, default=_missing): """Removes and returns a key or index. :param key: The key to be popped. If this is an integer the item at that position is removed, if it's a string the value for that key is. If the key is omitted or `None` the last item is removed. :return: an item. """ if key is None: return self._list.pop() if isinstance(key, integer_types): return self._list.pop(key) try: rv = self[key] self.remove(key) except KeyError: if default is not _missing: return default raise return rv def popitem(self): """Removes a key or index and returns a (key, value) item.""" return self.pop() def __contains__(self, key): """Check if a key is present.""" try: self.__getitem__(key, _get_mode=True) except KeyError: return False return True has_key = __contains__ def __iter__(self): """Yield ``(key, value)`` tuples.""" return iter(self._list) def __len__(self): return len(self._list) def add(self, _key, _value, **kw): """Add a new header tuple to the list. Keyword arguments can specify additional parameters for the header value, with underscores converted to dashes:: >>> d = Headers() >>> d.add('Content-Type', 'text/plain') >>> d.add('Content-Disposition', 'attachment', filename='foo.png') The keyword argument dumping uses :func:`dump_options_header` behind the scenes. .. versionadded:: 0.4.1 keyword arguments were added for :mod:`wsgiref` compatibility. """ if kw: _value = _options_header_vkw(_value, kw) _value = _unicodify_header_value(_value) self._validate_value(_value) self._list.append((_key, _value)) def _validate_value(self, value): if not isinstance(value, text_type): raise TypeError('Value should be unicode.') if u'\n' in value or u'\r' in value: raise ValueError('Detected newline in header value. This is ' 'a potential security problem') def add_header(self, _key, _value, **_kw): """Add a new header tuple to the list. An alias for :meth:`add` for compatibility with the :mod:`wsgiref` :meth:`~wsgiref.headers.Headers.add_header` method. """ self.add(_key, _value, **_kw) def clear(self): """Clears all headers.""" del self._list[:] def set(self, _key, _value, **kw): """Remove all header tuples for `key` and add a new one. The newly added key either appears at the end of the list if there was no entry or replaces the first one. Keyword arguments can specify additional parameters for the header value, with underscores converted to dashes. See :meth:`add` for more information. .. versionchanged:: 0.6.1 :meth:`set` now accepts the same arguments as :meth:`add`. :param key: The key to be inserted. :param value: The value to be inserted. """ if kw: _value = _options_header_vkw(_value, kw) _value = _unicodify_header_value(_value) self._validate_value(_value) if not self._list: self._list.append((_key, _value)) return listiter = iter(self._list) ikey = _key.lower() for idx, (old_key, old_value) in enumerate(listiter): if old_key.lower() == ikey: # replace first ocurrence self._list[idx] = (_key, _value) break else: self._list.append((_key, _value)) return self._list[idx + 1:] = [t for t in listiter if t[0].lower() != ikey] def setdefault(self, key, value): """Returns the value for the key if it is in the dict, otherwise it returns `default` and sets that value for `key`. :param key: The key to be looked up. :param default: The default value to be returned if the key is not in the dict. If not further specified it's `None`. """ if key in self: return self[key] self.set(key, value) return value def __setitem__(self, key, value): """Like :meth:`set` but also supports index/slice based setting.""" if isinstance(key, (slice, integer_types)): if isinstance(key, integer_types): value = [value] value = [(k, _unicodify_header_value(v)) for (k, v) in value] [self._validate_value(v) for (k, v) in value] if isinstance(key, integer_types): self._list[key] = value[0] else: self._list[key] = value else: self.set(key, value) def to_list(self, charset='iso-8859-1'): """Convert the headers into a list suitable for WSGI.""" from warnings import warn warn(DeprecationWarning('Method removed, use to_wsgi_list instead'), stacklevel=2) return self.to_wsgi_list() def to_wsgi_list(self): """Convert the headers into a list suitable for WSGI. The values are byte strings in Python 2 converted to latin1 and unicode strings in Python 3 for the WSGI server to encode. :return: list """ if PY2: return [(k, v.encode('latin1')) for k, v in self] return list(self) def copy(self): return self.__class__(self._list) def __copy__(self): return self.copy() def __str__(self): """Returns formatted headers suitable for HTTP transmission.""" strs = [] for key, value in self.to_wsgi_list(): strs.append('%s: %s' % (key, value)) strs.append('\r\n') return '\r\n'.join(strs) def __repr__(self): return '%s(%r)' % ( self.__class__.__name__, list(self) ) class ImmutableHeadersMixin(object): """Makes a :class:`Headers` immutable. We do not mark them as hashable though since the only usecase for this datastructure in Werkzeug is a view on a mutable structure. .. versionadded:: 0.5 :private: """ def __delitem__(self, key): is_immutable(self) def __setitem__(self, key, value): is_immutable(self) set = __setitem__ def add(self, item): is_immutable(self) remove = add_header = add def extend(self, iterable): is_immutable(self) def insert(self, pos, value): is_immutable(self) def pop(self, index=-1): is_immutable(self) def popitem(self): is_immutable(self) def setdefault(self, key, default): is_immutable(self) class EnvironHeaders(ImmutableHeadersMixin, Headers): """Read only version of the headers from a WSGI environment. This provides the same interface as `Headers` and is constructed from a WSGI environment. From Werkzeug 0.3 onwards, the `KeyError` raised by this class is also a subclass of the :exc:`~exceptions.BadRequest` HTTP exception and will render a page for a ``400 BAD REQUEST`` if caught in a catch-all for HTTP exceptions. """ def __init__(self, environ): self.environ = environ def __eq__(self, other): return self.environ is other.environ def __getitem__(self, key, _get_mode=False): # _get_mode is a no-op for this class as there is no index but # used because get() calls it. key = key.upper().replace('-', '_') if key in ('CONTENT_TYPE', 'CONTENT_LENGTH'): return _unicodify_header_value(self.environ[key]) return _unicodify_header_value(self.environ['HTTP_' + key]) def __len__(self): # the iter is necessary because otherwise list calls our # len which would call list again and so forth. return len(list(iter(self))) def __iter__(self): for key, value in iteritems(self.environ): if key.startswith('HTTP_') and key not in \ ('HTTP_CONTENT_TYPE', 'HTTP_CONTENT_LENGTH'): yield (key[5:].replace('_', '-').title(), _unicodify_header_value(value)) elif key in ('CONTENT_TYPE', 'CONTENT_LENGTH'): yield (key.replace('_', '-').title(), _unicodify_header_value(value)) def copy(self): raise TypeError('cannot create %r copies' % self.__class__.__name__) @native_itermethods(['keys', 'values', 'items', 'lists', 'listvalues']) class CombinedMultiDict(ImmutableMultiDictMixin, MultiDict): """A read only :class:`MultiDict` that you can pass multiple :class:`MultiDict` instances as sequence and it will combine the return values of all wrapped dicts: >>> from werkzeug.datastructures import CombinedMultiDict, MultiDict >>> post = MultiDict([('foo', 'bar')]) >>> get = MultiDict([('blub', 'blah')]) >>> combined = CombinedMultiDict([get, post]) >>> combined['foo'] 'bar' >>> combined['blub'] 'blah' This works for all read operations and will raise a `TypeError` for methods that usually change data which isn't possible. From Werkzeug 0.3 onwards, the `KeyError` raised by this class is also a subclass of the :exc:`~exceptions.BadRequest` HTTP exception and will render a page for a ``400 BAD REQUEST`` if caught in a catch-all for HTTP exceptions. """ def __reduce_ex__(self, protocol): return type(self), (self.dicts,) def __init__(self, dicts=None): self.dicts = dicts or [] @classmethod def fromkeys(cls): raise TypeError('cannot create %r instances by fromkeys' % cls.__name__) def __getitem__(self, key): for d in self.dicts: if key in d: return d[key] raise exceptions.BadRequestKeyError(key) def get(self, key, default=None, type=None): for d in self.dicts: if key in d: if type is not None: try: return type(d[key]) except ValueError: continue return d[key] return default def getlist(self, key, type=None): rv = [] for d in self.dicts: rv.extend(d.getlist(key, type)) return rv def keys(self): rv = set() for d in self.dicts: rv.update(d.keys()) return iter(rv) __iter__ = keys def items(self, multi=False): found = set() for d in self.dicts: for key, value in iteritems(d, multi): if multi: yield key, value elif key not in found: found.add(key) yield key, value def values(self): for key, value in iteritems(self): yield value def lists(self): rv = {} for d in self.dicts: for key, values in iterlists(d): rv.setdefault(key, []).extend(values) return iteritems(rv) def listvalues(self): return (x[1] for x in self.lists()) def copy(self): """Return a shallow copy of this object.""" return self.__class__(self.dicts[:]) def to_dict(self, flat=True): """Return the contents as regular dict. If `flat` is `True` the returned dict will only have the first item present, if `flat` is `False` all values will be returned as lists. :param flat: If set to `False` the dict returned will have lists with all the values in it. Otherwise it will only contain the first item for each key. :return: a :class:`dict` """ rv = {} for d in reversed(self.dicts): rv.update(d.to_dict(flat)) return rv def __len__(self): return len(self.keys()) def __contains__(self, key): for d in self.dicts: if key in d: return True return False has_key = __contains__ def __repr__(self): return '%s(%r)' % (self.__class__.__name__, self.dicts) class FileMultiDict(MultiDict): """A special :class:`MultiDict` that has convenience methods to add files to it. This is used for :class:`EnvironBuilder` and generally useful for unittesting. .. versionadded:: 0.5 """ def add_file(self, name, file, filename=None, content_type=None): """Adds a new file to the dict. `file` can be a file name or a :class:`file`-like or a :class:`FileStorage` object. :param name: the name of the field. :param file: a filename or :class:`file`-like object :param filename: an optional filename :param content_type: an optional content type """ if isinstance(file, FileStorage): value = file else: if isinstance(file, string_types): if filename is None: filename = file file = open(file, 'rb') if filename and content_type is None: content_type = mimetypes.guess_type(filename)[0] or \ 'application/octet-stream' value = FileStorage(file, filename, name, content_type) self.add(name, value) class ImmutableDict(ImmutableDictMixin, dict): """An immutable :class:`dict`. .. versionadded:: 0.5 """ def __repr__(self): return '%s(%s)' % ( self.__class__.__name__, dict.__repr__(self), ) def copy(self): """Return a shallow mutable copy of this object. Keep in mind that the standard library's :func:`copy` function is a no-op for this class like for any other python immutable type (eg: :class:`tuple`). """ return dict(self) def __copy__(self): return self class ImmutableMultiDict(ImmutableMultiDictMixin, MultiDict): """An immutable :class:`MultiDict`. .. versionadded:: 0.5 """ def copy(self): """Return a shallow mutable copy of this object. Keep in mind that the standard library's :func:`copy` function is a no-op for this class like for any other python immutable type (eg: :class:`tuple`). """ return MultiDict(self) def __copy__(self): return self class ImmutableOrderedMultiDict(ImmutableMultiDictMixin, OrderedMultiDict): """An immutable :class:`OrderedMultiDict`. .. versionadded:: 0.6 """ def _iter_hashitems(self): return enumerate(iteritems(self, multi=True)) def copy(self): """Return a shallow mutable copy of this object. Keep in mind that the standard library's :func:`copy` function is a no-op for this class like for any other python immutable type (eg: :class:`tuple`). """ return OrderedMultiDict(self) def __copy__(self): return self @native_itermethods(['values']) class Accept(ImmutableList): """An :class:`Accept` object is just a list subclass for lists of ``(value, quality)`` tuples. It is automatically sorted by quality. All :class:`Accept` objects work similar to a list but provide extra functionality for working with the data. Containment checks are normalized to the rules of that header: >>> a = CharsetAccept([('ISO-8859-1', 1), ('utf-8', 0.7)]) >>> a.best 'ISO-8859-1' >>> 'iso-8859-1' in a True >>> 'UTF8' in a True >>> 'utf7' in a False To get the quality for an item you can use normal item lookup: >>> print a['utf-8'] 0.7 >>> a['utf7'] 0 .. versionchanged:: 0.5 :class:`Accept` objects are forced immutable now. """ def __init__(self, values=()): if values is None: list.__init__(self) self.provided = False elif isinstance(values, Accept): self.provided = values.provided list.__init__(self, values) else: self.provided = True values = [(a, b) for b, a in values] values.sort() values.reverse() list.__init__(self, [(a, b) for b, a in values]) def _value_matches(self, value, item): """Check if a value matches a given accept item.""" return item == '*' or item.lower() == value.lower() def __getitem__(self, key): """Besides index lookup (getting item n) you can also pass it a string to get the quality for the item. If the item is not in the list, the returned quality is ``0``. """ if isinstance(key, string_types): return self.quality(key) return list.__getitem__(self, key) def quality(self, key): """Returns the quality of the key. .. versionadded:: 0.6 In previous versions you had to use the item-lookup syntax (eg: ``obj[key]`` instead of ``obj.quality(key)``) """ for item, quality in self: if self._value_matches(key, item): return quality return 0 def __contains__(self, value): for item, quality in self: if self._value_matches(value, item): return True return False def __repr__(self): return '%s([%s])' % ( self.__class__.__name__, ', '.join('(%r, %s)' % (x, y) for x, y in self) ) def index(self, key): """Get the position of an entry or raise :exc:`ValueError`. :param key: The key to be looked up. .. versionchanged:: 0.5 This used to raise :exc:`IndexError`, which was inconsistent with the list API. """ if isinstance(key, string_types): for idx, (item, quality) in enumerate(self): if self._value_matches(key, item): return idx raise ValueError(key) return list.index(self, key) def find(self, key): """Get the position of an entry or return -1. :param key: The key to be looked up. """ try: return self.index(key) except ValueError: return -1 def values(self): """Iterate over all values.""" for item in self: yield item[0] def to_header(self): """Convert the header set into an HTTP header string.""" result = [] for value, quality in self: if quality != 1: value = '%s;q=%s' % (value, quality) result.append(value) return ','.join(result) def __str__(self): return self.to_header() def best_match(self, matches, default=None): """Returns the best match from a list of possible matches based on the quality of the client. If two items have the same quality, the one is returned that comes first. :param matches: a list of matches to check for :param default: the value that is returned if none match """ best_quality = -1 result = default for server_item in matches: for client_item, quality in self: if quality <= best_quality: break if self._value_matches(server_item, client_item): best_quality = quality result = server_item return result @property def best(self): """The best match as value.""" if self: return self[0][0] class MIMEAccept(Accept): """Like :class:`Accept` but with special methods and behavior for mimetypes. """ def _value_matches(self, value, item): def _normalize(x): x = x.lower() return x == '*' and ('*', '*') or x.split('/', 1) # this is from the application which is trusted. to avoid developer # frustration we actually check these for valid values if '/' not in value: raise ValueError('invalid mimetype %r' % value) value_type, value_subtype = _normalize(value) if value_type == '*' and value_subtype != '*': raise ValueError('invalid mimetype %r' % value) if '/' not in item: return False item_type, item_subtype = _normalize(item) if item_type == '*' and item_subtype != '*': return False return ( (item_type == item_subtype == '*' or value_type == value_subtype == '*') or (item_type == value_type and (item_subtype == '*' or value_subtype == '*' or item_subtype == value_subtype)) ) @property def accept_html(self): """True if this object accepts HTML.""" return ( 'text/html' in self or 'application/xhtml+xml' in self or self.accept_xhtml ) @property def accept_xhtml(self): """True if this object accepts XHTML.""" return ( 'application/xhtml+xml' in self or 'application/xml' in self ) @property def accept_json(self): """True if this object accepts JSON.""" return 'application/json' in self class LanguageAccept(Accept): """Like :class:`Accept` but with normalization for languages.""" def _value_matches(self, value, item): def _normalize(language): return _locale_delim_re.split(language.lower()) return item == '*' or _normalize(value) == _normalize(item) class CharsetAccept(Accept): """Like :class:`Accept` but with normalization for charsets.""" def _value_matches(self, value, item): def _normalize(name): try: return codecs.lookup(name).name except LookupError: return name.lower() return item == '*' or _normalize(value) == _normalize(item) def cache_property(key, empty, type): """Return a new property object for a cache header. Useful if you want to add support for a cache extension in a subclass.""" return property(lambda x: x._get_cache_value(key, empty, type), lambda x, v: x._set_cache_value(key, v, type), lambda x: x._del_cache_value(key), 'accessor for %r' % key) class _CacheControl(UpdateDictMixin, dict): """Subclass of a dict that stores values for a Cache-Control header. It has accessors for all the cache-control directives specified in RFC 2616. The class does not differentiate between request and response directives. Because the cache-control directives in the HTTP header use dashes the python descriptors use underscores for that. To get a header of the :class:`CacheControl` object again you can convert the object into a string or call the :meth:`to_header` method. If you plan to subclass it and add your own items have a look at the sourcecode for that class. .. versionchanged:: 0.4 Setting `no_cache` or `private` to boolean `True` will set the implicit none-value which is ``*``: >>> cc = ResponseCacheControl() >>> cc.no_cache = True >>> cc <ResponseCacheControl 'no-cache'> >>> cc.no_cache '*' >>> cc.no_cache = None >>> cc <ResponseCacheControl ''> In versions before 0.5 the behavior documented here affected the now no longer existing `CacheControl` class. """ no_cache = cache_property('no-cache', '*', None) no_store = cache_property('no-store', None, bool) max_age = cache_property('max-age', -1, int) no_transform = cache_property('no-transform', None, None) def __init__(self, values=(), on_update=None): dict.__init__(self, values or ()) self.on_update = on_update self.provided = values is not None def _get_cache_value(self, key, empty, type): """Used internally by the accessor properties.""" if type is bool: return key in self if key in self: value = self[key] if value is None: return empty elif type is not None: try: value = type(value) except ValueError: pass return value def _set_cache_value(self, key, value, type): """Used internally by the accessor properties.""" if type is bool: if value: self[key] = None else: self.pop(key, None) else: if value is None: self.pop(key) elif value is True: self[key] = None else: self[key] = value def _del_cache_value(self, key): """Used internally by the accessor properties.""" if key in self: del self[key] def to_header(self): """Convert the stored values into a cache control header.""" return dump_header(self) def __str__(self): return self.to_header() def __repr__(self): return '<%s %r>' % ( self.__class__.__name__, self.to_header() ) class RequestCacheControl(ImmutableDictMixin, _CacheControl): """A cache control for requests. This is immutable and gives access to all the request-relevant cache control headers. To get a header of the :class:`RequestCacheControl` object again you can convert the object into a string or call the :meth:`to_header` method. If you plan to subclass it and add your own items have a look at the sourcecode for that class. .. versionadded:: 0.5 In previous versions a `CacheControl` class existed that was used both for request and response. """ max_stale = cache_property('max-stale', '*', int) min_fresh = cache_property('min-fresh', '*', int) no_transform = cache_property('no-transform', None, None) only_if_cached = cache_property('only-if-cached', None, bool) class ResponseCacheControl(_CacheControl): """A cache control for responses. Unlike :class:`RequestCacheControl` this is mutable and gives access to response-relevant cache control headers. To get a header of the :class:`ResponseCacheControl` object again you can convert the object into a string or call the :meth:`to_header` method. If you plan to subclass it and add your own items have a look at the sourcecode for that class. .. versionadded:: 0.5 In previous versions a `CacheControl` class existed that was used both for request and response. """ public = cache_property('public', None, bool) private = cache_property('private', '*', None) must_revalidate = cache_property('must-revalidate', None, bool) proxy_revalidate = cache_property('proxy-revalidate', None, bool) s_maxage = cache_property('s-maxage', None, None) # attach cache_property to the _CacheControl as staticmethod # so that others can reuse it. _CacheControl.cache_property = staticmethod(cache_property) class CallbackDict(UpdateDictMixin, dict): """A dict that calls a function passed every time something is changed. The function is passed the dict instance. """ def __init__(self, initial=None, on_update=None): dict.__init__(self, initial or ()) self.on_update = on_update def __repr__(self): return '<%s %s>' % ( self.__class__.__name__, dict.__repr__(self) ) class HeaderSet(object): """Similar to the :class:`ETags` class this implements a set-like structure. Unlike :class:`ETags` this is case insensitive and used for vary, allow, and content-language headers. If not constructed using the :func:`parse_set_header` function the instantiation works like this: >>> hs = HeaderSet(['foo', 'bar', 'baz']) >>> hs HeaderSet(['foo', 'bar', 'baz']) """ def __init__(self, headers=None, on_update=None): self._headers = list(headers or ()) self._set = set([x.lower() for x in self._headers]) self.on_update = on_update def add(self, header): """Add a new header to the set.""" self.update((header,)) def remove(self, header): """Remove a header from the set. This raises an :exc:`KeyError` if the header is not in the set. .. versionchanged:: 0.5 In older versions a :exc:`IndexError` was raised instead of a :exc:`KeyError` if the object was missing. :param header: the header to be removed. """ key = header.lower() if key not in self._set: raise KeyError(header) self._set.remove(key) for idx, key in enumerate(self._headers): if key.lower() == header: del self._headers[idx] break if self.on_update is not None: self.on_update(self) def update(self, iterable): """Add all the headers from the iterable to the set. :param iterable: updates the set with the items from the iterable. """ inserted_any = False for header in iterable: key = header.lower() if key not in self._set: self._headers.append(header) self._set.add(key) inserted_any = True if inserted_any and self.on_update is not None: self.on_update(self) def discard(self, header): """Like :meth:`remove` but ignores errors. :param header: the header to be discarded. """ try: return self.remove(header) except KeyError: pass def find(self, header): """Return the index of the header in the set or return -1 if not found. :param header: the header to be looked up. """ header = header.lower() for idx, item in enumerate(self._headers): if item.lower() == header: return idx return -1 def index(self, header): """Return the index of the header in the set or raise an :exc:`IndexError`. :param header: the header to be looked up. """ rv = self.find(header) if rv < 0: raise IndexError(header) return rv def clear(self): """Clear the set.""" self._set.clear() del self._headers[:] if self.on_update is not None: self.on_update(self) def as_set(self, preserve_casing=False): """Return the set as real python set type. When calling this, all the items are converted to lowercase and the ordering is lost. :param preserve_casing: if set to `True` the items in the set returned will have the original case like in the :class:`HeaderSet`, otherwise they will be lowercase. """ if preserve_casing: return set(self._headers) return set(self._set) def to_header(self): """Convert the header set into an HTTP header string.""" return ', '.join(map(quote_header_value, self._headers)) def __getitem__(self, idx): return self._headers[idx] def __delitem__(self, idx): rv = self._headers.pop(idx) self._set.remove(rv.lower()) if self.on_update is not None: self.on_update(self) def __setitem__(self, idx, value): old = self._headers[idx] self._set.remove(old.lower()) self._headers[idx] = value self._set.add(value.lower()) if self.on_update is not None: self.on_update(self) def __contains__(self, header): return header.lower() in self._set def __len__(self): return len(self._set) def __iter__(self): return iter(self._headers) def __nonzero__(self): return bool(self._set) def __str__(self): return self.to_header() def __repr__(self): return '%s(%r)' % ( self.__class__.__name__, self._headers ) class ETags(object): """A set that can be used to check if one etag is present in a collection of etags. """ def __init__(self, strong_etags=None, weak_etags=None, star_tag=False): self._strong = frozenset(not star_tag and strong_etags or ()) self._weak = frozenset(weak_etags or ()) self.star_tag = star_tag def as_set(self, include_weak=False): """Convert the `ETags` object into a python set. Per default all the weak etags are not part of this set.""" rv = set(self._strong) if include_weak: rv.update(self._weak) return rv def is_weak(self, etag): """Check if an etag is weak.""" return etag in self._weak def contains_weak(self, etag): """Check if an etag is part of the set including weak and strong tags.""" return self.is_weak(etag) or self.contains(etag) def contains(self, etag): """Check if an etag is part of the set ignoring weak tags. It is also possible to use the ``in`` operator. """ if self.star_tag: return True return etag in self._strong def contains_raw(self, etag): """When passed a quoted tag it will check if this tag is part of the set. If the tag is weak it is checked against weak and strong tags, otherwise strong only.""" etag, weak = unquote_etag(etag) if weak: return self.contains_weak(etag) return self.contains(etag) def to_header(self): """Convert the etags set into a HTTP header string.""" if self.star_tag: return '*' return ', '.join( ['"%s"' % x for x in self._strong] + ['w/"%s"' % x for x in self._weak] ) def __call__(self, etag=None, data=None, include_weak=False): if [etag, data].count(None) != 1: raise TypeError('either tag or data required, but at least one') if etag is None: etag = generate_etag(data) if include_weak: if etag in self._weak: return True return etag in self._strong def __nonzero__(self): return bool(self.star_tag or self._strong or self._weak) def __str__(self): return self.to_header() def __iter__(self): return iter(self._strong) def __contains__(self, etag): return self.contains(etag) def __repr__(self): return '<%s %r>' % (self.__class__.__name__, str(self)) class IfRange(object): """Very simple object that represents the `If-Range` header in parsed form. It will either have neither a etag or date or one of either but never both. .. versionadded:: 0.7 """ def __init__(self, etag=None, date=None): #: The etag parsed and unquoted. Ranges always operate on strong #: etags so the weakness information is not necessary. self.etag = etag #: The date in parsed format or `None`. self.date = date def to_header(self): """Converts the object back into an HTTP header.""" if self.date is not None: return http_date(self.date) if self.etag is not None: return quote_etag(self.etag) return '' def __str__(self): return self.to_header() def __repr__(self): return '<%s %r>' % (self.__class__.__name__, str(self)) class Range(object): """Represents a range header. All the methods are only supporting bytes as unit. It does store multiple ranges but :meth:`range_for_length` will only work if only one range is provided. .. versionadded:: 0.7 """ def __init__(self, units, ranges): #: The units of this range. Usually "bytes". self.units = units #: A list of ``(begin, end)`` tuples for the range header provided. #: The ranges are non-inclusive. self.ranges = ranges def range_for_length(self, length): """If the range is for bytes, the length is not None and there is exactly one range and it is satisfiable it returns a ``(start, stop)`` tuple, otherwise `None`. """ if self.units != 'bytes' or length is None or len(self.ranges) != 1: return None start, end = self.ranges[0] if end is None: end = length if start < 0: start += length if is_byte_range_valid(start, end, length): return start, min(end, length) def make_content_range(self, length): """Creates a :class:`~werkzeug.datastructures.ContentRange` object from the current range and given content length. """ rng = self.range_for_length(length) if rng is not None: return ContentRange(self.units, rng[0], rng[1], length) def to_header(self): """Converts the object back into an HTTP header.""" ranges = [] for begin, end in self.ranges: if end is None: ranges.append(begin >= 0 and '%s-' % begin or str(begin)) else: ranges.append('%s-%s' % (begin, end - 1)) return '%s=%s' % (self.units, ','.join(ranges)) def __str__(self): return self.to_header() def __repr__(self): return '<%s %r>' % (self.__class__.__name__, str(self)) class ContentRange(object): """Represents the content range header. .. versionadded:: 0.7 """ def __init__(self, units, start, stop, length=None, on_update=None): assert is_byte_range_valid(start, stop, length), \ 'Bad range provided' self.on_update = on_update self.set(start, stop, length, units) def _callback_property(name): def fget(self): return getattr(self, name) def fset(self, value): setattr(self, name, value) if self.on_update is not None: self.on_update(self) return property(fget, fset) #: The units to use, usually "bytes" units = _callback_property('_units') #: The start point of the range or `None`. start = _callback_property('_start') #: The stop point of the range (non-inclusive) or `None`. Can only be #: `None` if also start is `None`. stop = _callback_property('_stop') #: The length of the range or `None`. length = _callback_property('_length') def set(self, start, stop, length=None, units='bytes'): """Simple method to update the ranges.""" assert is_byte_range_valid(start, stop, length), \ 'Bad range provided' self._units = units self._start = start self._stop = stop self._length = length if self.on_update is not None: self.on_update(self) def unset(self): """Sets the units to `None` which indicates that the header should no longer be used. """ self.set(None, None, units=None) def to_header(self): if self.units is None: return '' if self.length is None: length = '*' else: length = self.length if self.start is None: return '%s */%s' % (self.units, length) return '%s %s-%s/%s' % ( self.units, self.start, self.stop - 1, length ) def __nonzero__(self): return self.units is not None __bool__ = __nonzero__ def __str__(self): return self.to_header() def __repr__(self): return '<%s %r>' % (self.__class__.__name__, str(self)) class Authorization(ImmutableDictMixin, dict): """Represents an `Authorization` header sent by the client. You should not create this kind of object yourself but use it when it's returned by the `parse_authorization_header` function. This object is a dict subclass and can be altered by setting dict items but it should be considered immutable as it's returned by the client and not meant for modifications. .. versionchanged:: 0.5 This object became immutable. """ def __init__(self, auth_type, data=None): dict.__init__(self, data or {}) self.type = auth_type username = property(lambda x: x.get('username'), doc=''' The username transmitted. This is set for both basic and digest auth all the time.''') password = property(lambda x: x.get('password'), doc=''' When the authentication type is basic this is the password transmitted by the client, else `None`.''') realm = property(lambda x: x.get('realm'), doc=''' This is the server realm sent back for HTTP digest auth.''') nonce = property(lambda x: x.get('nonce'), doc=''' The nonce the server sent for digest auth, sent back by the client. A nonce should be unique for every 401 response for HTTP digest auth.''') uri = property(lambda x: x.get('uri'), doc=''' The URI from Request-URI of the Request-Line; duplicated because proxies are allowed to change the Request-Line in transit. HTTP digest auth only.''') nc = property(lambda x: x.get('nc'), doc=''' The nonce count value transmitted by clients if a qop-header is also transmitted. HTTP digest auth only.''') cnonce = property(lambda x: x.get('cnonce'), doc=''' If the server sent a qop-header in the ``WWW-Authenticate`` header, the client has to provide this value for HTTP digest auth. See the RFC for more details.''') response = property(lambda x: x.get('response'), doc=''' A string of 32 hex digits computed as defined in RFC 2617, which proves that the user knows a password. Digest auth only.''') opaque = property(lambda x: x.get('opaque'), doc=''' The opaque header from the server returned unchanged by the client. It is recommended that this string be base64 or hexadecimal data. Digest auth only.''') @property def qop(self): """Indicates what "quality of protection" the client has applied to the message for HTTP digest auth.""" def on_update(header_set): if not header_set and 'qop' in self: del self['qop'] elif header_set: self['qop'] = header_set.to_header() return parse_set_header(self.get('qop'), on_update) class WWWAuthenticate(UpdateDictMixin, dict): """Provides simple access to `WWW-Authenticate` headers.""" #: list of keys that require quoting in the generated header _require_quoting = frozenset(['domain', 'nonce', 'opaque', 'realm']) def __init__(self, auth_type=None, values=None, on_update=None): dict.__init__(self, values or ()) if auth_type: self['__auth_type__'] = auth_type self.on_update = on_update def set_basic(self, realm='authentication required'): """Clear the auth info and enable basic auth.""" dict.clear(self) dict.update(self, {'__auth_type__': 'basic', 'realm': realm}) if self.on_update: self.on_update(self) def set_digest(self, realm, nonce, qop=('auth',), opaque=None, algorithm=None, stale=False): """Clear the auth info and enable digest auth.""" d = { '__auth_type__': 'digest', 'realm': realm, 'nonce': nonce, 'qop': dump_header(qop) } if stale: d['stale'] = 'TRUE' if opaque is not None: d['opaque'] = opaque if algorithm is not None: d['algorithm'] = algorithm dict.clear(self) dict.update(self, d) if self.on_update: self.on_update(self) def to_header(self): """Convert the stored values into a WWW-Authenticate header.""" d = dict(self) auth_type = d.pop('__auth_type__', None) or 'basic' return '%s %s' % (auth_type.title(), ', '.join([ '%s=%s' % (key, quote_header_value(value, allow_token=key not in self._require_quoting)) for key, value in iteritems(d) ])) def __str__(self): return self.to_header() def __repr__(self): return '<%s %r>' % ( self.__class__.__name__, self.to_header() ) def auth_property(name, doc=None): """A static helper function for subclasses to add extra authentication system properties onto a class:: class FooAuthenticate(WWWAuthenticate): special_realm = auth_property('special_realm') For more information have a look at the sourcecode to see how the regular properties (:attr:`realm` etc.) are implemented. """ def _set_value(self, value): if value is None: self.pop(name, None) else: self[name] = str(value) return property(lambda x: x.get(name), _set_value, doc=doc) def _set_property(name, doc=None): def fget(self): def on_update(header_set): if not header_set and name in self: del self[name] elif header_set: self[name] = header_set.to_header() return parse_set_header(self.get(name), on_update) return property(fget, doc=doc) type = auth_property('__auth_type__', doc=''' The type of the auth mechanism. HTTP currently specifies `Basic` and `Digest`.''') realm = auth_property('realm', doc=''' A string to be displayed to users so they know which username and password to use. This string should contain at least the name of the host performing the authentication and might additionally indicate the collection of users who might have access.''') domain = _set_property('domain', doc=''' A list of URIs that define the protection space. If a URI is an absolute path, it is relative to the canonical root URL of the server being accessed.''') nonce = auth_property('nonce', doc=''' A server-specified data string which should be uniquely generated each time a 401 response is made. It is recommended that this string be base64 or hexadecimal data.''') opaque = auth_property('opaque', doc=''' A string of data, specified by the server, which should be returned by the client unchanged in the Authorization header of subsequent requests with URIs in the same protection space. It is recommended that this string be base64 or hexadecimal data.''') algorithm = auth_property('algorithm', doc=''' A string indicating a pair of algorithms used to produce the digest and a checksum. If this is not present it is assumed to be "MD5". If the algorithm is not understood, the challenge should be ignored (and a different one used, if there is more than one).''') qop = _set_property('qop', doc=''' A set of quality-of-privacy directives such as auth and auth-int.''') def _get_stale(self): val = self.get('stale') if val is not None: return val.lower() == 'true' def _set_stale(self, value): if value is None: self.pop('stale', None) else: self['stale'] = value and 'TRUE' or 'FALSE' stale = property(_get_stale, _set_stale, doc=''' A flag, indicating that the previous request from the client was rejected because the nonce value was stale.''') del _get_stale, _set_stale # make auth_property a staticmethod so that subclasses of # `WWWAuthenticate` can use it for new properties. auth_property = staticmethod(auth_property) del _set_property class FileStorage(object): """The :class:`FileStorage` class is a thin wrapper over incoming files. It is used by the request object to represent uploaded files. All the attributes of the wrapper stream are proxied by the file storage so it's possible to do ``storage.read()`` instead of the long form ``storage.stream.read()``. """ def __init__(self, stream=None, filename=None, name=None, content_type=None, content_length=None, headers=None): self.name = name self.stream = stream or _empty_stream # if no filename is provided we can attempt to get the filename # from the stream object passed. There we have to be careful to # skip things like <fdopen>, <stderr> etc. Python marks these # special filenames with angular brackets. if filename is None: filename = getattr(stream, 'name', None) s = make_literal_wrapper(filename) if filename and filename[0] == s('<') and filename[-1] == s('>'): filename = None # On Python 3 we want to make sure the filename is always unicode. # This might not be if the name attribute is bytes due to the # file being opened from the bytes API. if not PY2 and isinstance(filename, bytes): filename = filename.decode(sys.getfilesystemencoding(), 'replace') self.filename = filename if headers is None: headers = Headers() self.headers = headers if content_type is not None: headers['Content-Type'] = content_type if content_length is not None: headers['Content-Length'] = str(content_length) def _parse_content_type(self): if not hasattr(self, '_parsed_content_type'): self._parsed_content_type = \ parse_options_header(self.content_type) @property def content_type(self): """The content-type sent in the header. Usually not available""" return self.headers.get('content-type') @property def content_length(self): """The content-length sent in the header. Usually not available""" return int(self.headers.get('content-length') or 0) @property def mimetype(self): """Like :attr:`content_type` but without parameters (eg, without charset, type etc.). For example if the content type is ``text/html; charset=utf-8`` the mimetype would be ``'text/html'``. .. versionadded:: 0.7 """ self._parse_content_type() return self._parsed_content_type[0] @property def mimetype_params(self): """The mimetype parameters as dict. For example if the content type is ``text/html; charset=utf-8`` the params would be ``{'charset': 'utf-8'}``. .. versionadded:: 0.7 """ self._parse_content_type() return self._parsed_content_type[1] def save(self, dst, buffer_size=16384): """Save the file to a destination path or file object. If the destination is a file object you have to close it yourself after the call. The buffer size is the number of bytes held in memory during the copy process. It defaults to 16KB. For secure file saving also have a look at :func:`secure_filename`. :param dst: a filename or open file object the uploaded file is saved to. :param buffer_size: the size of the buffer. This works the same as the `length` parameter of :func:`shutil.copyfileobj`. """ from shutil import copyfileobj close_dst = False if isinstance(dst, string_types): dst = open(dst, 'wb') close_dst = True try: copyfileobj(self.stream, dst, buffer_size) finally: if close_dst: dst.close() def close(self): """Close the underlying file if possible.""" try: self.stream.close() except Exception: pass def __nonzero__(self): return bool(self.filename) def __getattr__(self, name): return getattr(self.stream, name) def __iter__(self): return iter(self.readline, '') def __repr__(self): return '<%s: %r (%r)>' % ( self.__class__.__name__, self.filename, self.content_type ) # circular dependencies from werkzeug.http import dump_options_header, dump_header, generate_etag, \ quote_header_value, parse_set_header, unquote_etag, quote_etag, \ parse_options_header, http_date, is_byte_range_valid from werkzeug import exceptions
a7xtony1/plugin.video.ELECTROMERIDAtv
refs/heads/master
modules/libraries/pyaes/aes.py
177
# The MIT License (MIT) # # Copyright (c) 2014 Richard Moore # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # This is a pure-Python implementation of the AES algorithm and AES common # modes of operation. # See: https://en.wikipedia.org/wiki/Advanced_Encryption_Standard # Honestly, the best description of the modes of operations are the wonderful # diagrams on Wikipedia. They explain in moments what my words could never # achieve. Hence the inline documentation here is sparer than I'd prefer. # See: https://en.wikipedia.org/wiki/Block_cipher_mode_of_operation # Also useful, PyCrypto, a crypto library implemented in C with Python bindings: # https://www.dlitz.net/software/pycrypto/ # Supported key sizes: # 128-bit # 192-bit # 256-bit # Supported modes of operation: # ECB - Electronic Codebook # CBC - Cipher-Block Chaining # CFB - Cipher Feedback # OFB - Output Feedback # CTR - Counter # See the README.md for API details and general information. import copy import struct __all__ = ["AES", "AESModeOfOperationCTR", "AESModeOfOperationCBC", "AESModeOfOperationCFB", "AESModeOfOperationECB", "AESModeOfOperationOFB", "AESModesOfOperation", "Counter"] def _compact_word(word): return (word[0] << 24) | (word[1] << 16) | (word[2] << 8) | word[3] def _string_to_bytes(text): return list(ord(c) for c in text) def _bytes_to_string(binary): return "".join(chr(b) for b in binary) def _concat_list(a, b): return a + b # Python 3 compatibility try: xrange except Exception: xrange = range # Python 3 supports bytes, which is already an array of integers def _string_to_bytes(text): if isinstance(text, bytes): return text return [ord(c) for c in text] # In Python 3, we return bytes def _bytes_to_string(binary): return bytes(binary) # Python 3 cannot concatenate a list onto a bytes, so we bytes-ify it first def _concat_list(a, b): return a + bytes(b) # Based *largely* on the Rijndael implementation # See: http://csrc.nist.gov/publications/fips/fips197/fips-197.pdf class AES(object): '''Encapsulates the AES block cipher. You generally should not need this. Use the AESModeOfOperation classes below instead.''' # Number of rounds by keysize number_of_rounds = {16: 10, 24: 12, 32: 14} # Round constant words rcon = [ 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36, 0x6c, 0xd8, 0xab, 0x4d, 0x9a, 0x2f, 0x5e, 0xbc, 0x63, 0xc6, 0x97, 0x35, 0x6a, 0xd4, 0xb3, 0x7d, 0xfa, 0xef, 0xc5, 0x91 ] # S-box and Inverse S-box (S is for Substitution) S = [ 0x63, 0x7c, 0x77, 0x7b, 0xf2, 0x6b, 0x6f, 0xc5, 0x30, 0x01, 0x67, 0x2b, 0xfe, 0xd7, 0xab, 0x76, 0xca, 0x82, 0xc9, 0x7d, 0xfa, 0x59, 0x47, 0xf0, 0xad, 0xd4, 0xa2, 0xaf, 0x9c, 0xa4, 0x72, 0xc0, 0xb7, 0xfd, 0x93, 0x26, 0x36, 0x3f, 0xf7, 0xcc, 0x34, 0xa5, 0xe5, 0xf1, 0x71, 0xd8, 0x31, 0x15, 0x04, 0xc7, 0x23, 0xc3, 0x18, 0x96, 0x05, 0x9a, 0x07, 0x12, 0x80, 0xe2, 0xeb, 0x27, 0xb2, 0x75, 0x09, 0x83, 0x2c, 0x1a, 0x1b, 0x6e, 0x5a, 0xa0, 0x52, 0x3b, 0xd6, 0xb3, 0x29, 0xe3, 0x2f, 0x84, 0x53, 0xd1, 0x00, 0xed, 0x20, 0xfc, 0xb1, 0x5b, 0x6a, 0xcb, 0xbe, 0x39, 0x4a, 0x4c, 0x58, 0xcf, 0xd0, 0xef, 0xaa, 0xfb, 0x43, 0x4d, 0x33, 0x85, 0x45, 0xf9, 0x02, 0x7f, 0x50, 0x3c, 0x9f, 0xa8, 0x51, 0xa3, 0x40, 0x8f, 0x92, 0x9d, 0x38, 0xf5, 0xbc, 0xb6, 0xda, 0x21, 0x10, 0xff, 0xf3, 0xd2, 0xcd, 0x0c, 0x13, 0xec, 0x5f, 0x97, 0x44, 0x17, 0xc4, 0xa7, 0x7e, 0x3d, 0x64, 0x5d, 0x19, 0x73, 0x60, 0x81, 0x4f, 0xdc, 0x22, 0x2a, 0x90, 0x88, 0x46, 0xee, 0xb8, 0x14, 0xde, 0x5e, 0x0b, 0xdb, 0xe0, 0x32, 0x3a, 0x0a, 0x49, 0x06, 0x24, 0x5c, 0xc2, 0xd3, 0xac, 0x62, 0x91, 0x95, 0xe4, 0x79, 0xe7, 0xc8, 0x37, 0x6d, 0x8d, 0xd5, 0x4e, 0xa9, 0x6c, 0x56, 0xf4, 0xea, 0x65, 0x7a, 0xae, 0x08, 0xba, 0x78, 0x25, 0x2e, 0x1c, 0xa6, 0xb4, 0xc6, 0xe8, 0xdd, 0x74, 0x1f, 0x4b, 0xbd, 0x8b, 0x8a, 0x70, 0x3e, 0xb5, 0x66, 0x48, 0x03, 0xf6, 0x0e, 0x61, 0x35, 0x57, 0xb9, 0x86, 0xc1, 0x1d, 0x9e, 0xe1, 0xf8, 0x98, 0x11, 0x69, 0xd9, 0x8e, 0x94, 0x9b, 0x1e, 0x87, 0xe9, 0xce, 0x55, 0x28, 0xdf, 0x8c, 0xa1, 0x89, 0x0d, 0xbf, 0xe6, 0x42, 0x68, 0x41, 0x99, 0x2d, 0x0f, 0xb0, 0x54, 0xbb, 0x16 ] Si =[ 0x52, 0x09, 0x6a, 0xd5, 0x30, 0x36, 0xa5, 0x38, 0xbf, 0x40, 0xa3, 0x9e, 0x81, 0xf3, 0xd7, 0xfb, 0x7c, 0xe3, 0x39, 0x82, 0x9b, 0x2f, 0xff, 0x87, 0x34, 0x8e, 0x43, 0x44, 0xc4, 0xde, 0xe9, 0xcb, 0x54, 0x7b, 0x94, 0x32, 0xa6, 0xc2, 0x23, 0x3d, 0xee, 0x4c, 0x95, 0x0b, 0x42, 0xfa, 0xc3, 0x4e, 0x08, 0x2e, 0xa1, 0x66, 0x28, 0xd9, 0x24, 0xb2, 0x76, 0x5b, 0xa2, 0x49, 0x6d, 0x8b, 0xd1, 0x25, 0x72, 0xf8, 0xf6, 0x64, 0x86, 0x68, 0x98, 0x16, 0xd4, 0xa4, 0x5c, 0xcc, 0x5d, 0x65, 0xb6, 0x92, 0x6c, 0x70, 0x48, 0x50, 0xfd, 0xed, 0xb9, 0xda, 0x5e, 0x15, 0x46, 0x57, 0xa7, 0x8d, 0x9d, 0x84, 0x90, 0xd8, 0xab, 0x00, 0x8c, 0xbc, 0xd3, 0x0a, 0xf7, 0xe4, 0x58, 0x05, 0xb8, 0xb3, 0x45, 0x06, 0xd0, 0x2c, 0x1e, 0x8f, 0xca, 0x3f, 0x0f, 0x02, 0xc1, 0xaf, 0xbd, 0x03, 0x01, 0x13, 0x8a, 0x6b, 0x3a, 0x91, 0x11, 0x41, 0x4f, 0x67, 0xdc, 0xea, 0x97, 0xf2, 0xcf, 0xce, 0xf0, 0xb4, 0xe6, 0x73, 0x96, 0xac, 0x74, 0x22, 0xe7, 0xad, 0x35, 0x85, 0xe2, 0xf9, 0x37, 0xe8, 0x1c, 0x75, 0xdf, 0x6e, 0x47, 0xf1, 0x1a, 0x71, 0x1d, 0x29, 0xc5, 0x89, 0x6f, 0xb7, 0x62, 0x0e, 0xaa, 0x18, 0xbe, 0x1b, 0xfc, 0x56, 0x3e, 0x4b, 0xc6, 0xd2, 0x79, 0x20, 0x9a, 0xdb, 0xc0, 0xfe, 0x78, 0xcd, 0x5a, 0xf4, 0x1f, 0xdd, 0xa8, 0x33, 0x88, 0x07, 0xc7, 0x31, 0xb1, 0x12, 0x10, 0x59, 0x27, 0x80, 0xec, 0x5f, 0x60, 0x51, 0x7f, 0xa9, 0x19, 0xb5, 0x4a, 0x0d, 0x2d, 0xe5, 0x7a, 0x9f, 0x93, 0xc9, 0x9c, 0xef, 0xa0, 0xe0, 0x3b, 0x4d, 0xae, 0x2a, 0xf5, 0xb0, 0xc8, 0xeb, 0xbb, 0x3c, 0x83, 0x53, 0x99, 0x61, 0x17, 0x2b, 0x04, 0x7e, 0xba, 0x77, 0xd6, 0x26, 0xe1, 0x69, 0x14, 0x63, 0x55, 0x21, 0x0c, 0x7d ] # Transformations for encryption T1 = [ 0xc66363a5, 0xf87c7c84, 0xee777799, 0xf67b7b8d, 0xfff2f20d, 0xd66b6bbd, 0xde6f6fb1, 0x91c5c554, 0x60303050, 0x02010103, 0xce6767a9, 0x562b2b7d, 0xe7fefe19, 0xb5d7d762, 0x4dababe6, 0xec76769a, 0x8fcaca45, 0x1f82829d, 0x89c9c940, 0xfa7d7d87, 0xeffafa15, 0xb25959eb, 0x8e4747c9, 0xfbf0f00b, 0x41adadec, 0xb3d4d467, 0x5fa2a2fd, 0x45afafea, 0x239c9cbf, 0x53a4a4f7, 0xe4727296, 0x9bc0c05b, 0x75b7b7c2, 0xe1fdfd1c, 0x3d9393ae, 0x4c26266a, 0x6c36365a, 0x7e3f3f41, 0xf5f7f702, 0x83cccc4f, 0x6834345c, 0x51a5a5f4, 0xd1e5e534, 0xf9f1f108, 0xe2717193, 0xabd8d873, 0x62313153, 0x2a15153f, 0x0804040c, 0x95c7c752, 0x46232365, 0x9dc3c35e, 0x30181828, 0x379696a1, 0x0a05050f, 0x2f9a9ab5, 0x0e070709, 0x24121236, 0x1b80809b, 0xdfe2e23d, 0xcdebeb26, 0x4e272769, 0x7fb2b2cd, 0xea75759f, 0x1209091b, 0x1d83839e, 0x582c2c74, 0x341a1a2e, 0x361b1b2d, 0xdc6e6eb2, 0xb45a5aee, 0x5ba0a0fb, 0xa45252f6, 0x763b3b4d, 0xb7d6d661, 0x7db3b3ce, 0x5229297b, 0xdde3e33e, 0x5e2f2f71, 0x13848497, 0xa65353f5, 0xb9d1d168, 0x00000000, 0xc1eded2c, 0x40202060, 0xe3fcfc1f, 0x79b1b1c8, 0xb65b5bed, 0xd46a6abe, 0x8dcbcb46, 0x67bebed9, 0x7239394b, 0x944a4ade, 0x984c4cd4, 0xb05858e8, 0x85cfcf4a, 0xbbd0d06b, 0xc5efef2a, 0x4faaaae5, 0xedfbfb16, 0x864343c5, 0x9a4d4dd7, 0x66333355, 0x11858594, 0x8a4545cf, 0xe9f9f910, 0x04020206, 0xfe7f7f81, 0xa05050f0, 0x783c3c44, 0x259f9fba, 0x4ba8a8e3, 0xa25151f3, 0x5da3a3fe, 0x804040c0, 0x058f8f8a, 0x3f9292ad, 0x219d9dbc, 0x70383848, 0xf1f5f504, 0x63bcbcdf, 0x77b6b6c1, 0xafdada75, 0x42212163, 0x20101030, 0xe5ffff1a, 0xfdf3f30e, 0xbfd2d26d, 0x81cdcd4c, 0x180c0c14, 0x26131335, 0xc3ecec2f, 0xbe5f5fe1, 0x359797a2, 0x884444cc, 0x2e171739, 0x93c4c457, 0x55a7a7f2, 0xfc7e7e82, 0x7a3d3d47, 0xc86464ac, 0xba5d5de7, 0x3219192b, 0xe6737395, 0xc06060a0, 0x19818198, 0x9e4f4fd1, 0xa3dcdc7f, 0x44222266, 0x542a2a7e, 0x3b9090ab, 0x0b888883, 0x8c4646ca, 0xc7eeee29, 0x6bb8b8d3, 0x2814143c, 0xa7dede79, 0xbc5e5ee2, 0x160b0b1d, 0xaddbdb76, 0xdbe0e03b, 0x64323256, 0x743a3a4e, 0x140a0a1e, 0x924949db, 0x0c06060a, 0x4824246c, 0xb85c5ce4, 0x9fc2c25d, 0xbdd3d36e, 0x43acacef, 0xc46262a6, 0x399191a8, 0x319595a4, 0xd3e4e437, 0xf279798b, 0xd5e7e732, 0x8bc8c843, 0x6e373759, 0xda6d6db7, 0x018d8d8c, 0xb1d5d564, 0x9c4e4ed2, 0x49a9a9e0, 0xd86c6cb4, 0xac5656fa, 0xf3f4f407, 0xcfeaea25, 0xca6565af, 0xf47a7a8e, 0x47aeaee9, 0x10080818, 0x6fbabad5, 0xf0787888, 0x4a25256f, 0x5c2e2e72, 0x381c1c24, 0x57a6a6f1, 0x73b4b4c7, 0x97c6c651, 0xcbe8e823, 0xa1dddd7c, 0xe874749c, 0x3e1f1f21, 0x964b4bdd, 0x61bdbddc, 0x0d8b8b86, 0x0f8a8a85, 0xe0707090, 0x7c3e3e42, 0x71b5b5c4, 0xcc6666aa, 0x904848d8, 0x06030305, 0xf7f6f601, 0x1c0e0e12, 0xc26161a3, 0x6a35355f, 0xae5757f9, 0x69b9b9d0, 0x17868691, 0x99c1c158, 0x3a1d1d27, 0x279e9eb9, 0xd9e1e138, 0xebf8f813, 0x2b9898b3, 0x22111133, 0xd26969bb, 0xa9d9d970, 0x078e8e89, 0x339494a7, 0x2d9b9bb6, 0x3c1e1e22, 0x15878792, 0xc9e9e920, 0x87cece49, 0xaa5555ff, 0x50282878, 0xa5dfdf7a, 0x038c8c8f, 0x59a1a1f8, 0x09898980, 0x1a0d0d17, 0x65bfbfda, 0xd7e6e631, 0x844242c6, 0xd06868b8, 0x824141c3, 0x299999b0, 0x5a2d2d77, 0x1e0f0f11, 0x7bb0b0cb, 0xa85454fc, 0x6dbbbbd6, 0x2c16163a ] T2 = [ 0xa5c66363, 0x84f87c7c, 0x99ee7777, 0x8df67b7b, 0x0dfff2f2, 0xbdd66b6b, 0xb1de6f6f, 0x5491c5c5, 0x50603030, 0x03020101, 0xa9ce6767, 0x7d562b2b, 0x19e7fefe, 0x62b5d7d7, 0xe64dabab, 0x9aec7676, 0x458fcaca, 0x9d1f8282, 0x4089c9c9, 0x87fa7d7d, 0x15effafa, 0xebb25959, 0xc98e4747, 0x0bfbf0f0, 0xec41adad, 0x67b3d4d4, 0xfd5fa2a2, 0xea45afaf, 0xbf239c9c, 0xf753a4a4, 0x96e47272, 0x5b9bc0c0, 0xc275b7b7, 0x1ce1fdfd, 0xae3d9393, 0x6a4c2626, 0x5a6c3636, 0x417e3f3f, 0x02f5f7f7, 0x4f83cccc, 0x5c683434, 0xf451a5a5, 0x34d1e5e5, 0x08f9f1f1, 0x93e27171, 0x73abd8d8, 0x53623131, 0x3f2a1515, 0x0c080404, 0x5295c7c7, 0x65462323, 0x5e9dc3c3, 0x28301818, 0xa1379696, 0x0f0a0505, 0xb52f9a9a, 0x090e0707, 0x36241212, 0x9b1b8080, 0x3ddfe2e2, 0x26cdebeb, 0x694e2727, 0xcd7fb2b2, 0x9fea7575, 0x1b120909, 0x9e1d8383, 0x74582c2c, 0x2e341a1a, 0x2d361b1b, 0xb2dc6e6e, 0xeeb45a5a, 0xfb5ba0a0, 0xf6a45252, 0x4d763b3b, 0x61b7d6d6, 0xce7db3b3, 0x7b522929, 0x3edde3e3, 0x715e2f2f, 0x97138484, 0xf5a65353, 0x68b9d1d1, 0x00000000, 0x2cc1eded, 0x60402020, 0x1fe3fcfc, 0xc879b1b1, 0xedb65b5b, 0xbed46a6a, 0x468dcbcb, 0xd967bebe, 0x4b723939, 0xde944a4a, 0xd4984c4c, 0xe8b05858, 0x4a85cfcf, 0x6bbbd0d0, 0x2ac5efef, 0xe54faaaa, 0x16edfbfb, 0xc5864343, 0xd79a4d4d, 0x55663333, 0x94118585, 0xcf8a4545, 0x10e9f9f9, 0x06040202, 0x81fe7f7f, 0xf0a05050, 0x44783c3c, 0xba259f9f, 0xe34ba8a8, 0xf3a25151, 0xfe5da3a3, 0xc0804040, 0x8a058f8f, 0xad3f9292, 0xbc219d9d, 0x48703838, 0x04f1f5f5, 0xdf63bcbc, 0xc177b6b6, 0x75afdada, 0x63422121, 0x30201010, 0x1ae5ffff, 0x0efdf3f3, 0x6dbfd2d2, 0x4c81cdcd, 0x14180c0c, 0x35261313, 0x2fc3ecec, 0xe1be5f5f, 0xa2359797, 0xcc884444, 0x392e1717, 0x5793c4c4, 0xf255a7a7, 0x82fc7e7e, 0x477a3d3d, 0xacc86464, 0xe7ba5d5d, 0x2b321919, 0x95e67373, 0xa0c06060, 0x98198181, 0xd19e4f4f, 0x7fa3dcdc, 0x66442222, 0x7e542a2a, 0xab3b9090, 0x830b8888, 0xca8c4646, 0x29c7eeee, 0xd36bb8b8, 0x3c281414, 0x79a7dede, 0xe2bc5e5e, 0x1d160b0b, 0x76addbdb, 0x3bdbe0e0, 0x56643232, 0x4e743a3a, 0x1e140a0a, 0xdb924949, 0x0a0c0606, 0x6c482424, 0xe4b85c5c, 0x5d9fc2c2, 0x6ebdd3d3, 0xef43acac, 0xa6c46262, 0xa8399191, 0xa4319595, 0x37d3e4e4, 0x8bf27979, 0x32d5e7e7, 0x438bc8c8, 0x596e3737, 0xb7da6d6d, 0x8c018d8d, 0x64b1d5d5, 0xd29c4e4e, 0xe049a9a9, 0xb4d86c6c, 0xfaac5656, 0x07f3f4f4, 0x25cfeaea, 0xafca6565, 0x8ef47a7a, 0xe947aeae, 0x18100808, 0xd56fbaba, 0x88f07878, 0x6f4a2525, 0x725c2e2e, 0x24381c1c, 0xf157a6a6, 0xc773b4b4, 0x5197c6c6, 0x23cbe8e8, 0x7ca1dddd, 0x9ce87474, 0x213e1f1f, 0xdd964b4b, 0xdc61bdbd, 0x860d8b8b, 0x850f8a8a, 0x90e07070, 0x427c3e3e, 0xc471b5b5, 0xaacc6666, 0xd8904848, 0x05060303, 0x01f7f6f6, 0x121c0e0e, 0xa3c26161, 0x5f6a3535, 0xf9ae5757, 0xd069b9b9, 0x91178686, 0x5899c1c1, 0x273a1d1d, 0xb9279e9e, 0x38d9e1e1, 0x13ebf8f8, 0xb32b9898, 0x33221111, 0xbbd26969, 0x70a9d9d9, 0x89078e8e, 0xa7339494, 0xb62d9b9b, 0x223c1e1e, 0x92158787, 0x20c9e9e9, 0x4987cece, 0xffaa5555, 0x78502828, 0x7aa5dfdf, 0x8f038c8c, 0xf859a1a1, 0x80098989, 0x171a0d0d, 0xda65bfbf, 0x31d7e6e6, 0xc6844242, 0xb8d06868, 0xc3824141, 0xb0299999, 0x775a2d2d, 0x111e0f0f, 0xcb7bb0b0, 0xfca85454, 0xd66dbbbb, 0x3a2c1616 ] T3 = [ 0x63a5c663, 0x7c84f87c, 0x7799ee77, 0x7b8df67b, 0xf20dfff2, 0x6bbdd66b, 0x6fb1de6f, 0xc55491c5, 0x30506030, 0x01030201, 0x67a9ce67, 0x2b7d562b, 0xfe19e7fe, 0xd762b5d7, 0xabe64dab, 0x769aec76, 0xca458fca, 0x829d1f82, 0xc94089c9, 0x7d87fa7d, 0xfa15effa, 0x59ebb259, 0x47c98e47, 0xf00bfbf0, 0xadec41ad, 0xd467b3d4, 0xa2fd5fa2, 0xafea45af, 0x9cbf239c, 0xa4f753a4, 0x7296e472, 0xc05b9bc0, 0xb7c275b7, 0xfd1ce1fd, 0x93ae3d93, 0x266a4c26, 0x365a6c36, 0x3f417e3f, 0xf702f5f7, 0xcc4f83cc, 0x345c6834, 0xa5f451a5, 0xe534d1e5, 0xf108f9f1, 0x7193e271, 0xd873abd8, 0x31536231, 0x153f2a15, 0x040c0804, 0xc75295c7, 0x23654623, 0xc35e9dc3, 0x18283018, 0x96a13796, 0x050f0a05, 0x9ab52f9a, 0x07090e07, 0x12362412, 0x809b1b80, 0xe23ddfe2, 0xeb26cdeb, 0x27694e27, 0xb2cd7fb2, 0x759fea75, 0x091b1209, 0x839e1d83, 0x2c74582c, 0x1a2e341a, 0x1b2d361b, 0x6eb2dc6e, 0x5aeeb45a, 0xa0fb5ba0, 0x52f6a452, 0x3b4d763b, 0xd661b7d6, 0xb3ce7db3, 0x297b5229, 0xe33edde3, 0x2f715e2f, 0x84971384, 0x53f5a653, 0xd168b9d1, 0x00000000, 0xed2cc1ed, 0x20604020, 0xfc1fe3fc, 0xb1c879b1, 0x5bedb65b, 0x6abed46a, 0xcb468dcb, 0xbed967be, 0x394b7239, 0x4ade944a, 0x4cd4984c, 0x58e8b058, 0xcf4a85cf, 0xd06bbbd0, 0xef2ac5ef, 0xaae54faa, 0xfb16edfb, 0x43c58643, 0x4dd79a4d, 0x33556633, 0x85941185, 0x45cf8a45, 0xf910e9f9, 0x02060402, 0x7f81fe7f, 0x50f0a050, 0x3c44783c, 0x9fba259f, 0xa8e34ba8, 0x51f3a251, 0xa3fe5da3, 0x40c08040, 0x8f8a058f, 0x92ad3f92, 0x9dbc219d, 0x38487038, 0xf504f1f5, 0xbcdf63bc, 0xb6c177b6, 0xda75afda, 0x21634221, 0x10302010, 0xff1ae5ff, 0xf30efdf3, 0xd26dbfd2, 0xcd4c81cd, 0x0c14180c, 0x13352613, 0xec2fc3ec, 0x5fe1be5f, 0x97a23597, 0x44cc8844, 0x17392e17, 0xc45793c4, 0xa7f255a7, 0x7e82fc7e, 0x3d477a3d, 0x64acc864, 0x5de7ba5d, 0x192b3219, 0x7395e673, 0x60a0c060, 0x81981981, 0x4fd19e4f, 0xdc7fa3dc, 0x22664422, 0x2a7e542a, 0x90ab3b90, 0x88830b88, 0x46ca8c46, 0xee29c7ee, 0xb8d36bb8, 0x143c2814, 0xde79a7de, 0x5ee2bc5e, 0x0b1d160b, 0xdb76addb, 0xe03bdbe0, 0x32566432, 0x3a4e743a, 0x0a1e140a, 0x49db9249, 0x060a0c06, 0x246c4824, 0x5ce4b85c, 0xc25d9fc2, 0xd36ebdd3, 0xacef43ac, 0x62a6c462, 0x91a83991, 0x95a43195, 0xe437d3e4, 0x798bf279, 0xe732d5e7, 0xc8438bc8, 0x37596e37, 0x6db7da6d, 0x8d8c018d, 0xd564b1d5, 0x4ed29c4e, 0xa9e049a9, 0x6cb4d86c, 0x56faac56, 0xf407f3f4, 0xea25cfea, 0x65afca65, 0x7a8ef47a, 0xaee947ae, 0x08181008, 0xbad56fba, 0x7888f078, 0x256f4a25, 0x2e725c2e, 0x1c24381c, 0xa6f157a6, 0xb4c773b4, 0xc65197c6, 0xe823cbe8, 0xdd7ca1dd, 0x749ce874, 0x1f213e1f, 0x4bdd964b, 0xbddc61bd, 0x8b860d8b, 0x8a850f8a, 0x7090e070, 0x3e427c3e, 0xb5c471b5, 0x66aacc66, 0x48d89048, 0x03050603, 0xf601f7f6, 0x0e121c0e, 0x61a3c261, 0x355f6a35, 0x57f9ae57, 0xb9d069b9, 0x86911786, 0xc15899c1, 0x1d273a1d, 0x9eb9279e, 0xe138d9e1, 0xf813ebf8, 0x98b32b98, 0x11332211, 0x69bbd269, 0xd970a9d9, 0x8e89078e, 0x94a73394, 0x9bb62d9b, 0x1e223c1e, 0x87921587, 0xe920c9e9, 0xce4987ce, 0x55ffaa55, 0x28785028, 0xdf7aa5df, 0x8c8f038c, 0xa1f859a1, 0x89800989, 0x0d171a0d, 0xbfda65bf, 0xe631d7e6, 0x42c68442, 0x68b8d068, 0x41c38241, 0x99b02999, 0x2d775a2d, 0x0f111e0f, 0xb0cb7bb0, 0x54fca854, 0xbbd66dbb, 0x163a2c16 ] T4 = [ 0x6363a5c6, 0x7c7c84f8, 0x777799ee, 0x7b7b8df6, 0xf2f20dff, 0x6b6bbdd6, 0x6f6fb1de, 0xc5c55491, 0x30305060, 0x01010302, 0x6767a9ce, 0x2b2b7d56, 0xfefe19e7, 0xd7d762b5, 0xababe64d, 0x76769aec, 0xcaca458f, 0x82829d1f, 0xc9c94089, 0x7d7d87fa, 0xfafa15ef, 0x5959ebb2, 0x4747c98e, 0xf0f00bfb, 0xadadec41, 0xd4d467b3, 0xa2a2fd5f, 0xafafea45, 0x9c9cbf23, 0xa4a4f753, 0x727296e4, 0xc0c05b9b, 0xb7b7c275, 0xfdfd1ce1, 0x9393ae3d, 0x26266a4c, 0x36365a6c, 0x3f3f417e, 0xf7f702f5, 0xcccc4f83, 0x34345c68, 0xa5a5f451, 0xe5e534d1, 0xf1f108f9, 0x717193e2, 0xd8d873ab, 0x31315362, 0x15153f2a, 0x04040c08, 0xc7c75295, 0x23236546, 0xc3c35e9d, 0x18182830, 0x9696a137, 0x05050f0a, 0x9a9ab52f, 0x0707090e, 0x12123624, 0x80809b1b, 0xe2e23ddf, 0xebeb26cd, 0x2727694e, 0xb2b2cd7f, 0x75759fea, 0x09091b12, 0x83839e1d, 0x2c2c7458, 0x1a1a2e34, 0x1b1b2d36, 0x6e6eb2dc, 0x5a5aeeb4, 0xa0a0fb5b, 0x5252f6a4, 0x3b3b4d76, 0xd6d661b7, 0xb3b3ce7d, 0x29297b52, 0xe3e33edd, 0x2f2f715e, 0x84849713, 0x5353f5a6, 0xd1d168b9, 0x00000000, 0xeded2cc1, 0x20206040, 0xfcfc1fe3, 0xb1b1c879, 0x5b5bedb6, 0x6a6abed4, 0xcbcb468d, 0xbebed967, 0x39394b72, 0x4a4ade94, 0x4c4cd498, 0x5858e8b0, 0xcfcf4a85, 0xd0d06bbb, 0xefef2ac5, 0xaaaae54f, 0xfbfb16ed, 0x4343c586, 0x4d4dd79a, 0x33335566, 0x85859411, 0x4545cf8a, 0xf9f910e9, 0x02020604, 0x7f7f81fe, 0x5050f0a0, 0x3c3c4478, 0x9f9fba25, 0xa8a8e34b, 0x5151f3a2, 0xa3a3fe5d, 0x4040c080, 0x8f8f8a05, 0x9292ad3f, 0x9d9dbc21, 0x38384870, 0xf5f504f1, 0xbcbcdf63, 0xb6b6c177, 0xdada75af, 0x21216342, 0x10103020, 0xffff1ae5, 0xf3f30efd, 0xd2d26dbf, 0xcdcd4c81, 0x0c0c1418, 0x13133526, 0xecec2fc3, 0x5f5fe1be, 0x9797a235, 0x4444cc88, 0x1717392e, 0xc4c45793, 0xa7a7f255, 0x7e7e82fc, 0x3d3d477a, 0x6464acc8, 0x5d5de7ba, 0x19192b32, 0x737395e6, 0x6060a0c0, 0x81819819, 0x4f4fd19e, 0xdcdc7fa3, 0x22226644, 0x2a2a7e54, 0x9090ab3b, 0x8888830b, 0x4646ca8c, 0xeeee29c7, 0xb8b8d36b, 0x14143c28, 0xdede79a7, 0x5e5ee2bc, 0x0b0b1d16, 0xdbdb76ad, 0xe0e03bdb, 0x32325664, 0x3a3a4e74, 0x0a0a1e14, 0x4949db92, 0x06060a0c, 0x24246c48, 0x5c5ce4b8, 0xc2c25d9f, 0xd3d36ebd, 0xacacef43, 0x6262a6c4, 0x9191a839, 0x9595a431, 0xe4e437d3, 0x79798bf2, 0xe7e732d5, 0xc8c8438b, 0x3737596e, 0x6d6db7da, 0x8d8d8c01, 0xd5d564b1, 0x4e4ed29c, 0xa9a9e049, 0x6c6cb4d8, 0x5656faac, 0xf4f407f3, 0xeaea25cf, 0x6565afca, 0x7a7a8ef4, 0xaeaee947, 0x08081810, 0xbabad56f, 0x787888f0, 0x25256f4a, 0x2e2e725c, 0x1c1c2438, 0xa6a6f157, 0xb4b4c773, 0xc6c65197, 0xe8e823cb, 0xdddd7ca1, 0x74749ce8, 0x1f1f213e, 0x4b4bdd96, 0xbdbddc61, 0x8b8b860d, 0x8a8a850f, 0x707090e0, 0x3e3e427c, 0xb5b5c471, 0x6666aacc, 0x4848d890, 0x03030506, 0xf6f601f7, 0x0e0e121c, 0x6161a3c2, 0x35355f6a, 0x5757f9ae, 0xb9b9d069, 0x86869117, 0xc1c15899, 0x1d1d273a, 0x9e9eb927, 0xe1e138d9, 0xf8f813eb, 0x9898b32b, 0x11113322, 0x6969bbd2, 0xd9d970a9, 0x8e8e8907, 0x9494a733, 0x9b9bb62d, 0x1e1e223c, 0x87879215, 0xe9e920c9, 0xcece4987, 0x5555ffaa, 0x28287850, 0xdfdf7aa5, 0x8c8c8f03, 0xa1a1f859, 0x89898009, 0x0d0d171a, 0xbfbfda65, 0xe6e631d7, 0x4242c684, 0x6868b8d0, 0x4141c382, 0x9999b029, 0x2d2d775a, 0x0f0f111e, 0xb0b0cb7b, 0x5454fca8, 0xbbbbd66d, 0x16163a2c ] # Transformations for decryption T5 = [ 0x51f4a750, 0x7e416553, 0x1a17a4c3, 0x3a275e96, 0x3bab6bcb, 0x1f9d45f1, 0xacfa58ab, 0x4be30393, 0x2030fa55, 0xad766df6, 0x88cc7691, 0xf5024c25, 0x4fe5d7fc, 0xc52acbd7, 0x26354480, 0xb562a38f, 0xdeb15a49, 0x25ba1b67, 0x45ea0e98, 0x5dfec0e1, 0xc32f7502, 0x814cf012, 0x8d4697a3, 0x6bd3f9c6, 0x038f5fe7, 0x15929c95, 0xbf6d7aeb, 0x955259da, 0xd4be832d, 0x587421d3, 0x49e06929, 0x8ec9c844, 0x75c2896a, 0xf48e7978, 0x99583e6b, 0x27b971dd, 0xbee14fb6, 0xf088ad17, 0xc920ac66, 0x7dce3ab4, 0x63df4a18, 0xe51a3182, 0x97513360, 0x62537f45, 0xb16477e0, 0xbb6bae84, 0xfe81a01c, 0xf9082b94, 0x70486858, 0x8f45fd19, 0x94de6c87, 0x527bf8b7, 0xab73d323, 0x724b02e2, 0xe31f8f57, 0x6655ab2a, 0xb2eb2807, 0x2fb5c203, 0x86c57b9a, 0xd33708a5, 0x302887f2, 0x23bfa5b2, 0x02036aba, 0xed16825c, 0x8acf1c2b, 0xa779b492, 0xf307f2f0, 0x4e69e2a1, 0x65daf4cd, 0x0605bed5, 0xd134621f, 0xc4a6fe8a, 0x342e539d, 0xa2f355a0, 0x058ae132, 0xa4f6eb75, 0x0b83ec39, 0x4060efaa, 0x5e719f06, 0xbd6e1051, 0x3e218af9, 0x96dd063d, 0xdd3e05ae, 0x4de6bd46, 0x91548db5, 0x71c45d05, 0x0406d46f, 0x605015ff, 0x1998fb24, 0xd6bde997, 0x894043cc, 0x67d99e77, 0xb0e842bd, 0x07898b88, 0xe7195b38, 0x79c8eedb, 0xa17c0a47, 0x7c420fe9, 0xf8841ec9, 0x00000000, 0x09808683, 0x322bed48, 0x1e1170ac, 0x6c5a724e, 0xfd0efffb, 0x0f853856, 0x3daed51e, 0x362d3927, 0x0a0fd964, 0x685ca621, 0x9b5b54d1, 0x24362e3a, 0x0c0a67b1, 0x9357e70f, 0xb4ee96d2, 0x1b9b919e, 0x80c0c54f, 0x61dc20a2, 0x5a774b69, 0x1c121a16, 0xe293ba0a, 0xc0a02ae5, 0x3c22e043, 0x121b171d, 0x0e090d0b, 0xf28bc7ad, 0x2db6a8b9, 0x141ea9c8, 0x57f11985, 0xaf75074c, 0xee99ddbb, 0xa37f60fd, 0xf701269f, 0x5c72f5bc, 0x44663bc5, 0x5bfb7e34, 0x8b432976, 0xcb23c6dc, 0xb6edfc68, 0xb8e4f163, 0xd731dcca, 0x42638510, 0x13972240, 0x84c61120, 0x854a247d, 0xd2bb3df8, 0xaef93211, 0xc729a16d, 0x1d9e2f4b, 0xdcb230f3, 0x0d8652ec, 0x77c1e3d0, 0x2bb3166c, 0xa970b999, 0x119448fa, 0x47e96422, 0xa8fc8cc4, 0xa0f03f1a, 0x567d2cd8, 0x223390ef, 0x87494ec7, 0xd938d1c1, 0x8ccaa2fe, 0x98d40b36, 0xa6f581cf, 0xa57ade28, 0xdab78e26, 0x3fadbfa4, 0x2c3a9de4, 0x5078920d, 0x6a5fcc9b, 0x547e4662, 0xf68d13c2, 0x90d8b8e8, 0x2e39f75e, 0x82c3aff5, 0x9f5d80be, 0x69d0937c, 0x6fd52da9, 0xcf2512b3, 0xc8ac993b, 0x10187da7, 0xe89c636e, 0xdb3bbb7b, 0xcd267809, 0x6e5918f4, 0xec9ab701, 0x834f9aa8, 0xe6956e65, 0xaaffe67e, 0x21bccf08, 0xef15e8e6, 0xbae79bd9, 0x4a6f36ce, 0xea9f09d4, 0x29b07cd6, 0x31a4b2af, 0x2a3f2331, 0xc6a59430, 0x35a266c0, 0x744ebc37, 0xfc82caa6, 0xe090d0b0, 0x33a7d815, 0xf104984a, 0x41ecdaf7, 0x7fcd500e, 0x1791f62f, 0x764dd68d, 0x43efb04d, 0xccaa4d54, 0xe49604df, 0x9ed1b5e3, 0x4c6a881b, 0xc12c1fb8, 0x4665517f, 0x9d5eea04, 0x018c355d, 0xfa877473, 0xfb0b412e, 0xb3671d5a, 0x92dbd252, 0xe9105633, 0x6dd64713, 0x9ad7618c, 0x37a10c7a, 0x59f8148e, 0xeb133c89, 0xcea927ee, 0xb761c935, 0xe11ce5ed, 0x7a47b13c, 0x9cd2df59, 0x55f2733f, 0x1814ce79, 0x73c737bf, 0x53f7cdea, 0x5ffdaa5b, 0xdf3d6f14, 0x7844db86, 0xcaaff381, 0xb968c43e, 0x3824342c, 0xc2a3405f, 0x161dc372, 0xbce2250c, 0x283c498b, 0xff0d9541, 0x39a80171, 0x080cb3de, 0xd8b4e49c, 0x6456c190, 0x7bcb8461, 0xd532b670, 0x486c5c74, 0xd0b85742 ] T6 = [ 0x5051f4a7, 0x537e4165, 0xc31a17a4, 0x963a275e, 0xcb3bab6b, 0xf11f9d45, 0xabacfa58, 0x934be303, 0x552030fa, 0xf6ad766d, 0x9188cc76, 0x25f5024c, 0xfc4fe5d7, 0xd7c52acb, 0x80263544, 0x8fb562a3, 0x49deb15a, 0x6725ba1b, 0x9845ea0e, 0xe15dfec0, 0x02c32f75, 0x12814cf0, 0xa38d4697, 0xc66bd3f9, 0xe7038f5f, 0x9515929c, 0xebbf6d7a, 0xda955259, 0x2dd4be83, 0xd3587421, 0x2949e069, 0x448ec9c8, 0x6a75c289, 0x78f48e79, 0x6b99583e, 0xdd27b971, 0xb6bee14f, 0x17f088ad, 0x66c920ac, 0xb47dce3a, 0x1863df4a, 0x82e51a31, 0x60975133, 0x4562537f, 0xe0b16477, 0x84bb6bae, 0x1cfe81a0, 0x94f9082b, 0x58704868, 0x198f45fd, 0x8794de6c, 0xb7527bf8, 0x23ab73d3, 0xe2724b02, 0x57e31f8f, 0x2a6655ab, 0x07b2eb28, 0x032fb5c2, 0x9a86c57b, 0xa5d33708, 0xf2302887, 0xb223bfa5, 0xba02036a, 0x5ced1682, 0x2b8acf1c, 0x92a779b4, 0xf0f307f2, 0xa14e69e2, 0xcd65daf4, 0xd50605be, 0x1fd13462, 0x8ac4a6fe, 0x9d342e53, 0xa0a2f355, 0x32058ae1, 0x75a4f6eb, 0x390b83ec, 0xaa4060ef, 0x065e719f, 0x51bd6e10, 0xf93e218a, 0x3d96dd06, 0xaedd3e05, 0x464de6bd, 0xb591548d, 0x0571c45d, 0x6f0406d4, 0xff605015, 0x241998fb, 0x97d6bde9, 0xcc894043, 0x7767d99e, 0xbdb0e842, 0x8807898b, 0x38e7195b, 0xdb79c8ee, 0x47a17c0a, 0xe97c420f, 0xc9f8841e, 0x00000000, 0x83098086, 0x48322bed, 0xac1e1170, 0x4e6c5a72, 0xfbfd0eff, 0x560f8538, 0x1e3daed5, 0x27362d39, 0x640a0fd9, 0x21685ca6, 0xd19b5b54, 0x3a24362e, 0xb10c0a67, 0x0f9357e7, 0xd2b4ee96, 0x9e1b9b91, 0x4f80c0c5, 0xa261dc20, 0x695a774b, 0x161c121a, 0x0ae293ba, 0xe5c0a02a, 0x433c22e0, 0x1d121b17, 0x0b0e090d, 0xadf28bc7, 0xb92db6a8, 0xc8141ea9, 0x8557f119, 0x4caf7507, 0xbbee99dd, 0xfda37f60, 0x9ff70126, 0xbc5c72f5, 0xc544663b, 0x345bfb7e, 0x768b4329, 0xdccb23c6, 0x68b6edfc, 0x63b8e4f1, 0xcad731dc, 0x10426385, 0x40139722, 0x2084c611, 0x7d854a24, 0xf8d2bb3d, 0x11aef932, 0x6dc729a1, 0x4b1d9e2f, 0xf3dcb230, 0xec0d8652, 0xd077c1e3, 0x6c2bb316, 0x99a970b9, 0xfa119448, 0x2247e964, 0xc4a8fc8c, 0x1aa0f03f, 0xd8567d2c, 0xef223390, 0xc787494e, 0xc1d938d1, 0xfe8ccaa2, 0x3698d40b, 0xcfa6f581, 0x28a57ade, 0x26dab78e, 0xa43fadbf, 0xe42c3a9d, 0x0d507892, 0x9b6a5fcc, 0x62547e46, 0xc2f68d13, 0xe890d8b8, 0x5e2e39f7, 0xf582c3af, 0xbe9f5d80, 0x7c69d093, 0xa96fd52d, 0xb3cf2512, 0x3bc8ac99, 0xa710187d, 0x6ee89c63, 0x7bdb3bbb, 0x09cd2678, 0xf46e5918, 0x01ec9ab7, 0xa8834f9a, 0x65e6956e, 0x7eaaffe6, 0x0821bccf, 0xe6ef15e8, 0xd9bae79b, 0xce4a6f36, 0xd4ea9f09, 0xd629b07c, 0xaf31a4b2, 0x312a3f23, 0x30c6a594, 0xc035a266, 0x37744ebc, 0xa6fc82ca, 0xb0e090d0, 0x1533a7d8, 0x4af10498, 0xf741ecda, 0x0e7fcd50, 0x2f1791f6, 0x8d764dd6, 0x4d43efb0, 0x54ccaa4d, 0xdfe49604, 0xe39ed1b5, 0x1b4c6a88, 0xb8c12c1f, 0x7f466551, 0x049d5eea, 0x5d018c35, 0x73fa8774, 0x2efb0b41, 0x5ab3671d, 0x5292dbd2, 0x33e91056, 0x136dd647, 0x8c9ad761, 0x7a37a10c, 0x8e59f814, 0x89eb133c, 0xeecea927, 0x35b761c9, 0xede11ce5, 0x3c7a47b1, 0x599cd2df, 0x3f55f273, 0x791814ce, 0xbf73c737, 0xea53f7cd, 0x5b5ffdaa, 0x14df3d6f, 0x867844db, 0x81caaff3, 0x3eb968c4, 0x2c382434, 0x5fc2a340, 0x72161dc3, 0x0cbce225, 0x8b283c49, 0x41ff0d95, 0x7139a801, 0xde080cb3, 0x9cd8b4e4, 0x906456c1, 0x617bcb84, 0x70d532b6, 0x74486c5c, 0x42d0b857 ] T7 = [ 0xa75051f4, 0x65537e41, 0xa4c31a17, 0x5e963a27, 0x6bcb3bab, 0x45f11f9d, 0x58abacfa, 0x03934be3, 0xfa552030, 0x6df6ad76, 0x769188cc, 0x4c25f502, 0xd7fc4fe5, 0xcbd7c52a, 0x44802635, 0xa38fb562, 0x5a49deb1, 0x1b6725ba, 0x0e9845ea, 0xc0e15dfe, 0x7502c32f, 0xf012814c, 0x97a38d46, 0xf9c66bd3, 0x5fe7038f, 0x9c951592, 0x7aebbf6d, 0x59da9552, 0x832dd4be, 0x21d35874, 0x692949e0, 0xc8448ec9, 0x896a75c2, 0x7978f48e, 0x3e6b9958, 0x71dd27b9, 0x4fb6bee1, 0xad17f088, 0xac66c920, 0x3ab47dce, 0x4a1863df, 0x3182e51a, 0x33609751, 0x7f456253, 0x77e0b164, 0xae84bb6b, 0xa01cfe81, 0x2b94f908, 0x68587048, 0xfd198f45, 0x6c8794de, 0xf8b7527b, 0xd323ab73, 0x02e2724b, 0x8f57e31f, 0xab2a6655, 0x2807b2eb, 0xc2032fb5, 0x7b9a86c5, 0x08a5d337, 0x87f23028, 0xa5b223bf, 0x6aba0203, 0x825ced16, 0x1c2b8acf, 0xb492a779, 0xf2f0f307, 0xe2a14e69, 0xf4cd65da, 0xbed50605, 0x621fd134, 0xfe8ac4a6, 0x539d342e, 0x55a0a2f3, 0xe132058a, 0xeb75a4f6, 0xec390b83, 0xefaa4060, 0x9f065e71, 0x1051bd6e, 0x8af93e21, 0x063d96dd, 0x05aedd3e, 0xbd464de6, 0x8db59154, 0x5d0571c4, 0xd46f0406, 0x15ff6050, 0xfb241998, 0xe997d6bd, 0x43cc8940, 0x9e7767d9, 0x42bdb0e8, 0x8b880789, 0x5b38e719, 0xeedb79c8, 0x0a47a17c, 0x0fe97c42, 0x1ec9f884, 0x00000000, 0x86830980, 0xed48322b, 0x70ac1e11, 0x724e6c5a, 0xfffbfd0e, 0x38560f85, 0xd51e3dae, 0x3927362d, 0xd9640a0f, 0xa621685c, 0x54d19b5b, 0x2e3a2436, 0x67b10c0a, 0xe70f9357, 0x96d2b4ee, 0x919e1b9b, 0xc54f80c0, 0x20a261dc, 0x4b695a77, 0x1a161c12, 0xba0ae293, 0x2ae5c0a0, 0xe0433c22, 0x171d121b, 0x0d0b0e09, 0xc7adf28b, 0xa8b92db6, 0xa9c8141e, 0x198557f1, 0x074caf75, 0xddbbee99, 0x60fda37f, 0x269ff701, 0xf5bc5c72, 0x3bc54466, 0x7e345bfb, 0x29768b43, 0xc6dccb23, 0xfc68b6ed, 0xf163b8e4, 0xdccad731, 0x85104263, 0x22401397, 0x112084c6, 0x247d854a, 0x3df8d2bb, 0x3211aef9, 0xa16dc729, 0x2f4b1d9e, 0x30f3dcb2, 0x52ec0d86, 0xe3d077c1, 0x166c2bb3, 0xb999a970, 0x48fa1194, 0x642247e9, 0x8cc4a8fc, 0x3f1aa0f0, 0x2cd8567d, 0x90ef2233, 0x4ec78749, 0xd1c1d938, 0xa2fe8cca, 0x0b3698d4, 0x81cfa6f5, 0xde28a57a, 0x8e26dab7, 0xbfa43fad, 0x9de42c3a, 0x920d5078, 0xcc9b6a5f, 0x4662547e, 0x13c2f68d, 0xb8e890d8, 0xf75e2e39, 0xaff582c3, 0x80be9f5d, 0x937c69d0, 0x2da96fd5, 0x12b3cf25, 0x993bc8ac, 0x7da71018, 0x636ee89c, 0xbb7bdb3b, 0x7809cd26, 0x18f46e59, 0xb701ec9a, 0x9aa8834f, 0x6e65e695, 0xe67eaaff, 0xcf0821bc, 0xe8e6ef15, 0x9bd9bae7, 0x36ce4a6f, 0x09d4ea9f, 0x7cd629b0, 0xb2af31a4, 0x23312a3f, 0x9430c6a5, 0x66c035a2, 0xbc37744e, 0xcaa6fc82, 0xd0b0e090, 0xd81533a7, 0x984af104, 0xdaf741ec, 0x500e7fcd, 0xf62f1791, 0xd68d764d, 0xb04d43ef, 0x4d54ccaa, 0x04dfe496, 0xb5e39ed1, 0x881b4c6a, 0x1fb8c12c, 0x517f4665, 0xea049d5e, 0x355d018c, 0x7473fa87, 0x412efb0b, 0x1d5ab367, 0xd25292db, 0x5633e910, 0x47136dd6, 0x618c9ad7, 0x0c7a37a1, 0x148e59f8, 0x3c89eb13, 0x27eecea9, 0xc935b761, 0xe5ede11c, 0xb13c7a47, 0xdf599cd2, 0x733f55f2, 0xce791814, 0x37bf73c7, 0xcdea53f7, 0xaa5b5ffd, 0x6f14df3d, 0xdb867844, 0xf381caaf, 0xc43eb968, 0x342c3824, 0x405fc2a3, 0xc372161d, 0x250cbce2, 0x498b283c, 0x9541ff0d, 0x017139a8, 0xb3de080c, 0xe49cd8b4, 0xc1906456, 0x84617bcb, 0xb670d532, 0x5c74486c, 0x5742d0b8 ] T8 = [ 0xf4a75051, 0x4165537e, 0x17a4c31a, 0x275e963a, 0xab6bcb3b, 0x9d45f11f, 0xfa58abac, 0xe303934b, 0x30fa5520, 0x766df6ad, 0xcc769188, 0x024c25f5, 0xe5d7fc4f, 0x2acbd7c5, 0x35448026, 0x62a38fb5, 0xb15a49de, 0xba1b6725, 0xea0e9845, 0xfec0e15d, 0x2f7502c3, 0x4cf01281, 0x4697a38d, 0xd3f9c66b, 0x8f5fe703, 0x929c9515, 0x6d7aebbf, 0x5259da95, 0xbe832dd4, 0x7421d358, 0xe0692949, 0xc9c8448e, 0xc2896a75, 0x8e7978f4, 0x583e6b99, 0xb971dd27, 0xe14fb6be, 0x88ad17f0, 0x20ac66c9, 0xce3ab47d, 0xdf4a1863, 0x1a3182e5, 0x51336097, 0x537f4562, 0x6477e0b1, 0x6bae84bb, 0x81a01cfe, 0x082b94f9, 0x48685870, 0x45fd198f, 0xde6c8794, 0x7bf8b752, 0x73d323ab, 0x4b02e272, 0x1f8f57e3, 0x55ab2a66, 0xeb2807b2, 0xb5c2032f, 0xc57b9a86, 0x3708a5d3, 0x2887f230, 0xbfa5b223, 0x036aba02, 0x16825ced, 0xcf1c2b8a, 0x79b492a7, 0x07f2f0f3, 0x69e2a14e, 0xdaf4cd65, 0x05bed506, 0x34621fd1, 0xa6fe8ac4, 0x2e539d34, 0xf355a0a2, 0x8ae13205, 0xf6eb75a4, 0x83ec390b, 0x60efaa40, 0x719f065e, 0x6e1051bd, 0x218af93e, 0xdd063d96, 0x3e05aedd, 0xe6bd464d, 0x548db591, 0xc45d0571, 0x06d46f04, 0x5015ff60, 0x98fb2419, 0xbde997d6, 0x4043cc89, 0xd99e7767, 0xe842bdb0, 0x898b8807, 0x195b38e7, 0xc8eedb79, 0x7c0a47a1, 0x420fe97c, 0x841ec9f8, 0x00000000, 0x80868309, 0x2bed4832, 0x1170ac1e, 0x5a724e6c, 0x0efffbfd, 0x8538560f, 0xaed51e3d, 0x2d392736, 0x0fd9640a, 0x5ca62168, 0x5b54d19b, 0x362e3a24, 0x0a67b10c, 0x57e70f93, 0xee96d2b4, 0x9b919e1b, 0xc0c54f80, 0xdc20a261, 0x774b695a, 0x121a161c, 0x93ba0ae2, 0xa02ae5c0, 0x22e0433c, 0x1b171d12, 0x090d0b0e, 0x8bc7adf2, 0xb6a8b92d, 0x1ea9c814, 0xf1198557, 0x75074caf, 0x99ddbbee, 0x7f60fda3, 0x01269ff7, 0x72f5bc5c, 0x663bc544, 0xfb7e345b, 0x4329768b, 0x23c6dccb, 0xedfc68b6, 0xe4f163b8, 0x31dccad7, 0x63851042, 0x97224013, 0xc6112084, 0x4a247d85, 0xbb3df8d2, 0xf93211ae, 0x29a16dc7, 0x9e2f4b1d, 0xb230f3dc, 0x8652ec0d, 0xc1e3d077, 0xb3166c2b, 0x70b999a9, 0x9448fa11, 0xe9642247, 0xfc8cc4a8, 0xf03f1aa0, 0x7d2cd856, 0x3390ef22, 0x494ec787, 0x38d1c1d9, 0xcaa2fe8c, 0xd40b3698, 0xf581cfa6, 0x7ade28a5, 0xb78e26da, 0xadbfa43f, 0x3a9de42c, 0x78920d50, 0x5fcc9b6a, 0x7e466254, 0x8d13c2f6, 0xd8b8e890, 0x39f75e2e, 0xc3aff582, 0x5d80be9f, 0xd0937c69, 0xd52da96f, 0x2512b3cf, 0xac993bc8, 0x187da710, 0x9c636ee8, 0x3bbb7bdb, 0x267809cd, 0x5918f46e, 0x9ab701ec, 0x4f9aa883, 0x956e65e6, 0xffe67eaa, 0xbccf0821, 0x15e8e6ef, 0xe79bd9ba, 0x6f36ce4a, 0x9f09d4ea, 0xb07cd629, 0xa4b2af31, 0x3f23312a, 0xa59430c6, 0xa266c035, 0x4ebc3774, 0x82caa6fc, 0x90d0b0e0, 0xa7d81533, 0x04984af1, 0xecdaf741, 0xcd500e7f, 0x91f62f17, 0x4dd68d76, 0xefb04d43, 0xaa4d54cc, 0x9604dfe4, 0xd1b5e39e, 0x6a881b4c, 0x2c1fb8c1, 0x65517f46, 0x5eea049d, 0x8c355d01, 0x877473fa, 0x0b412efb, 0x671d5ab3, 0xdbd25292, 0x105633e9, 0xd647136d, 0xd7618c9a, 0xa10c7a37, 0xf8148e59, 0x133c89eb, 0xa927eece, 0x61c935b7, 0x1ce5ede1, 0x47b13c7a, 0xd2df599c, 0xf2733f55, 0x14ce7918, 0xc737bf73, 0xf7cdea53, 0xfdaa5b5f, 0x3d6f14df, 0x44db8678, 0xaff381ca, 0x68c43eb9, 0x24342c38, 0xa3405fc2, 0x1dc37216, 0xe2250cbc, 0x3c498b28, 0x0d9541ff, 0xa8017139, 0x0cb3de08, 0xb4e49cd8, 0x56c19064, 0xcb84617b, 0x32b670d5, 0x6c5c7448, 0xb85742d0 ] # Transformations for decryption key expansion U1 = [ 0x00000000, 0x0e090d0b, 0x1c121a16, 0x121b171d, 0x3824342c, 0x362d3927, 0x24362e3a, 0x2a3f2331, 0x70486858, 0x7e416553, 0x6c5a724e, 0x62537f45, 0x486c5c74, 0x4665517f, 0x547e4662, 0x5a774b69, 0xe090d0b0, 0xee99ddbb, 0xfc82caa6, 0xf28bc7ad, 0xd8b4e49c, 0xd6bde997, 0xc4a6fe8a, 0xcaaff381, 0x90d8b8e8, 0x9ed1b5e3, 0x8ccaa2fe, 0x82c3aff5, 0xa8fc8cc4, 0xa6f581cf, 0xb4ee96d2, 0xbae79bd9, 0xdb3bbb7b, 0xd532b670, 0xc729a16d, 0xc920ac66, 0xe31f8f57, 0xed16825c, 0xff0d9541, 0xf104984a, 0xab73d323, 0xa57ade28, 0xb761c935, 0xb968c43e, 0x9357e70f, 0x9d5eea04, 0x8f45fd19, 0x814cf012, 0x3bab6bcb, 0x35a266c0, 0x27b971dd, 0x29b07cd6, 0x038f5fe7, 0x0d8652ec, 0x1f9d45f1, 0x119448fa, 0x4be30393, 0x45ea0e98, 0x57f11985, 0x59f8148e, 0x73c737bf, 0x7dce3ab4, 0x6fd52da9, 0x61dc20a2, 0xad766df6, 0xa37f60fd, 0xb16477e0, 0xbf6d7aeb, 0x955259da, 0x9b5b54d1, 0x894043cc, 0x87494ec7, 0xdd3e05ae, 0xd33708a5, 0xc12c1fb8, 0xcf2512b3, 0xe51a3182, 0xeb133c89, 0xf9082b94, 0xf701269f, 0x4de6bd46, 0x43efb04d, 0x51f4a750, 0x5ffdaa5b, 0x75c2896a, 0x7bcb8461, 0x69d0937c, 0x67d99e77, 0x3daed51e, 0x33a7d815, 0x21bccf08, 0x2fb5c203, 0x058ae132, 0x0b83ec39, 0x1998fb24, 0x1791f62f, 0x764dd68d, 0x7844db86, 0x6a5fcc9b, 0x6456c190, 0x4e69e2a1, 0x4060efaa, 0x527bf8b7, 0x5c72f5bc, 0x0605bed5, 0x080cb3de, 0x1a17a4c3, 0x141ea9c8, 0x3e218af9, 0x302887f2, 0x223390ef, 0x2c3a9de4, 0x96dd063d, 0x98d40b36, 0x8acf1c2b, 0x84c61120, 0xaef93211, 0xa0f03f1a, 0xb2eb2807, 0xbce2250c, 0xe6956e65, 0xe89c636e, 0xfa877473, 0xf48e7978, 0xdeb15a49, 0xd0b85742, 0xc2a3405f, 0xccaa4d54, 0x41ecdaf7, 0x4fe5d7fc, 0x5dfec0e1, 0x53f7cdea, 0x79c8eedb, 0x77c1e3d0, 0x65daf4cd, 0x6bd3f9c6, 0x31a4b2af, 0x3fadbfa4, 0x2db6a8b9, 0x23bfa5b2, 0x09808683, 0x07898b88, 0x15929c95, 0x1b9b919e, 0xa17c0a47, 0xaf75074c, 0xbd6e1051, 0xb3671d5a, 0x99583e6b, 0x97513360, 0x854a247d, 0x8b432976, 0xd134621f, 0xdf3d6f14, 0xcd267809, 0xc32f7502, 0xe9105633, 0xe7195b38, 0xf5024c25, 0xfb0b412e, 0x9ad7618c, 0x94de6c87, 0x86c57b9a, 0x88cc7691, 0xa2f355a0, 0xacfa58ab, 0xbee14fb6, 0xb0e842bd, 0xea9f09d4, 0xe49604df, 0xf68d13c2, 0xf8841ec9, 0xd2bb3df8, 0xdcb230f3, 0xcea927ee, 0xc0a02ae5, 0x7a47b13c, 0x744ebc37, 0x6655ab2a, 0x685ca621, 0x42638510, 0x4c6a881b, 0x5e719f06, 0x5078920d, 0x0a0fd964, 0x0406d46f, 0x161dc372, 0x1814ce79, 0x322bed48, 0x3c22e043, 0x2e39f75e, 0x2030fa55, 0xec9ab701, 0xe293ba0a, 0xf088ad17, 0xfe81a01c, 0xd4be832d, 0xdab78e26, 0xc8ac993b, 0xc6a59430, 0x9cd2df59, 0x92dbd252, 0x80c0c54f, 0x8ec9c844, 0xa4f6eb75, 0xaaffe67e, 0xb8e4f163, 0xb6edfc68, 0x0c0a67b1, 0x02036aba, 0x10187da7, 0x1e1170ac, 0x342e539d, 0x3a275e96, 0x283c498b, 0x26354480, 0x7c420fe9, 0x724b02e2, 0x605015ff, 0x6e5918f4, 0x44663bc5, 0x4a6f36ce, 0x587421d3, 0x567d2cd8, 0x37a10c7a, 0x39a80171, 0x2bb3166c, 0x25ba1b67, 0x0f853856, 0x018c355d, 0x13972240, 0x1d9e2f4b, 0x47e96422, 0x49e06929, 0x5bfb7e34, 0x55f2733f, 0x7fcd500e, 0x71c45d05, 0x63df4a18, 0x6dd64713, 0xd731dcca, 0xd938d1c1, 0xcb23c6dc, 0xc52acbd7, 0xef15e8e6, 0xe11ce5ed, 0xf307f2f0, 0xfd0efffb, 0xa779b492, 0xa970b999, 0xbb6bae84, 0xb562a38f, 0x9f5d80be, 0x91548db5, 0x834f9aa8, 0x8d4697a3 ] U2 = [ 0x00000000, 0x0b0e090d, 0x161c121a, 0x1d121b17, 0x2c382434, 0x27362d39, 0x3a24362e, 0x312a3f23, 0x58704868, 0x537e4165, 0x4e6c5a72, 0x4562537f, 0x74486c5c, 0x7f466551, 0x62547e46, 0x695a774b, 0xb0e090d0, 0xbbee99dd, 0xa6fc82ca, 0xadf28bc7, 0x9cd8b4e4, 0x97d6bde9, 0x8ac4a6fe, 0x81caaff3, 0xe890d8b8, 0xe39ed1b5, 0xfe8ccaa2, 0xf582c3af, 0xc4a8fc8c, 0xcfa6f581, 0xd2b4ee96, 0xd9bae79b, 0x7bdb3bbb, 0x70d532b6, 0x6dc729a1, 0x66c920ac, 0x57e31f8f, 0x5ced1682, 0x41ff0d95, 0x4af10498, 0x23ab73d3, 0x28a57ade, 0x35b761c9, 0x3eb968c4, 0x0f9357e7, 0x049d5eea, 0x198f45fd, 0x12814cf0, 0xcb3bab6b, 0xc035a266, 0xdd27b971, 0xd629b07c, 0xe7038f5f, 0xec0d8652, 0xf11f9d45, 0xfa119448, 0x934be303, 0x9845ea0e, 0x8557f119, 0x8e59f814, 0xbf73c737, 0xb47dce3a, 0xa96fd52d, 0xa261dc20, 0xf6ad766d, 0xfda37f60, 0xe0b16477, 0xebbf6d7a, 0xda955259, 0xd19b5b54, 0xcc894043, 0xc787494e, 0xaedd3e05, 0xa5d33708, 0xb8c12c1f, 0xb3cf2512, 0x82e51a31, 0x89eb133c, 0x94f9082b, 0x9ff70126, 0x464de6bd, 0x4d43efb0, 0x5051f4a7, 0x5b5ffdaa, 0x6a75c289, 0x617bcb84, 0x7c69d093, 0x7767d99e, 0x1e3daed5, 0x1533a7d8, 0x0821bccf, 0x032fb5c2, 0x32058ae1, 0x390b83ec, 0x241998fb, 0x2f1791f6, 0x8d764dd6, 0x867844db, 0x9b6a5fcc, 0x906456c1, 0xa14e69e2, 0xaa4060ef, 0xb7527bf8, 0xbc5c72f5, 0xd50605be, 0xde080cb3, 0xc31a17a4, 0xc8141ea9, 0xf93e218a, 0xf2302887, 0xef223390, 0xe42c3a9d, 0x3d96dd06, 0x3698d40b, 0x2b8acf1c, 0x2084c611, 0x11aef932, 0x1aa0f03f, 0x07b2eb28, 0x0cbce225, 0x65e6956e, 0x6ee89c63, 0x73fa8774, 0x78f48e79, 0x49deb15a, 0x42d0b857, 0x5fc2a340, 0x54ccaa4d, 0xf741ecda, 0xfc4fe5d7, 0xe15dfec0, 0xea53f7cd, 0xdb79c8ee, 0xd077c1e3, 0xcd65daf4, 0xc66bd3f9, 0xaf31a4b2, 0xa43fadbf, 0xb92db6a8, 0xb223bfa5, 0x83098086, 0x8807898b, 0x9515929c, 0x9e1b9b91, 0x47a17c0a, 0x4caf7507, 0x51bd6e10, 0x5ab3671d, 0x6b99583e, 0x60975133, 0x7d854a24, 0x768b4329, 0x1fd13462, 0x14df3d6f, 0x09cd2678, 0x02c32f75, 0x33e91056, 0x38e7195b, 0x25f5024c, 0x2efb0b41, 0x8c9ad761, 0x8794de6c, 0x9a86c57b, 0x9188cc76, 0xa0a2f355, 0xabacfa58, 0xb6bee14f, 0xbdb0e842, 0xd4ea9f09, 0xdfe49604, 0xc2f68d13, 0xc9f8841e, 0xf8d2bb3d, 0xf3dcb230, 0xeecea927, 0xe5c0a02a, 0x3c7a47b1, 0x37744ebc, 0x2a6655ab, 0x21685ca6, 0x10426385, 0x1b4c6a88, 0x065e719f, 0x0d507892, 0x640a0fd9, 0x6f0406d4, 0x72161dc3, 0x791814ce, 0x48322bed, 0x433c22e0, 0x5e2e39f7, 0x552030fa, 0x01ec9ab7, 0x0ae293ba, 0x17f088ad, 0x1cfe81a0, 0x2dd4be83, 0x26dab78e, 0x3bc8ac99, 0x30c6a594, 0x599cd2df, 0x5292dbd2, 0x4f80c0c5, 0x448ec9c8, 0x75a4f6eb, 0x7eaaffe6, 0x63b8e4f1, 0x68b6edfc, 0xb10c0a67, 0xba02036a, 0xa710187d, 0xac1e1170, 0x9d342e53, 0x963a275e, 0x8b283c49, 0x80263544, 0xe97c420f, 0xe2724b02, 0xff605015, 0xf46e5918, 0xc544663b, 0xce4a6f36, 0xd3587421, 0xd8567d2c, 0x7a37a10c, 0x7139a801, 0x6c2bb316, 0x6725ba1b, 0x560f8538, 0x5d018c35, 0x40139722, 0x4b1d9e2f, 0x2247e964, 0x2949e069, 0x345bfb7e, 0x3f55f273, 0x0e7fcd50, 0x0571c45d, 0x1863df4a, 0x136dd647, 0xcad731dc, 0xc1d938d1, 0xdccb23c6, 0xd7c52acb, 0xe6ef15e8, 0xede11ce5, 0xf0f307f2, 0xfbfd0eff, 0x92a779b4, 0x99a970b9, 0x84bb6bae, 0x8fb562a3, 0xbe9f5d80, 0xb591548d, 0xa8834f9a, 0xa38d4697 ] U3 = [ 0x00000000, 0x0d0b0e09, 0x1a161c12, 0x171d121b, 0x342c3824, 0x3927362d, 0x2e3a2436, 0x23312a3f, 0x68587048, 0x65537e41, 0x724e6c5a, 0x7f456253, 0x5c74486c, 0x517f4665, 0x4662547e, 0x4b695a77, 0xd0b0e090, 0xddbbee99, 0xcaa6fc82, 0xc7adf28b, 0xe49cd8b4, 0xe997d6bd, 0xfe8ac4a6, 0xf381caaf, 0xb8e890d8, 0xb5e39ed1, 0xa2fe8cca, 0xaff582c3, 0x8cc4a8fc, 0x81cfa6f5, 0x96d2b4ee, 0x9bd9bae7, 0xbb7bdb3b, 0xb670d532, 0xa16dc729, 0xac66c920, 0x8f57e31f, 0x825ced16, 0x9541ff0d, 0x984af104, 0xd323ab73, 0xde28a57a, 0xc935b761, 0xc43eb968, 0xe70f9357, 0xea049d5e, 0xfd198f45, 0xf012814c, 0x6bcb3bab, 0x66c035a2, 0x71dd27b9, 0x7cd629b0, 0x5fe7038f, 0x52ec0d86, 0x45f11f9d, 0x48fa1194, 0x03934be3, 0x0e9845ea, 0x198557f1, 0x148e59f8, 0x37bf73c7, 0x3ab47dce, 0x2da96fd5, 0x20a261dc, 0x6df6ad76, 0x60fda37f, 0x77e0b164, 0x7aebbf6d, 0x59da9552, 0x54d19b5b, 0x43cc8940, 0x4ec78749, 0x05aedd3e, 0x08a5d337, 0x1fb8c12c, 0x12b3cf25, 0x3182e51a, 0x3c89eb13, 0x2b94f908, 0x269ff701, 0xbd464de6, 0xb04d43ef, 0xa75051f4, 0xaa5b5ffd, 0x896a75c2, 0x84617bcb, 0x937c69d0, 0x9e7767d9, 0xd51e3dae, 0xd81533a7, 0xcf0821bc, 0xc2032fb5, 0xe132058a, 0xec390b83, 0xfb241998, 0xf62f1791, 0xd68d764d, 0xdb867844, 0xcc9b6a5f, 0xc1906456, 0xe2a14e69, 0xefaa4060, 0xf8b7527b, 0xf5bc5c72, 0xbed50605, 0xb3de080c, 0xa4c31a17, 0xa9c8141e, 0x8af93e21, 0x87f23028, 0x90ef2233, 0x9de42c3a, 0x063d96dd, 0x0b3698d4, 0x1c2b8acf, 0x112084c6, 0x3211aef9, 0x3f1aa0f0, 0x2807b2eb, 0x250cbce2, 0x6e65e695, 0x636ee89c, 0x7473fa87, 0x7978f48e, 0x5a49deb1, 0x5742d0b8, 0x405fc2a3, 0x4d54ccaa, 0xdaf741ec, 0xd7fc4fe5, 0xc0e15dfe, 0xcdea53f7, 0xeedb79c8, 0xe3d077c1, 0xf4cd65da, 0xf9c66bd3, 0xb2af31a4, 0xbfa43fad, 0xa8b92db6, 0xa5b223bf, 0x86830980, 0x8b880789, 0x9c951592, 0x919e1b9b, 0x0a47a17c, 0x074caf75, 0x1051bd6e, 0x1d5ab367, 0x3e6b9958, 0x33609751, 0x247d854a, 0x29768b43, 0x621fd134, 0x6f14df3d, 0x7809cd26, 0x7502c32f, 0x5633e910, 0x5b38e719, 0x4c25f502, 0x412efb0b, 0x618c9ad7, 0x6c8794de, 0x7b9a86c5, 0x769188cc, 0x55a0a2f3, 0x58abacfa, 0x4fb6bee1, 0x42bdb0e8, 0x09d4ea9f, 0x04dfe496, 0x13c2f68d, 0x1ec9f884, 0x3df8d2bb, 0x30f3dcb2, 0x27eecea9, 0x2ae5c0a0, 0xb13c7a47, 0xbc37744e, 0xab2a6655, 0xa621685c, 0x85104263, 0x881b4c6a, 0x9f065e71, 0x920d5078, 0xd9640a0f, 0xd46f0406, 0xc372161d, 0xce791814, 0xed48322b, 0xe0433c22, 0xf75e2e39, 0xfa552030, 0xb701ec9a, 0xba0ae293, 0xad17f088, 0xa01cfe81, 0x832dd4be, 0x8e26dab7, 0x993bc8ac, 0x9430c6a5, 0xdf599cd2, 0xd25292db, 0xc54f80c0, 0xc8448ec9, 0xeb75a4f6, 0xe67eaaff, 0xf163b8e4, 0xfc68b6ed, 0x67b10c0a, 0x6aba0203, 0x7da71018, 0x70ac1e11, 0x539d342e, 0x5e963a27, 0x498b283c, 0x44802635, 0x0fe97c42, 0x02e2724b, 0x15ff6050, 0x18f46e59, 0x3bc54466, 0x36ce4a6f, 0x21d35874, 0x2cd8567d, 0x0c7a37a1, 0x017139a8, 0x166c2bb3, 0x1b6725ba, 0x38560f85, 0x355d018c, 0x22401397, 0x2f4b1d9e, 0x642247e9, 0x692949e0, 0x7e345bfb, 0x733f55f2, 0x500e7fcd, 0x5d0571c4, 0x4a1863df, 0x47136dd6, 0xdccad731, 0xd1c1d938, 0xc6dccb23, 0xcbd7c52a, 0xe8e6ef15, 0xe5ede11c, 0xf2f0f307, 0xfffbfd0e, 0xb492a779, 0xb999a970, 0xae84bb6b, 0xa38fb562, 0x80be9f5d, 0x8db59154, 0x9aa8834f, 0x97a38d46 ] U4 = [ 0x00000000, 0x090d0b0e, 0x121a161c, 0x1b171d12, 0x24342c38, 0x2d392736, 0x362e3a24, 0x3f23312a, 0x48685870, 0x4165537e, 0x5a724e6c, 0x537f4562, 0x6c5c7448, 0x65517f46, 0x7e466254, 0x774b695a, 0x90d0b0e0, 0x99ddbbee, 0x82caa6fc, 0x8bc7adf2, 0xb4e49cd8, 0xbde997d6, 0xa6fe8ac4, 0xaff381ca, 0xd8b8e890, 0xd1b5e39e, 0xcaa2fe8c, 0xc3aff582, 0xfc8cc4a8, 0xf581cfa6, 0xee96d2b4, 0xe79bd9ba, 0x3bbb7bdb, 0x32b670d5, 0x29a16dc7, 0x20ac66c9, 0x1f8f57e3, 0x16825ced, 0x0d9541ff, 0x04984af1, 0x73d323ab, 0x7ade28a5, 0x61c935b7, 0x68c43eb9, 0x57e70f93, 0x5eea049d, 0x45fd198f, 0x4cf01281, 0xab6bcb3b, 0xa266c035, 0xb971dd27, 0xb07cd629, 0x8f5fe703, 0x8652ec0d, 0x9d45f11f, 0x9448fa11, 0xe303934b, 0xea0e9845, 0xf1198557, 0xf8148e59, 0xc737bf73, 0xce3ab47d, 0xd52da96f, 0xdc20a261, 0x766df6ad, 0x7f60fda3, 0x6477e0b1, 0x6d7aebbf, 0x5259da95, 0x5b54d19b, 0x4043cc89, 0x494ec787, 0x3e05aedd, 0x3708a5d3, 0x2c1fb8c1, 0x2512b3cf, 0x1a3182e5, 0x133c89eb, 0x082b94f9, 0x01269ff7, 0xe6bd464d, 0xefb04d43, 0xf4a75051, 0xfdaa5b5f, 0xc2896a75, 0xcb84617b, 0xd0937c69, 0xd99e7767, 0xaed51e3d, 0xa7d81533, 0xbccf0821, 0xb5c2032f, 0x8ae13205, 0x83ec390b, 0x98fb2419, 0x91f62f17, 0x4dd68d76, 0x44db8678, 0x5fcc9b6a, 0x56c19064, 0x69e2a14e, 0x60efaa40, 0x7bf8b752, 0x72f5bc5c, 0x05bed506, 0x0cb3de08, 0x17a4c31a, 0x1ea9c814, 0x218af93e, 0x2887f230, 0x3390ef22, 0x3a9de42c, 0xdd063d96, 0xd40b3698, 0xcf1c2b8a, 0xc6112084, 0xf93211ae, 0xf03f1aa0, 0xeb2807b2, 0xe2250cbc, 0x956e65e6, 0x9c636ee8, 0x877473fa, 0x8e7978f4, 0xb15a49de, 0xb85742d0, 0xa3405fc2, 0xaa4d54cc, 0xecdaf741, 0xe5d7fc4f, 0xfec0e15d, 0xf7cdea53, 0xc8eedb79, 0xc1e3d077, 0xdaf4cd65, 0xd3f9c66b, 0xa4b2af31, 0xadbfa43f, 0xb6a8b92d, 0xbfa5b223, 0x80868309, 0x898b8807, 0x929c9515, 0x9b919e1b, 0x7c0a47a1, 0x75074caf, 0x6e1051bd, 0x671d5ab3, 0x583e6b99, 0x51336097, 0x4a247d85, 0x4329768b, 0x34621fd1, 0x3d6f14df, 0x267809cd, 0x2f7502c3, 0x105633e9, 0x195b38e7, 0x024c25f5, 0x0b412efb, 0xd7618c9a, 0xde6c8794, 0xc57b9a86, 0xcc769188, 0xf355a0a2, 0xfa58abac, 0xe14fb6be, 0xe842bdb0, 0x9f09d4ea, 0x9604dfe4, 0x8d13c2f6, 0x841ec9f8, 0xbb3df8d2, 0xb230f3dc, 0xa927eece, 0xa02ae5c0, 0x47b13c7a, 0x4ebc3774, 0x55ab2a66, 0x5ca62168, 0x63851042, 0x6a881b4c, 0x719f065e, 0x78920d50, 0x0fd9640a, 0x06d46f04, 0x1dc37216, 0x14ce7918, 0x2bed4832, 0x22e0433c, 0x39f75e2e, 0x30fa5520, 0x9ab701ec, 0x93ba0ae2, 0x88ad17f0, 0x81a01cfe, 0xbe832dd4, 0xb78e26da, 0xac993bc8, 0xa59430c6, 0xd2df599c, 0xdbd25292, 0xc0c54f80, 0xc9c8448e, 0xf6eb75a4, 0xffe67eaa, 0xe4f163b8, 0xedfc68b6, 0x0a67b10c, 0x036aba02, 0x187da710, 0x1170ac1e, 0x2e539d34, 0x275e963a, 0x3c498b28, 0x35448026, 0x420fe97c, 0x4b02e272, 0x5015ff60, 0x5918f46e, 0x663bc544, 0x6f36ce4a, 0x7421d358, 0x7d2cd856, 0xa10c7a37, 0xa8017139, 0xb3166c2b, 0xba1b6725, 0x8538560f, 0x8c355d01, 0x97224013, 0x9e2f4b1d, 0xe9642247, 0xe0692949, 0xfb7e345b, 0xf2733f55, 0xcd500e7f, 0xc45d0571, 0xdf4a1863, 0xd647136d, 0x31dccad7, 0x38d1c1d9, 0x23c6dccb, 0x2acbd7c5, 0x15e8e6ef, 0x1ce5ede1, 0x07f2f0f3, 0x0efffbfd, 0x79b492a7, 0x70b999a9, 0x6bae84bb, 0x62a38fb5, 0x5d80be9f, 0x548db591, 0x4f9aa883, 0x4697a38d ] def __init__(self, key): if len(key) not in (16, 24, 32): raise ValueError('Invalid key size') rounds = self.number_of_rounds[len(key)] # Encryption round keys self._Ke = [[0] * 4 for i in xrange(rounds + 1)] # Decryption round keys self._Kd = [[0] * 4 for i in xrange(rounds + 1)] round_key_count = (rounds + 1) * 4 KC = len(key) // 4 # Convert the key into ints tk = [ struct.unpack('>i', key[i:i + 4])[0] for i in xrange(0, len(key), 4) ] # Copy values into round key arrays for i in xrange(0, KC): self._Ke[i // 4][i % 4] = tk[i] self._Kd[rounds - (i // 4)][i % 4] = tk[i] # Key expansion (fips-197 section 5.2) rconpointer = 0 t = KC while t < round_key_count: tt = tk[KC - 1] tk[0] ^= ((self.S[(tt >> 16) & 0xFF] << 24) ^ (self.S[(tt >> 8) & 0xFF] << 16) ^ (self.S[ tt & 0xFF] << 8) ^ self.S[(tt >> 24) & 0xFF] ^ (self.rcon[rconpointer] << 24)) rconpointer += 1 if KC != 8: for i in xrange(1, KC): tk[i] ^= tk[i - 1] # Key expansion for 256-bit keys is "slightly different" (fips-197) else: for i in xrange(1, KC // 2): tk[i] ^= tk[i - 1] tt = tk[KC // 2 - 1] tk[KC // 2] ^= (self.S[ tt & 0xFF] ^ (self.S[(tt >> 8) & 0xFF] << 8) ^ (self.S[(tt >> 16) & 0xFF] << 16) ^ (self.S[(tt >> 24) & 0xFF] << 24)) for i in xrange(KC // 2 + 1, KC): tk[i] ^= tk[i - 1] # Copy values into round key arrays j = 0 while j < KC and t < round_key_count: self._Ke[t // 4][t % 4] = tk[j] self._Kd[rounds - (t // 4)][t % 4] = tk[j] j += 1 t += 1 # Inverse-Cipher-ify the decryption round key (fips-197 section 5.3) for r in xrange(1, rounds): for j in xrange(0, 4): tt = self._Kd[r][j] self._Kd[r][j] = (self.U1[(tt >> 24) & 0xFF] ^ self.U2[(tt >> 16) & 0xFF] ^ self.U3[(tt >> 8) & 0xFF] ^ self.U4[ tt & 0xFF]) def encrypt(self, plaintext): 'Encrypt a block of plain text using the AES block cipher.' if len(plaintext) != 16: raise ValueError('wrong block length') rounds = len(self._Ke) - 1 (s1, s2, s3) = [1, 2, 3] a = [0, 0, 0, 0] # Convert plaintext to (ints ^ key) t = [(_compact_word(plaintext[4 * i:4 * i + 4]) ^ self._Ke[0][i]) for i in xrange(0, 4)] # Apply round transforms for r in xrange(1, rounds): for i in xrange(0, 4): a[i] = (self.T1[(t[ i ] >> 24) & 0xFF] ^ self.T2[(t[(i + s1) % 4] >> 16) & 0xFF] ^ self.T3[(t[(i + s2) % 4] >> 8) & 0xFF] ^ self.T4[ t[(i + s3) % 4] & 0xFF] ^ self._Ke[r][i]) t = copy.copy(a) # The last round is special result = [ ] for i in xrange(0, 4): tt = self._Ke[rounds][i] result.append((self.S[(t[ i ] >> 24) & 0xFF] ^ (tt >> 24)) & 0xFF) result.append((self.S[(t[(i + s1) % 4] >> 16) & 0xFF] ^ (tt >> 16)) & 0xFF) result.append((self.S[(t[(i + s2) % 4] >> 8) & 0xFF] ^ (tt >> 8)) & 0xFF) result.append((self.S[ t[(i + s3) % 4] & 0xFF] ^ tt ) & 0xFF) return result def decrypt(self, ciphertext): 'Decrypt a block of cipher text using the AES block cipher.' if len(ciphertext) != 16: raise ValueError('wrong block length') rounds = len(self._Kd) - 1 (s1, s2, s3) = [3, 2, 1] a = [0, 0, 0, 0] # Convert ciphertext to (ints ^ key) t = [(_compact_word(ciphertext[4 * i:4 * i + 4]) ^ self._Kd[0][i]) for i in xrange(0, 4)] # Apply round transforms for r in xrange(1, rounds): for i in xrange(0, 4): a[i] = (self.T5[(t[ i ] >> 24) & 0xFF] ^ self.T6[(t[(i + s1) % 4] >> 16) & 0xFF] ^ self.T7[(t[(i + s2) % 4] >> 8) & 0xFF] ^ self.T8[ t[(i + s3) % 4] & 0xFF] ^ self._Kd[r][i]) t = copy.copy(a) # The last round is special result = [ ] for i in xrange(0, 4): tt = self._Kd[rounds][i] result.append((self.Si[(t[ i ] >> 24) & 0xFF] ^ (tt >> 24)) & 0xFF) result.append((self.Si[(t[(i + s1) % 4] >> 16) & 0xFF] ^ (tt >> 16)) & 0xFF) result.append((self.Si[(t[(i + s2) % 4] >> 8) & 0xFF] ^ (tt >> 8)) & 0xFF) result.append((self.Si[ t[(i + s3) % 4] & 0xFF] ^ tt ) & 0xFF) return result class Counter(object): '''A counter object for the Counter (CTR) mode of operation. To create a custom counter, you can usually just override the increment method.''' def __init__(self, initial_value = 1): # Convert the value into an array of bytes long self._counter = [ ((initial_value >> i) % 256) for i in xrange(128 - 8, -1, -8) ] value = property(lambda s: s._counter) def increment(self): '''Increment the counter (overflow rolls back to 0).''' for i in xrange(len(self._counter) - 1, -1, -1): self._counter[i] += 1 if self._counter[i] < 256: break # Carry the one self._counter[i] = 0 # Overflow else: self._counter = [ 0 ] * len(self._counter) class AESBlockModeOfOperation(object): '''Super-class for AES modes of operation that require blocks.''' def __init__(self, key): self._aes = AES(key) def decrypt(self, ciphertext): raise Exception('not implemented') def encrypt(self, plaintext): raise Exception('not implemented') class AESStreamModeOfOperation(AESBlockModeOfOperation): '''Super-class for AES modes of operation that are stream-ciphers.''' class AESSegmentModeOfOperation(AESStreamModeOfOperation): '''Super-class for AES modes of operation that segment data.''' segment_bytes = 16 class AESModeOfOperationECB(AESBlockModeOfOperation): '''AES Electronic Codebook Mode of Operation. o Block-cipher, so data must be padded to 16 byte boundaries Security Notes: o This mode is not recommended o Any two identical blocks produce identical encrypted values, exposing data patterns. (See the image of Tux on wikipedia) Also see: o https://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Electronic_codebook_.28ECB.29 o See NIST SP800-38A (http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf); section 6.1''' name = "Electronic Codebook (ECB)" def encrypt(self, plaintext): if len(plaintext) != 16: raise ValueError('plaintext block must be 16 bytes') plaintext = _string_to_bytes(plaintext) return _bytes_to_string(self._aes.encrypt(plaintext)) def decrypt(self, ciphertext): if len(ciphertext) != 16: raise ValueError('ciphertext block must be 16 bytes') ciphertext = _string_to_bytes(ciphertext) return _bytes_to_string(self._aes.decrypt(ciphertext)) class AESModeOfOperationCBC(AESBlockModeOfOperation): '''AES Cipher-Block Chaining Mode of Operation. o The Initialization Vector (IV) o Block-cipher, so data must be padded to 16 byte boundaries o An incorrect initialization vector will only cause the first block to be corrupt; all other blocks will be intact o A corrupt bit in the cipher text will cause a block to be corrupted, and the next block to be inverted, but all other blocks will be intact. Security Notes: o This method (and CTR) ARE recommended. Also see: o https://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Cipher-block_chaining_.28CBC.29 o See NIST SP800-38A (http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf); section 6.2''' name = "Cipher-Block Chaining (CBC)" def __init__(self, key, iv = None): if iv is None: self._last_cipherblock = [ 0 ] * 16 elif len(iv) != 16: raise ValueError('initialization vector must be 16 bytes') else: self._last_cipherblock = _string_to_bytes(iv) AESBlockModeOfOperation.__init__(self, key) def encrypt(self, plaintext): if len(plaintext) != 16: raise ValueError('plaintext block must be 16 bytes') plaintext = _string_to_bytes(plaintext) precipherblock = [ (p ^ l) for (p, l) in zip(plaintext, self._last_cipherblock) ] self._last_cipherblock = self._aes.encrypt(precipherblock) return _bytes_to_string(self._last_cipherblock) def decrypt(self, ciphertext): if len(ciphertext) != 16: raise ValueError('ciphertext block must be 16 bytes') cipherblock = _string_to_bytes(ciphertext) plaintext = [ (p ^ l) for (p, l) in zip(self._aes.decrypt(cipherblock), self._last_cipherblock) ] self._last_cipherblock = cipherblock return _bytes_to_string(plaintext) class AESModeOfOperationCFB(AESSegmentModeOfOperation): '''AES Cipher Feedback Mode of Operation. o A stream-cipher, so input does not need to be padded to blocks, but does need to be padded to segment_size Also see: o https://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Cipher_feedback_.28CFB.29 o See NIST SP800-38A (http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf); section 6.3''' name = "Cipher Feedback (CFB)" def __init__(self, key, iv, segment_size = 1): if segment_size == 0: segment_size = 1 if iv is None: self._shift_register = [ 0 ] * 16 elif len(iv) != 16: raise ValueError('initialization vector must be 16 bytes') else: self._shift_register = _string_to_bytes(iv) self._segment_bytes = segment_size AESBlockModeOfOperation.__init__(self, key) segment_bytes = property(lambda s: s._segment_bytes) def encrypt(self, plaintext): if len(plaintext) % self._segment_bytes != 0: raise ValueError('plaintext block must be a multiple of segment_size') plaintext = _string_to_bytes(plaintext) # Break block into segments encrypted = [ ] for i in xrange(0, len(plaintext), self._segment_bytes): plaintext_segment = plaintext[i: i + self._segment_bytes] xor_segment = self._aes.encrypt(self._shift_register)[:len(plaintext_segment)] cipher_segment = [ (p ^ x) for (p, x) in zip(plaintext_segment, xor_segment) ] # Shift the top bits out and the ciphertext in self._shift_register = _concat_list(self._shift_register[len(cipher_segment):], cipher_segment) encrypted.extend(cipher_segment) return _bytes_to_string(encrypted) def decrypt(self, ciphertext): if len(ciphertext) % self._segment_bytes != 0: raise ValueError('ciphertext block must be a multiple of segment_size') ciphertext = _string_to_bytes(ciphertext) # Break block into segments decrypted = [ ] for i in xrange(0, len(ciphertext), self._segment_bytes): cipher_segment = ciphertext[i: i + self._segment_bytes] xor_segment = self._aes.encrypt(self._shift_register)[:len(cipher_segment)] plaintext_segment = [ (p ^ x) for (p, x) in zip(cipher_segment, xor_segment) ] # Shift the top bits out and the ciphertext in self._shift_register = _concat_list(self._shift_register[len(cipher_segment):], cipher_segment) decrypted.extend(plaintext_segment) return _bytes_to_string(decrypted) class AESModeOfOperationOFB(AESStreamModeOfOperation): '''AES Output Feedback Mode of Operation. o A stream-cipher, so input does not need to be padded to blocks, allowing arbitrary length data. o A bit twiddled in the cipher text, twiddles the same bit in the same bit in the plain text, which can be useful for error correction techniques. Also see: o https://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Output_feedback_.28OFB.29 o See NIST SP800-38A (http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf); section 6.4''' name = "Output Feedback (OFB)" def __init__(self, key, iv = None): if iv is None: self._last_precipherblock = [ 0 ] * 16 elif len(iv) != 16: raise ValueError('initialization vector must be 16 bytes') else: self._last_precipherblock = _string_to_bytes(iv) self._remaining_block = [ ] AESBlockModeOfOperation.__init__(self, key) def encrypt(self, plaintext): encrypted = [ ] for p in _string_to_bytes(plaintext): if len(self._remaining_block) == 0: self._remaining_block = self._aes.encrypt(self._last_precipherblock) self._last_precipherblock = [ ] precipherbyte = self._remaining_block.pop(0) self._last_precipherblock.append(precipherbyte) cipherbyte = p ^ precipherbyte encrypted.append(cipherbyte) return _bytes_to_string(encrypted) def decrypt(self, ciphertext): # AES-OFB is symetric return self.encrypt(ciphertext) class AESModeOfOperationCTR(AESStreamModeOfOperation): '''AES Counter Mode of Operation. o A stream-cipher, so input does not need to be padded to blocks, allowing arbitrary length data. o The counter must be the same size as the key size (ie. len(key)) o Each block independant of the other, so a corrupt byte will not damage future blocks. o Each block has a uniue counter value associated with it, which contributes to the encrypted value, so no data patterns are leaked. o Also known as: Counter Mode (CM), Integer Counter Mode (ICM) and Segmented Integer Counter (SIC Security Notes: o This method (and CBC) ARE recommended. o Each message block is associated with a counter value which must be unique for ALL messages with the same key. Otherwise security may be compromised. Also see: o https://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Counter_.28CTR.29 o See NIST SP800-38A (http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf); section 6.5 and Appendix B for managing the initial counter''' name = "Counter (CTR)" def __init__(self, key, counter = None): AESBlockModeOfOperation.__init__(self, key) if counter is None: counter = Counter() self._counter = counter self._remaining_counter = [ ] def encrypt(self, plaintext): while len(self._remaining_counter) < len(plaintext): self._remaining_counter += self._aes.encrypt(self._counter.value) self._counter.increment() plaintext = _string_to_bytes(plaintext) encrypted = [ (p ^ c) for (p, c) in zip(plaintext, self._remaining_counter) ] self._remaining_counter = self._remaining_counter[len(encrypted):] return _bytes_to_string(encrypted) def decrypt(self, crypttext): # AES-CTR is symetric return self.encrypt(crypttext) # Simple lookup table for each mode AESModesOfOperation = dict( ctr = AESModeOfOperationCTR, cbc = AESModeOfOperationCBC, cfb = AESModeOfOperationCFB, ecb = AESModeOfOperationECB, ofb = AESModeOfOperationOFB, )
puuu/micropython
refs/heads/master
tests/basics/op_error_memoryview.py
4
# test errors from bad operations (unary, binary, etc) try: memoryview except: import sys print("SKIP") sys.exit() def test_exc(code, exc): try: exec(code) print("no exception") except exc: print("right exception") except: print("wrong exception") # unsupported binary operators test_exc("m = memoryview(bytearray())\nm += bytearray()", TypeError)
priyankarani/nereid-checkout
refs/heads/develop
setup.py
1
#!/usr/bin/env python # -*- coding: utf-8 -*- import time import sys import re import os import ConfigParser import unittest from setuptools import setup, Command def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() class SQLiteTest(Command): """ Run the tests on SQLite """ description = "Run tests on SQLite" user_options = [] def initialize_options(self): pass def finalize_options(self): pass def run(self): if self.distribution.tests_require: self.distribution.fetch_build_eggs(self.distribution.tests_require) os.environ['TRYTOND_DATABASE_URI'] = 'sqlite://' os.environ['DB_NAME'] = ':memory:' from tests import suite test_result = unittest.TextTestRunner(verbosity=3).run(suite()) if test_result.wasSuccessful(): sys.exit(0) sys.exit(-1) class PostgresTest(Command): """ Run the tests on Postgres. """ description = "Run tests on Postgresql" user_options = [] def initialize_options(self): pass def finalize_options(self): pass def run(self): if self.distribution.tests_require: self.distribution.fetch_build_eggs(self.distribution.tests_require) os.environ['TRYTOND_DATABASE_URI'] = 'postgresql://' os.environ['DB_NAME'] = 'test_' + str(int(time.time())) from tests import suite test_result = unittest.TextTestRunner(verbosity=3).run(suite()) if test_result.wasSuccessful(): sys.exit(0) sys.exit(-1) config = ConfigParser.ConfigParser() config.readfp(open('tryton.cfg')) info = dict(config.items('tryton')) for key in ('depends', 'extras_depend', 'xml'): if key in info: info[key] = info[key].strip().splitlines() major_version, minor_version, _ = info.get('version', '0.0.1').split('.', 2) major_version = int(major_version) minor_version = int(minor_version) requires = [] MODULE = 'nereid_checkout' PREFIX = 'trytond' MODULE2PREFIX = { 'nereid_payment_gateway': 'fio', 'sale_payment_gateway': 'fio', 'email_queue': 'openlabs', 'sale_confirmation_email': 'openlabs', } for dep in info.get('depends', []): if not re.match(r'(ir|res|webdav)(\W|$)', dep): requires.append( '%s_%s >= %s.%s, < %s.%s' % ( MODULE2PREFIX.get(dep, 'trytond'), dep, major_version, minor_version, major_version, minor_version + 1 ) ) requires.append( 'trytond >= %s.%s, < %s.%s' % ( major_version, minor_version, major_version, minor_version + 1 ) ) setup( name='%s_%s' % (PREFIX, MODULE), version=info.get('version', '0.0.1'), description="Tryton module for Payment Gatway/Merchant Integration", author="Fulfil.IO Inc.", author_email='support@fulfil.io', url='https://www.fulfil.io', package_dir={'trytond.modules.%s' % MODULE: '.'}, packages=[ 'trytond.modules.%s' % MODULE, 'trytond.modules.%s.tests' % MODULE, ], package_data={ 'trytond.modules.%s' % MODULE: info.get('xml', []) + info.get('translation', []) + ['tryton.cfg', 'locale/*.po', 'tests/*.rst', '*.odt'] + ['view/*.xml'], }, classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Plugins', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Framework :: Tryton', 'Topic :: Office/Business', ], license='BSD', install_requires=requires, extras_require={ 'docs': ['sphinx', 'sphinx_rtd_theme'], }, zip_safe=False, entry_points=""" [trytond.modules] %s = trytond.modules.%s """ % (MODULE, MODULE), test_suite='tests', test_loader='trytond.test_loader:Loader', tests_require=[ 'fio_payment_gateway_authorize_net >= %s.%s, < %s.%s' % ( major_version, minor_version, major_version, minor_version + 1 ), 'pycountry', 'mock', ], cmdclass={ 'test': SQLiteTest, 'test_on_postgres': PostgresTest, }, )
CCI-MOC/nova
refs/heads/k2k-liberty
nova/image/download/base.py
118
# Copyright 2013 Red Hat, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from nova import exception class TransferBase(object): def download(self, context, url_parts, dst_path, metadata, **kwargs): raise exception.ImageDownloadModuleNotImplementedError( method_name='download')
boyombo/django-stations
refs/heads/master
stations/insure/tests.py
24123
from django.test import TestCase # Create your tests here.
jainayush975/zulip
refs/heads/master
zerver/tests/test_push_notifications.py
9
import mock from mock import call import time from typing import Any, Dict, Union, SupportsInt, Text import gcm from django.test import TestCase from django.conf import settings from zerver.models import PushDeviceToken, UserProfile, Message from zerver.models import get_user_profile_by_email, receives_online_notifications, \ receives_offline_notifications from zerver.lib import push_notifications as apn from zerver.lib.test_classes import ( ZulipTestCase, ) class MockRedis(object): data = {} # type: Dict[str, Any] def hgetall(self, key): # type: (str) -> Any return self.data.get(key) def exists(self, key): # type: (str) -> bool return key in self.data def hmset(self, key, data): # type: (str, Dict[Any, Any]) -> None self.data[key] = data def delete(self, key): # type: (str) -> None if self.exists(key): del self.data[key] def expire(self, *args, **kwargs): # type: (*Any, **Any) -> None pass class PushNotificationTest(TestCase): def setUp(self): # type: () -> None email = 'hamlet@zulip.com' apn.connection = apn.get_connection('fake-cert', 'fake-key') self.redis_client = apn.redis_client = MockRedis() # type: ignore apn.dbx_connection = apn.get_connection('fake-cert', 'fake-key') self.user_profile = get_user_profile_by_email(email) self.tokens = [u'aaaa', u'bbbb'] for token in self.tokens: PushDeviceToken.objects.create( kind=PushDeviceToken.APNS, token=apn.hex_to_b64(token), user=self.user_profile, ios_app_id=settings.ZULIP_IOS_APP_ID) def tearDown(self): # type: () -> None for i in [100, 200]: self.redis_client.delete(apn.get_apns_key(i)) class APNsMessageTest(PushNotificationTest): @mock.patch('random.getrandbits', side_effect=[100, 200]) def test_apns_message(self, mock_getrandbits): # type: (mock.MagicMock) -> None apn.APNsMessage(self.user_profile.id, self.tokens, alert="test") data = self.redis_client.hgetall(apn.get_apns_key(100)) self.assertEqual(data['token'], 'aaaa') self.assertEqual(int(data['user_id']), self.user_profile.id) data = self.redis_client.hgetall(apn.get_apns_key(200)) self.assertEqual(data['token'], 'bbbb') self.assertEqual(int(data['user_id']), self.user_profile.id) class ResponseListenerTest(PushNotificationTest): def get_error_response(self, **kwargs): # type: (**Any) -> Dict[str, SupportsInt] er = {'identifier': 0, 'status': 0} # type: Dict[str, SupportsInt] er.update({k: v for k, v in kwargs.items() if k in er}) return er def get_cache_value(self): # type: () -> Dict[str, Union[str, int]] return {'token': 'aaaa', 'user_id': self.user_profile.id} @mock.patch('logging.warn') def test_cache_does_not_exist(self, mock_warn): # type: (mock.MagicMock) -> None err_rsp = self.get_error_response(identifier=100, status=1) apn.response_listener(err_rsp) msg = "APNs key, apns:100, doesn't not exist." mock_warn.assert_called_once_with(msg) @mock.patch('logging.warn') def test_cache_exists(self, mock_warn): # type: (mock.MagicMock) -> None self.redis_client.hmset(apn.get_apns_key(100), self.get_cache_value()) err_rsp = self.get_error_response(identifier=100, status=1) apn.response_listener(err_rsp) b64_token = apn.hex_to_b64('aaaa') errmsg = apn.ERROR_CODES[int(err_rsp['status'])] msg = ("APNS: Failed to deliver APNS notification to %s, " "reason: %s" % (b64_token, errmsg)) mock_warn.assert_called_once_with(msg) @mock.patch('logging.warn') def test_error_code_eight(self, mock_warn): # type: (mock.MagicMock) -> None self.redis_client.hmset(apn.get_apns_key(100), self.get_cache_value()) err_rsp = self.get_error_response(identifier=100, status=8) b64_token = apn.hex_to_b64('aaaa') self.assertEqual(PushDeviceToken.objects.filter( user=self.user_profile, token=b64_token).count(), 1) apn.response_listener(err_rsp) self.assertEqual(mock_warn.call_count, 2) self.assertEqual(PushDeviceToken.objects.filter( user=self.user_profile, token=b64_token).count(), 0) class TestPushApi(ZulipTestCase): def test_push_api(self): # type: () -> None email = "cordelia@zulip.com" user = get_user_profile_by_email(email) self.login(email) endpoints = [ ('/json/users/me/apns_device_token', 'apple-token'), ('/json/users/me/android_gcm_reg_id', 'android-token'), ] # Test error handling for endpoint, _ in endpoints: # Try adding/removing tokens that are too big... broken_token = "x" * 5000 # too big result = self.client_post(endpoint, {'token': broken_token}) self.assert_json_error(result, 'Empty or invalid length token') result = self.client_delete(endpoint, {'token': broken_token}) self.assert_json_error(result, 'Empty or invalid length token') # Try to remove a non-existent token... result = self.client_delete(endpoint, {'token': 'non-existent token'}) self.assert_json_error(result, 'Token does not exist') # Add tokens for endpoint, token in endpoints: # Test that we can push twice result = self.client_post(endpoint, {'token': token}) self.assert_json_success(result) result = self.client_post(endpoint, {'token': token}) self.assert_json_success(result) tokens = list(PushDeviceToken.objects.filter(user=user, token=token)) self.assertEqual(len(tokens), 1) self.assertEqual(tokens[0].token, token) # User should have tokens for both devices now. tokens = list(PushDeviceToken.objects.filter(user=user)) self.assertEqual(len(tokens), 2) # Remove tokens for endpoint, token in endpoints: result = self.client_delete(endpoint, {'token': token}) self.assert_json_success(result) tokens = list(PushDeviceToken.objects.filter(user=user, token=token)) self.assertEqual(len(tokens), 0) class SendNotificationTest(PushNotificationTest): @mock.patch('logging.warn') @mock.patch('logging.info') @mock.patch('zerver.lib.push_notifications._do_push_to_apns_service') def test_send_apple_push_notifiction(self, mock_send, mock_info, mock_warn): # type: (mock.MagicMock, mock.MagicMock, mock.MagicMock) -> None def test_send(user_id, message, alert): # type: (int, Message, str) -> None self.assertEqual(user_id, self.user_profile.id) self.assertEqual(set(message.tokens), set(self.tokens)) mock_send.side_effect = test_send apn.send_apple_push_notification_to_user(self.user_profile, "test alert") self.assertEqual(mock_send.call_count, 1) @mock.patch('apns.GatewayConnection.send_notification_multiple') def test_do_push_to_apns_service(self, mock_push): # type: (mock.MagicMock) -> None msg = apn.APNsMessage(self.user_profile.id, self.tokens, alert="test") def test_push(message): # type: (Message) -> None self.assertIs(message, msg.get_frame()) mock_push.side_effect = test_push apn._do_push_to_apns_service(self.user_profile.id, msg, apn.connection) @mock.patch('logging.warn') @mock.patch('logging.info') @mock.patch('apns.GatewayConnection.send_notification_multiple') def test_connection_single_none(self, mock_push, mock_info, mock_warn): # type: (mock.MagicMock, mock.MagicMock, mock.MagicMock) -> None apn.connection = None apn.send_apple_push_notification_to_user(self.user_profile, "test alert") @mock.patch('logging.error') @mock.patch('apns.GatewayConnection.send_notification_multiple') def test_connection_both_none(self, mock_push, mock_error): # type: (mock.MagicMock, mock.MagicMock) -> None apn.connection = None apn.dbx_connection = None apn.send_apple_push_notification_to_user(self.user_profile, "test alert") class APNsFeedbackTest(PushNotificationTest): @mock.patch('logging.info') @mock.patch('apns.FeedbackConnection.items') def test_feedback(self, mock_items, mock_info): # type: (mock.MagicMock, mock.MagicMock) -> None update_time = apn.timestamp_to_datetime(int(time.time()) - 10000) PushDeviceToken.objects.all().update(last_updated=update_time) mock_items.return_value = [ ('aaaa', int(time.time())), ] self.assertEqual(PushDeviceToken.objects.all().count(), 2) apn.check_apns_feedback() self.assertEqual(PushDeviceToken.objects.all().count(), 1) class GCMTest(PushNotificationTest): def setUp(self): # type: () -> None super(GCMTest, self).setUp() apn.gcm = gcm.GCM('fake key') self.gcm_tokens = [u'1111', u'2222'] for token in self.gcm_tokens: PushDeviceToken.objects.create( kind=PushDeviceToken.GCM, token=apn.hex_to_b64(token), user=self.user_profile, ios_app_id=None) def get_gcm_data(self, **kwargs): # type: (**Any) -> Dict[str, Any] data = { 'key 1': 'Data 1', 'key 2': 'Data 2', } data.update(kwargs) return data class GCMNotSetTest(GCMTest): @mock.patch('logging.error') def test_gcm_is_none(self, mock_error): # type: (mock.MagicMock) -> None apn.gcm = None apn.send_android_push_notification_to_user(self.user_profile, {}) mock_error.assert_called_with("Attempting to send a GCM push " "notification, but no API key was " "configured") class GCMSuccessTest(GCMTest): @mock.patch('logging.warning') @mock.patch('logging.info') @mock.patch('gcm.GCM.json_request') def test_success(self, mock_send, mock_info, mock_warning): # type: (mock.MagicMock, mock.MagicMock, mock.MagicMock) -> None res = {} res['success'] = {token: ind for ind, token in enumerate(self.gcm_tokens)} mock_send.return_value = res data = self.get_gcm_data() apn.send_android_push_notification_to_user(self.user_profile, data) self.assertEqual(mock_info.call_count, 2) c1 = call("GCM: Sent 1111 as 0") c2 = call("GCM: Sent 2222 as 1") mock_info.assert_has_calls([c1, c2], any_order=True) mock_warning.assert_not_called() class GCMCanonicalTest(GCMTest): @mock.patch('logging.warning') @mock.patch('gcm.GCM.json_request') def test_equal(self, mock_send, mock_warning): # type: (mock.MagicMock, mock.MagicMock) -> None res = {} res['canonical'] = {1: 1} mock_send.return_value = res data = self.get_gcm_data() apn.send_android_push_notification_to_user(self.user_profile, data) mock_warning.assert_called_once_with("GCM: Got canonical ref but it " "already matches our ID 1!") @mock.patch('logging.warning') @mock.patch('gcm.GCM.json_request') def test_pushdevice_not_present(self, mock_send, mock_warning): # type: (mock.MagicMock, mock.MagicMock) -> None res = {} t1 = apn.hex_to_b64(u'1111') t2 = apn.hex_to_b64(u'3333') res['canonical'] = {t1: t2} mock_send.return_value = res def get_count(hex_token): # type: (Text) -> int token = apn.hex_to_b64(hex_token) return PushDeviceToken.objects.filter( token=token, kind=PushDeviceToken.GCM).count() self.assertEqual(get_count(u'1111'), 1) self.assertEqual(get_count(u'3333'), 0) data = self.get_gcm_data() apn.send_android_push_notification_to_user(self.user_profile, data) msg = ("GCM: Got canonical ref %s " "replacing %s but new ID not " "registered! Updating.") mock_warning.assert_called_once_with(msg % (t2, t1)) self.assertEqual(get_count(u'1111'), 0) self.assertEqual(get_count(u'3333'), 1) @mock.patch('logging.info') @mock.patch('gcm.GCM.json_request') def test_pushdevice_different(self, mock_send, mock_info): # type: (mock.MagicMock, mock.MagicMock) -> None res = {} old_token = apn.hex_to_b64(u'1111') new_token = apn.hex_to_b64(u'2222') res['canonical'] = {old_token: new_token} mock_send.return_value = res def get_count(hex_token): # type: (Text) -> int token = apn.hex_to_b64(hex_token) return PushDeviceToken.objects.filter( token=token, kind=PushDeviceToken.GCM).count() self.assertEqual(get_count(u'1111'), 1) self.assertEqual(get_count(u'2222'), 1) data = self.get_gcm_data() apn.send_android_push_notification_to_user(self.user_profile, data) mock_info.assert_called_once_with( "GCM: Got canonical ref %s, dropping %s" % (new_token, old_token)) self.assertEqual(get_count(u'1111'), 0) self.assertEqual(get_count(u'2222'), 1) class GCMNotRegisteredTest(GCMTest): @mock.patch('logging.info') @mock.patch('gcm.GCM.json_request') def test_not_registered(self, mock_send, mock_info): # type: (mock.MagicMock, mock.MagicMock) -> None res = {} token = apn.hex_to_b64(u'1111') res['errors'] = {'NotRegistered': [token]} mock_send.return_value = res def get_count(hex_token): # type: (Text) -> int token = apn.hex_to_b64(hex_token) return PushDeviceToken.objects.filter( token=token, kind=PushDeviceToken.GCM).count() self.assertEqual(get_count(u'1111'), 1) data = self.get_gcm_data() apn.send_android_push_notification_to_user(self.user_profile, data) mock_info.assert_called_once_with("GCM: Removing %s" % (token,)) self.assertEqual(get_count(u'1111'), 0) class GCMFailureTest(GCMTest): @mock.patch('logging.warning') @mock.patch('gcm.GCM.json_request') def test_failure(self, mock_send, mock_warn): # type: (mock.MagicMock, mock.MagicMock) -> None res = {} token = apn.hex_to_b64(u'1111') res['errors'] = {'Failed': [token]} mock_send.return_value = res data = self.get_gcm_data() apn.send_android_push_notification_to_user(self.user_profile, data) c1 = call("GCM: Delivery to %s failed: Failed" % (token,)) mock_warn.assert_has_calls([c1], any_order=True) class TestReceivesNotificationsFunctions(ZulipTestCase): def setUp(self): # type: () -> None email = "cordelia@zulip.com" self.user = get_user_profile_by_email(email) def test_receivers_online_notifications_when_user_is_a_bot(self): # type: () -> None self.user.is_bot = True self.user.enable_online_push_notifications = True self.assertFalse(receives_online_notifications(self.user)) self.user.enable_online_push_notifications = False self.assertFalse(receives_online_notifications(self.user)) def test_receivers_online_notifications_when_user_is_not_a_bot(self): # type: () -> None self.user.is_bot = False self.user.enable_online_push_notifications = True self.assertTrue(receives_online_notifications(self.user)) self.user.enable_online_push_notifications = False self.assertFalse(receives_online_notifications(self.user)) def test_receivers_offline_notifications_when_user_is_a_bot(self): # type: () -> None self.user.is_bot = True self.user.enable_offline_email_notifications = True self.user.enable_offline_push_notifications = True self.assertFalse(receives_offline_notifications(self.user)) self.user.enable_offline_email_notifications = False self.user.enable_offline_push_notifications = False self.assertFalse(receives_offline_notifications(self.user)) self.user.enable_offline_email_notifications = True self.user.enable_offline_push_notifications = False self.assertFalse(receives_offline_notifications(self.user)) self.user.enable_offline_email_notifications = False self.user.enable_offline_push_notifications = True self.assertFalse(receives_offline_notifications(self.user)) def test_receivers_offline_notifications_when_user_is_not_a_bot(self): # type: () -> None self.user.is_bot = False self.user.enable_offline_email_notifications = True self.user.enable_offline_push_notifications = True self.assertTrue(receives_offline_notifications(self.user)) self.user.enable_offline_email_notifications = False self.user.enable_offline_push_notifications = False self.assertFalse(receives_offline_notifications(self.user)) self.user.enable_offline_email_notifications = True self.user.enable_offline_push_notifications = False self.assertTrue(receives_offline_notifications(self.user)) self.user.enable_offline_email_notifications = False self.user.enable_offline_push_notifications = True self.assertTrue(receives_offline_notifications(self.user))
hurricup/intellij-community
refs/heads/master
python/testData/mover/multiLineSelection5_afterDown.py
83
class Test(object): def q(self): c = 3 <selection><caret>a = 1 b = 2 </selection>
40223119/2015w11
refs/heads/master
static/Brython3.1.0-20150301-090019/Lib/html/entities.py
814
"""HTML character entity references.""" # maps the HTML entity name to the Unicode codepoint name2codepoint = { 'AElig': 0x00c6, # latin capital letter AE = latin capital ligature AE, U+00C6 ISOlat1 'Aacute': 0x00c1, # latin capital letter A with acute, U+00C1 ISOlat1 'Acirc': 0x00c2, # latin capital letter A with circumflex, U+00C2 ISOlat1 'Agrave': 0x00c0, # latin capital letter A with grave = latin capital letter A grave, U+00C0 ISOlat1 'Alpha': 0x0391, # greek capital letter alpha, U+0391 'Aring': 0x00c5, # latin capital letter A with ring above = latin capital letter A ring, U+00C5 ISOlat1 'Atilde': 0x00c3, # latin capital letter A with tilde, U+00C3 ISOlat1 'Auml': 0x00c4, # latin capital letter A with diaeresis, U+00C4 ISOlat1 'Beta': 0x0392, # greek capital letter beta, U+0392 'Ccedil': 0x00c7, # latin capital letter C with cedilla, U+00C7 ISOlat1 'Chi': 0x03a7, # greek capital letter chi, U+03A7 'Dagger': 0x2021, # double dagger, U+2021 ISOpub 'Delta': 0x0394, # greek capital letter delta, U+0394 ISOgrk3 'ETH': 0x00d0, # latin capital letter ETH, U+00D0 ISOlat1 'Eacute': 0x00c9, # latin capital letter E with acute, U+00C9 ISOlat1 'Ecirc': 0x00ca, # latin capital letter E with circumflex, U+00CA ISOlat1 'Egrave': 0x00c8, # latin capital letter E with grave, U+00C8 ISOlat1 'Epsilon': 0x0395, # greek capital letter epsilon, U+0395 'Eta': 0x0397, # greek capital letter eta, U+0397 'Euml': 0x00cb, # latin capital letter E with diaeresis, U+00CB ISOlat1 'Gamma': 0x0393, # greek capital letter gamma, U+0393 ISOgrk3 'Iacute': 0x00cd, # latin capital letter I with acute, U+00CD ISOlat1 'Icirc': 0x00ce, # latin capital letter I with circumflex, U+00CE ISOlat1 'Igrave': 0x00cc, # latin capital letter I with grave, U+00CC ISOlat1 'Iota': 0x0399, # greek capital letter iota, U+0399 'Iuml': 0x00cf, # latin capital letter I with diaeresis, U+00CF ISOlat1 'Kappa': 0x039a, # greek capital letter kappa, U+039A 'Lambda': 0x039b, # greek capital letter lambda, U+039B ISOgrk3 'Mu': 0x039c, # greek capital letter mu, U+039C 'Ntilde': 0x00d1, # latin capital letter N with tilde, U+00D1 ISOlat1 'Nu': 0x039d, # greek capital letter nu, U+039D 'OElig': 0x0152, # latin capital ligature OE, U+0152 ISOlat2 'Oacute': 0x00d3, # latin capital letter O with acute, U+00D3 ISOlat1 'Ocirc': 0x00d4, # latin capital letter O with circumflex, U+00D4 ISOlat1 'Ograve': 0x00d2, # latin capital letter O with grave, U+00D2 ISOlat1 'Omega': 0x03a9, # greek capital letter omega, U+03A9 ISOgrk3 'Omicron': 0x039f, # greek capital letter omicron, U+039F 'Oslash': 0x00d8, # latin capital letter O with stroke = latin capital letter O slash, U+00D8 ISOlat1 'Otilde': 0x00d5, # latin capital letter O with tilde, U+00D5 ISOlat1 'Ouml': 0x00d6, # latin capital letter O with diaeresis, U+00D6 ISOlat1 'Phi': 0x03a6, # greek capital letter phi, U+03A6 ISOgrk3 'Pi': 0x03a0, # greek capital letter pi, U+03A0 ISOgrk3 'Prime': 0x2033, # double prime = seconds = inches, U+2033 ISOtech 'Psi': 0x03a8, # greek capital letter psi, U+03A8 ISOgrk3 'Rho': 0x03a1, # greek capital letter rho, U+03A1 'Scaron': 0x0160, # latin capital letter S with caron, U+0160 ISOlat2 'Sigma': 0x03a3, # greek capital letter sigma, U+03A3 ISOgrk3 'THORN': 0x00de, # latin capital letter THORN, U+00DE ISOlat1 'Tau': 0x03a4, # greek capital letter tau, U+03A4 'Theta': 0x0398, # greek capital letter theta, U+0398 ISOgrk3 'Uacute': 0x00da, # latin capital letter U with acute, U+00DA ISOlat1 'Ucirc': 0x00db, # latin capital letter U with circumflex, U+00DB ISOlat1 'Ugrave': 0x00d9, # latin capital letter U with grave, U+00D9 ISOlat1 'Upsilon': 0x03a5, # greek capital letter upsilon, U+03A5 ISOgrk3 'Uuml': 0x00dc, # latin capital letter U with diaeresis, U+00DC ISOlat1 'Xi': 0x039e, # greek capital letter xi, U+039E ISOgrk3 'Yacute': 0x00dd, # latin capital letter Y with acute, U+00DD ISOlat1 'Yuml': 0x0178, # latin capital letter Y with diaeresis, U+0178 ISOlat2 'Zeta': 0x0396, # greek capital letter zeta, U+0396 'aacute': 0x00e1, # latin small letter a with acute, U+00E1 ISOlat1 'acirc': 0x00e2, # latin small letter a with circumflex, U+00E2 ISOlat1 'acute': 0x00b4, # acute accent = spacing acute, U+00B4 ISOdia 'aelig': 0x00e6, # latin small letter ae = latin small ligature ae, U+00E6 ISOlat1 'agrave': 0x00e0, # latin small letter a with grave = latin small letter a grave, U+00E0 ISOlat1 'alefsym': 0x2135, # alef symbol = first transfinite cardinal, U+2135 NEW 'alpha': 0x03b1, # greek small letter alpha, U+03B1 ISOgrk3 'amp': 0x0026, # ampersand, U+0026 ISOnum 'and': 0x2227, # logical and = wedge, U+2227 ISOtech 'ang': 0x2220, # angle, U+2220 ISOamso 'aring': 0x00e5, # latin small letter a with ring above = latin small letter a ring, U+00E5 ISOlat1 'asymp': 0x2248, # almost equal to = asymptotic to, U+2248 ISOamsr 'atilde': 0x00e3, # latin small letter a with tilde, U+00E3 ISOlat1 'auml': 0x00e4, # latin small letter a with diaeresis, U+00E4 ISOlat1 'bdquo': 0x201e, # double low-9 quotation mark, U+201E NEW 'beta': 0x03b2, # greek small letter beta, U+03B2 ISOgrk3 'brvbar': 0x00a6, # broken bar = broken vertical bar, U+00A6 ISOnum 'bull': 0x2022, # bullet = black small circle, U+2022 ISOpub 'cap': 0x2229, # intersection = cap, U+2229 ISOtech 'ccedil': 0x00e7, # latin small letter c with cedilla, U+00E7 ISOlat1 'cedil': 0x00b8, # cedilla = spacing cedilla, U+00B8 ISOdia 'cent': 0x00a2, # cent sign, U+00A2 ISOnum 'chi': 0x03c7, # greek small letter chi, U+03C7 ISOgrk3 'circ': 0x02c6, # modifier letter circumflex accent, U+02C6 ISOpub 'clubs': 0x2663, # black club suit = shamrock, U+2663 ISOpub 'cong': 0x2245, # approximately equal to, U+2245 ISOtech 'copy': 0x00a9, # copyright sign, U+00A9 ISOnum 'crarr': 0x21b5, # downwards arrow with corner leftwards = carriage return, U+21B5 NEW 'cup': 0x222a, # union = cup, U+222A ISOtech 'curren': 0x00a4, # currency sign, U+00A4 ISOnum 'dArr': 0x21d3, # downwards double arrow, U+21D3 ISOamsa 'dagger': 0x2020, # dagger, U+2020 ISOpub 'darr': 0x2193, # downwards arrow, U+2193 ISOnum 'deg': 0x00b0, # degree sign, U+00B0 ISOnum 'delta': 0x03b4, # greek small letter delta, U+03B4 ISOgrk3 'diams': 0x2666, # black diamond suit, U+2666 ISOpub 'divide': 0x00f7, # division sign, U+00F7 ISOnum 'eacute': 0x00e9, # latin small letter e with acute, U+00E9 ISOlat1 'ecirc': 0x00ea, # latin small letter e with circumflex, U+00EA ISOlat1 'egrave': 0x00e8, # latin small letter e with grave, U+00E8 ISOlat1 'empty': 0x2205, # empty set = null set = diameter, U+2205 ISOamso 'emsp': 0x2003, # em space, U+2003 ISOpub 'ensp': 0x2002, # en space, U+2002 ISOpub 'epsilon': 0x03b5, # greek small letter epsilon, U+03B5 ISOgrk3 'equiv': 0x2261, # identical to, U+2261 ISOtech 'eta': 0x03b7, # greek small letter eta, U+03B7 ISOgrk3 'eth': 0x00f0, # latin small letter eth, U+00F0 ISOlat1 'euml': 0x00eb, # latin small letter e with diaeresis, U+00EB ISOlat1 'euro': 0x20ac, # euro sign, U+20AC NEW 'exist': 0x2203, # there exists, U+2203 ISOtech 'fnof': 0x0192, # latin small f with hook = function = florin, U+0192 ISOtech 'forall': 0x2200, # for all, U+2200 ISOtech 'frac12': 0x00bd, # vulgar fraction one half = fraction one half, U+00BD ISOnum 'frac14': 0x00bc, # vulgar fraction one quarter = fraction one quarter, U+00BC ISOnum 'frac34': 0x00be, # vulgar fraction three quarters = fraction three quarters, U+00BE ISOnum 'frasl': 0x2044, # fraction slash, U+2044 NEW 'gamma': 0x03b3, # greek small letter gamma, U+03B3 ISOgrk3 'ge': 0x2265, # greater-than or equal to, U+2265 ISOtech 'gt': 0x003e, # greater-than sign, U+003E ISOnum 'hArr': 0x21d4, # left right double arrow, U+21D4 ISOamsa 'harr': 0x2194, # left right arrow, U+2194 ISOamsa 'hearts': 0x2665, # black heart suit = valentine, U+2665 ISOpub 'hellip': 0x2026, # horizontal ellipsis = three dot leader, U+2026 ISOpub 'iacute': 0x00ed, # latin small letter i with acute, U+00ED ISOlat1 'icirc': 0x00ee, # latin small letter i with circumflex, U+00EE ISOlat1 'iexcl': 0x00a1, # inverted exclamation mark, U+00A1 ISOnum 'igrave': 0x00ec, # latin small letter i with grave, U+00EC ISOlat1 'image': 0x2111, # blackletter capital I = imaginary part, U+2111 ISOamso 'infin': 0x221e, # infinity, U+221E ISOtech 'int': 0x222b, # integral, U+222B ISOtech 'iota': 0x03b9, # greek small letter iota, U+03B9 ISOgrk3 'iquest': 0x00bf, # inverted question mark = turned question mark, U+00BF ISOnum 'isin': 0x2208, # element of, U+2208 ISOtech 'iuml': 0x00ef, # latin small letter i with diaeresis, U+00EF ISOlat1 'kappa': 0x03ba, # greek small letter kappa, U+03BA ISOgrk3 'lArr': 0x21d0, # leftwards double arrow, U+21D0 ISOtech 'lambda': 0x03bb, # greek small letter lambda, U+03BB ISOgrk3 'lang': 0x2329, # left-pointing angle bracket = bra, U+2329 ISOtech 'laquo': 0x00ab, # left-pointing double angle quotation mark = left pointing guillemet, U+00AB ISOnum 'larr': 0x2190, # leftwards arrow, U+2190 ISOnum 'lceil': 0x2308, # left ceiling = apl upstile, U+2308 ISOamsc 'ldquo': 0x201c, # left double quotation mark, U+201C ISOnum 'le': 0x2264, # less-than or equal to, U+2264 ISOtech 'lfloor': 0x230a, # left floor = apl downstile, U+230A ISOamsc 'lowast': 0x2217, # asterisk operator, U+2217 ISOtech 'loz': 0x25ca, # lozenge, U+25CA ISOpub 'lrm': 0x200e, # left-to-right mark, U+200E NEW RFC 2070 'lsaquo': 0x2039, # single left-pointing angle quotation mark, U+2039 ISO proposed 'lsquo': 0x2018, # left single quotation mark, U+2018 ISOnum 'lt': 0x003c, # less-than sign, U+003C ISOnum 'macr': 0x00af, # macron = spacing macron = overline = APL overbar, U+00AF ISOdia 'mdash': 0x2014, # em dash, U+2014 ISOpub 'micro': 0x00b5, # micro sign, U+00B5 ISOnum 'middot': 0x00b7, # middle dot = Georgian comma = Greek middle dot, U+00B7 ISOnum 'minus': 0x2212, # minus sign, U+2212 ISOtech 'mu': 0x03bc, # greek small letter mu, U+03BC ISOgrk3 'nabla': 0x2207, # nabla = backward difference, U+2207 ISOtech 'nbsp': 0x00a0, # no-break space = non-breaking space, U+00A0 ISOnum 'ndash': 0x2013, # en dash, U+2013 ISOpub 'ne': 0x2260, # not equal to, U+2260 ISOtech 'ni': 0x220b, # contains as member, U+220B ISOtech 'not': 0x00ac, # not sign, U+00AC ISOnum 'notin': 0x2209, # not an element of, U+2209 ISOtech 'nsub': 0x2284, # not a subset of, U+2284 ISOamsn 'ntilde': 0x00f1, # latin small letter n with tilde, U+00F1 ISOlat1 'nu': 0x03bd, # greek small letter nu, U+03BD ISOgrk3 'oacute': 0x00f3, # latin small letter o with acute, U+00F3 ISOlat1 'ocirc': 0x00f4, # latin small letter o with circumflex, U+00F4 ISOlat1 'oelig': 0x0153, # latin small ligature oe, U+0153 ISOlat2 'ograve': 0x00f2, # latin small letter o with grave, U+00F2 ISOlat1 'oline': 0x203e, # overline = spacing overscore, U+203E NEW 'omega': 0x03c9, # greek small letter omega, U+03C9 ISOgrk3 'omicron': 0x03bf, # greek small letter omicron, U+03BF NEW 'oplus': 0x2295, # circled plus = direct sum, U+2295 ISOamsb 'or': 0x2228, # logical or = vee, U+2228 ISOtech 'ordf': 0x00aa, # feminine ordinal indicator, U+00AA ISOnum 'ordm': 0x00ba, # masculine ordinal indicator, U+00BA ISOnum 'oslash': 0x00f8, # latin small letter o with stroke, = latin small letter o slash, U+00F8 ISOlat1 'otilde': 0x00f5, # latin small letter o with tilde, U+00F5 ISOlat1 'otimes': 0x2297, # circled times = vector product, U+2297 ISOamsb 'ouml': 0x00f6, # latin small letter o with diaeresis, U+00F6 ISOlat1 'para': 0x00b6, # pilcrow sign = paragraph sign, U+00B6 ISOnum 'part': 0x2202, # partial differential, U+2202 ISOtech 'permil': 0x2030, # per mille sign, U+2030 ISOtech 'perp': 0x22a5, # up tack = orthogonal to = perpendicular, U+22A5 ISOtech 'phi': 0x03c6, # greek small letter phi, U+03C6 ISOgrk3 'pi': 0x03c0, # greek small letter pi, U+03C0 ISOgrk3 'piv': 0x03d6, # greek pi symbol, U+03D6 ISOgrk3 'plusmn': 0x00b1, # plus-minus sign = plus-or-minus sign, U+00B1 ISOnum 'pound': 0x00a3, # pound sign, U+00A3 ISOnum 'prime': 0x2032, # prime = minutes = feet, U+2032 ISOtech 'prod': 0x220f, # n-ary product = product sign, U+220F ISOamsb 'prop': 0x221d, # proportional to, U+221D ISOtech 'psi': 0x03c8, # greek small letter psi, U+03C8 ISOgrk3 'quot': 0x0022, # quotation mark = APL quote, U+0022 ISOnum 'rArr': 0x21d2, # rightwards double arrow, U+21D2 ISOtech 'radic': 0x221a, # square root = radical sign, U+221A ISOtech 'rang': 0x232a, # right-pointing angle bracket = ket, U+232A ISOtech 'raquo': 0x00bb, # right-pointing double angle quotation mark = right pointing guillemet, U+00BB ISOnum 'rarr': 0x2192, # rightwards arrow, U+2192 ISOnum 'rceil': 0x2309, # right ceiling, U+2309 ISOamsc 'rdquo': 0x201d, # right double quotation mark, U+201D ISOnum 'real': 0x211c, # blackletter capital R = real part symbol, U+211C ISOamso 'reg': 0x00ae, # registered sign = registered trade mark sign, U+00AE ISOnum 'rfloor': 0x230b, # right floor, U+230B ISOamsc 'rho': 0x03c1, # greek small letter rho, U+03C1 ISOgrk3 'rlm': 0x200f, # right-to-left mark, U+200F NEW RFC 2070 'rsaquo': 0x203a, # single right-pointing angle quotation mark, U+203A ISO proposed 'rsquo': 0x2019, # right single quotation mark, U+2019 ISOnum 'sbquo': 0x201a, # single low-9 quotation mark, U+201A NEW 'scaron': 0x0161, # latin small letter s with caron, U+0161 ISOlat2 'sdot': 0x22c5, # dot operator, U+22C5 ISOamsb 'sect': 0x00a7, # section sign, U+00A7 ISOnum 'shy': 0x00ad, # soft hyphen = discretionary hyphen, U+00AD ISOnum 'sigma': 0x03c3, # greek small letter sigma, U+03C3 ISOgrk3 'sigmaf': 0x03c2, # greek small letter final sigma, U+03C2 ISOgrk3 'sim': 0x223c, # tilde operator = varies with = similar to, U+223C ISOtech 'spades': 0x2660, # black spade suit, U+2660 ISOpub 'sub': 0x2282, # subset of, U+2282 ISOtech 'sube': 0x2286, # subset of or equal to, U+2286 ISOtech 'sum': 0x2211, # n-ary sumation, U+2211 ISOamsb 'sup': 0x2283, # superset of, U+2283 ISOtech 'sup1': 0x00b9, # superscript one = superscript digit one, U+00B9 ISOnum 'sup2': 0x00b2, # superscript two = superscript digit two = squared, U+00B2 ISOnum 'sup3': 0x00b3, # superscript three = superscript digit three = cubed, U+00B3 ISOnum 'supe': 0x2287, # superset of or equal to, U+2287 ISOtech 'szlig': 0x00df, # latin small letter sharp s = ess-zed, U+00DF ISOlat1 'tau': 0x03c4, # greek small letter tau, U+03C4 ISOgrk3 'there4': 0x2234, # therefore, U+2234 ISOtech 'theta': 0x03b8, # greek small letter theta, U+03B8 ISOgrk3 'thetasym': 0x03d1, # greek small letter theta symbol, U+03D1 NEW 'thinsp': 0x2009, # thin space, U+2009 ISOpub 'thorn': 0x00fe, # latin small letter thorn with, U+00FE ISOlat1 'tilde': 0x02dc, # small tilde, U+02DC ISOdia 'times': 0x00d7, # multiplication sign, U+00D7 ISOnum 'trade': 0x2122, # trade mark sign, U+2122 ISOnum 'uArr': 0x21d1, # upwards double arrow, U+21D1 ISOamsa 'uacute': 0x00fa, # latin small letter u with acute, U+00FA ISOlat1 'uarr': 0x2191, # upwards arrow, U+2191 ISOnum 'ucirc': 0x00fb, # latin small letter u with circumflex, U+00FB ISOlat1 'ugrave': 0x00f9, # latin small letter u with grave, U+00F9 ISOlat1 'uml': 0x00a8, # diaeresis = spacing diaeresis, U+00A8 ISOdia 'upsih': 0x03d2, # greek upsilon with hook symbol, U+03D2 NEW 'upsilon': 0x03c5, # greek small letter upsilon, U+03C5 ISOgrk3 'uuml': 0x00fc, # latin small letter u with diaeresis, U+00FC ISOlat1 'weierp': 0x2118, # script capital P = power set = Weierstrass p, U+2118 ISOamso 'xi': 0x03be, # greek small letter xi, U+03BE ISOgrk3 'yacute': 0x00fd, # latin small letter y with acute, U+00FD ISOlat1 'yen': 0x00a5, # yen sign = yuan sign, U+00A5 ISOnum 'yuml': 0x00ff, # latin small letter y with diaeresis, U+00FF ISOlat1 'zeta': 0x03b6, # greek small letter zeta, U+03B6 ISOgrk3 'zwj': 0x200d, # zero width joiner, U+200D NEW RFC 2070 'zwnj': 0x200c, # zero width non-joiner, U+200C NEW RFC 2070 } # maps the HTML5 named character references to the equivalent Unicode character(s) html5 = { 'Aacute': '\xc1', 'aacute': '\xe1', 'Aacute;': '\xc1', 'aacute;': '\xe1', 'Abreve;': '\u0102', 'abreve;': '\u0103', 'ac;': '\u223e', 'acd;': '\u223f', 'acE;': '\u223e\u0333', 'Acirc': '\xc2', 'acirc': '\xe2', 'Acirc;': '\xc2', 'acirc;': '\xe2', 'acute': '\xb4', 'acute;': '\xb4', 'Acy;': '\u0410', 'acy;': '\u0430', 'AElig': '\xc6', 'aelig': '\xe6', 'AElig;': '\xc6', 'aelig;': '\xe6', 'af;': '\u2061', 'Afr;': '\U0001d504', 'afr;': '\U0001d51e', 'Agrave': '\xc0', 'agrave': '\xe0', 'Agrave;': '\xc0', 'agrave;': '\xe0', 'alefsym;': '\u2135', 'aleph;': '\u2135', 'Alpha;': '\u0391', 'alpha;': '\u03b1', 'Amacr;': '\u0100', 'amacr;': '\u0101', 'amalg;': '\u2a3f', 'AMP': '&', 'amp': '&', 'AMP;': '&', 'amp;': '&', 'And;': '\u2a53', 'and;': '\u2227', 'andand;': '\u2a55', 'andd;': '\u2a5c', 'andslope;': '\u2a58', 'andv;': '\u2a5a', 'ang;': '\u2220', 'ange;': '\u29a4', 'angle;': '\u2220', 'angmsd;': '\u2221', 'angmsdaa;': '\u29a8', 'angmsdab;': '\u29a9', 'angmsdac;': '\u29aa', 'angmsdad;': '\u29ab', 'angmsdae;': '\u29ac', 'angmsdaf;': '\u29ad', 'angmsdag;': '\u29ae', 'angmsdah;': '\u29af', 'angrt;': '\u221f', 'angrtvb;': '\u22be', 'angrtvbd;': '\u299d', 'angsph;': '\u2222', 'angst;': '\xc5', 'angzarr;': '\u237c', 'Aogon;': '\u0104', 'aogon;': '\u0105', 'Aopf;': '\U0001d538', 'aopf;': '\U0001d552', 'ap;': '\u2248', 'apacir;': '\u2a6f', 'apE;': '\u2a70', 'ape;': '\u224a', 'apid;': '\u224b', 'apos;': "'", 'ApplyFunction;': '\u2061', 'approx;': '\u2248', 'approxeq;': '\u224a', 'Aring': '\xc5', 'aring': '\xe5', 'Aring;': '\xc5', 'aring;': '\xe5', 'Ascr;': '\U0001d49c', 'ascr;': '\U0001d4b6', 'Assign;': '\u2254', 'ast;': '*', 'asymp;': '\u2248', 'asympeq;': '\u224d', 'Atilde': '\xc3', 'atilde': '\xe3', 'Atilde;': '\xc3', 'atilde;': '\xe3', 'Auml': '\xc4', 'auml': '\xe4', 'Auml;': '\xc4', 'auml;': '\xe4', 'awconint;': '\u2233', 'awint;': '\u2a11', 'backcong;': '\u224c', 'backepsilon;': '\u03f6', 'backprime;': '\u2035', 'backsim;': '\u223d', 'backsimeq;': '\u22cd', 'Backslash;': '\u2216', 'Barv;': '\u2ae7', 'barvee;': '\u22bd', 'Barwed;': '\u2306', 'barwed;': '\u2305', 'barwedge;': '\u2305', 'bbrk;': '\u23b5', 'bbrktbrk;': '\u23b6', 'bcong;': '\u224c', 'Bcy;': '\u0411', 'bcy;': '\u0431', 'bdquo;': '\u201e', 'becaus;': '\u2235', 'Because;': '\u2235', 'because;': '\u2235', 'bemptyv;': '\u29b0', 'bepsi;': '\u03f6', 'bernou;': '\u212c', 'Bernoullis;': '\u212c', 'Beta;': '\u0392', 'beta;': '\u03b2', 'beth;': '\u2136', 'between;': '\u226c', 'Bfr;': '\U0001d505', 'bfr;': '\U0001d51f', 'bigcap;': '\u22c2', 'bigcirc;': '\u25ef', 'bigcup;': '\u22c3', 'bigodot;': '\u2a00', 'bigoplus;': '\u2a01', 'bigotimes;': '\u2a02', 'bigsqcup;': '\u2a06', 'bigstar;': '\u2605', 'bigtriangledown;': '\u25bd', 'bigtriangleup;': '\u25b3', 'biguplus;': '\u2a04', 'bigvee;': '\u22c1', 'bigwedge;': '\u22c0', 'bkarow;': '\u290d', 'blacklozenge;': '\u29eb', 'blacksquare;': '\u25aa', 'blacktriangle;': '\u25b4', 'blacktriangledown;': '\u25be', 'blacktriangleleft;': '\u25c2', 'blacktriangleright;': '\u25b8', 'blank;': '\u2423', 'blk12;': '\u2592', 'blk14;': '\u2591', 'blk34;': '\u2593', 'block;': '\u2588', 'bne;': '=\u20e5', 'bnequiv;': '\u2261\u20e5', 'bNot;': '\u2aed', 'bnot;': '\u2310', 'Bopf;': '\U0001d539', 'bopf;': '\U0001d553', 'bot;': '\u22a5', 'bottom;': '\u22a5', 'bowtie;': '\u22c8', 'boxbox;': '\u29c9', 'boxDL;': '\u2557', 'boxDl;': '\u2556', 'boxdL;': '\u2555', 'boxdl;': '\u2510', 'boxDR;': '\u2554', 'boxDr;': '\u2553', 'boxdR;': '\u2552', 'boxdr;': '\u250c', 'boxH;': '\u2550', 'boxh;': '\u2500', 'boxHD;': '\u2566', 'boxHd;': '\u2564', 'boxhD;': '\u2565', 'boxhd;': '\u252c', 'boxHU;': '\u2569', 'boxHu;': '\u2567', 'boxhU;': '\u2568', 'boxhu;': '\u2534', 'boxminus;': '\u229f', 'boxplus;': '\u229e', 'boxtimes;': '\u22a0', 'boxUL;': '\u255d', 'boxUl;': '\u255c', 'boxuL;': '\u255b', 'boxul;': '\u2518', 'boxUR;': '\u255a', 'boxUr;': '\u2559', 'boxuR;': '\u2558', 'boxur;': '\u2514', 'boxV;': '\u2551', 'boxv;': '\u2502', 'boxVH;': '\u256c', 'boxVh;': '\u256b', 'boxvH;': '\u256a', 'boxvh;': '\u253c', 'boxVL;': '\u2563', 'boxVl;': '\u2562', 'boxvL;': '\u2561', 'boxvl;': '\u2524', 'boxVR;': '\u2560', 'boxVr;': '\u255f', 'boxvR;': '\u255e', 'boxvr;': '\u251c', 'bprime;': '\u2035', 'Breve;': '\u02d8', 'breve;': '\u02d8', 'brvbar': '\xa6', 'brvbar;': '\xa6', 'Bscr;': '\u212c', 'bscr;': '\U0001d4b7', 'bsemi;': '\u204f', 'bsim;': '\u223d', 'bsime;': '\u22cd', 'bsol;': '\\', 'bsolb;': '\u29c5', 'bsolhsub;': '\u27c8', 'bull;': '\u2022', 'bullet;': '\u2022', 'bump;': '\u224e', 'bumpE;': '\u2aae', 'bumpe;': '\u224f', 'Bumpeq;': '\u224e', 'bumpeq;': '\u224f', 'Cacute;': '\u0106', 'cacute;': '\u0107', 'Cap;': '\u22d2', 'cap;': '\u2229', 'capand;': '\u2a44', 'capbrcup;': '\u2a49', 'capcap;': '\u2a4b', 'capcup;': '\u2a47', 'capdot;': '\u2a40', 'CapitalDifferentialD;': '\u2145', 'caps;': '\u2229\ufe00', 'caret;': '\u2041', 'caron;': '\u02c7', 'Cayleys;': '\u212d', 'ccaps;': '\u2a4d', 'Ccaron;': '\u010c', 'ccaron;': '\u010d', 'Ccedil': '\xc7', 'ccedil': '\xe7', 'Ccedil;': '\xc7', 'ccedil;': '\xe7', 'Ccirc;': '\u0108', 'ccirc;': '\u0109', 'Cconint;': '\u2230', 'ccups;': '\u2a4c', 'ccupssm;': '\u2a50', 'Cdot;': '\u010a', 'cdot;': '\u010b', 'cedil': '\xb8', 'cedil;': '\xb8', 'Cedilla;': '\xb8', 'cemptyv;': '\u29b2', 'cent': '\xa2', 'cent;': '\xa2', 'CenterDot;': '\xb7', 'centerdot;': '\xb7', 'Cfr;': '\u212d', 'cfr;': '\U0001d520', 'CHcy;': '\u0427', 'chcy;': '\u0447', 'check;': '\u2713', 'checkmark;': '\u2713', 'Chi;': '\u03a7', 'chi;': '\u03c7', 'cir;': '\u25cb', 'circ;': '\u02c6', 'circeq;': '\u2257', 'circlearrowleft;': '\u21ba', 'circlearrowright;': '\u21bb', 'circledast;': '\u229b', 'circledcirc;': '\u229a', 'circleddash;': '\u229d', 'CircleDot;': '\u2299', 'circledR;': '\xae', 'circledS;': '\u24c8', 'CircleMinus;': '\u2296', 'CirclePlus;': '\u2295', 'CircleTimes;': '\u2297', 'cirE;': '\u29c3', 'cire;': '\u2257', 'cirfnint;': '\u2a10', 'cirmid;': '\u2aef', 'cirscir;': '\u29c2', 'ClockwiseContourIntegral;': '\u2232', 'CloseCurlyDoubleQuote;': '\u201d', 'CloseCurlyQuote;': '\u2019', 'clubs;': '\u2663', 'clubsuit;': '\u2663', 'Colon;': '\u2237', 'colon;': ':', 'Colone;': '\u2a74', 'colone;': '\u2254', 'coloneq;': '\u2254', 'comma;': ',', 'commat;': '@', 'comp;': '\u2201', 'compfn;': '\u2218', 'complement;': '\u2201', 'complexes;': '\u2102', 'cong;': '\u2245', 'congdot;': '\u2a6d', 'Congruent;': '\u2261', 'Conint;': '\u222f', 'conint;': '\u222e', 'ContourIntegral;': '\u222e', 'Copf;': '\u2102', 'copf;': '\U0001d554', 'coprod;': '\u2210', 'Coproduct;': '\u2210', 'COPY': '\xa9', 'copy': '\xa9', 'COPY;': '\xa9', 'copy;': '\xa9', 'copysr;': '\u2117', 'CounterClockwiseContourIntegral;': '\u2233', 'crarr;': '\u21b5', 'Cross;': '\u2a2f', 'cross;': '\u2717', 'Cscr;': '\U0001d49e', 'cscr;': '\U0001d4b8', 'csub;': '\u2acf', 'csube;': '\u2ad1', 'csup;': '\u2ad0', 'csupe;': '\u2ad2', 'ctdot;': '\u22ef', 'cudarrl;': '\u2938', 'cudarrr;': '\u2935', 'cuepr;': '\u22de', 'cuesc;': '\u22df', 'cularr;': '\u21b6', 'cularrp;': '\u293d', 'Cup;': '\u22d3', 'cup;': '\u222a', 'cupbrcap;': '\u2a48', 'CupCap;': '\u224d', 'cupcap;': '\u2a46', 'cupcup;': '\u2a4a', 'cupdot;': '\u228d', 'cupor;': '\u2a45', 'cups;': '\u222a\ufe00', 'curarr;': '\u21b7', 'curarrm;': '\u293c', 'curlyeqprec;': '\u22de', 'curlyeqsucc;': '\u22df', 'curlyvee;': '\u22ce', 'curlywedge;': '\u22cf', 'curren': '\xa4', 'curren;': '\xa4', 'curvearrowleft;': '\u21b6', 'curvearrowright;': '\u21b7', 'cuvee;': '\u22ce', 'cuwed;': '\u22cf', 'cwconint;': '\u2232', 'cwint;': '\u2231', 'cylcty;': '\u232d', 'Dagger;': '\u2021', 'dagger;': '\u2020', 'daleth;': '\u2138', 'Darr;': '\u21a1', 'dArr;': '\u21d3', 'darr;': '\u2193', 'dash;': '\u2010', 'Dashv;': '\u2ae4', 'dashv;': '\u22a3', 'dbkarow;': '\u290f', 'dblac;': '\u02dd', 'Dcaron;': '\u010e', 'dcaron;': '\u010f', 'Dcy;': '\u0414', 'dcy;': '\u0434', 'DD;': '\u2145', 'dd;': '\u2146', 'ddagger;': '\u2021', 'ddarr;': '\u21ca', 'DDotrahd;': '\u2911', 'ddotseq;': '\u2a77', 'deg': '\xb0', 'deg;': '\xb0', 'Del;': '\u2207', 'Delta;': '\u0394', 'delta;': '\u03b4', 'demptyv;': '\u29b1', 'dfisht;': '\u297f', 'Dfr;': '\U0001d507', 'dfr;': '\U0001d521', 'dHar;': '\u2965', 'dharl;': '\u21c3', 'dharr;': '\u21c2', 'DiacriticalAcute;': '\xb4', 'DiacriticalDot;': '\u02d9', 'DiacriticalDoubleAcute;': '\u02dd', 'DiacriticalGrave;': '`', 'DiacriticalTilde;': '\u02dc', 'diam;': '\u22c4', 'Diamond;': '\u22c4', 'diamond;': '\u22c4', 'diamondsuit;': '\u2666', 'diams;': '\u2666', 'die;': '\xa8', 'DifferentialD;': '\u2146', 'digamma;': '\u03dd', 'disin;': '\u22f2', 'div;': '\xf7', 'divide': '\xf7', 'divide;': '\xf7', 'divideontimes;': '\u22c7', 'divonx;': '\u22c7', 'DJcy;': '\u0402', 'djcy;': '\u0452', 'dlcorn;': '\u231e', 'dlcrop;': '\u230d', 'dollar;': '$', 'Dopf;': '\U0001d53b', 'dopf;': '\U0001d555', 'Dot;': '\xa8', 'dot;': '\u02d9', 'DotDot;': '\u20dc', 'doteq;': '\u2250', 'doteqdot;': '\u2251', 'DotEqual;': '\u2250', 'dotminus;': '\u2238', 'dotplus;': '\u2214', 'dotsquare;': '\u22a1', 'doublebarwedge;': '\u2306', 'DoubleContourIntegral;': '\u222f', 'DoubleDot;': '\xa8', 'DoubleDownArrow;': '\u21d3', 'DoubleLeftArrow;': '\u21d0', 'DoubleLeftRightArrow;': '\u21d4', 'DoubleLeftTee;': '\u2ae4', 'DoubleLongLeftArrow;': '\u27f8', 'DoubleLongLeftRightArrow;': '\u27fa', 'DoubleLongRightArrow;': '\u27f9', 'DoubleRightArrow;': '\u21d2', 'DoubleRightTee;': '\u22a8', 'DoubleUpArrow;': '\u21d1', 'DoubleUpDownArrow;': '\u21d5', 'DoubleVerticalBar;': '\u2225', 'DownArrow;': '\u2193', 'Downarrow;': '\u21d3', 'downarrow;': '\u2193', 'DownArrowBar;': '\u2913', 'DownArrowUpArrow;': '\u21f5', 'DownBreve;': '\u0311', 'downdownarrows;': '\u21ca', 'downharpoonleft;': '\u21c3', 'downharpoonright;': '\u21c2', 'DownLeftRightVector;': '\u2950', 'DownLeftTeeVector;': '\u295e', 'DownLeftVector;': '\u21bd', 'DownLeftVectorBar;': '\u2956', 'DownRightTeeVector;': '\u295f', 'DownRightVector;': '\u21c1', 'DownRightVectorBar;': '\u2957', 'DownTee;': '\u22a4', 'DownTeeArrow;': '\u21a7', 'drbkarow;': '\u2910', 'drcorn;': '\u231f', 'drcrop;': '\u230c', 'Dscr;': '\U0001d49f', 'dscr;': '\U0001d4b9', 'DScy;': '\u0405', 'dscy;': '\u0455', 'dsol;': '\u29f6', 'Dstrok;': '\u0110', 'dstrok;': '\u0111', 'dtdot;': '\u22f1', 'dtri;': '\u25bf', 'dtrif;': '\u25be', 'duarr;': '\u21f5', 'duhar;': '\u296f', 'dwangle;': '\u29a6', 'DZcy;': '\u040f', 'dzcy;': '\u045f', 'dzigrarr;': '\u27ff', 'Eacute': '\xc9', 'eacute': '\xe9', 'Eacute;': '\xc9', 'eacute;': '\xe9', 'easter;': '\u2a6e', 'Ecaron;': '\u011a', 'ecaron;': '\u011b', 'ecir;': '\u2256', 'Ecirc': '\xca', 'ecirc': '\xea', 'Ecirc;': '\xca', 'ecirc;': '\xea', 'ecolon;': '\u2255', 'Ecy;': '\u042d', 'ecy;': '\u044d', 'eDDot;': '\u2a77', 'Edot;': '\u0116', 'eDot;': '\u2251', 'edot;': '\u0117', 'ee;': '\u2147', 'efDot;': '\u2252', 'Efr;': '\U0001d508', 'efr;': '\U0001d522', 'eg;': '\u2a9a', 'Egrave': '\xc8', 'egrave': '\xe8', 'Egrave;': '\xc8', 'egrave;': '\xe8', 'egs;': '\u2a96', 'egsdot;': '\u2a98', 'el;': '\u2a99', 'Element;': '\u2208', 'elinters;': '\u23e7', 'ell;': '\u2113', 'els;': '\u2a95', 'elsdot;': '\u2a97', 'Emacr;': '\u0112', 'emacr;': '\u0113', 'empty;': '\u2205', 'emptyset;': '\u2205', 'EmptySmallSquare;': '\u25fb', 'emptyv;': '\u2205', 'EmptyVerySmallSquare;': '\u25ab', 'emsp13;': '\u2004', 'emsp14;': '\u2005', 'emsp;': '\u2003', 'ENG;': '\u014a', 'eng;': '\u014b', 'ensp;': '\u2002', 'Eogon;': '\u0118', 'eogon;': '\u0119', 'Eopf;': '\U0001d53c', 'eopf;': '\U0001d556', 'epar;': '\u22d5', 'eparsl;': '\u29e3', 'eplus;': '\u2a71', 'epsi;': '\u03b5', 'Epsilon;': '\u0395', 'epsilon;': '\u03b5', 'epsiv;': '\u03f5', 'eqcirc;': '\u2256', 'eqcolon;': '\u2255', 'eqsim;': '\u2242', 'eqslantgtr;': '\u2a96', 'eqslantless;': '\u2a95', 'Equal;': '\u2a75', 'equals;': '=', 'EqualTilde;': '\u2242', 'equest;': '\u225f', 'Equilibrium;': '\u21cc', 'equiv;': '\u2261', 'equivDD;': '\u2a78', 'eqvparsl;': '\u29e5', 'erarr;': '\u2971', 'erDot;': '\u2253', 'Escr;': '\u2130', 'escr;': '\u212f', 'esdot;': '\u2250', 'Esim;': '\u2a73', 'esim;': '\u2242', 'Eta;': '\u0397', 'eta;': '\u03b7', 'ETH': '\xd0', 'eth': '\xf0', 'ETH;': '\xd0', 'eth;': '\xf0', 'Euml': '\xcb', 'euml': '\xeb', 'Euml;': '\xcb', 'euml;': '\xeb', 'euro;': '\u20ac', 'excl;': '!', 'exist;': '\u2203', 'Exists;': '\u2203', 'expectation;': '\u2130', 'ExponentialE;': '\u2147', 'exponentiale;': '\u2147', 'fallingdotseq;': '\u2252', 'Fcy;': '\u0424', 'fcy;': '\u0444', 'female;': '\u2640', 'ffilig;': '\ufb03', 'fflig;': '\ufb00', 'ffllig;': '\ufb04', 'Ffr;': '\U0001d509', 'ffr;': '\U0001d523', 'filig;': '\ufb01', 'FilledSmallSquare;': '\u25fc', 'FilledVerySmallSquare;': '\u25aa', 'fjlig;': 'fj', 'flat;': '\u266d', 'fllig;': '\ufb02', 'fltns;': '\u25b1', 'fnof;': '\u0192', 'Fopf;': '\U0001d53d', 'fopf;': '\U0001d557', 'ForAll;': '\u2200', 'forall;': '\u2200', 'fork;': '\u22d4', 'forkv;': '\u2ad9', 'Fouriertrf;': '\u2131', 'fpartint;': '\u2a0d', 'frac12': '\xbd', 'frac12;': '\xbd', 'frac13;': '\u2153', 'frac14': '\xbc', 'frac14;': '\xbc', 'frac15;': '\u2155', 'frac16;': '\u2159', 'frac18;': '\u215b', 'frac23;': '\u2154', 'frac25;': '\u2156', 'frac34': '\xbe', 'frac34;': '\xbe', 'frac35;': '\u2157', 'frac38;': '\u215c', 'frac45;': '\u2158', 'frac56;': '\u215a', 'frac58;': '\u215d', 'frac78;': '\u215e', 'frasl;': '\u2044', 'frown;': '\u2322', 'Fscr;': '\u2131', 'fscr;': '\U0001d4bb', 'gacute;': '\u01f5', 'Gamma;': '\u0393', 'gamma;': '\u03b3', 'Gammad;': '\u03dc', 'gammad;': '\u03dd', 'gap;': '\u2a86', 'Gbreve;': '\u011e', 'gbreve;': '\u011f', 'Gcedil;': '\u0122', 'Gcirc;': '\u011c', 'gcirc;': '\u011d', 'Gcy;': '\u0413', 'gcy;': '\u0433', 'Gdot;': '\u0120', 'gdot;': '\u0121', 'gE;': '\u2267', 'ge;': '\u2265', 'gEl;': '\u2a8c', 'gel;': '\u22db', 'geq;': '\u2265', 'geqq;': '\u2267', 'geqslant;': '\u2a7e', 'ges;': '\u2a7e', 'gescc;': '\u2aa9', 'gesdot;': '\u2a80', 'gesdoto;': '\u2a82', 'gesdotol;': '\u2a84', 'gesl;': '\u22db\ufe00', 'gesles;': '\u2a94', 'Gfr;': '\U0001d50a', 'gfr;': '\U0001d524', 'Gg;': '\u22d9', 'gg;': '\u226b', 'ggg;': '\u22d9', 'gimel;': '\u2137', 'GJcy;': '\u0403', 'gjcy;': '\u0453', 'gl;': '\u2277', 'gla;': '\u2aa5', 'glE;': '\u2a92', 'glj;': '\u2aa4', 'gnap;': '\u2a8a', 'gnapprox;': '\u2a8a', 'gnE;': '\u2269', 'gne;': '\u2a88', 'gneq;': '\u2a88', 'gneqq;': '\u2269', 'gnsim;': '\u22e7', 'Gopf;': '\U0001d53e', 'gopf;': '\U0001d558', 'grave;': '`', 'GreaterEqual;': '\u2265', 'GreaterEqualLess;': '\u22db', 'GreaterFullEqual;': '\u2267', 'GreaterGreater;': '\u2aa2', 'GreaterLess;': '\u2277', 'GreaterSlantEqual;': '\u2a7e', 'GreaterTilde;': '\u2273', 'Gscr;': '\U0001d4a2', 'gscr;': '\u210a', 'gsim;': '\u2273', 'gsime;': '\u2a8e', 'gsiml;': '\u2a90', 'GT': '>', 'gt': '>', 'GT;': '>', 'Gt;': '\u226b', 'gt;': '>', 'gtcc;': '\u2aa7', 'gtcir;': '\u2a7a', 'gtdot;': '\u22d7', 'gtlPar;': '\u2995', 'gtquest;': '\u2a7c', 'gtrapprox;': '\u2a86', 'gtrarr;': '\u2978', 'gtrdot;': '\u22d7', 'gtreqless;': '\u22db', 'gtreqqless;': '\u2a8c', 'gtrless;': '\u2277', 'gtrsim;': '\u2273', 'gvertneqq;': '\u2269\ufe00', 'gvnE;': '\u2269\ufe00', 'Hacek;': '\u02c7', 'hairsp;': '\u200a', 'half;': '\xbd', 'hamilt;': '\u210b', 'HARDcy;': '\u042a', 'hardcy;': '\u044a', 'hArr;': '\u21d4', 'harr;': '\u2194', 'harrcir;': '\u2948', 'harrw;': '\u21ad', 'Hat;': '^', 'hbar;': '\u210f', 'Hcirc;': '\u0124', 'hcirc;': '\u0125', 'hearts;': '\u2665', 'heartsuit;': '\u2665', 'hellip;': '\u2026', 'hercon;': '\u22b9', 'Hfr;': '\u210c', 'hfr;': '\U0001d525', 'HilbertSpace;': '\u210b', 'hksearow;': '\u2925', 'hkswarow;': '\u2926', 'hoarr;': '\u21ff', 'homtht;': '\u223b', 'hookleftarrow;': '\u21a9', 'hookrightarrow;': '\u21aa', 'Hopf;': '\u210d', 'hopf;': '\U0001d559', 'horbar;': '\u2015', 'HorizontalLine;': '\u2500', 'Hscr;': '\u210b', 'hscr;': '\U0001d4bd', 'hslash;': '\u210f', 'Hstrok;': '\u0126', 'hstrok;': '\u0127', 'HumpDownHump;': '\u224e', 'HumpEqual;': '\u224f', 'hybull;': '\u2043', 'hyphen;': '\u2010', 'Iacute': '\xcd', 'iacute': '\xed', 'Iacute;': '\xcd', 'iacute;': '\xed', 'ic;': '\u2063', 'Icirc': '\xce', 'icirc': '\xee', 'Icirc;': '\xce', 'icirc;': '\xee', 'Icy;': '\u0418', 'icy;': '\u0438', 'Idot;': '\u0130', 'IEcy;': '\u0415', 'iecy;': '\u0435', 'iexcl': '\xa1', 'iexcl;': '\xa1', 'iff;': '\u21d4', 'Ifr;': '\u2111', 'ifr;': '\U0001d526', 'Igrave': '\xcc', 'igrave': '\xec', 'Igrave;': '\xcc', 'igrave;': '\xec', 'ii;': '\u2148', 'iiiint;': '\u2a0c', 'iiint;': '\u222d', 'iinfin;': '\u29dc', 'iiota;': '\u2129', 'IJlig;': '\u0132', 'ijlig;': '\u0133', 'Im;': '\u2111', 'Imacr;': '\u012a', 'imacr;': '\u012b', 'image;': '\u2111', 'ImaginaryI;': '\u2148', 'imagline;': '\u2110', 'imagpart;': '\u2111', 'imath;': '\u0131', 'imof;': '\u22b7', 'imped;': '\u01b5', 'Implies;': '\u21d2', 'in;': '\u2208', 'incare;': '\u2105', 'infin;': '\u221e', 'infintie;': '\u29dd', 'inodot;': '\u0131', 'Int;': '\u222c', 'int;': '\u222b', 'intcal;': '\u22ba', 'integers;': '\u2124', 'Integral;': '\u222b', 'intercal;': '\u22ba', 'Intersection;': '\u22c2', 'intlarhk;': '\u2a17', 'intprod;': '\u2a3c', 'InvisibleComma;': '\u2063', 'InvisibleTimes;': '\u2062', 'IOcy;': '\u0401', 'iocy;': '\u0451', 'Iogon;': '\u012e', 'iogon;': '\u012f', 'Iopf;': '\U0001d540', 'iopf;': '\U0001d55a', 'Iota;': '\u0399', 'iota;': '\u03b9', 'iprod;': '\u2a3c', 'iquest': '\xbf', 'iquest;': '\xbf', 'Iscr;': '\u2110', 'iscr;': '\U0001d4be', 'isin;': '\u2208', 'isindot;': '\u22f5', 'isinE;': '\u22f9', 'isins;': '\u22f4', 'isinsv;': '\u22f3', 'isinv;': '\u2208', 'it;': '\u2062', 'Itilde;': '\u0128', 'itilde;': '\u0129', 'Iukcy;': '\u0406', 'iukcy;': '\u0456', 'Iuml': '\xcf', 'iuml': '\xef', 'Iuml;': '\xcf', 'iuml;': '\xef', 'Jcirc;': '\u0134', 'jcirc;': '\u0135', 'Jcy;': '\u0419', 'jcy;': '\u0439', 'Jfr;': '\U0001d50d', 'jfr;': '\U0001d527', 'jmath;': '\u0237', 'Jopf;': '\U0001d541', 'jopf;': '\U0001d55b', 'Jscr;': '\U0001d4a5', 'jscr;': '\U0001d4bf', 'Jsercy;': '\u0408', 'jsercy;': '\u0458', 'Jukcy;': '\u0404', 'jukcy;': '\u0454', 'Kappa;': '\u039a', 'kappa;': '\u03ba', 'kappav;': '\u03f0', 'Kcedil;': '\u0136', 'kcedil;': '\u0137', 'Kcy;': '\u041a', 'kcy;': '\u043a', 'Kfr;': '\U0001d50e', 'kfr;': '\U0001d528', 'kgreen;': '\u0138', 'KHcy;': '\u0425', 'khcy;': '\u0445', 'KJcy;': '\u040c', 'kjcy;': '\u045c', 'Kopf;': '\U0001d542', 'kopf;': '\U0001d55c', 'Kscr;': '\U0001d4a6', 'kscr;': '\U0001d4c0', 'lAarr;': '\u21da', 'Lacute;': '\u0139', 'lacute;': '\u013a', 'laemptyv;': '\u29b4', 'lagran;': '\u2112', 'Lambda;': '\u039b', 'lambda;': '\u03bb', 'Lang;': '\u27ea', 'lang;': '\u27e8', 'langd;': '\u2991', 'langle;': '\u27e8', 'lap;': '\u2a85', 'Laplacetrf;': '\u2112', 'laquo': '\xab', 'laquo;': '\xab', 'Larr;': '\u219e', 'lArr;': '\u21d0', 'larr;': '\u2190', 'larrb;': '\u21e4', 'larrbfs;': '\u291f', 'larrfs;': '\u291d', 'larrhk;': '\u21a9', 'larrlp;': '\u21ab', 'larrpl;': '\u2939', 'larrsim;': '\u2973', 'larrtl;': '\u21a2', 'lat;': '\u2aab', 'lAtail;': '\u291b', 'latail;': '\u2919', 'late;': '\u2aad', 'lates;': '\u2aad\ufe00', 'lBarr;': '\u290e', 'lbarr;': '\u290c', 'lbbrk;': '\u2772', 'lbrace;': '{', 'lbrack;': '[', 'lbrke;': '\u298b', 'lbrksld;': '\u298f', 'lbrkslu;': '\u298d', 'Lcaron;': '\u013d', 'lcaron;': '\u013e', 'Lcedil;': '\u013b', 'lcedil;': '\u013c', 'lceil;': '\u2308', 'lcub;': '{', 'Lcy;': '\u041b', 'lcy;': '\u043b', 'ldca;': '\u2936', 'ldquo;': '\u201c', 'ldquor;': '\u201e', 'ldrdhar;': '\u2967', 'ldrushar;': '\u294b', 'ldsh;': '\u21b2', 'lE;': '\u2266', 'le;': '\u2264', 'LeftAngleBracket;': '\u27e8', 'LeftArrow;': '\u2190', 'Leftarrow;': '\u21d0', 'leftarrow;': '\u2190', 'LeftArrowBar;': '\u21e4', 'LeftArrowRightArrow;': '\u21c6', 'leftarrowtail;': '\u21a2', 'LeftCeiling;': '\u2308', 'LeftDoubleBracket;': '\u27e6', 'LeftDownTeeVector;': '\u2961', 'LeftDownVector;': '\u21c3', 'LeftDownVectorBar;': '\u2959', 'LeftFloor;': '\u230a', 'leftharpoondown;': '\u21bd', 'leftharpoonup;': '\u21bc', 'leftleftarrows;': '\u21c7', 'LeftRightArrow;': '\u2194', 'Leftrightarrow;': '\u21d4', 'leftrightarrow;': '\u2194', 'leftrightarrows;': '\u21c6', 'leftrightharpoons;': '\u21cb', 'leftrightsquigarrow;': '\u21ad', 'LeftRightVector;': '\u294e', 'LeftTee;': '\u22a3', 'LeftTeeArrow;': '\u21a4', 'LeftTeeVector;': '\u295a', 'leftthreetimes;': '\u22cb', 'LeftTriangle;': '\u22b2', 'LeftTriangleBar;': '\u29cf', 'LeftTriangleEqual;': '\u22b4', 'LeftUpDownVector;': '\u2951', 'LeftUpTeeVector;': '\u2960', 'LeftUpVector;': '\u21bf', 'LeftUpVectorBar;': '\u2958', 'LeftVector;': '\u21bc', 'LeftVectorBar;': '\u2952', 'lEg;': '\u2a8b', 'leg;': '\u22da', 'leq;': '\u2264', 'leqq;': '\u2266', 'leqslant;': '\u2a7d', 'les;': '\u2a7d', 'lescc;': '\u2aa8', 'lesdot;': '\u2a7f', 'lesdoto;': '\u2a81', 'lesdotor;': '\u2a83', 'lesg;': '\u22da\ufe00', 'lesges;': '\u2a93', 'lessapprox;': '\u2a85', 'lessdot;': '\u22d6', 'lesseqgtr;': '\u22da', 'lesseqqgtr;': '\u2a8b', 'LessEqualGreater;': '\u22da', 'LessFullEqual;': '\u2266', 'LessGreater;': '\u2276', 'lessgtr;': '\u2276', 'LessLess;': '\u2aa1', 'lesssim;': '\u2272', 'LessSlantEqual;': '\u2a7d', 'LessTilde;': '\u2272', 'lfisht;': '\u297c', 'lfloor;': '\u230a', 'Lfr;': '\U0001d50f', 'lfr;': '\U0001d529', 'lg;': '\u2276', 'lgE;': '\u2a91', 'lHar;': '\u2962', 'lhard;': '\u21bd', 'lharu;': '\u21bc', 'lharul;': '\u296a', 'lhblk;': '\u2584', 'LJcy;': '\u0409', 'ljcy;': '\u0459', 'Ll;': '\u22d8', 'll;': '\u226a', 'llarr;': '\u21c7', 'llcorner;': '\u231e', 'Lleftarrow;': '\u21da', 'llhard;': '\u296b', 'lltri;': '\u25fa', 'Lmidot;': '\u013f', 'lmidot;': '\u0140', 'lmoust;': '\u23b0', 'lmoustache;': '\u23b0', 'lnap;': '\u2a89', 'lnapprox;': '\u2a89', 'lnE;': '\u2268', 'lne;': '\u2a87', 'lneq;': '\u2a87', 'lneqq;': '\u2268', 'lnsim;': '\u22e6', 'loang;': '\u27ec', 'loarr;': '\u21fd', 'lobrk;': '\u27e6', 'LongLeftArrow;': '\u27f5', 'Longleftarrow;': '\u27f8', 'longleftarrow;': '\u27f5', 'LongLeftRightArrow;': '\u27f7', 'Longleftrightarrow;': '\u27fa', 'longleftrightarrow;': '\u27f7', 'longmapsto;': '\u27fc', 'LongRightArrow;': '\u27f6', 'Longrightarrow;': '\u27f9', 'longrightarrow;': '\u27f6', 'looparrowleft;': '\u21ab', 'looparrowright;': '\u21ac', 'lopar;': '\u2985', 'Lopf;': '\U0001d543', 'lopf;': '\U0001d55d', 'loplus;': '\u2a2d', 'lotimes;': '\u2a34', 'lowast;': '\u2217', 'lowbar;': '_', 'LowerLeftArrow;': '\u2199', 'LowerRightArrow;': '\u2198', 'loz;': '\u25ca', 'lozenge;': '\u25ca', 'lozf;': '\u29eb', 'lpar;': '(', 'lparlt;': '\u2993', 'lrarr;': '\u21c6', 'lrcorner;': '\u231f', 'lrhar;': '\u21cb', 'lrhard;': '\u296d', 'lrm;': '\u200e', 'lrtri;': '\u22bf', 'lsaquo;': '\u2039', 'Lscr;': '\u2112', 'lscr;': '\U0001d4c1', 'Lsh;': '\u21b0', 'lsh;': '\u21b0', 'lsim;': '\u2272', 'lsime;': '\u2a8d', 'lsimg;': '\u2a8f', 'lsqb;': '[', 'lsquo;': '\u2018', 'lsquor;': '\u201a', 'Lstrok;': '\u0141', 'lstrok;': '\u0142', 'LT': '<', 'lt': '<', 'LT;': '<', 'Lt;': '\u226a', 'lt;': '<', 'ltcc;': '\u2aa6', 'ltcir;': '\u2a79', 'ltdot;': '\u22d6', 'lthree;': '\u22cb', 'ltimes;': '\u22c9', 'ltlarr;': '\u2976', 'ltquest;': '\u2a7b', 'ltri;': '\u25c3', 'ltrie;': '\u22b4', 'ltrif;': '\u25c2', 'ltrPar;': '\u2996', 'lurdshar;': '\u294a', 'luruhar;': '\u2966', 'lvertneqq;': '\u2268\ufe00', 'lvnE;': '\u2268\ufe00', 'macr': '\xaf', 'macr;': '\xaf', 'male;': '\u2642', 'malt;': '\u2720', 'maltese;': '\u2720', 'Map;': '\u2905', 'map;': '\u21a6', 'mapsto;': '\u21a6', 'mapstodown;': '\u21a7', 'mapstoleft;': '\u21a4', 'mapstoup;': '\u21a5', 'marker;': '\u25ae', 'mcomma;': '\u2a29', 'Mcy;': '\u041c', 'mcy;': '\u043c', 'mdash;': '\u2014', 'mDDot;': '\u223a', 'measuredangle;': '\u2221', 'MediumSpace;': '\u205f', 'Mellintrf;': '\u2133', 'Mfr;': '\U0001d510', 'mfr;': '\U0001d52a', 'mho;': '\u2127', 'micro': '\xb5', 'micro;': '\xb5', 'mid;': '\u2223', 'midast;': '*', 'midcir;': '\u2af0', 'middot': '\xb7', 'middot;': '\xb7', 'minus;': '\u2212', 'minusb;': '\u229f', 'minusd;': '\u2238', 'minusdu;': '\u2a2a', 'MinusPlus;': '\u2213', 'mlcp;': '\u2adb', 'mldr;': '\u2026', 'mnplus;': '\u2213', 'models;': '\u22a7', 'Mopf;': '\U0001d544', 'mopf;': '\U0001d55e', 'mp;': '\u2213', 'Mscr;': '\u2133', 'mscr;': '\U0001d4c2', 'mstpos;': '\u223e', 'Mu;': '\u039c', 'mu;': '\u03bc', 'multimap;': '\u22b8', 'mumap;': '\u22b8', 'nabla;': '\u2207', 'Nacute;': '\u0143', 'nacute;': '\u0144', 'nang;': '\u2220\u20d2', 'nap;': '\u2249', 'napE;': '\u2a70\u0338', 'napid;': '\u224b\u0338', 'napos;': '\u0149', 'napprox;': '\u2249', 'natur;': '\u266e', 'natural;': '\u266e', 'naturals;': '\u2115', 'nbsp': '\xa0', 'nbsp;': '\xa0', 'nbump;': '\u224e\u0338', 'nbumpe;': '\u224f\u0338', 'ncap;': '\u2a43', 'Ncaron;': '\u0147', 'ncaron;': '\u0148', 'Ncedil;': '\u0145', 'ncedil;': '\u0146', 'ncong;': '\u2247', 'ncongdot;': '\u2a6d\u0338', 'ncup;': '\u2a42', 'Ncy;': '\u041d', 'ncy;': '\u043d', 'ndash;': '\u2013', 'ne;': '\u2260', 'nearhk;': '\u2924', 'neArr;': '\u21d7', 'nearr;': '\u2197', 'nearrow;': '\u2197', 'nedot;': '\u2250\u0338', 'NegativeMediumSpace;': '\u200b', 'NegativeThickSpace;': '\u200b', 'NegativeThinSpace;': '\u200b', 'NegativeVeryThinSpace;': '\u200b', 'nequiv;': '\u2262', 'nesear;': '\u2928', 'nesim;': '\u2242\u0338', 'NestedGreaterGreater;': '\u226b', 'NestedLessLess;': '\u226a', 'NewLine;': '\n', 'nexist;': '\u2204', 'nexists;': '\u2204', 'Nfr;': '\U0001d511', 'nfr;': '\U0001d52b', 'ngE;': '\u2267\u0338', 'nge;': '\u2271', 'ngeq;': '\u2271', 'ngeqq;': '\u2267\u0338', 'ngeqslant;': '\u2a7e\u0338', 'nges;': '\u2a7e\u0338', 'nGg;': '\u22d9\u0338', 'ngsim;': '\u2275', 'nGt;': '\u226b\u20d2', 'ngt;': '\u226f', 'ngtr;': '\u226f', 'nGtv;': '\u226b\u0338', 'nhArr;': '\u21ce', 'nharr;': '\u21ae', 'nhpar;': '\u2af2', 'ni;': '\u220b', 'nis;': '\u22fc', 'nisd;': '\u22fa', 'niv;': '\u220b', 'NJcy;': '\u040a', 'njcy;': '\u045a', 'nlArr;': '\u21cd', 'nlarr;': '\u219a', 'nldr;': '\u2025', 'nlE;': '\u2266\u0338', 'nle;': '\u2270', 'nLeftarrow;': '\u21cd', 'nleftarrow;': '\u219a', 'nLeftrightarrow;': '\u21ce', 'nleftrightarrow;': '\u21ae', 'nleq;': '\u2270', 'nleqq;': '\u2266\u0338', 'nleqslant;': '\u2a7d\u0338', 'nles;': '\u2a7d\u0338', 'nless;': '\u226e', 'nLl;': '\u22d8\u0338', 'nlsim;': '\u2274', 'nLt;': '\u226a\u20d2', 'nlt;': '\u226e', 'nltri;': '\u22ea', 'nltrie;': '\u22ec', 'nLtv;': '\u226a\u0338', 'nmid;': '\u2224', 'NoBreak;': '\u2060', 'NonBreakingSpace;': '\xa0', 'Nopf;': '\u2115', 'nopf;': '\U0001d55f', 'not': '\xac', 'Not;': '\u2aec', 'not;': '\xac', 'NotCongruent;': '\u2262', 'NotCupCap;': '\u226d', 'NotDoubleVerticalBar;': '\u2226', 'NotElement;': '\u2209', 'NotEqual;': '\u2260', 'NotEqualTilde;': '\u2242\u0338', 'NotExists;': '\u2204', 'NotGreater;': '\u226f', 'NotGreaterEqual;': '\u2271', 'NotGreaterFullEqual;': '\u2267\u0338', 'NotGreaterGreater;': '\u226b\u0338', 'NotGreaterLess;': '\u2279', 'NotGreaterSlantEqual;': '\u2a7e\u0338', 'NotGreaterTilde;': '\u2275', 'NotHumpDownHump;': '\u224e\u0338', 'NotHumpEqual;': '\u224f\u0338', 'notin;': '\u2209', 'notindot;': '\u22f5\u0338', 'notinE;': '\u22f9\u0338', 'notinva;': '\u2209', 'notinvb;': '\u22f7', 'notinvc;': '\u22f6', 'NotLeftTriangle;': '\u22ea', 'NotLeftTriangleBar;': '\u29cf\u0338', 'NotLeftTriangleEqual;': '\u22ec', 'NotLess;': '\u226e', 'NotLessEqual;': '\u2270', 'NotLessGreater;': '\u2278', 'NotLessLess;': '\u226a\u0338', 'NotLessSlantEqual;': '\u2a7d\u0338', 'NotLessTilde;': '\u2274', 'NotNestedGreaterGreater;': '\u2aa2\u0338', 'NotNestedLessLess;': '\u2aa1\u0338', 'notni;': '\u220c', 'notniva;': '\u220c', 'notnivb;': '\u22fe', 'notnivc;': '\u22fd', 'NotPrecedes;': '\u2280', 'NotPrecedesEqual;': '\u2aaf\u0338', 'NotPrecedesSlantEqual;': '\u22e0', 'NotReverseElement;': '\u220c', 'NotRightTriangle;': '\u22eb', 'NotRightTriangleBar;': '\u29d0\u0338', 'NotRightTriangleEqual;': '\u22ed', 'NotSquareSubset;': '\u228f\u0338', 'NotSquareSubsetEqual;': '\u22e2', 'NotSquareSuperset;': '\u2290\u0338', 'NotSquareSupersetEqual;': '\u22e3', 'NotSubset;': '\u2282\u20d2', 'NotSubsetEqual;': '\u2288', 'NotSucceeds;': '\u2281', 'NotSucceedsEqual;': '\u2ab0\u0338', 'NotSucceedsSlantEqual;': '\u22e1', 'NotSucceedsTilde;': '\u227f\u0338', 'NotSuperset;': '\u2283\u20d2', 'NotSupersetEqual;': '\u2289', 'NotTilde;': '\u2241', 'NotTildeEqual;': '\u2244', 'NotTildeFullEqual;': '\u2247', 'NotTildeTilde;': '\u2249', 'NotVerticalBar;': '\u2224', 'npar;': '\u2226', 'nparallel;': '\u2226', 'nparsl;': '\u2afd\u20e5', 'npart;': '\u2202\u0338', 'npolint;': '\u2a14', 'npr;': '\u2280', 'nprcue;': '\u22e0', 'npre;': '\u2aaf\u0338', 'nprec;': '\u2280', 'npreceq;': '\u2aaf\u0338', 'nrArr;': '\u21cf', 'nrarr;': '\u219b', 'nrarrc;': '\u2933\u0338', 'nrarrw;': '\u219d\u0338', 'nRightarrow;': '\u21cf', 'nrightarrow;': '\u219b', 'nrtri;': '\u22eb', 'nrtrie;': '\u22ed', 'nsc;': '\u2281', 'nsccue;': '\u22e1', 'nsce;': '\u2ab0\u0338', 'Nscr;': '\U0001d4a9', 'nscr;': '\U0001d4c3', 'nshortmid;': '\u2224', 'nshortparallel;': '\u2226', 'nsim;': '\u2241', 'nsime;': '\u2244', 'nsimeq;': '\u2244', 'nsmid;': '\u2224', 'nspar;': '\u2226', 'nsqsube;': '\u22e2', 'nsqsupe;': '\u22e3', 'nsub;': '\u2284', 'nsubE;': '\u2ac5\u0338', 'nsube;': '\u2288', 'nsubset;': '\u2282\u20d2', 'nsubseteq;': '\u2288', 'nsubseteqq;': '\u2ac5\u0338', 'nsucc;': '\u2281', 'nsucceq;': '\u2ab0\u0338', 'nsup;': '\u2285', 'nsupE;': '\u2ac6\u0338', 'nsupe;': '\u2289', 'nsupset;': '\u2283\u20d2', 'nsupseteq;': '\u2289', 'nsupseteqq;': '\u2ac6\u0338', 'ntgl;': '\u2279', 'Ntilde': '\xd1', 'ntilde': '\xf1', 'Ntilde;': '\xd1', 'ntilde;': '\xf1', 'ntlg;': '\u2278', 'ntriangleleft;': '\u22ea', 'ntrianglelefteq;': '\u22ec', 'ntriangleright;': '\u22eb', 'ntrianglerighteq;': '\u22ed', 'Nu;': '\u039d', 'nu;': '\u03bd', 'num;': '#', 'numero;': '\u2116', 'numsp;': '\u2007', 'nvap;': '\u224d\u20d2', 'nVDash;': '\u22af', 'nVdash;': '\u22ae', 'nvDash;': '\u22ad', 'nvdash;': '\u22ac', 'nvge;': '\u2265\u20d2', 'nvgt;': '>\u20d2', 'nvHarr;': '\u2904', 'nvinfin;': '\u29de', 'nvlArr;': '\u2902', 'nvle;': '\u2264\u20d2', 'nvlt;': '<\u20d2', 'nvltrie;': '\u22b4\u20d2', 'nvrArr;': '\u2903', 'nvrtrie;': '\u22b5\u20d2', 'nvsim;': '\u223c\u20d2', 'nwarhk;': '\u2923', 'nwArr;': '\u21d6', 'nwarr;': '\u2196', 'nwarrow;': '\u2196', 'nwnear;': '\u2927', 'Oacute': '\xd3', 'oacute': '\xf3', 'Oacute;': '\xd3', 'oacute;': '\xf3', 'oast;': '\u229b', 'ocir;': '\u229a', 'Ocirc': '\xd4', 'ocirc': '\xf4', 'Ocirc;': '\xd4', 'ocirc;': '\xf4', 'Ocy;': '\u041e', 'ocy;': '\u043e', 'odash;': '\u229d', 'Odblac;': '\u0150', 'odblac;': '\u0151', 'odiv;': '\u2a38', 'odot;': '\u2299', 'odsold;': '\u29bc', 'OElig;': '\u0152', 'oelig;': '\u0153', 'ofcir;': '\u29bf', 'Ofr;': '\U0001d512', 'ofr;': '\U0001d52c', 'ogon;': '\u02db', 'Ograve': '\xd2', 'ograve': '\xf2', 'Ograve;': '\xd2', 'ograve;': '\xf2', 'ogt;': '\u29c1', 'ohbar;': '\u29b5', 'ohm;': '\u03a9', 'oint;': '\u222e', 'olarr;': '\u21ba', 'olcir;': '\u29be', 'olcross;': '\u29bb', 'oline;': '\u203e', 'olt;': '\u29c0', 'Omacr;': '\u014c', 'omacr;': '\u014d', 'Omega;': '\u03a9', 'omega;': '\u03c9', 'Omicron;': '\u039f', 'omicron;': '\u03bf', 'omid;': '\u29b6', 'ominus;': '\u2296', 'Oopf;': '\U0001d546', 'oopf;': '\U0001d560', 'opar;': '\u29b7', 'OpenCurlyDoubleQuote;': '\u201c', 'OpenCurlyQuote;': '\u2018', 'operp;': '\u29b9', 'oplus;': '\u2295', 'Or;': '\u2a54', 'or;': '\u2228', 'orarr;': '\u21bb', 'ord;': '\u2a5d', 'order;': '\u2134', 'orderof;': '\u2134', 'ordf': '\xaa', 'ordf;': '\xaa', 'ordm': '\xba', 'ordm;': '\xba', 'origof;': '\u22b6', 'oror;': '\u2a56', 'orslope;': '\u2a57', 'orv;': '\u2a5b', 'oS;': '\u24c8', 'Oscr;': '\U0001d4aa', 'oscr;': '\u2134', 'Oslash': '\xd8', 'oslash': '\xf8', 'Oslash;': '\xd8', 'oslash;': '\xf8', 'osol;': '\u2298', 'Otilde': '\xd5', 'otilde': '\xf5', 'Otilde;': '\xd5', 'otilde;': '\xf5', 'Otimes;': '\u2a37', 'otimes;': '\u2297', 'otimesas;': '\u2a36', 'Ouml': '\xd6', 'ouml': '\xf6', 'Ouml;': '\xd6', 'ouml;': '\xf6', 'ovbar;': '\u233d', 'OverBar;': '\u203e', 'OverBrace;': '\u23de', 'OverBracket;': '\u23b4', 'OverParenthesis;': '\u23dc', 'par;': '\u2225', 'para': '\xb6', 'para;': '\xb6', 'parallel;': '\u2225', 'parsim;': '\u2af3', 'parsl;': '\u2afd', 'part;': '\u2202', 'PartialD;': '\u2202', 'Pcy;': '\u041f', 'pcy;': '\u043f', 'percnt;': '%', 'period;': '.', 'permil;': '\u2030', 'perp;': '\u22a5', 'pertenk;': '\u2031', 'Pfr;': '\U0001d513', 'pfr;': '\U0001d52d', 'Phi;': '\u03a6', 'phi;': '\u03c6', 'phiv;': '\u03d5', 'phmmat;': '\u2133', 'phone;': '\u260e', 'Pi;': '\u03a0', 'pi;': '\u03c0', 'pitchfork;': '\u22d4', 'piv;': '\u03d6', 'planck;': '\u210f', 'planckh;': '\u210e', 'plankv;': '\u210f', 'plus;': '+', 'plusacir;': '\u2a23', 'plusb;': '\u229e', 'pluscir;': '\u2a22', 'plusdo;': '\u2214', 'plusdu;': '\u2a25', 'pluse;': '\u2a72', 'PlusMinus;': '\xb1', 'plusmn': '\xb1', 'plusmn;': '\xb1', 'plussim;': '\u2a26', 'plustwo;': '\u2a27', 'pm;': '\xb1', 'Poincareplane;': '\u210c', 'pointint;': '\u2a15', 'Popf;': '\u2119', 'popf;': '\U0001d561', 'pound': '\xa3', 'pound;': '\xa3', 'Pr;': '\u2abb', 'pr;': '\u227a', 'prap;': '\u2ab7', 'prcue;': '\u227c', 'prE;': '\u2ab3', 'pre;': '\u2aaf', 'prec;': '\u227a', 'precapprox;': '\u2ab7', 'preccurlyeq;': '\u227c', 'Precedes;': '\u227a', 'PrecedesEqual;': '\u2aaf', 'PrecedesSlantEqual;': '\u227c', 'PrecedesTilde;': '\u227e', 'preceq;': '\u2aaf', 'precnapprox;': '\u2ab9', 'precneqq;': '\u2ab5', 'precnsim;': '\u22e8', 'precsim;': '\u227e', 'Prime;': '\u2033', 'prime;': '\u2032', 'primes;': '\u2119', 'prnap;': '\u2ab9', 'prnE;': '\u2ab5', 'prnsim;': '\u22e8', 'prod;': '\u220f', 'Product;': '\u220f', 'profalar;': '\u232e', 'profline;': '\u2312', 'profsurf;': '\u2313', 'prop;': '\u221d', 'Proportion;': '\u2237', 'Proportional;': '\u221d', 'propto;': '\u221d', 'prsim;': '\u227e', 'prurel;': '\u22b0', 'Pscr;': '\U0001d4ab', 'pscr;': '\U0001d4c5', 'Psi;': '\u03a8', 'psi;': '\u03c8', 'puncsp;': '\u2008', 'Qfr;': '\U0001d514', 'qfr;': '\U0001d52e', 'qint;': '\u2a0c', 'Qopf;': '\u211a', 'qopf;': '\U0001d562', 'qprime;': '\u2057', 'Qscr;': '\U0001d4ac', 'qscr;': '\U0001d4c6', 'quaternions;': '\u210d', 'quatint;': '\u2a16', 'quest;': '?', 'questeq;': '\u225f', 'QUOT': '"', 'quot': '"', 'QUOT;': '"', 'quot;': '"', 'rAarr;': '\u21db', 'race;': '\u223d\u0331', 'Racute;': '\u0154', 'racute;': '\u0155', 'radic;': '\u221a', 'raemptyv;': '\u29b3', 'Rang;': '\u27eb', 'rang;': '\u27e9', 'rangd;': '\u2992', 'range;': '\u29a5', 'rangle;': '\u27e9', 'raquo': '\xbb', 'raquo;': '\xbb', 'Rarr;': '\u21a0', 'rArr;': '\u21d2', 'rarr;': '\u2192', 'rarrap;': '\u2975', 'rarrb;': '\u21e5', 'rarrbfs;': '\u2920', 'rarrc;': '\u2933', 'rarrfs;': '\u291e', 'rarrhk;': '\u21aa', 'rarrlp;': '\u21ac', 'rarrpl;': '\u2945', 'rarrsim;': '\u2974', 'Rarrtl;': '\u2916', 'rarrtl;': '\u21a3', 'rarrw;': '\u219d', 'rAtail;': '\u291c', 'ratail;': '\u291a', 'ratio;': '\u2236', 'rationals;': '\u211a', 'RBarr;': '\u2910', 'rBarr;': '\u290f', 'rbarr;': '\u290d', 'rbbrk;': '\u2773', 'rbrace;': '}', 'rbrack;': ']', 'rbrke;': '\u298c', 'rbrksld;': '\u298e', 'rbrkslu;': '\u2990', 'Rcaron;': '\u0158', 'rcaron;': '\u0159', 'Rcedil;': '\u0156', 'rcedil;': '\u0157', 'rceil;': '\u2309', 'rcub;': '}', 'Rcy;': '\u0420', 'rcy;': '\u0440', 'rdca;': '\u2937', 'rdldhar;': '\u2969', 'rdquo;': '\u201d', 'rdquor;': '\u201d', 'rdsh;': '\u21b3', 'Re;': '\u211c', 'real;': '\u211c', 'realine;': '\u211b', 'realpart;': '\u211c', 'reals;': '\u211d', 'rect;': '\u25ad', 'REG': '\xae', 'reg': '\xae', 'REG;': '\xae', 'reg;': '\xae', 'ReverseElement;': '\u220b', 'ReverseEquilibrium;': '\u21cb', 'ReverseUpEquilibrium;': '\u296f', 'rfisht;': '\u297d', 'rfloor;': '\u230b', 'Rfr;': '\u211c', 'rfr;': '\U0001d52f', 'rHar;': '\u2964', 'rhard;': '\u21c1', 'rharu;': '\u21c0', 'rharul;': '\u296c', 'Rho;': '\u03a1', 'rho;': '\u03c1', 'rhov;': '\u03f1', 'RightAngleBracket;': '\u27e9', 'RightArrow;': '\u2192', 'Rightarrow;': '\u21d2', 'rightarrow;': '\u2192', 'RightArrowBar;': '\u21e5', 'RightArrowLeftArrow;': '\u21c4', 'rightarrowtail;': '\u21a3', 'RightCeiling;': '\u2309', 'RightDoubleBracket;': '\u27e7', 'RightDownTeeVector;': '\u295d', 'RightDownVector;': '\u21c2', 'RightDownVectorBar;': '\u2955', 'RightFloor;': '\u230b', 'rightharpoondown;': '\u21c1', 'rightharpoonup;': '\u21c0', 'rightleftarrows;': '\u21c4', 'rightleftharpoons;': '\u21cc', 'rightrightarrows;': '\u21c9', 'rightsquigarrow;': '\u219d', 'RightTee;': '\u22a2', 'RightTeeArrow;': '\u21a6', 'RightTeeVector;': '\u295b', 'rightthreetimes;': '\u22cc', 'RightTriangle;': '\u22b3', 'RightTriangleBar;': '\u29d0', 'RightTriangleEqual;': '\u22b5', 'RightUpDownVector;': '\u294f', 'RightUpTeeVector;': '\u295c', 'RightUpVector;': '\u21be', 'RightUpVectorBar;': '\u2954', 'RightVector;': '\u21c0', 'RightVectorBar;': '\u2953', 'ring;': '\u02da', 'risingdotseq;': '\u2253', 'rlarr;': '\u21c4', 'rlhar;': '\u21cc', 'rlm;': '\u200f', 'rmoust;': '\u23b1', 'rmoustache;': '\u23b1', 'rnmid;': '\u2aee', 'roang;': '\u27ed', 'roarr;': '\u21fe', 'robrk;': '\u27e7', 'ropar;': '\u2986', 'Ropf;': '\u211d', 'ropf;': '\U0001d563', 'roplus;': '\u2a2e', 'rotimes;': '\u2a35', 'RoundImplies;': '\u2970', 'rpar;': ')', 'rpargt;': '\u2994', 'rppolint;': '\u2a12', 'rrarr;': '\u21c9', 'Rrightarrow;': '\u21db', 'rsaquo;': '\u203a', 'Rscr;': '\u211b', 'rscr;': '\U0001d4c7', 'Rsh;': '\u21b1', 'rsh;': '\u21b1', 'rsqb;': ']', 'rsquo;': '\u2019', 'rsquor;': '\u2019', 'rthree;': '\u22cc', 'rtimes;': '\u22ca', 'rtri;': '\u25b9', 'rtrie;': '\u22b5', 'rtrif;': '\u25b8', 'rtriltri;': '\u29ce', 'RuleDelayed;': '\u29f4', 'ruluhar;': '\u2968', 'rx;': '\u211e', 'Sacute;': '\u015a', 'sacute;': '\u015b', 'sbquo;': '\u201a', 'Sc;': '\u2abc', 'sc;': '\u227b', 'scap;': '\u2ab8', 'Scaron;': '\u0160', 'scaron;': '\u0161', 'sccue;': '\u227d', 'scE;': '\u2ab4', 'sce;': '\u2ab0', 'Scedil;': '\u015e', 'scedil;': '\u015f', 'Scirc;': '\u015c', 'scirc;': '\u015d', 'scnap;': '\u2aba', 'scnE;': '\u2ab6', 'scnsim;': '\u22e9', 'scpolint;': '\u2a13', 'scsim;': '\u227f', 'Scy;': '\u0421', 'scy;': '\u0441', 'sdot;': '\u22c5', 'sdotb;': '\u22a1', 'sdote;': '\u2a66', 'searhk;': '\u2925', 'seArr;': '\u21d8', 'searr;': '\u2198', 'searrow;': '\u2198', 'sect': '\xa7', 'sect;': '\xa7', 'semi;': ';', 'seswar;': '\u2929', 'setminus;': '\u2216', 'setmn;': '\u2216', 'sext;': '\u2736', 'Sfr;': '\U0001d516', 'sfr;': '\U0001d530', 'sfrown;': '\u2322', 'sharp;': '\u266f', 'SHCHcy;': '\u0429', 'shchcy;': '\u0449', 'SHcy;': '\u0428', 'shcy;': '\u0448', 'ShortDownArrow;': '\u2193', 'ShortLeftArrow;': '\u2190', 'shortmid;': '\u2223', 'shortparallel;': '\u2225', 'ShortRightArrow;': '\u2192', 'ShortUpArrow;': '\u2191', 'shy': '\xad', 'shy;': '\xad', 'Sigma;': '\u03a3', 'sigma;': '\u03c3', 'sigmaf;': '\u03c2', 'sigmav;': '\u03c2', 'sim;': '\u223c', 'simdot;': '\u2a6a', 'sime;': '\u2243', 'simeq;': '\u2243', 'simg;': '\u2a9e', 'simgE;': '\u2aa0', 'siml;': '\u2a9d', 'simlE;': '\u2a9f', 'simne;': '\u2246', 'simplus;': '\u2a24', 'simrarr;': '\u2972', 'slarr;': '\u2190', 'SmallCircle;': '\u2218', 'smallsetminus;': '\u2216', 'smashp;': '\u2a33', 'smeparsl;': '\u29e4', 'smid;': '\u2223', 'smile;': '\u2323', 'smt;': '\u2aaa', 'smte;': '\u2aac', 'smtes;': '\u2aac\ufe00', 'SOFTcy;': '\u042c', 'softcy;': '\u044c', 'sol;': '/', 'solb;': '\u29c4', 'solbar;': '\u233f', 'Sopf;': '\U0001d54a', 'sopf;': '\U0001d564', 'spades;': '\u2660', 'spadesuit;': '\u2660', 'spar;': '\u2225', 'sqcap;': '\u2293', 'sqcaps;': '\u2293\ufe00', 'sqcup;': '\u2294', 'sqcups;': '\u2294\ufe00', 'Sqrt;': '\u221a', 'sqsub;': '\u228f', 'sqsube;': '\u2291', 'sqsubset;': '\u228f', 'sqsubseteq;': '\u2291', 'sqsup;': '\u2290', 'sqsupe;': '\u2292', 'sqsupset;': '\u2290', 'sqsupseteq;': '\u2292', 'squ;': '\u25a1', 'Square;': '\u25a1', 'square;': '\u25a1', 'SquareIntersection;': '\u2293', 'SquareSubset;': '\u228f', 'SquareSubsetEqual;': '\u2291', 'SquareSuperset;': '\u2290', 'SquareSupersetEqual;': '\u2292', 'SquareUnion;': '\u2294', 'squarf;': '\u25aa', 'squf;': '\u25aa', 'srarr;': '\u2192', 'Sscr;': '\U0001d4ae', 'sscr;': '\U0001d4c8', 'ssetmn;': '\u2216', 'ssmile;': '\u2323', 'sstarf;': '\u22c6', 'Star;': '\u22c6', 'star;': '\u2606', 'starf;': '\u2605', 'straightepsilon;': '\u03f5', 'straightphi;': '\u03d5', 'strns;': '\xaf', 'Sub;': '\u22d0', 'sub;': '\u2282', 'subdot;': '\u2abd', 'subE;': '\u2ac5', 'sube;': '\u2286', 'subedot;': '\u2ac3', 'submult;': '\u2ac1', 'subnE;': '\u2acb', 'subne;': '\u228a', 'subplus;': '\u2abf', 'subrarr;': '\u2979', 'Subset;': '\u22d0', 'subset;': '\u2282', 'subseteq;': '\u2286', 'subseteqq;': '\u2ac5', 'SubsetEqual;': '\u2286', 'subsetneq;': '\u228a', 'subsetneqq;': '\u2acb', 'subsim;': '\u2ac7', 'subsub;': '\u2ad5', 'subsup;': '\u2ad3', 'succ;': '\u227b', 'succapprox;': '\u2ab8', 'succcurlyeq;': '\u227d', 'Succeeds;': '\u227b', 'SucceedsEqual;': '\u2ab0', 'SucceedsSlantEqual;': '\u227d', 'SucceedsTilde;': '\u227f', 'succeq;': '\u2ab0', 'succnapprox;': '\u2aba', 'succneqq;': '\u2ab6', 'succnsim;': '\u22e9', 'succsim;': '\u227f', 'SuchThat;': '\u220b', 'Sum;': '\u2211', 'sum;': '\u2211', 'sung;': '\u266a', 'sup1': '\xb9', 'sup1;': '\xb9', 'sup2': '\xb2', 'sup2;': '\xb2', 'sup3': '\xb3', 'sup3;': '\xb3', 'Sup;': '\u22d1', 'sup;': '\u2283', 'supdot;': '\u2abe', 'supdsub;': '\u2ad8', 'supE;': '\u2ac6', 'supe;': '\u2287', 'supedot;': '\u2ac4', 'Superset;': '\u2283', 'SupersetEqual;': '\u2287', 'suphsol;': '\u27c9', 'suphsub;': '\u2ad7', 'suplarr;': '\u297b', 'supmult;': '\u2ac2', 'supnE;': '\u2acc', 'supne;': '\u228b', 'supplus;': '\u2ac0', 'Supset;': '\u22d1', 'supset;': '\u2283', 'supseteq;': '\u2287', 'supseteqq;': '\u2ac6', 'supsetneq;': '\u228b', 'supsetneqq;': '\u2acc', 'supsim;': '\u2ac8', 'supsub;': '\u2ad4', 'supsup;': '\u2ad6', 'swarhk;': '\u2926', 'swArr;': '\u21d9', 'swarr;': '\u2199', 'swarrow;': '\u2199', 'swnwar;': '\u292a', 'szlig': '\xdf', 'szlig;': '\xdf', 'Tab;': '\t', 'target;': '\u2316', 'Tau;': '\u03a4', 'tau;': '\u03c4', 'tbrk;': '\u23b4', 'Tcaron;': '\u0164', 'tcaron;': '\u0165', 'Tcedil;': '\u0162', 'tcedil;': '\u0163', 'Tcy;': '\u0422', 'tcy;': '\u0442', 'tdot;': '\u20db', 'telrec;': '\u2315', 'Tfr;': '\U0001d517', 'tfr;': '\U0001d531', 'there4;': '\u2234', 'Therefore;': '\u2234', 'therefore;': '\u2234', 'Theta;': '\u0398', 'theta;': '\u03b8', 'thetasym;': '\u03d1', 'thetav;': '\u03d1', 'thickapprox;': '\u2248', 'thicksim;': '\u223c', 'ThickSpace;': '\u205f\u200a', 'thinsp;': '\u2009', 'ThinSpace;': '\u2009', 'thkap;': '\u2248', 'thksim;': '\u223c', 'THORN': '\xde', 'thorn': '\xfe', 'THORN;': '\xde', 'thorn;': '\xfe', 'Tilde;': '\u223c', 'tilde;': '\u02dc', 'TildeEqual;': '\u2243', 'TildeFullEqual;': '\u2245', 'TildeTilde;': '\u2248', 'times': '\xd7', 'times;': '\xd7', 'timesb;': '\u22a0', 'timesbar;': '\u2a31', 'timesd;': '\u2a30', 'tint;': '\u222d', 'toea;': '\u2928', 'top;': '\u22a4', 'topbot;': '\u2336', 'topcir;': '\u2af1', 'Topf;': '\U0001d54b', 'topf;': '\U0001d565', 'topfork;': '\u2ada', 'tosa;': '\u2929', 'tprime;': '\u2034', 'TRADE;': '\u2122', 'trade;': '\u2122', 'triangle;': '\u25b5', 'triangledown;': '\u25bf', 'triangleleft;': '\u25c3', 'trianglelefteq;': '\u22b4', 'triangleq;': '\u225c', 'triangleright;': '\u25b9', 'trianglerighteq;': '\u22b5', 'tridot;': '\u25ec', 'trie;': '\u225c', 'triminus;': '\u2a3a', 'TripleDot;': '\u20db', 'triplus;': '\u2a39', 'trisb;': '\u29cd', 'tritime;': '\u2a3b', 'trpezium;': '\u23e2', 'Tscr;': '\U0001d4af', 'tscr;': '\U0001d4c9', 'TScy;': '\u0426', 'tscy;': '\u0446', 'TSHcy;': '\u040b', 'tshcy;': '\u045b', 'Tstrok;': '\u0166', 'tstrok;': '\u0167', 'twixt;': '\u226c', 'twoheadleftarrow;': '\u219e', 'twoheadrightarrow;': '\u21a0', 'Uacute': '\xda', 'uacute': '\xfa', 'Uacute;': '\xda', 'uacute;': '\xfa', 'Uarr;': '\u219f', 'uArr;': '\u21d1', 'uarr;': '\u2191', 'Uarrocir;': '\u2949', 'Ubrcy;': '\u040e', 'ubrcy;': '\u045e', 'Ubreve;': '\u016c', 'ubreve;': '\u016d', 'Ucirc': '\xdb', 'ucirc': '\xfb', 'Ucirc;': '\xdb', 'ucirc;': '\xfb', 'Ucy;': '\u0423', 'ucy;': '\u0443', 'udarr;': '\u21c5', 'Udblac;': '\u0170', 'udblac;': '\u0171', 'udhar;': '\u296e', 'ufisht;': '\u297e', 'Ufr;': '\U0001d518', 'ufr;': '\U0001d532', 'Ugrave': '\xd9', 'ugrave': '\xf9', 'Ugrave;': '\xd9', 'ugrave;': '\xf9', 'uHar;': '\u2963', 'uharl;': '\u21bf', 'uharr;': '\u21be', 'uhblk;': '\u2580', 'ulcorn;': '\u231c', 'ulcorner;': '\u231c', 'ulcrop;': '\u230f', 'ultri;': '\u25f8', 'Umacr;': '\u016a', 'umacr;': '\u016b', 'uml': '\xa8', 'uml;': '\xa8', 'UnderBar;': '_', 'UnderBrace;': '\u23df', 'UnderBracket;': '\u23b5', 'UnderParenthesis;': '\u23dd', 'Union;': '\u22c3', 'UnionPlus;': '\u228e', 'Uogon;': '\u0172', 'uogon;': '\u0173', 'Uopf;': '\U0001d54c', 'uopf;': '\U0001d566', 'UpArrow;': '\u2191', 'Uparrow;': '\u21d1', 'uparrow;': '\u2191', 'UpArrowBar;': '\u2912', 'UpArrowDownArrow;': '\u21c5', 'UpDownArrow;': '\u2195', 'Updownarrow;': '\u21d5', 'updownarrow;': '\u2195', 'UpEquilibrium;': '\u296e', 'upharpoonleft;': '\u21bf', 'upharpoonright;': '\u21be', 'uplus;': '\u228e', 'UpperLeftArrow;': '\u2196', 'UpperRightArrow;': '\u2197', 'Upsi;': '\u03d2', 'upsi;': '\u03c5', 'upsih;': '\u03d2', 'Upsilon;': '\u03a5', 'upsilon;': '\u03c5', 'UpTee;': '\u22a5', 'UpTeeArrow;': '\u21a5', 'upuparrows;': '\u21c8', 'urcorn;': '\u231d', 'urcorner;': '\u231d', 'urcrop;': '\u230e', 'Uring;': '\u016e', 'uring;': '\u016f', 'urtri;': '\u25f9', 'Uscr;': '\U0001d4b0', 'uscr;': '\U0001d4ca', 'utdot;': '\u22f0', 'Utilde;': '\u0168', 'utilde;': '\u0169', 'utri;': '\u25b5', 'utrif;': '\u25b4', 'uuarr;': '\u21c8', 'Uuml': '\xdc', 'uuml': '\xfc', 'Uuml;': '\xdc', 'uuml;': '\xfc', 'uwangle;': '\u29a7', 'vangrt;': '\u299c', 'varepsilon;': '\u03f5', 'varkappa;': '\u03f0', 'varnothing;': '\u2205', 'varphi;': '\u03d5', 'varpi;': '\u03d6', 'varpropto;': '\u221d', 'vArr;': '\u21d5', 'varr;': '\u2195', 'varrho;': '\u03f1', 'varsigma;': '\u03c2', 'varsubsetneq;': '\u228a\ufe00', 'varsubsetneqq;': '\u2acb\ufe00', 'varsupsetneq;': '\u228b\ufe00', 'varsupsetneqq;': '\u2acc\ufe00', 'vartheta;': '\u03d1', 'vartriangleleft;': '\u22b2', 'vartriangleright;': '\u22b3', 'Vbar;': '\u2aeb', 'vBar;': '\u2ae8', 'vBarv;': '\u2ae9', 'Vcy;': '\u0412', 'vcy;': '\u0432', 'VDash;': '\u22ab', 'Vdash;': '\u22a9', 'vDash;': '\u22a8', 'vdash;': '\u22a2', 'Vdashl;': '\u2ae6', 'Vee;': '\u22c1', 'vee;': '\u2228', 'veebar;': '\u22bb', 'veeeq;': '\u225a', 'vellip;': '\u22ee', 'Verbar;': '\u2016', 'verbar;': '|', 'Vert;': '\u2016', 'vert;': '|', 'VerticalBar;': '\u2223', 'VerticalLine;': '|', 'VerticalSeparator;': '\u2758', 'VerticalTilde;': '\u2240', 'VeryThinSpace;': '\u200a', 'Vfr;': '\U0001d519', 'vfr;': '\U0001d533', 'vltri;': '\u22b2', 'vnsub;': '\u2282\u20d2', 'vnsup;': '\u2283\u20d2', 'Vopf;': '\U0001d54d', 'vopf;': '\U0001d567', 'vprop;': '\u221d', 'vrtri;': '\u22b3', 'Vscr;': '\U0001d4b1', 'vscr;': '\U0001d4cb', 'vsubnE;': '\u2acb\ufe00', 'vsubne;': '\u228a\ufe00', 'vsupnE;': '\u2acc\ufe00', 'vsupne;': '\u228b\ufe00', 'Vvdash;': '\u22aa', 'vzigzag;': '\u299a', 'Wcirc;': '\u0174', 'wcirc;': '\u0175', 'wedbar;': '\u2a5f', 'Wedge;': '\u22c0', 'wedge;': '\u2227', 'wedgeq;': '\u2259', 'weierp;': '\u2118', 'Wfr;': '\U0001d51a', 'wfr;': '\U0001d534', 'Wopf;': '\U0001d54e', 'wopf;': '\U0001d568', 'wp;': '\u2118', 'wr;': '\u2240', 'wreath;': '\u2240', 'Wscr;': '\U0001d4b2', 'wscr;': '\U0001d4cc', 'xcap;': '\u22c2', 'xcirc;': '\u25ef', 'xcup;': '\u22c3', 'xdtri;': '\u25bd', 'Xfr;': '\U0001d51b', 'xfr;': '\U0001d535', 'xhArr;': '\u27fa', 'xharr;': '\u27f7', 'Xi;': '\u039e', 'xi;': '\u03be', 'xlArr;': '\u27f8', 'xlarr;': '\u27f5', 'xmap;': '\u27fc', 'xnis;': '\u22fb', 'xodot;': '\u2a00', 'Xopf;': '\U0001d54f', 'xopf;': '\U0001d569', 'xoplus;': '\u2a01', 'xotime;': '\u2a02', 'xrArr;': '\u27f9', 'xrarr;': '\u27f6', 'Xscr;': '\U0001d4b3', 'xscr;': '\U0001d4cd', 'xsqcup;': '\u2a06', 'xuplus;': '\u2a04', 'xutri;': '\u25b3', 'xvee;': '\u22c1', 'xwedge;': '\u22c0', 'Yacute': '\xdd', 'yacute': '\xfd', 'Yacute;': '\xdd', 'yacute;': '\xfd', 'YAcy;': '\u042f', 'yacy;': '\u044f', 'Ycirc;': '\u0176', 'ycirc;': '\u0177', 'Ycy;': '\u042b', 'ycy;': '\u044b', 'yen': '\xa5', 'yen;': '\xa5', 'Yfr;': '\U0001d51c', 'yfr;': '\U0001d536', 'YIcy;': '\u0407', 'yicy;': '\u0457', 'Yopf;': '\U0001d550', 'yopf;': '\U0001d56a', 'Yscr;': '\U0001d4b4', 'yscr;': '\U0001d4ce', 'YUcy;': '\u042e', 'yucy;': '\u044e', 'yuml': '\xff', 'Yuml;': '\u0178', 'yuml;': '\xff', 'Zacute;': '\u0179', 'zacute;': '\u017a', 'Zcaron;': '\u017d', 'zcaron;': '\u017e', 'Zcy;': '\u0417', 'zcy;': '\u0437', 'Zdot;': '\u017b', 'zdot;': '\u017c', 'zeetrf;': '\u2128', 'ZeroWidthSpace;': '\u200b', 'Zeta;': '\u0396', 'zeta;': '\u03b6', 'Zfr;': '\u2128', 'zfr;': '\U0001d537', 'ZHcy;': '\u0416', 'zhcy;': '\u0436', 'zigrarr;': '\u21dd', 'Zopf;': '\u2124', 'zopf;': '\U0001d56b', 'Zscr;': '\U0001d4b5', 'zscr;': '\U0001d4cf', 'zwj;': '\u200d', 'zwnj;': '\u200c', } # maps the Unicode codepoint to the HTML entity name codepoint2name = {} # maps the HTML entity name to the character # (or a character reference if the character is outside the Latin-1 range) entitydefs = {} for (name, codepoint) in name2codepoint.items(): codepoint2name[codepoint] = name entitydefs[name] = chr(codepoint) del name, codepoint
schristophe/starspot
refs/heads/master
starspot/color/__init__.py
2
# # import bolcor as bc
KamranMackey/readthedocs.org
refs/heads/master
readthedocs/rtd_tests/tests/test_views.py
10
from django.test import TestCase from django.contrib.auth.models import User from projects.models import Project from projects.forms import UpdateProjectForm class Testmaker(TestCase): fixtures = ["eric"] def test_imported_docs(self): # Test Import self.client.login(username='eric', password='test') user = User.objects.get(username='eric') r = self.client.get('/dashboard/', {}) self.assertEqual(r.status_code, 200) r = self.client.get('/dashboard/import/manual/', {}) self.assertEqual(r.status_code, 200) form = UpdateProjectForm(data={ 'name': 'Django Kong', 'repo': 'https://github.com/ericholscher/django-kong', 'repo_type': 'git', 'description': 'OOHHH AH AH AH KONG SMASH', 'language': 'en', 'default_branch': '', 'project_url': 'http://django-kong.rtfd.org', 'default_version': 'latest', 'privacy_level': 'public', 'version_privacy_level': 'public', 'python_interpreter': 'python', 'documentation_type': 'sphinx', 'csrfmiddlewaretoken': '34af7c8a5ba84b84564403a280d9a9be', }, user=user) _ = form.save() _ = Project.objects.get(slug='django-kong') r = self.client.get('/docs/django-kong/en/latest/', {}) self.assertEqual(r.status_code, 200) r = self.client.get('/dashboard/django-kong/versions/', {}) self.assertEqual(r.status_code, 200) r = self.client.get('/builds/django-kong/') self.assertEqual(r.status_code, 200) r = self.client.get('/dashboard/django-kong/edit/', {}) self.assertEqual(r.status_code, 200) r = self.client.get('/dashboard/django-kong/subprojects/', {}) self.assertEqual(r.status_code, 200)
nohona/cron-crm
refs/heads/master
usr/local/certbot/certbot/plugins/common_test.py
4
"""Tests for certbot.plugins.common.""" import os import shutil import tempfile import unittest import mock import OpenSSL from acme import challenges from acme import jose from certbot import achallenges from certbot.tests import acme_util from certbot.tests import util as test_util class NamespaceFunctionsTest(unittest.TestCase): """Tests for certbot.plugins.common.*_namespace functions.""" def test_option_namespace(self): from certbot.plugins.common import option_namespace self.assertEqual("foo-", option_namespace("foo")) def test_dest_namespace(self): from certbot.plugins.common import dest_namespace self.assertEqual("foo_", dest_namespace("foo")) def test_dest_namespace_with_dashes(self): from certbot.plugins.common import dest_namespace self.assertEqual("foo_bar_", dest_namespace("foo-bar")) class PluginTest(unittest.TestCase): """Test for certbot.plugins.common.Plugin.""" def setUp(self): from certbot.plugins.common import Plugin class MockPlugin(Plugin): # pylint: disable=missing-docstring @classmethod def add_parser_arguments(cls, add): add("foo-bar", dest="different_to_foo_bar", x=1, y=None) self.plugin_cls = MockPlugin self.config = mock.MagicMock() self.plugin = MockPlugin(config=self.config, name="mock") def test_init(self): self.assertEqual("mock", self.plugin.name) self.assertEqual(self.config, self.plugin.config) def test_option_namespace(self): self.assertEqual("mock-", self.plugin.option_namespace) def test_option_name(self): self.assertEqual("mock-foo_bar", self.plugin.option_name("foo_bar")) def test_dest_namespace(self): self.assertEqual("mock_", self.plugin.dest_namespace) def test_dest(self): self.assertEqual("mock_foo_bar", self.plugin.dest("foo-bar")) self.assertEqual("mock_foo_bar", self.plugin.dest("foo_bar")) def test_conf(self): self.assertEqual(self.config.mock_foo_bar, self.plugin.conf("foo-bar")) def test_inject_parser_options(self): parser = mock.MagicMock() self.plugin_cls.inject_parser_options(parser, "mock") # note that inject_parser_options doesn't check if dest has # correct prefix parser.add_argument.assert_called_once_with( "--mock-foo-bar", dest="different_to_foo_bar", x=1, y=None) class AddrTest(unittest.TestCase): """Tests for certbot.client.plugins.common.Addr.""" def setUp(self): from certbot.plugins.common import Addr self.addr1 = Addr.fromstring("192.168.1.1") self.addr2 = Addr.fromstring("192.168.1.1:*") self.addr3 = Addr.fromstring("192.168.1.1:80") self.addr4 = Addr.fromstring("[fe00::1]") self.addr5 = Addr.fromstring("[fe00::1]:*") self.addr6 = Addr.fromstring("[fe00::1]:80") self.addr7 = Addr.fromstring("[fe00::1]:5") self.addr8 = Addr.fromstring("[fe00:1:2:3:4:5:6:7:8:9]:8080") def test_fromstring(self): self.assertEqual(self.addr1.get_addr(), "192.168.1.1") self.assertEqual(self.addr1.get_port(), "") self.assertEqual(self.addr2.get_addr(), "192.168.1.1") self.assertEqual(self.addr2.get_port(), "*") self.assertEqual(self.addr3.get_addr(), "192.168.1.1") self.assertEqual(self.addr3.get_port(), "80") self.assertEqual(self.addr4.get_addr(), "[fe00::1]") self.assertEqual(self.addr4.get_port(), "") self.assertEqual(self.addr5.get_addr(), "[fe00::1]") self.assertEqual(self.addr5.get_port(), "*") self.assertEqual(self.addr6.get_addr(), "[fe00::1]") self.assertEqual(self.addr6.get_port(), "80") self.assertEqual(self.addr6.get_ipv6_exploded(), "fe00:0:0:0:0:0:0:1") self.assertEqual(self.addr1.get_ipv6_exploded(), "") self.assertEqual(self.addr7.get_port(), "5") self.assertEqual(self.addr8.get_ipv6_exploded(), "fe00:1:2:3:4:5:6:7") def test_str(self): self.assertEqual(str(self.addr1), "192.168.1.1") self.assertEqual(str(self.addr2), "192.168.1.1:*") self.assertEqual(str(self.addr3), "192.168.1.1:80") self.assertEqual(str(self.addr4), "[fe00::1]") self.assertEqual(str(self.addr5), "[fe00::1]:*") self.assertEqual(str(self.addr6), "[fe00::1]:80") def test_get_addr_obj(self): self.assertEqual(str(self.addr1.get_addr_obj("443")), "192.168.1.1:443") self.assertEqual(str(self.addr2.get_addr_obj("")), "192.168.1.1") self.assertEqual(str(self.addr1.get_addr_obj("*")), "192.168.1.1:*") self.assertEqual(str(self.addr4.get_addr_obj("443")), "[fe00::1]:443") self.assertEqual(str(self.addr5.get_addr_obj("")), "[fe00::1]") self.assertEqual(str(self.addr4.get_addr_obj("*")), "[fe00::1]:*") def test_eq(self): self.assertEqual(self.addr1, self.addr2.get_addr_obj("")) self.assertNotEqual(self.addr1, self.addr2) self.assertFalse(self.addr1 == 3333) self.assertEqual(self.addr4, self.addr4.get_addr_obj("")) self.assertNotEqual(self.addr4, self.addr5) self.assertFalse(self.addr4 == 3333) from certbot.plugins.common import Addr self.assertEqual(self.addr4, Addr.fromstring("[fe00:0:0::1]")) self.assertEqual(self.addr4, Addr.fromstring("[fe00:0::0:0:1]")) def test_set_inclusion(self): from certbot.plugins.common import Addr set_a = set([self.addr1, self.addr2]) addr1b = Addr.fromstring("192.168.1.1") addr2b = Addr.fromstring("192.168.1.1:*") set_b = set([addr1b, addr2b]) self.assertEqual(set_a, set_b) set_c = set([self.addr4, self.addr5]) addr4b = Addr.fromstring("[fe00::1]") addr5b = Addr.fromstring("[fe00::1]:*") set_d = set([addr4b, addr5b]) self.assertEqual(set_c, set_d) class TLSSNI01Test(unittest.TestCase): """Tests for certbot.plugins.common.TLSSNI01.""" auth_key = jose.JWKRSA.load(test_util.load_vector("rsa512_key.pem")) achalls = [ achallenges.KeyAuthorizationAnnotatedChallenge( challb=acme_util.chall_to_challb( challenges.TLSSNI01(token=b'token1'), "pending"), domain="encryption-example.demo", account_key=auth_key), achallenges.KeyAuthorizationAnnotatedChallenge( challb=acme_util.chall_to_challb( challenges.TLSSNI01(token=b'token2'), "pending"), domain="certbot.demo", account_key=auth_key), ] def setUp(self): self.tempdir = tempfile.mkdtemp() configurator = mock.MagicMock() configurator.config.config_dir = os.path.join(self.tempdir, "config") configurator.config.work_dir = os.path.join(self.tempdir, "work") from certbot.plugins.common import TLSSNI01 self.sni = TLSSNI01(configurator=configurator) def tearDown(self): shutil.rmtree(self.tempdir) def test_add_chall(self): self.sni.add_chall(self.achalls[0], 0) self.assertEqual(1, len(self.sni.achalls)) self.assertEqual([0], self.sni.indices) def test_setup_challenge_cert(self): # This is a helper function that can be used for handling # open context managers more elegantly. It avoids dealing with # __enter__ and __exit__ calls. # http://www.voidspace.org.uk/python/mock/helpers.html#mock.mock_open mock_open, mock_safe_open = mock.mock_open(), mock.mock_open() response = challenges.TLSSNI01Response() achall = mock.MagicMock() achall.chall.encode.return_value = "token" key = test_util.load_pyopenssl_private_key("rsa512_key.pem") achall.response_and_validation.return_value = ( response, (test_util.load_cert("cert.pem"), key)) with mock.patch("certbot.plugins.common.open", mock_open, create=True): with mock.patch("certbot.plugins.common.util.safe_open", mock_safe_open): # pylint: disable=protected-access self.assertEqual(response, self.sni._setup_challenge_cert( achall, "randomS1")) # pylint: disable=no-member mock_open.assert_called_once_with(self.sni.get_cert_path(achall), "wb") mock_open.return_value.write.assert_called_once_with( test_util.load_vector("cert.pem")) mock_safe_open.assert_called_once_with( self.sni.get_key_path(achall), "wb", chmod=0o400) mock_safe_open.return_value.write.assert_called_once_with( OpenSSL.crypto.dump_privatekey(OpenSSL.crypto.FILETYPE_PEM, key)) if __name__ == "__main__": unittest.main() # pragma: no cover
coreyoconnor/nixops
refs/heads/master
nixops/util.py
2
# -*- coding: utf-8 -*- import os import sys import time import json import copy import fcntl import base64 import select import socket import struct import shutil import tempfile import subprocess import logging import atexit from StringIO import StringIO devnull = open(os.devnull, 'rw') def check_wait(test, initial=10, factor=1, max_tries=60, exception=True): """Call function ‘test’ periodically until it returns True or a timeout occurs.""" wait = initial tries = 0 while tries < max_tries and not test(): wait = wait * factor tries = tries + 1 if tries == max_tries: if exception: raise Exception("operation timed out") return False time.sleep(wait) return True class CommandFailed(Exception): def __init__(self, message, exitcode): self.message = message self.exitcode = exitcode def __str__(self): return "{0} (exit code {1})".format(self.message, self.exitcode) def logged_exec(command, logger, check=True, capture_stdout=False, stdin=None, stdin_string=None, env=None): """ Execute a command with logging using the specified logger. The command itself has to be an iterable of strings, just like subprocess.Popen without shell=True. Keywords stdin and env have the same functionality as well. When calling with capture_stdout=True, a string is returned, which contains everything the programm wrote to stdout. When calling with check=False, the return code isn't checked and the function will return an integer which represents the return code of the program, otherwise a CommandFailed exception is thrown. """ if stdin_string is not None: stdin = subprocess.PIPE elif stdin is None: stdin = devnull if capture_stdout: process = subprocess.Popen(command, env=env, stdin=stdin, stdout=subprocess.PIPE, stderr=subprocess.PIPE) fds = [process.stdout, process.stderr] log_fd = process.stderr else: process = subprocess.Popen(command, env=env, stdin=stdin, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) fds = [process.stdout] log_fd = process.stdout # FIXME: this can deadlock if stdin_string doesn't fit in the # kernel pipe buffer. if stdin_string is not None: process.stdin.write(stdin_string) process.stdin.close() for fd in fds: make_non_blocking(fd) at_new_line = True stdout = "" while len(fds) > 0: # The timeout/poll is to deal with processes (like # VBoxManage) that start children that go into the # background but keep the parent's stdout/stderr open, # preventing an EOF. FIXME: Would be better to catch # SIGCHLD. (r, w, x) = select.select(fds, [], [], 1) if len(r) == 0 and process.poll() is not None: break if capture_stdout and process.stdout in r: data = process.stdout.read() if data == "": fds.remove(process.stdout) else: stdout += data if log_fd in r: data = log_fd.read() if data == "": if not at_new_line: logger.log_end("") fds.remove(log_fd) else: start = 0 while start < len(data): end = data.find('\n', start) if end == -1: logger.log_start(data[start:]) at_new_line = False else: s = data[start:end] if at_new_line: logger.log(s) else: logger.log_end(s) at_new_line = True if end == -1: break start = end + 1 res = process.wait() if check and res != 0: msg = "command ‘{0}’ failed on machine ‘{1}’" err = msg.format(command, logger.machine_name) raise CommandFailed(err, res) return stdout if capture_stdout else res def generate_random_string(length=256): """Generate a base-64 encoded cryptographically strong random string.""" s = os.urandom(length) assert len(s) == length return base64.b64encode(s) def make_non_blocking(fd): fcntl.fcntl(fd, fcntl.F_SETFL, fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK) def ping_tcp_port(ip, port, timeout=1, ensure_timeout=False): s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.settimeout(timeout) s.setsockopt(socket.SOL_SOCKET, socket.SO_LINGER, struct.pack('ii', 1, 0)) try: s.connect((ip, port)) except socket.timeout: return False except: # FIXME: check that we got a transient error (like connection # refused or no route to host). For any other error, throw an # exception. if ensure_timeout: time.sleep(timeout) return False s.shutdown(socket.SHUT_RDWR) return True def wait_for_tcp_port(ip, port, timeout=-1, open=True, callback=None): """Wait until the specified TCP port is open or closed.""" n = 0 while True: if ping_tcp_port(ip, port, ensure_timeout=True) == open: return True if not open: time.sleep(1) n = n + 1 if timeout != -1 and n >= timeout: break if callback: callback() raise Exception("timed out waiting for port {0} on ‘{1}’".format(port, ip)) def ansi_highlight(s, outfile=sys.stderr): return "\033[1;35m" + s + "\033[0m" if outfile.isatty() else s def ansi_warn(s, outfile=sys.stderr): return "\033[1;33m" + s + "\033[0m" if outfile.isatty() else s def ansi_error(s, outfile=sys.stderr): return "\033[1;31m" + s + "\033[0m" if outfile.isatty() else s def ansi_success(s, outfile=sys.stderr): return "\033[1;32m" + s + "\033[0m" if outfile.isatty() else s def _maybe_abspath(s): if s.startswith("http://") or s.startswith("https://") or s.startswith("file://") or s.startswith("channel:"): return s return os.path.abspath(s) def abs_nix_path(x): xs = x.split('=', 1) if len(xs) == 1: return _maybe_abspath(x) return xs[0] + '=' + _maybe_abspath(xs[1]) undefined = object() def attr_property(name, default, type=str): """Define a property that corresponds to a value in the NixOps state file.""" def get(self): s = self._get_attr(name, default) if s == undefined: if default != undefined: return copy.deepcopy(default) raise Exception("deployment attribute ‘{0}’ missing from state file".format(name)) if s == None: return None elif type is str: return s elif type is int: return int(s) elif type is bool: return True if s == "1" else False elif type is 'json': return json.loads(s) else: assert False def set(self, x): if x == default: self._del_attr(name) elif type is 'json': self._set_attr(name, json.dumps(x)) else: self._set_attr(name, x) return property(get, set) def create_key_pair(key_name="NixOps auto-generated key", type="ed25519"): key_dir = tempfile.mkdtemp(prefix="nixops-key-tmp") res = subprocess.call(["ssh-keygen", "-t", type, "-f", key_dir + "/key", "-N", '', "-C", key_name], stdout=devnull) if res != 0: raise Exception("unable to generate an SSH key") f = open(key_dir + "/key"); private = f.read(); f.close() f = open(key_dir + "/key.pub"); public = f.read().rstrip(); f.close() shutil.rmtree(key_dir) return (private, public) class SelfDeletingDir(str): def __init__(self, s): str.__init__(s) atexit.register(self._delete) def _delete(self): shutil.rmtree(self) class TeeStderr(StringIO): stderr = None def __init__(self): StringIO.__init__(self) self.stderr = sys.stderr self.logger = logging.getLogger('root') sys.stderr = self def __del__(self): sys.stderr = self.stderr def write(self, data): self.stderr.write(data) for l in data.split('\n'): self.logger.warning(l) def fileno(self): return self.stderr.fileno() def isatty(self): return self.stderr.isatty() class TeeStdout(StringIO): stdout = None def __init__(self): StringIO.__init__(self) self.stdout = sys.stdout self.logger = logging.getLogger('root') sys.stdout = self def __del__(self): sys.stdout = self.stdout def write(self, data): self.stdout.write(data) for l in data.split('\n'): self.logger.info(l) def fileno(self): return self.stdout.fileno() def isatty(self): return self.stdout.isatty() # Borrowed from http://stackoverflow.com/questions/377017/test-if-executable-exists-in-python. def which(program): import os def is_exe(fpath): return os.path.isfile(fpath) and os.access(fpath, os.X_OK) fpath, fname = os.path.split(program) if fpath: if is_exe(program): return program else: for path in os.environ["PATH"].split(os.pathsep): path = path.strip('"') exe_file = os.path.join(path, program) if is_exe(exe_file): return exe_file raise Exception("program ‘{0}’ not found in \$PATH".format(program)) def enum(**enums): return type('Enum', (), enums) def write_file(path, contents): f = open(path, "w") f.write(contents) f.close() def xml_expr_to_python(node): if node.tag == "attrs": res = {} for attr in node.findall("attr"): res[attr.get("name")] = xml_expr_to_python(attr.find("*")) return res elif node.tag == "list": res = [] for elem in node.findall("*"): res.append(xml_expr_to_python(elem)) return res elif node.tag == "string": return node.get("value") elif node.tag == "path": return node.get("value") elif node.tag == "bool": return node.get("value") == "true" elif node.tag == "int": return int(node.get("value")) elif node.tag == "null": return None elif node.tag == "derivation": return {"drvPath": node.get("drvPath/"), "outPath": node.get("outPath")} raise Exception("cannot convert XML output of nix-instantiate to Python: Unknown tag "+node.tag) def parse_nixos_version(s): """Split a NixOS version string into a list of components.""" return s.split(".")
dcroc16/skunk_works
refs/heads/master
google_appengine/lib/django-1.5/tests/modeltests/signals/tests.py
112
from __future__ import absolute_import from django.db.models import signals from django.dispatch import receiver from django.test import TestCase from django.utils import six from .models import Person, Car # #8285: signals can be any callable class PostDeleteHandler(object): def __init__(self, data): self.data = data def __call__(self, signal, sender, instance, **kwargs): self.data.append( (instance, instance.id is None) ) class MyReceiver(object): def __init__(self, param): self.param = param self._run = False def __call__(self, signal, sender, **kwargs): self._run = True signal.disconnect(receiver=self, sender=sender) class SignalTests(TestCase): def test_basic(self): # Save up the number of connected signals so that we can check at the # end that all the signals we register get properly unregistered (#9989) pre_signals = ( len(signals.pre_save.receivers), len(signals.post_save.receivers), len(signals.pre_delete.receivers), len(signals.post_delete.receivers), ) data = [] def pre_save_test(signal, sender, instance, **kwargs): data.append( (instance, kwargs.get("raw", False)) ) signals.pre_save.connect(pre_save_test) def post_save_test(signal, sender, instance, **kwargs): data.append( (instance, kwargs.get("created"), kwargs.get("raw", False)) ) signals.post_save.connect(post_save_test) def pre_delete_test(signal, sender, instance, **kwargs): data.append( (instance, instance.id is None) ) signals.pre_delete.connect(pre_delete_test) post_delete_test = PostDeleteHandler(data) signals.post_delete.connect(post_delete_test) # throw a decorator syntax receiver into the mix @receiver(signals.pre_save) def pre_save_decorator_test(signal, sender, instance, **kwargs): data.append(instance) @receiver(signals.pre_save, sender=Car) def pre_save_decorator_sender_test(signal, sender, instance, **kwargs): data.append(instance) p1 = Person(first_name="John", last_name="Smith") self.assertEqual(data, []) p1.save() self.assertEqual(data, [ (p1, False), p1, (p1, True, False), ]) data[:] = [] p1.first_name = "Tom" p1.save() self.assertEqual(data, [ (p1, False), p1, (p1, False, False), ]) data[:] = [] # Car signal (sender defined) c1 = Car(make="Volkswagon", model="Passat") c1.save() self.assertEqual(data, [ (c1, False), c1, c1, (c1, True, False), ]) data[:] = [] # Calling an internal method purely so that we can trigger a "raw" save. p1.save_base(raw=True) self.assertEqual(data, [ (p1, True), p1, (p1, False, True), ]) data[:] = [] p1.delete() self.assertEqual(data, [ (p1, False), (p1, False), ]) data[:] = [] p2 = Person(first_name="James", last_name="Jones") p2.id = 99999 p2.save() self.assertEqual(data, [ (p2, False), p2, (p2, True, False), ]) data[:] = [] p2.id = 99998 p2.save() self.assertEqual(data, [ (p2, False), p2, (p2, True, False), ]) data[:] = [] p2.delete() self.assertEqual(data, [ (p2, False), (p2, False) ]) self.assertQuerysetEqual( Person.objects.all(), [ "James Jones", ], six.text_type ) signals.post_delete.disconnect(post_delete_test) signals.pre_delete.disconnect(pre_delete_test) signals.post_save.disconnect(post_save_test) signals.pre_save.disconnect(pre_save_test) signals.pre_save.disconnect(pre_save_decorator_test) signals.pre_save.disconnect(pre_save_decorator_sender_test, sender=Car) # Check that all our signals got disconnected properly. post_signals = ( len(signals.pre_save.receivers), len(signals.post_save.receivers), len(signals.pre_delete.receivers), len(signals.post_delete.receivers), ) self.assertEqual(pre_signals, post_signals) def test_disconnect_in_dispatch(self): """ Test that signals that disconnect when being called don't mess future dispatching. """ a, b = MyReceiver(1), MyReceiver(2) signals.post_save.connect(sender=Person, receiver=a) signals.post_save.connect(sender=Person, receiver=b) p = Person.objects.create(first_name='John', last_name='Smith') self.assertTrue(a._run) self.assertTrue(b._run) self.assertEqual(signals.post_save.receivers, [])
OpenSoccerManager/opensoccermanager-editor
refs/heads/master
uigtk/clubs.py
1
#!/usr/bin/env python3 # This file is part of OpenSoccerManager-Editor. # # OpenSoccerManager is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by the # Free Software Foundation, either version 3 of the License, or (at your # option) any later version. # # OpenSoccerManager is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY # or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for # more details. # # You should have received a copy of the GNU General Public License along with # OpenSoccerManager. If not, see <http://www.gnu.org/licenses/>. from gi.repository import Gtk import re import unicodedata import data import structures.clubs import uigtk.dialogs import uigtk.interface import uigtk.search import uigtk.widgets class Clubs(uigtk.widgets.Grid): name = "Clubs" def __init__(self): uigtk.widgets.Grid.__init__(self) self.set_border_width(5) Clubs.search = uigtk.search.Search(data.clubs.get_clubs) Clubs.search.treeview.connect("row-activated", self.on_row_activated) Clubs.search.treeselection.connect("changed", self.on_treeselection_changed) self.attach(Clubs.search, 0, 0, 1, 1) self.clubedit = ClubEdit() self.clubedit.set_sensitive(False) self.attach(self.clubedit, 1, 0, 1, 1) self.populate_data() def add_item(self): ''' Add item into model and load attributes for editing. ''' club = data.clubs.add_club() treeiter = Clubs.search.liststore.insert(0, [club.clubid, ""]) treeiter1 = Clubs.search.treemodelfilter.convert_child_iter_to_iter(treeiter) treeiter2 = Clubs.search.treemodelsort.convert_child_iter_to_iter(treeiter1[1]) treepath = Clubs.search.treemodelsort.get_path(treeiter2[1]) Clubs.search.activate_row(treepath) self.clubedit.clear_details() self.clubedit.club = club self.clubedit.entryName.grab_focus() def remove_item(self): ''' Query removal of selected club if dialog enabled. ''' model, treeiter = Clubs.search.treeselection.get_selected() if treeiter: clubid = model[treeiter][0] if data.preferences.confirm_remove: club = data.clubs.get_club_by_id(clubid) if not club.can_remove(): uigtk.dialogs.ClubKeyError(club.name) else: dialog = uigtk.dialogs.RemoveItem("Club", club.name) if dialog.show(): data.clubs.remove_club(clubid) self.populate_data() else: data.clubs.remove_club(clubid) self.populate_data() def on_row_activated(self, treeview, treepath, treeviewcolumn): ''' Get club selected and initiate details loading. ''' model = treeview.get_model() if treepath: clubid = model[treepath][0] club = data.clubs.get_club_by_id(clubid) self.clubedit.set_details(club) self.clubedit.set_sensitive(True) def on_treeselection_changed(self, treeselection): ''' Update visible details when selection is changed. ''' model, treeiter = treeselection.get_selected() if treeiter: data.window.menu.menuitemRemove.set_sensitive(True) data.window.toolbar.toolbuttonRemove.set_sensitive(True) else: data.window.menu.menuitemRemove.set_sensitive(False) data.window.toolbar.toolbuttonRemove.set_sensitive(False) self.clubedit.clear_details() self.clubedit.set_sensitive(False) def populate_data(self): Clubs.search.liststore.clear() for clubid, club in data.clubs.get_clubs(): Clubs.search.liststore.append([clubid, club.name]) Clubs.search.activate_first_item() class ClubEdit(uigtk.widgets.Grid): def __init__(self): uigtk.widgets.Grid.__init__(self) grid = uigtk.widgets.Grid() grid.set_hexpand(True) grid.set_vexpand(True) self.attach(grid, 0, 0, 1, 1) grid2 = uigtk.widgets.Grid() grid.attach(grid2, 0, 0, 1, 1) label = uigtk.widgets.Label("_Name", leftalign=True) grid2.attach(label, 0, 0, 1, 1) self.entryName = Gtk.Entry() label.set_mnemonic_widget(self.entryName) grid2.attach(self.entryName, 1, 0, 1, 1) label = uigtk.widgets.Label("_Nickname", leftalign=True) grid2.attach(label, 0, 1, 1, 1) self.entryNickname = Gtk.Entry() label.set_mnemonic_widget(self.entryNickname) grid2.attach(self.entryNickname, 1, 1, 1, 1) self.attributes = AttributeEdit() grid.attach(self.attributes, 0, 2, 1, 1) self.actionbuttons = uigtk.interface.ActionButtons() self.actionbuttons.buttonUpdate.connect("clicked", self.on_update_clicked) self.attach(self.actionbuttons, 0, 1, 1, 1) def on_update_clicked(self, *args): ''' Update current values into working data. ''' self.club.name = self.entryName.get_text() self.club.nickname = self.entryNickname.get_text() self.club.attributes = {} for row in self.attributes.liststore: attributeid = row[0] attribute = structures.clubs.Attribute(self.club.clubid) self.club.attributes[attributeid] = attribute attribute.year = row[1] attribute.manager = row[2] attribute.chairman = row[3] attribute.stadium = data.stadiums.get_stadium_by_id(row[4]) attribute.reputation = row[6] model, treeiter = Clubs.search.treeselection.get_selected() child_treeiter = model.convert_iter_to_child_iter(treeiter) liststore = model.get_model() liststore[child_treeiter][1] = self.club.name model, treeiter = Clubs.search.treeselection.get_selected() treepath = model.get_path(treeiter) Clubs.search.treeview.scroll_to_cell(treepath) data.unsaved = True def set_details(self, club): ''' Update selected player with details to be displayed. ''' self.clear_details() self.club = club self.entryName.set_text(club.name) self.entryNickname.set_text(club.nickname) self.attributes.club = club self.attributes.populate_data() def clear_details(self, *args): ''' Clear visible attributes. ''' self.entryName.set_text("") self.entryNickname.set_text("") self.attributes.liststore.clear() class AttributeEdit(uigtk.widgets.Grid): def __init__(self): uigtk.widgets.Grid.__init__(self) self.liststore = Gtk.ListStore(int, int, str, str, int, str, int, int) treemodelsort = Gtk.TreeModelSort(self.liststore) treemodelsort.set_sort_column_id(1, Gtk.SortType.ASCENDING) self.attributes = uigtk.interface.Attributes() self.attributes.treeview.set_model(treemodelsort) self.attributes.treeview.connect("row-activated", self.on_row_activated) self.attributes.treeselection.connect("changed", self.on_treeselection_changed) self.attributes.buttonAdd.connect("clicked", self.on_add_clicked) self.attributes.buttonEdit.connect("clicked", self.on_edit_clicked) self.attributes.buttonRemove.connect("clicked", self.on_remove_clicked) self.attach(self.attributes, 0, 0, 1, 1) treeviewcolumn = uigtk.widgets.TreeViewColumn(title="Year", column=1) self.attributes.treeview.append_column(treeviewcolumn) treeviewcolumn = uigtk.widgets.TreeViewColumn(title="Manager", column=2) self.attributes.treeview.append_column(treeviewcolumn) treeviewcolumn = uigtk.widgets.TreeViewColumn(title="Chairman", column=3) self.attributes.treeview.append_column(treeviewcolumn) treeviewcolumn = uigtk.widgets.TreeViewColumn(title="Stadium", column=5) self.attributes.treeview.append_column(treeviewcolumn) treeviewcolumn = uigtk.widgets.TreeViewColumn(title="Reputation", column=6) self.attributes.treeview.append_column(treeviewcolumn) treeviewcolumn = uigtk.widgets.TreeViewColumn(title="Players", column=7) self.attributes.treeview.append_column(treeviewcolumn) self.attributedialog = AttributeDialog() def on_add_clicked(self, *args): ''' Display add dialog for new attribute. ''' self.attributedialog.show(self.club, self.liststore) def on_edit_clicked(self, *args): ''' Display edit dialog for selected attribute. ''' model, treeiter = self.attributes.treeselection.get_selected() treeiter1 = model.convert_iter_to_child_iter(treeiter) self.attributedialog.show(self.club, self.liststore, treeiter1) def on_remove_clicked(self, *args): ''' Remove selected attribute for loaded club. ''' dialog = uigtk.dialogs.RemoveAttribute(index=1) if dialog.show(): model, treeiter = self.attributes.treeselection.get_selected() treeiter1 = model.convert_iter_to_child_iter(treeiter) self.liststore.remove(treeiter1) data.unsaved = True self.populate_data() def on_row_activated(self, *args): ''' Display edit dialog on activation of row. ''' self.on_edit_clicked() def on_treeselection_changed(self, treeselection): ''' Update visible details when selection is changed. ''' model, treeiter = treeselection.get_selected() if treeiter: self.attributes.buttonEdit.set_sensitive(True) self.attributes.buttonRemove.set_sensitive(True) else: self.attributes.buttonEdit.set_sensitive(False) self.attributes.buttonRemove.set_sensitive(False) def populate_data(self): self.liststore.clear() for attributeid, attribute in self.club.attributes.items(): stadium = attribute.get_stadium_name() self.liststore.append([attributeid, attribute.year, attribute.manager, attribute.chairman, attribute.stadium.stadiumid, stadium, attribute.reputation, attribute.get_player_count()]) class AttributeDialog(Gtk.Dialog): def __init__(self): self.stadium = None Gtk.Dialog.__init__(self) self.set_transient_for(data.window) self.set_default_size(-1, 300) self.set_modal(True) self.set_title("Add Attribute") self.add_button("_Cancel", Gtk.ResponseType.CANCEL) self.add_button("_Add", Gtk.ResponseType.OK) self.set_default_response(Gtk.ResponseType.OK) self.set_response_sensitive(Gtk.ResponseType.OK, False) self.vbox.set_border_width(5) notebook = Gtk.Notebook() self.vbox.add(notebook) grid = uigtk.widgets.Grid() grid.set_border_width(5) notebook.append_page(grid, uigtk.widgets.Label("_Details")) label = uigtk.widgets.Label("_Year", leftalign=True) grid.attach(label, 0, 0, 1, 1) self.comboboxYear = Gtk.ComboBoxText() self.comboboxYear.set_tooltip_text("Year to add attribute data.") self.comboboxYear.connect("changed", self.update_commit_button) label.set_mnemonic_widget(self.comboboxYear) grid.attach(self.comboboxYear, 1, 0, 1, 1) label = uigtk.widgets.Label("_Manager", leftalign=True) grid.attach(label, 0, 1, 1, 1) self.entryManager = Gtk.Entry() self.entryManager.set_tooltip_text("Name of manager for this club.") self.entryManager.connect("changed", self.update_commit_button) label.set_mnemonic_widget(self.entryManager) grid.attach(self.entryManager, 1, 1, 2, 1) label = uigtk.widgets.Label("_Chairman", leftalign=True) grid.attach(label, 0, 2, 1, 1) self.entryChairman = Gtk.Entry() self.entryChairman.set_tooltip_text("Name of chairman for this club.") self.entryChairman.connect("changed", self.update_commit_button) label.set_mnemonic_widget(self.entryChairman) grid.attach(self.entryChairman, 1, 2, 2, 1) label = uigtk.widgets.Label("_Stadium", leftalign=True) grid.attach(label, 0, 3, 1, 1) self.buttonStadium = Gtk.Button("") self.buttonStadium.set_tooltip_text("Stadium selection for this club.") self.buttonStadium.connect("clicked", self.on_stadium_clicked) label.set_mnemonic_widget(self.buttonStadium) grid.attach(self.buttonStadium, 1, 3, 2, 1) label = uigtk.widgets.Label("_Reputation", leftalign=True) grid.attach(label, 0, 4, 1, 1) self.spinbuttonReputation = Gtk.SpinButton() self.spinbuttonReputation.set_range(1, 20) self.spinbuttonReputation.set_increments(1, 2) self.spinbuttonReputation.set_tooltip_text("Reputation value of club (higher is better).") self.spinbuttonReputation.connect("value-changed", self.update_commit_button) label.set_mnemonic_widget(self.spinbuttonReputation) grid.attach(self.spinbuttonReputation, 1, 4, 1, 1) self.playerlist = uigtk.interface.ItemList() self.playerlist.set_border_width(5) self.playerlist.buttonAdd.connect("clicked", self.on_add_player_clicked) self.playerlist.buttonRemove.connect("clicked", self.on_remove_player_clicked) notebook.append_page(self.playerlist, uigtk.widgets.Label("_Squad")) self.stadiumdialog = uigtk.selectors.StadiumSelectorDialog() self.playerdialog = uigtk.selectors.PlayerSelectorDialog() def update_commit_button(self, *args): ''' Update sensitivity of commit button on dialog. ''' sensitive = False if self.comboboxYear.get_active_id(): sensitive = True if sensitive: sensitive = self.entryManager.get_text_length() > 0 if sensitive: sensitive = self.entryChairman.get_text_length() > 0 if sensitive: if self.stadium: sensitive = True else: sensitive = False self.set_response_sensitive(Gtk.ResponseType.OK, sensitive) def on_add_player_clicked(self, *args): ''' Add selected player to squad. ''' playerid = self.playerdialog.show() if playerid: player = data.players.get_player_by_id(playerid) for attributeid, attribute in player.attributes.items(): if int(self.comboboxYear.get_active_id()) == attribute.year: attribute.club = self.clubid break self.populate_squad() def on_remove_player_clicked(self, *args): ''' Remove selected player from club and remove club attribute. ''' model, treeiter = self.playerlist.treeview.treeselection.get_selected() playerid = model[treeiter][0] attributeid = model[treeiter][1] player = data.players.get_player_by_id(playerid) attribute = player.attributes[attributeid] attribute.club = None self.populate_squad() def on_stadium_clicked(self, *args): ''' Display stadium selection dialog. ''' if self.attributeid: attribute = self.club.attributes[self.attributeid] self.stadium = self.stadiumdialog.show(stadium=attribute.stadium) else: self.stadium = self.stadiumdialog.show() if self.stadium: self.buttonStadium.set_label(self.stadium.name) self.update_commit_button() def populate_years(self, years=None): ''' Customise available year values for add and edit actions. ''' self.comboboxYear.remove_all() if years: added = False for year in data.years.get_years(): if year not in years: self.comboboxYear.append(str(year), str(year)) added = True self.comboboxYear.set_sensitive(added) self.comboboxYear.set_active(0) else: for year in data.years.get_years(): self.comboboxYear.append(str(year), str(year)) def load_attributes(self): ''' Load attributes for given club. ''' self.attribute = self.club.attributes[self.attributeid] self.comboboxYear.set_active_id(str(self.model[self.treeiter][1])) self.entryManager.set_text(self.model[self.treeiter][2]) self.entryChairman.set_text(self.model[self.treeiter][3]) stadiumid = self.model[self.treeiter][4] self.stadium = data.stadiums.get_stadium_by_id(stadiumid) self.buttonStadium.set_label(self.model[self.treeiter][5]) self.spinbuttonReputation.set_value(self.model[self.treeiter][6]) self.populate_squad() def save_attributes(self): ''' Save attributes for given club. ''' if not self.treeiter: self.attributeid = self.club.add_attribute() self.treeiter = self.model.append([self.attributeid, 0, "", "", 0, "", 0, 0]) self.model[self.treeiter][1] = int(self.comboboxYear.get_active_id()) self.model[self.treeiter][2] = self.entryManager.get_text() self.model[self.treeiter][3] = self.entryChairman.get_text() self.model[self.treeiter][4] = self.stadium.stadiumid self.model[self.treeiter][5] = self.stadium.name self.model[self.treeiter][6] = self.spinbuttonReputation.get_value_as_int() def clear_attributes(self): ''' Reset data entry fields on close of dialog. ''' self.entryManager.set_text("") self.entryChairman.set_text("") self.buttonStadium.set_label("") self.spinbuttonReputation.set_value(1) self.stadium = None self.playerlist.liststore.clear() def populate_squad(self): ''' Load squad for club attribute being displayed. ''' self.playerlist.liststore.clear() self.playerlist.labelCount.set_label("%i/30 Players" % (self.attribute.get_player_count())) for playerid, player in data.players.get_players(): for attributeid, attribute in player.attributes.items(): if attribute.club == self.club.clubid: if attribute.year == self.attribute.year: self.playerlist.liststore.append([playerid, attributeid, player.get_name()]) def show(self, club, model, treeiter=None): self.club = club self.model = model self.treeiter = treeiter button = self.get_widget_for_response(Gtk.ResponseType.OK) if treeiter: self.set_title("Edit Attribute") button.set_label("_Edit") self.attributeid = model[treeiter][0] self.populate_years() self.load_attributes() else: self.set_title("Add Attribute") button.set_label("_Add") years = [attribute.year for attribute in club.attributes.values()] self.attributeid = None self.stadium = None self.populate_years(years) self.update_commit_button() self.show_all() if self.run() == Gtk.ResponseType.OK: self.save_attributes() self.clear_attributes() self.hide()
manassolanki/erpnext
refs/heads/develop
erpnext/hr/doctype/travel_request/travel_request.py
18
# -*- coding: utf-8 -*- # Copyright (c) 2018, Frappe Technologies Pvt. Ltd. and contributors # For license information, please see license.txt from __future__ import unicode_literals import frappe from frappe.model.document import Document class TravelRequest(Document): pass
VirtualWatershed/vwadaptor
refs/heads/master
vwadaptor/worker.py
2
import os from celery import Celery from vwadaptor.settings import DevConfig, ProdConfig if os.environ.get("VWADAPTOR_ENV") == 'prod': config = ProdConfig else: config = DevConfig celery= Celery('vwadaptor', broker=config.CELERY_BROKER_URL, backend=config.CELERY_RESULT_BACKEND)
alqfahad/odoo
refs/heads/8.0
openerp/addons/base/ir/ir_translation.py
117
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import logging from openerp import tools import openerp.modules from openerp.osv import fields, osv from openerp.tools.translate import _ _logger = logging.getLogger(__name__) TRANSLATION_TYPE = [ ('field', 'Field'), ('model', 'Object'), ('rml', 'RML (deprecated - use Report)'), # Pending deprecation - to be replaced by report! ('report', 'Report/Template'), ('selection', 'Selection'), ('view', 'View'), ('wizard_button', 'Wizard Button'), ('wizard_field', 'Wizard Field'), ('wizard_view', 'Wizard View'), ('xsl', 'XSL'), ('help', 'Help'), ('code', 'Code'), ('constraint', 'Constraint'), ('sql_constraint', 'SQL Constraint') ] class ir_translation_import_cursor(object): """Temporary cursor for optimizing mass insert into ir.translation Open it (attached to a sql cursor), feed it with translation data and finish() it in order to insert multiple translations in a batch. """ _table_name = 'tmp_ir_translation_import' def __init__(self, cr, uid, parent, context): """ Initializer Store some values, and also create a temporary SQL table to accept the data. @param parent an instance of ir.translation ORM model """ self._cr = cr self._uid = uid self._context = context self._overwrite = context.get('overwrite', False) self._debug = False self._parent_table = parent._table # Note that Postgres will NOT inherit the constraints or indexes # of ir_translation, so this copy will be much faster. cr.execute('''CREATE TEMP TABLE %s( imd_model VARCHAR(64), imd_name VARCHAR(128) ) INHERITS (%s) ''' % (self._table_name, self._parent_table)) def push(self, trans_dict): """Feed a translation, as a dictionary, into the cursor """ params = dict(trans_dict, state="translated" if trans_dict['value'] else "to_translate") if params['type'] == 'view': # ugly hack for QWeb views - pending refactoring of translations in master if params['imd_model'] == 'website': params['imd_model'] = "ir.ui.view" # non-QWeb views do not need a matching res_id -> force to 0 to avoid dropping them elif params['res_id'] is None: params['res_id'] = 0 self._cr.execute("""INSERT INTO %s (name, lang, res_id, src, type, imd_model, module, imd_name, value, state, comments) VALUES (%%(name)s, %%(lang)s, %%(res_id)s, %%(src)s, %%(type)s, %%(imd_model)s, %%(module)s, %%(imd_name)s, %%(value)s, %%(state)s, %%(comments)s)""" % self._table_name, params) def finish(self): """ Transfer the data from the temp table to ir.translation """ cr = self._cr if self._debug: cr.execute("SELECT count(*) FROM %s" % self._table_name) c = cr.fetchone()[0] _logger.debug("ir.translation.cursor: We have %d entries to process", c) # Step 1: resolve ir.model.data references to res_ids cr.execute("""UPDATE %s AS ti SET res_id = imd.res_id FROM ir_model_data AS imd WHERE ti.res_id IS NULL AND ti.module IS NOT NULL AND ti.imd_name IS NOT NULL AND ti.module = imd.module AND ti.imd_name = imd.name AND ti.imd_model = imd.model; """ % self._table_name) if self._debug: cr.execute("SELECT module, imd_name, imd_model FROM %s " \ "WHERE res_id IS NULL AND module IS NOT NULL" % self._table_name) for row in cr.fetchall(): _logger.info("ir.translation.cursor: missing res_id for %s.%s <%s> ", *row) # Records w/o res_id must _not_ be inserted into our db, because they are # referencing non-existent data. cr.execute("DELETE FROM %s WHERE res_id IS NULL AND module IS NOT NULL" % self._table_name) find_expr = """ irt.lang = ti.lang AND irt.type = ti.type AND irt.module = ti.module AND irt.name = ti.name AND (ti.type IN ('field', 'help') OR irt.src = ti.src) AND ( ti.type NOT IN ('model', 'view') OR (ti.type = 'model' AND ti.res_id = irt.res_id) OR (ti.type = 'view' AND (irt.res_id IS NULL OR ti.res_id = irt.res_id)) ) """ # Step 2: update existing (matching) translations if self._overwrite: cr.execute("""UPDATE ONLY %s AS irt SET value = ti.value, src = ti.src, state = 'translated' FROM %s AS ti WHERE %s AND ti.value IS NOT NULL AND ti.value != '' """ % (self._parent_table, self._table_name, find_expr)) # Step 3: insert new translations cr.execute("""INSERT INTO %s(name, lang, res_id, src, type, value, module, state, comments) SELECT name, lang, res_id, src, type, value, module, state, comments FROM %s AS ti WHERE NOT EXISTS(SELECT 1 FROM ONLY %s AS irt WHERE %s); """ % (self._parent_table, self._table_name, self._parent_table, find_expr)) if self._debug: cr.execute('SELECT COUNT(*) FROM ONLY %s' % self._parent_table) c1 = cr.fetchone()[0] cr.execute('SELECT COUNT(*) FROM ONLY %s AS irt, %s AS ti WHERE %s' % \ (self._parent_table, self._table_name, find_expr)) c = cr.fetchone()[0] _logger.debug("ir.translation.cursor: %d entries now in ir.translation, %d common entries with tmp", c1, c) # Step 4: cleanup cr.execute("DROP TABLE %s" % self._table_name) return True class ir_translation(osv.osv): _name = "ir.translation" _log_access = False def _get_language(self, cr, uid, context): lang_model = self.pool.get('res.lang') lang_ids = lang_model.search(cr, uid, [('translatable', '=', True)], context=context) lang_data = lang_model.read(cr, uid, lang_ids, ['code', 'name'], context=context) return [(d['code'], d['name']) for d in lang_data] def _get_src(self, cr, uid, ids, name, arg, context=None): ''' Get source name for the translation. If object type is model then return the value store in db. Otherwise return value store in src field ''' if context is None: context = {} res = dict.fromkeys(ids, False) for record in self.browse(cr, uid, ids, context=context): if record.type != 'model': res[record.id] = record.src else: model_name, field = record.name.split(',') model = self.pool.get(model_name) if model is not None: # Pass context without lang, need to read real stored field, not translation context_no_lang = dict(context, lang=None) result = model.read(cr, uid, [record.res_id], [field], context=context_no_lang) res[record.id] = result[0][field] if result else False return res def _set_src(self, cr, uid, id, name, value, args, context=None): ''' When changing source term of a translation, change its value in db for the associated object, and the src field ''' if context is None: context = {} record = self.browse(cr, uid, id, context=context) if record.type == 'model': model_name, field = record.name.split(',') model = self.pool.get(model_name) #We need to take the context without the language information, because we want to write on the #value store in db and not on the one associate with current language. #Also not removing lang from context trigger an error when lang is different context_wo_lang = context.copy() context_wo_lang.pop('lang', None) model.write(cr, uid, [record.res_id], {field: value}, context=context_wo_lang) return self.write(cr, uid, id, {'src': value}, context=context) _columns = { 'name': fields.char('Translated field', required=True), 'res_id': fields.integer('Record ID', select=True), 'lang': fields.selection(_get_language, string='Language'), 'type': fields.selection(TRANSLATION_TYPE, string='Type', select=True), 'src': fields.text('Old source'), 'source': fields.function(_get_src, fnct_inv=_set_src, type='text', string='Source'), 'value': fields.text('Translation Value'), 'module': fields.char('Module', help="Module this term belongs to", select=True), 'state': fields.selection( [('to_translate','To Translate'), ('inprogress','Translation in Progress'), ('translated','Translated')], string="Status", help="Automatically set to let administators find new terms that might need to be translated"), # aka gettext extracted-comments - we use them to flag openerp-web translation # cfr: http://www.gnu.org/savannah-checkouts/gnu/gettext/manual/html_node/PO-Files.html 'comments': fields.text('Translation comments', select=True), } _defaults = { 'state': 'to_translate', } _sql_constraints = [ ('lang_fkey_res_lang', 'FOREIGN KEY(lang) REFERENCES res_lang(code)', 'Language code of translation item must be among known languages' ), ] def _auto_init(self, cr, context=None): super(ir_translation, self)._auto_init(cr, context) # FIXME: there is a size limit on btree indexed values so we can't index src column with normal btree. cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = %s', ('ir_translation_ltns',)) if cr.fetchone(): #temporarily removed: cr.execute('CREATE INDEX ir_translation_ltns ON ir_translation (name, lang, type, src)') cr.execute('DROP INDEX ir_translation_ltns') cr.commit() cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = %s', ('ir_translation_lts',)) if cr.fetchone(): #temporarily removed: cr.execute('CREATE INDEX ir_translation_lts ON ir_translation (lang, type, src)') cr.execute('DROP INDEX ir_translation_lts') cr.commit() # add separate hash index on src (no size limit on values), as postgres 8.1+ is able to combine separate indexes cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = %s', ('ir_translation_src_hash_idx',)) if not cr.fetchone(): cr.execute('CREATE INDEX ir_translation_src_hash_idx ON ir_translation using hash (src)') cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = %s', ('ir_translation_ltn',)) if not cr.fetchone(): cr.execute('CREATE INDEX ir_translation_ltn ON ir_translation (name, lang, type)') cr.commit() def _check_selection_field_value(self, cr, uid, field, value, context=None): if field == 'lang': return return super(ir_translation, self)._check_selection_field_value(cr, uid, field, value, context=context) @tools.ormcache_multi(skiparg=3, multi=6) def _get_ids(self, cr, uid, name, tt, lang, ids): translations = dict.fromkeys(ids, False) if ids: cr.execute('select res_id,value ' 'from ir_translation ' 'where lang=%s ' 'and type=%s ' 'and name=%s ' 'and res_id IN %s', (lang,tt,name,tuple(ids))) for res_id, value in cr.fetchall(): translations[res_id] = value return translations def _set_ids(self, cr, uid, name, tt, lang, ids, value, src=None): self._get_ids.clear_cache(self) self.__get_source.clear_cache(self) cr.execute('update ir_translation ' 'set value=%s ' ' , src=%s ' ' , state=%s ' 'where lang=%s ' 'and type=%s ' 'and name=%s ' 'and res_id IN %s ' 'returning res_id', (value,src,'translated',lang,tt,name,tuple(ids),)) existing_ids = [x[0] for x in cr.fetchall()] for id in list(set(ids) - set(existing_ids)): self.create(cr, uid, { 'lang':lang, 'type':tt, 'name':name, 'res_id':id, 'value':value, 'src':src, 'state':'translated' }) return len(ids) def _get_source_query(self, cr, uid, name, types, lang, source, res_id): if source: query = """SELECT value FROM ir_translation WHERE lang=%s AND type in %s AND src=%s""" params = (lang or '', types, tools.ustr(source)) if res_id: query += " AND res_id in %s" params += (res_id,) if name: query += " AND name=%s" params += (tools.ustr(name),) else: query = """SELECT value FROM ir_translation WHERE lang=%s AND type in %s AND name=%s""" params = (lang or '', types, tools.ustr(name)) return (query, params) @tools.ormcache(skiparg=3) def __get_source(self, cr, uid, name, types, lang, source, res_id): # res_id is a tuple or None, otherwise ormcache cannot cache it! query, params = self._get_source_query(cr, uid, name, types, lang, source, res_id) cr.execute(query, params) res = cr.fetchone() trad = res and res[0] or u'' if source and not trad: return tools.ustr(source) return trad def _get_source(self, cr, uid, name, types, lang, source=None, res_id=None): """ Returns the translation for the given combination of name, type, language and source. All values passed to this method should be unicode (not byte strings), especially ``source``. :param name: identification of the term to translate, such as field name (optional if source is passed) :param types: single string defining type of term to translate (see ``type`` field on ir.translation), or sequence of allowed types (strings) :param lang: language code of the desired translation :param source: optional source term to translate (should be unicode) :param res_id: optional resource id or a list of ids to translate (if used, ``source`` should be set) :rtype: unicode :return: the request translation, or an empty unicode string if no translation was found and `source` was not passed """ # FIXME: should assert that `source` is unicode and fix all callers to always pass unicode # so we can remove the string encoding/decoding. if not lang: return tools.ustr(source or '') if isinstance(types, basestring): types = (types,) if res_id: if isinstance(res_id, (int, long)): res_id = (res_id,) else: res_id = tuple(res_id) return self.__get_source(cr, uid, name, types, lang, source, res_id) def create(self, cr, uid, vals, context=None): if context is None: context = {} ids = super(ir_translation, self).create(cr, uid, vals, context=context) self.__get_source.clear_cache(self) self._get_ids.clear_cache(self) self.pool['ir.ui.view'].clear_cache() return ids def write(self, cursor, user, ids, vals, context=None): if context is None: context = {} if isinstance(ids, (int, long)): ids = [ids] if vals.get('src') or ('value' in vals and not(vals.get('value'))): vals.update({'state':'to_translate'}) if vals.get('value'): vals.update({'state':'translated'}) result = super(ir_translation, self).write(cursor, user, ids, vals, context=context) self.__get_source.clear_cache(self) self._get_ids.clear_cache(self) self.pool['ir.ui.view'].clear_cache() return result def unlink(self, cursor, user, ids, context=None): if context is None: context = {} if isinstance(ids, (int, long)): ids = [ids] self.__get_source.clear_cache(self) self._get_ids.clear_cache(self) result = super(ir_translation, self).unlink(cursor, user, ids, context=context) return result def translate_fields(self, cr, uid, model, id, field=None, context=None): trans_model = self.pool[model] domain = ['&', ('res_id', '=', id), ('name', '=like', model + ',%')] langs_ids = self.pool.get('res.lang').search(cr, uid, [('code', '!=', 'en_US')], context=context) if not langs_ids: raise osv.except_osv(_('Error'), _("Translation features are unavailable until you install an extra OpenERP translation.")) langs = [lg.code for lg in self.pool.get('res.lang').browse(cr, uid, langs_ids, context=context)] main_lang = 'en_US' translatable_fields = [] for k, f in trans_model._fields.items(): if getattr(f, 'translate', False): if f.inherited: parent_id = trans_model.read(cr, uid, [id], [f.related[0]], context=context)[0][f.related[0]][0] translatable_fields.append({'name': k, 'id': parent_id, 'model': f.base_field.model_name}) domain.insert(0, '|') domain.extend(['&', ('res_id', '=', parent_id), ('name', '=', "%s,%s" % (f.base_field.model_name, k))]) else: translatable_fields.append({'name': k, 'id': id, 'model': model }) if len(langs): fields = [f.get('name') for f in translatable_fields] record = trans_model.read(cr, uid, [id], fields, context={ 'lang': main_lang })[0] for lg in langs: for f in translatable_fields: # Check if record exists, else create it (at once) sql = """INSERT INTO ir_translation (lang, src, name, type, res_id, value) SELECT %s, %s, %s, 'model', %s, %s WHERE NOT EXISTS (SELECT 1 FROM ir_translation WHERE lang=%s AND name=%s AND res_id=%s AND type='model'); UPDATE ir_translation SET src = %s WHERE lang=%s AND name=%s AND res_id=%s AND type='model'; """ src = record[f['name']] or None name = "%s,%s" % (f['model'], f['name']) cr.execute(sql, (lg, src , name, f['id'], src, lg, name, f['id'], src, lg, name, id)) action = { 'name': 'Translate', 'res_model': 'ir.translation', 'type': 'ir.actions.act_window', 'view_type': 'form', 'view_mode': 'tree,form', 'domain': domain, } if field: f = trans_model._fields[field] action['context'] = { 'search_default_name': "%s,%s" % (f.base_field.model_name, field) } return action def _get_import_cursor(self, cr, uid, context=None): """ Return a cursor-like object for fast inserting translations """ return ir_translation_import_cursor(cr, uid, self, context=context) def load_module_terms(self, cr, modules, langs, context=None): context = dict(context or {}) # local copy for module_name in modules: modpath = openerp.modules.get_module_path(module_name) if not modpath: continue for lang in langs: lang_code = tools.get_iso_codes(lang) base_lang_code = None if '_' in lang_code: base_lang_code = lang_code.split('_')[0] # Step 1: for sub-languages, load base language first (e.g. es_CL.po is loaded over es.po) if base_lang_code: base_trans_file = openerp.modules.get_module_resource(module_name, 'i18n', base_lang_code + '.po') if base_trans_file: _logger.info('module %s: loading base translation file %s for language %s', module_name, base_lang_code, lang) tools.trans_load(cr, base_trans_file, lang, verbose=False, module_name=module_name, context=context) context['overwrite'] = True # make sure the requested translation will override the base terms later # i18n_extra folder is for additional translations handle manually (eg: for l10n_be) base_trans_extra_file = openerp.modules.get_module_resource(module_name, 'i18n_extra', base_lang_code + '.po') if base_trans_extra_file: _logger.info('module %s: loading extra base translation file %s for language %s', module_name, base_lang_code, lang) tools.trans_load(cr, base_trans_extra_file, lang, verbose=False, module_name=module_name, context=context) context['overwrite'] = True # make sure the requested translation will override the base terms later # Step 2: then load the main translation file, possibly overriding the terms coming from the base language trans_file = openerp.modules.get_module_resource(module_name, 'i18n', lang_code + '.po') if trans_file: _logger.info('module %s: loading translation file (%s) for language %s', module_name, lang_code, lang) tools.trans_load(cr, trans_file, lang, verbose=False, module_name=module_name, context=context) elif lang_code != 'en_US': _logger.warning('module %s: no translation for language %s', module_name, lang_code) trans_extra_file = openerp.modules.get_module_resource(module_name, 'i18n_extra', lang_code + '.po') if trans_extra_file: _logger.info('module %s: loading extra translation file (%s) for language %s', module_name, lang_code, lang) tools.trans_load(cr, trans_extra_file, lang, verbose=False, module_name=module_name, context=context) return True # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
westernx/vee
refs/heads/master
vee/_vendor/setuptools/depends.py
462
import sys import imp import marshal from imp import PKG_DIRECTORY, PY_COMPILED, PY_SOURCE, PY_FROZEN from distutils.version import StrictVersion from setuptools import compat __all__ = [ 'Require', 'find_module', 'get_module_constant', 'extract_constant' ] class Require: """A prerequisite to building or installing a distribution""" def __init__(self, name, requested_version, module, homepage='', attribute=None, format=None): if format is None and requested_version is not None: format = StrictVersion if format is not None: requested_version = format(requested_version) if attribute is None: attribute = '__version__' self.__dict__.update(locals()) del self.self def full_name(self): """Return full package/distribution name, w/version""" if self.requested_version is not None: return '%s-%s' % (self.name,self.requested_version) return self.name def version_ok(self, version): """Is 'version' sufficiently up-to-date?""" return self.attribute is None or self.format is None or \ str(version) != "unknown" and version >= self.requested_version def get_version(self, paths=None, default="unknown"): """Get version number of installed module, 'None', or 'default' Search 'paths' for module. If not found, return 'None'. If found, return the extracted version attribute, or 'default' if no version attribute was specified, or the value cannot be determined without importing the module. The version is formatted according to the requirement's version format (if any), unless it is 'None' or the supplied 'default'. """ if self.attribute is None: try: f,p,i = find_module(self.module,paths) if f: f.close() return default except ImportError: return None v = get_module_constant(self.module, self.attribute, default, paths) if v is not None and v is not default and self.format is not None: return self.format(v) return v def is_present(self, paths=None): """Return true if dependency is present on 'paths'""" return self.get_version(paths) is not None def is_current(self, paths=None): """Return true if dependency is present and up-to-date on 'paths'""" version = self.get_version(paths) if version is None: return False return self.version_ok(version) def _iter_code(code): """Yield '(op,arg)' pair for each operation in code object 'code'""" from array import array from dis import HAVE_ARGUMENT, EXTENDED_ARG bytes = array('b',code.co_code) eof = len(code.co_code) ptr = 0 extended_arg = 0 while ptr<eof: op = bytes[ptr] if op>=HAVE_ARGUMENT: arg = bytes[ptr+1] + bytes[ptr+2]*256 + extended_arg ptr += 3 if op==EXTENDED_ARG: extended_arg = arg * compat.long_type(65536) continue else: arg = None ptr += 1 yield op,arg def find_module(module, paths=None): """Just like 'imp.find_module()', but with package support""" parts = module.split('.') while parts: part = parts.pop(0) f, path, (suffix,mode,kind) = info = imp.find_module(part, paths) if kind==PKG_DIRECTORY: parts = parts or ['__init__'] paths = [path] elif parts: raise ImportError("Can't find %r in %s" % (parts,module)) return info def get_module_constant(module, symbol, default=-1, paths=None): """Find 'module' by searching 'paths', and extract 'symbol' Return 'None' if 'module' does not exist on 'paths', or it does not define 'symbol'. If the module defines 'symbol' as a constant, return the constant. Otherwise, return 'default'.""" try: f, path, (suffix, mode, kind) = find_module(module, paths) except ImportError: # Module doesn't exist return None try: if kind==PY_COMPILED: f.read(8) # skip magic & date code = marshal.load(f) elif kind==PY_FROZEN: code = imp.get_frozen_object(module) elif kind==PY_SOURCE: code = compile(f.read(), path, 'exec') else: # Not something we can parse; we'll have to import it. :( if module not in sys.modules: imp.load_module(module, f, path, (suffix, mode, kind)) return getattr(sys.modules[module], symbol, None) finally: if f: f.close() return extract_constant(code, symbol, default) def extract_constant(code, symbol, default=-1): """Extract the constant value of 'symbol' from 'code' If the name 'symbol' is bound to a constant value by the Python code object 'code', return that value. If 'symbol' is bound to an expression, return 'default'. Otherwise, return 'None'. Return value is based on the first assignment to 'symbol'. 'symbol' must be a global, or at least a non-"fast" local in the code block. That is, only 'STORE_NAME' and 'STORE_GLOBAL' opcodes are checked, and 'symbol' must be present in 'code.co_names'. """ if symbol not in code.co_names: # name's not there, can't possibly be an assigment return None name_idx = list(code.co_names).index(symbol) STORE_NAME = 90 STORE_GLOBAL = 97 LOAD_CONST = 100 const = default for op, arg in _iter_code(code): if op==LOAD_CONST: const = code.co_consts[arg] elif arg==name_idx and (op==STORE_NAME or op==STORE_GLOBAL): return const else: const = default def _update_globals(): """ Patch the globals to remove the objects not available on some platforms. XXX it'd be better to test assertions about bytecode instead. """ if not sys.platform.startswith('java') and sys.platform != 'cli': return incompatible = 'extract_constant', 'get_module_constant' for name in incompatible: del globals()[name] __all__.remove(name) _update_globals()
coala-analyzer/coala-bears
refs/heads/master
bears/general/LicenseHeaderBear.py
3
import re from coalib.bears.LocalBear import LocalBear from coalib.results.Result import Result class LicenseHeaderBear(LocalBear): """ Checks for copyright notice in a file. """ LANGUAGES = {'All'} AUTHORS = {'The coala developers'} AUTHORS_EMAILS = {'coala-devel@googlegroups.com'} LICENSE = 'AGPL-3.0' CAN_DETECT = {'License'} def run(self, filename, file, author_name: str = ''): """ :param author: pass the name of the author """ copyright_regexp = \ r'Copyright\s+(\(C\)\s+)?\d{4}([-,]\d{4})*\s+%(author)s' re_copyright = re.compile(copyright_regexp % {'author': author_name}, re.IGNORECASE) if not(re_copyright.search(''.join(file))): message = 'Copyright notice not present.' re_copyright = re.compile(copyright_regexp % {'author': ''}, re.IGNORECASE) if author_name and re_copyright.search(''.join(file)): yield Result.from_values(self, 'Copyright notice ' 'with different/no author present.', file=filename) else: yield Result.from_values(self, message, file=filename)
jbenden/ansible
refs/heads/devel
lib/ansible/modules/windows/win_security_policy.py
18
#!/usr/bin/python # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # this is a windows documentation stub, actual code lives in the .ps1 # file of the same name ANSIBLE_METADATA = {'metadata_version': '1.0', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = r''' --- module: win_security_policy version_added: '2.4' short_description: changes local security policy settings description: - Allows you to set the local security policies that are configured by SecEdit.exe. notes: - This module uses the SecEdit.exe tool to configure the values, more details of the areas and keys that can be configured can be found here U(https://msdn.microsoft.com/en-us/library/bb742512.aspx). - If you are in a domain environment these policies may be set by a GPO policy, this module can temporarily change these values but the GPO will override it if the value differs. - You can also run C(SecEdit.exe /export /cfg C:\temp\output.ini) to view the current policies set on your system. options: section: description: - The ini section the key exists in. - If the section does not exist then the module will return an error. - Example sections to use are 'Account Policies', 'Local Policies', 'Event Log', 'Restricted Groups', 'System Services', 'Registry' and 'File System' required: yes key: description: - The ini key of the section or policy name to modify. - The module will return an error if this key is invalid. required: yes value: description: - The value for the ini key or policy name. - If the key takes in a boolean value then 0 = False and 1 = True. required: yes author: - Jordan Borean (@jborean93) ''' EXAMPLES = r''' - name: change the guest account name win_security_policy: section: System Access key: NewGuestName value: Guest Account - name: set the maximum password age win_security_policy: section: System Access key: MaximumPasswordAge value: 15 - name: do not store passwords using reversible encryption win_security_policy: section: System Access key: ClearTextPassword value: 0 - name: enable system events win_security_policy: section: Event Audit key: AuditSystemEvents value: 1 ''' RETURN = r''' rc: description: The return code after a failure when running SecEdit.exe. returned: failure with secedit calls type: int sample: -1 stdout: description: The output of the STDOUT buffer after a failure when running SecEdit.exe. returned: failure with secedit calls type: string sample: check log for error details stderr: description: The output of the STDERR buffer after a failure when running SecEdit.exe. returned: failure with secedit calls type: string sample: failed to import security policy import_log: description: The log of the SecEdit.exe /configure job that configured the local policies. This is used for debugging purposes on failures. returned: secedit.exe /import run and change occurred type: string sample: Completed 6 percent (0/15) \tProcess Privilege Rights area. key: description: The key in the section passed to the module to modify. returned: success type: string sample: NewGuestName section: description: The section passed to the module to modify. returned: success type: string sample: System Access value: description: The value passed to the module to modify to. returned: success type: string sample: Guest Account '''
gppezzi/easybuild-easyblocks
refs/heads/master
easybuild/easyblocks/s/scalapack.py
12
## # Copyright 2009-2015 Ghent University # # This file is part of EasyBuild, # originally created by the HPC team of Ghent University (http://ugent.be/hpc/en), # with support of Ghent University (http://ugent.be/hpc), # the Flemish Supercomputer Centre (VSC) (https://vscentrum.be/nl/en), # the Hercules foundation (http://www.herculesstichting.be/in_English) # and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en). # # http://github.com/hpcugent/easybuild # # EasyBuild is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation v2. # # EasyBuild is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with EasyBuild. If not, see <http://www.gnu.org/licenses/>. ## """ EasyBuild support for building and installing ScaLAPACK, implemented as an easyblock @author: Stijn De Weirdt (Ghent University) @author: Dries Verdegem (Ghent University) @author: Kenneth Hoste (Ghent University) @author: Pieter De Baets (Ghent University) @author: Jens Timmerman (Ghent University) """ import glob import os import shutil from distutils.version import LooseVersion import easybuild.tools.toolchain as toolchain from easybuild.easyblocks.blacs import det_interface #@UnresolvedImport from easybuild.easyblocks.generic.configuremake import ConfigureMake from easybuild.easyblocks.lapack import get_blas_lib as lapack_get_blas_lib #@UnresolvedImport from easybuild.tools.build_log import EasyBuildError from easybuild.tools.modules import get_software_root class EB_ScaLAPACK(ConfigureMake): """ Support for building and installing ScaLAPACK, both versions 1.x and 2.x """ def configure_step(self): """Configure ScaLAPACK build by copying SLmake.inc.example to SLmake.inc and checking dependencies.""" src = os.path.join(self.cfg['start_dir'], 'SLmake.inc.example') dest = os.path.join(self.cfg['start_dir'], 'SLmake.inc') if not os.path.isfile(src): raise EasyBuildError("Can't fin source file %s", src) if os.path.exists(dest): raise EasyBuildError("Destination file %s exists", dest) try: shutil.copy(src, dest) except OSError, err: raise EasyBuildError("Symlinking %s to %s failed: %s", src, dest, err) self.loosever = LooseVersion(self.version) # make sure required dependencies are available deps = [("LAPACK", "ACML", "OpenBLAS")] self.log.deprecated("EB_ScaLAPACK.configure_step uses hardcoded list of LAPACK libs", '3.0') # BLACS is only a dependency for ScaLAPACK versions prior to v2.0.0 if self.loosever < LooseVersion("2.0.0"): deps.append(("BLACS",)) for depgrp in deps: ok = False for dep in depgrp: if get_software_root(dep): ok = True break if not ok: raise EasyBuildError("None of the following dependencies %s are available/loaded.", str(depgrp)) def build_step(self): """Build ScaLAPACK using make after setting make options.""" # MPI compiler commands known_mpi_libs = [toolchain.MPICH, toolchain.MPICH2, toolchain.MVAPICH2] #@UndefinedVariable known_mpi_libs += [toolchain.OPENMPI, toolchain.QLOGICMPI] #@UndefinedVariable if os.getenv('MPICC') and os.getenv('MPIF77') and os.getenv('MPIF90'): mpicc = os.getenv('MPICC') mpif77 = os.getenv('MPIF77') mpif90 = os.getenv('MPIF90') elif self.toolchain.mpi_family() in known_mpi_libs: mpicc = 'mpicc' mpif77 = 'mpif77' mpif90 = 'mpif90' else: raise EasyBuildError("Don't know which compiler commands to use.") # set BLAS and LAPACK libs extra_makeopts = None self.log.deprecated("EB_ScaLAPACK.build_step doesn't use toolchain support for BLAS/LAPACK libs", '3.0') if get_software_root('LAPACK'): extra_makeopts = [ 'BLASLIB="%s -lpthread"' % lapack_get_blas_lib(self.log), 'LAPACKLIB=%s/lib/liblapack.a' % get_software_root('LAPACK') ] elif get_software_root('ACML'): root = get_software_root('ACML') acml_static_lib = os.path.join(root, os.getenv('ACML_BASEDIR', 'NO_ACML_BASEDIR'), 'lib', 'libacml.a') extra_makeopts = [ 'BLASLIB="%s -lpthread"' % acml_static_lib, 'LAPACKLIB=%s' % acml_static_lib ] elif get_software_root('OpenBLAS'): root = get_software_root('OpenBLAS') extra_makeopts = [ 'BLASLIB="%s -lpthread"' % lapack_get_blas_lib(self.log), 'LAPACKLIB="%s"' % lapack_get_blas_lib(self.log), ] else: raise EasyBuildError("LAPACK, ACML or OpenBLAS are not available, no idea how to set BLASLIB/LAPACKLIB make options.") # build procedure changed in v2.0.0 if self.loosever < LooseVersion("2.0.0"): blacs = get_software_root('BLACS') # determine interface interface = det_interface(self.log, os.path.join(blacs, 'bin')) # set build and BLACS dir correctly extra_makeopts.append('home=%s BLACSdir=%s' % (self.cfg['start_dir'], blacs)) # set BLACS libs correctly blacs_libs = [ ('BLACSFINIT', "F77init"), ('BLACSCINIT', "Cinit"), ('BLACSLIB', "") ] for (var, lib) in blacs_libs: extra_makeopts.append('%s=%s/lib/libblacs%s.a' % (var, blacs, lib)) # set compilers and options noopt = '' if self.toolchain.options['noopt']: noopt += " -O0" if self.toolchain.options['pic']: noopt += " -fPIC" extra_makeopts += [ 'F77="%s"' % mpif77, 'CC="%s"' % mpicc, 'NOOPT="%s"' % noopt, 'CCFLAGS="-O3 %s"' % os.getenv('CFLAGS') ] # set interface extra_makeopts.append("CDEFS='-D%s -DNO_IEEE $(USEMPI)'" % interface) else: # determine interface if self.toolchain.mpi_family() in known_mpi_libs: interface = 'Add_' else: raise EasyBuildError("Don't know which interface to pick for the MPI library being used.") # set compilers and options extra_makeopts += [ 'FC="%s"' % mpif90, 'CC="%s"' % mpicc, 'CCFLAGS="%s"' % os.getenv('CFLAGS'), 'FCFLAGS="%s"' % os.getenv('FFLAGS'), ] # set interface extra_makeopts.append('CDEFS="-D%s"' % interface) # update make opts, and build_step self.cfg.update('buildopts', ' '.join(extra_makeopts)) super(EB_ScaLAPACK, self).build_step() def install_step(self): """Install by copying files to install dir.""" # include files and libraries path_info = [ ("SRC", "include", ".h"), # include files ("", "lib", ".a"), # libraries ] for (srcdir, destdir, ext) in path_info: src = os.path.join(self.cfg['start_dir'], srcdir) dest = os.path.join(self.installdir, destdir) try: os.makedirs(dest) os.chdir(src) for lib in glob.glob('*%s' % ext): # copy file shutil.copy2(os.path.join(src, lib), dest) self.log.debug("Copied %s to %s" % (lib, dest)) except OSError, err: raise EasyBuildError("Copying %s/*.%s to installation dir %s failed: %s", src, ext, dest, err) def sanity_check_step(self): """Custom sanity check for ScaLAPACK.""" custom_paths = { 'files': ["lib/libscalapack.a"], 'dirs': [] } super(EB_ScaLAPACK, self).sanity_check_step(custom_paths=custom_paths)
diofeher/django-nfa
refs/heads/master
django/core/serializers/base.py
2
""" Module for abstract serializer/unserializer base classes. """ try: from cStringIO import StringIO except ImportError: from StringIO import StringIO from django.db import models from django.utils.encoding import smart_str, smart_unicode class SerializationError(Exception): """Something bad happened during serialization.""" pass class DeserializationError(Exception): """Something bad happened during deserialization.""" pass class Serializer(object): """ Abstract serializer base class. """ # Indicates if the implemented serializer is only available for # internal Django use. internal_use_only = False def serialize(self, queryset, **options): """ Serialize a queryset. """ self.options = options self.stream = options.get("stream", StringIO()) self.selected_fields = options.get("fields") self.start_serialization() for obj in queryset: self.start_object(obj) for field in obj._meta.local_fields: if field.serialize: if field.rel is None: if self.selected_fields is None or field.attname in self.selected_fields: self.handle_field(obj, field) else: if self.selected_fields is None or field.attname[:-3] in self.selected_fields: self.handle_fk_field(obj, field) for field in obj._meta.many_to_many: if field.serialize: if self.selected_fields is None or field.attname in self.selected_fields: self.handle_m2m_field(obj, field) self.end_object(obj) self.end_serialization() return self.getvalue() def get_string_value(self, obj, field): """ Convert a field's value to a string. """ if isinstance(field, models.DateTimeField): value = getattr(obj, field.name).strftime("%Y-%m-%d %H:%M:%S") else: value = field.flatten_data(follow=None, obj=obj).get(field.name, "") return smart_unicode(value) def start_serialization(self): """ Called when serializing of the queryset starts. """ raise NotImplementedError def end_serialization(self): """ Called when serializing of the queryset ends. """ pass def start_object(self, obj): """ Called when serializing of an object starts. """ raise NotImplementedError def end_object(self, obj): """ Called when serializing of an object ends. """ pass def handle_field(self, obj, field): """ Called to handle each individual (non-relational) field on an object. """ raise NotImplementedError def handle_fk_field(self, obj, field): """ Called to handle a ForeignKey field. """ raise NotImplementedError def handle_m2m_field(self, obj, field): """ Called to handle a ManyToManyField. """ raise NotImplementedError def getvalue(self): """ Return the fully serialized queryset (or None if the output stream is not seekable). """ if callable(getattr(self.stream, 'getvalue', None)): return self.stream.getvalue() class Deserializer(object): """ Abstract base deserializer class. """ def __init__(self, stream_or_string, **options): """ Init this serializer given a stream or a string """ self.options = options if isinstance(stream_or_string, basestring): self.stream = StringIO(stream_or_string) else: self.stream = stream_or_string # hack to make sure that the models have all been loaded before # deserialization starts (otherwise subclass calls to get_model() # and friends might fail...) models.get_apps() def __iter__(self): return self def next(self): """Iteration iterface -- return the next item in the stream""" raise NotImplementedError class DeserializedObject(object): """ A deserialized model. Basically a container for holding the pre-saved deserialized data along with the many-to-many data saved with the object. Call ``save()`` to save the object (with the many-to-many data) to the database; call ``save(save_m2m=False)`` to save just the object fields (and not touch the many-to-many stuff.) """ def __init__(self, obj, m2m_data=None): self.object = obj self.m2m_data = m2m_data def __repr__(self): return "<DeserializedObject: %s>" % smart_str(self.object) def save(self, save_m2m=True): # Call save on the Model baseclass directly. This bypasses any # model-defined save. The save is also forced to be raw. # This ensures that the data that is deserialized is literally # what came from the file, not post-processed by pre_save/save # methods. models.Model.save_base(self.object, raw=True) if self.m2m_data and save_m2m: for accessor_name, object_list in self.m2m_data.items(): setattr(self.object, accessor_name, object_list) # prevent a second (possibly accidental) call to save() from saving # the m2m data twice. self.m2m_data = None
steventimberman/masterDebater
refs/heads/master
venv/lib/python2.7/site-packages/whoosh/support/charset.py
95
# coding=utf-8 """This module contains tools for working with Sphinx charset table files. These files are useful for doing case and accent folding. See :class:`whoosh.analysis.CharsetTokenizer` and :class:`whoosh.analysis.CharsetFilter`. """ from collections import defaultdict import re from whoosh.compat import izip, u, iteritems, unichr, xrange # This is a straightforward accent-folding charset taken from Carlos Bueno's # article "Accent Folding for Auto-Complete", for use with CharsetFilter. # # http://www.alistapart.com/articles/accent-folding-for-auto-complete/ # # See the article for information and caveats. The code is lifted directly # from here: # # http://github.com/aristus/accent-folding/blob/master/accent_fold.py accent_map = { u('H'): u('h'), # H -> h u('I'): u('i'), # I -> i u('J'): u('j'), # J -> j u('N'): u('n'), # N -> n u('P'): u('p'), # P -> p u('S'): u('s'), # S -> s u('T'): u('t'), # T -> t u('W'): u('w'), # W -> w u('Y'): u('y'), # Y -> y u('i'): u('i'), # i -> i u('n'): u('n'), # n -> n u('p'): u('p'), # p -> p u('s'): u('s'), # s -> s u('\xc0'): u('a'), # À -> a u('\xc1'): u('a'), # Á -> a u('\xc2'): u('a'), # Â -> a u('\xc3'): u('a'), # Ã -> a u('\xc4'): u('a'), # Ä -> a u('\xc5'): u('a'), # Å -> a u('\xc7'): u('c'), # Ç -> c u('\xc8'): u('e'), # È -> e u('\xc9'): u('e'), # É -> e u('\xca'): u('e'), # Ê -> e u('\xcb'): u('e'), # Ë -> e u('\xcc'): u('i'), # Ì -> i u('\xcd'): u('i'), # Í -> i u('\xce'): u('i'), # Î -> i u('\xcf'): u('i'), # Ï -> i u('\xd1'): u('n'), # Ñ -> n u('\xd2'): u('o'), # Ò -> o u('\xd3'): u('o'), # Ó -> o u('\xd4'): u('o'), # Ô -> o u('\xd5'): u('o'), # Õ -> o u('\xd6'): u('o'), # Ö -> o u('\xd8'): u('o'), # Ø -> o u('\xd9'): u('u'), # Ù -> u u('\xda'): u('u'), # Ú -> u u('\xdb'): u('u'), # Û -> u u('\xdc'): u('u'), # Ü -> u u('\xdd'): u('y'), # Ý -> y u('\xde'): u('t'), # Þ -> t u('\xdf'): u('s'), # ß -> s u('\xe0'): u('a'), # à -> a u('\xe1'): u('a'), # á -> a u('\xe2'): u('a'), # â -> a u('\xe3'): u('a'), # ã -> a u('\xe4'): u('a'), # ä -> a u('\xe5'): u('a'), # å -> a u('\xe7'): u('c'), # ç -> c u('\xe8'): u('e'), # è -> e u('\xe9'): u('e'), # é -> e u('\xea'): u('e'), # ê -> e u('\xeb'): u('e'), # ë -> e u('\xec'): u('i'), # ì -> i u('\xed'): u('i'), # í -> i u('\xee'): u('i'), # î -> i u('\xef'): u('i'), # ï -> i u('\xf0'): u('d'), # ð -> d u('\xf1'): u('n'), # ñ -> n u('\xf2'): u('o'), # ò -> o u('\xf3'): u('o'), # ó -> o u('\xf4'): u('o'), # ô -> o u('\xf5'): u('o'), # õ -> o u('\xf6'): u('o'), # ö -> o u('\xf8'): u('o'), # ø -> o u('\xf9'): u('u'), # ù -> u u('\xfa'): u('u'), # ú -> u u('\xfb'): u('u'), # û -> u u('\xfc'): u('u'), # ü -> u u('\xfd'): u('y'), # ý -> y u('\xfe'): u('t'), # þ -> t u('\xff'): u('y'), # ÿ -> y u('\u0100'): u('a'), # Ā -> a u('\u0101'): u('a'), # ā -> a u('\u0102'): u('a'), # Ă -> a u('\u0103'): u('a'), # ă -> a u('\u0104'): u('a'), # Ą -> a u('\u0105'): u('a'), # ą -> a u('\u0106'): u('c'), # Ć -> c u('\u0107'): u('c'), # ć -> c u('\u0108'): u('c'), # Ĉ -> c u('\u0109'): u('c'), # ĉ -> c u('\u010a'): u('c'), # Ċ -> c u('\u010b'): u('c'), # ċ -> c u('\u010c'): u('c'), # Č -> c u('\u010d'): u('c'), # č -> c u('\u010e'): u('d'), # Ď -> d u('\u010f'): u('d'), # ď -> d u('\u0110'): u('d'), # Đ -> d u('\u0111'): u('d'), # đ -> d u('\u0112'): u('e'), # Ē -> e u('\u0113'): u('e'), # ē -> e u('\u0114'): u('e'), # Ĕ -> e u('\u0115'): u('e'), # ĕ -> e u('\u0116'): u('e'), # Ė -> e u('\u0117'): u('e'), # ė -> e u('\u0118'): u('e'), # Ę -> e u('\u0119'): u('e'), # ę -> e u('\u011a'): u('e'), # Ě -> e u('\u011b'): u('e'), # ě -> e u('\u011c'): u('g'), # Ĝ -> g u('\u011d'): u('g'), # ĝ -> g u('\u011e'): u('g'), # Ğ -> g u('\u011f'): u('g'), # ğ -> g u('\u0120'): u('g'), # Ġ -> g u('\u0121'): u('g'), # ġ -> g u('\u0122'): u('g'), # Ģ -> g u('\u0123'): u('g'), # ģ -> g u('\u0124'): u('h'), # Ĥ -> h u('\u0125'): u('h'), # ĥ -> h u('\u0126'): u('h'), # Ħ -> h u('\u0127'): u('h'), # ħ -> h u('\u0128'): u('i'), # Ĩ -> i u('\u0129'): u('i'), # ĩ -> i u('\u012a'): u('i'), # Ī -> i u('\u012b'): u('i'), # ī -> i u('\u012c'): u('i'), # Ĭ -> i u('\u012d'): u('i'), # ĭ -> i u('\u012e'): u('i'), # Į -> i u('\u012f'): u('i'), # į -> i u('\u0130'): u('i'), # İ -> i u('\u0131'): u('i'), # ı -> i u('\u0134'): u('j'), # Ĵ -> j u('\u0135'): u('j'), # ĵ -> j u('\u0136'): u('k'), # Ķ -> k u('\u0137'): u('k'), # ķ -> k u('\u0139'): u('a'), # Ĺ -> a u('\u013a'): u('l'), # ĺ -> l u('\u013b'): u('l'), # Ļ -> l u('\u013c'): u('l'), # ļ -> l u('\u013d'): u('l'), # Ľ -> l u('\u013e'): u('l'), # ľ -> l u('\u013f'): u('l'), # Ŀ -> l u('\u0140'): u('l'), # ŀ -> l u('\u0141'): u('l'), # Ł -> l u('\u0142'): u('l'), # ł -> l u('\u0143'): u('n'), # Ń -> n u('\u0144'): u('n'), # ń -> n u('\u0145'): u('n'), # Ņ -> n u('\u0146'): u('n'), # ņ -> n u('\u0147'): u('n'), # Ň -> n u('\u0148'): u('n'), # ň -> n u('\u014c'): u('o'), # Ō -> o u('\u014d'): u('o'), # ō -> o u('\u014e'): u('o'), # Ŏ -> o u('\u014f'): u('o'), # ŏ -> o u('\u0150'): u('o'), # Ő -> o u('\u0151'): u('o'), # ő -> o u('\u0154'): u('r'), # Ŕ -> r u('\u0155'): u('r'), # ŕ -> r u('\u0156'): u('r'), # Ŗ -> r u('\u0157'): u('r'), # ŗ -> r u('\u0158'): u('r'), # Ř -> r u('\u0159'): u('r'), # ř -> r u('\u015a'): u('s'), # Ś -> s u('\u015b'): u('s'), # ś -> s u('\u015c'): u('s'), # Ŝ -> s u('\u015d'): u('s'), # ŝ -> s u('\u015e'): u('s'), # Ş -> s u('\u015f'): u('s'), # ş -> s u('\u0160'): u('s'), # Š -> s u('\u0161'): u('s'), # š -> s u('\u0162'): u('t'), # Ţ -> t u('\u0163'): u('t'), # ţ -> t u('\u0164'): u('t'), # Ť -> t u('\u0165'): u('t'), # ť -> t u('\u0166'): u('t'), # Ŧ -> t u('\u0167'): u('t'), # ŧ -> t u('\u0168'): u('u'), # Ũ -> u u('\u0169'): u('u'), # ũ -> u u('\u016a'): u('u'), # Ū -> u u('\u016b'): u('u'), # ū -> u u('\u016c'): u('u'), # Ŭ -> u u('\u016d'): u('u'), # ŭ -> u u('\u016e'): u('u'), # Ů -> u u('\u016f'): u('u'), # ů -> u u('\u0170'): u('u'), # Ű -> u u('\u0171'): u('u'), # ű -> u u('\u0172'): u('u'), # Ų -> u u('\u0173'): u('u'), # ų -> u u('\u0174'): u('w'), # Ŵ -> w u('\u0175'): u('w'), # ŵ -> w u('\u0176'): u('y'), # Ŷ -> y u('\u0177'): u('y'), # ŷ -> y u('\u0178'): u('y'), # Ÿ -> y u('\u0179'): u('z'), # Ź -> z u('\u017a'): u('z'), # ź -> z u('\u017b'): u('z'), # Ż -> z u('\u017c'): u('z'), # ż -> z u('\u017d'): u('z'), # Ž -> z u('\u017e'): u('z'), # ž -> z u('\u0180'): u('b'), # ƀ -> b u('\u0181'): u('b'), # Ɓ -> b u('\u0182'): u('b'), # Ƃ -> b u('\u0183'): u('b'), # ƃ -> b u('\u0187'): u('c'), # Ƈ -> c u('\u0188'): u('c'), # ƈ -> c u('\u0189'): u('d'), # Ɖ -> d u('\u018a'): u('d'), # Ɗ -> d u('\u018b'): u('d'), # Ƌ -> d u('\u018c'): u('d'), # ƌ -> d u('\u018e'): u('e'), # Ǝ -> e u('\u018f'): u('e'), # Ə -> e u('\u0191'): u('f'), # Ƒ -> f u('\u0192'): u('f'), # ƒ -> f u('\u0193'): u('g'), # Ɠ -> g u('\u0197'): u('i'), # Ɨ -> i u('\u0198'): u('k'), # Ƙ -> k u('\u0199'): u('k'), # ƙ -> k u('\u019a'): u('l'), # ƚ -> l u('\u019d'): u('n'), # Ɲ -> n u('\u019e'): u('n'), # ƞ -> n u('\u019f'): u('o'), # Ɵ -> o u('\u01a0'): u('o'), # Ơ -> o u('\u01a1'): u('o'), # ơ -> o u('\u01a4'): u('p'), # Ƥ -> p u('\u01a5'): u('p'), # ƥ -> p u('\u01ab'): u('t'), # ƫ -> t u('\u01ac'): u('t'), # Ƭ -> t u('\u01ad'): u('t'), # ƭ -> t u('\u01ae'): u('t'), # Ʈ -> t u('\u01af'): u('u'), # Ư -> u u('\u01b0'): u('u'), # ư -> u u('\u01b2'): u('v'), # Ʋ -> v u('\u01b3'): u('y'), # Ƴ -> y u('\u01b4'): u('y'), # ƴ -> y u('\u01b5'): u('z'), # Ƶ -> z u('\u01b6'): u('z'), # ƶ -> z u('\u01ba'): u('z'), # ƺ -> z u('\u01cd'): u('a'), # Ǎ -> a u('\u01ce'): u('a'), # ǎ -> a u('\u01cf'): u('i'), # Ǐ -> i u('\u01d0'): u('i'), # ǐ -> i u('\u01d1'): u('o'), # Ǒ -> o u('\u01d2'): u('o'), # ǒ -> o u('\u01d3'): u('u'), # Ǔ -> u u('\u01d4'): u('u'), # ǔ -> u u('\u01d5'): u('u'), # Ǖ -> u u('\u01d6'): u('u'), # ǖ -> u u('\u01d7'): u('u'), # Ǘ -> u u('\u01d8'): u('u'), # ǘ -> u u('\u01d9'): u('u'), # Ǚ -> u u('\u01da'): u('u'), # ǚ -> u u('\u01db'): u('u'), # Ǜ -> u u('\u01dc'): u('u'), # ǜ -> u u('\u01dd'): u('e'), # ǝ -> e u('\u01de'): u('a'), # Ǟ -> a u('\u01df'): u('a'), # ǟ -> a u('\u01e0'): u('a'), # Ǡ -> a u('\u01e1'): u('a'), # ǡ -> a u('\u01e2'): u('a'), # Ǣ -> a u('\u01e3'): u('a'), # ǣ -> a u('\u01e4'): u('g'), # Ǥ -> g u('\u01e5'): u('g'), # ǥ -> g u('\u01e6'): u('g'), # Ǧ -> g u('\u01e7'): u('g'), # ǧ -> g u('\u01e8'): u('k'), # Ǩ -> k u('\u01e9'): u('k'), # ǩ -> k u('\u01ea'): u('o'), # Ǫ -> o u('\u01eb'): u('o'), # ǫ -> o u('\u01ec'): u('o'), # Ǭ -> o u('\u01ed'): u('o'), # ǭ -> o u('\u01ee'): u('z'), # Ǯ -> z u('\u01ef'): u('z'), # ǯ -> z u('\u01f0'): u('j'), # ǰ -> j u('\u01f4'): u('g'), # Ǵ -> g u('\u01f5'): u('g'), # ǵ -> g u('\u01f8'): u('n'), # Ǹ -> n u('\u01f9'): u('n'), # ǹ -> n u('\u01fa'): u('a'), # Ǻ -> a u('\u01fb'): u('a'), # ǻ -> a u('\u01fc'): u('a'), # Ǽ -> a u('\u01fd'): u('a'), # ǽ -> a u('\u01fe'): u('o'), # Ǿ -> o u('\u01ff'): u('o'), # ǿ -> o u('\u0200'): u('a'), # Ȁ -> a u('\u0201'): u('a'), # ȁ -> a u('\u0202'): u('a'), # Ȃ -> a u('\u0203'): u('a'), # ȃ -> a u('\u0204'): u('e'), # Ȅ -> e u('\u0205'): u('e'), # ȅ -> e u('\u0206'): u('e'), # Ȇ -> e u('\u0207'): u('e'), # ȇ -> e u('\u0208'): u('i'), # Ȉ -> i u('\u0209'): u('i'), # ȉ -> i u('\u020a'): u('i'), # Ȋ -> i u('\u020b'): u('i'), # ȋ -> i u('\u020c'): u('o'), # Ȍ -> o u('\u020d'): u('o'), # ȍ -> o u('\u020e'): u('o'), # Ȏ -> o u('\u020f'): u('o'), # ȏ -> o u('\u0210'): u('r'), # Ȑ -> r u('\u0211'): u('r'), # ȑ -> r u('\u0212'): u('r'), # Ȓ -> r u('\u0213'): u('r'), # ȓ -> r u('\u0214'): u('u'), # Ȕ -> u u('\u0215'): u('u'), # ȕ -> u u('\u0216'): u('u'), # Ȗ -> u u('\u0217'): u('u'), # ȗ -> u u('\u0218'): u('s'), # Ș -> s u('\u0219'): u('s'), # ș -> s u('\u021a'): u('t'), # Ț -> t u('\u021b'): u('t'), # ț -> t u('\u021e'): u('h'), # Ȟ -> h u('\u021f'): u('h'), # ȟ -> h u('\u0220'): u('n'), # Ƞ -> n u('\u0221'): u('d'), # ȡ -> d u('\u0224'): u('z'), # Ȥ -> z u('\u0225'): u('z'), # ȥ -> z u('\u0226'): u('a'), # Ȧ -> a u('\u0227'): u('a'), # ȧ -> a u('\u0228'): u('e'), # Ȩ -> e u('\u0229'): u('e'), # ȩ -> e u('\u022a'): u('o'), # Ȫ -> o u('\u022b'): u('o'), # ȫ -> o u('\u022c'): u('o'), # Ȭ -> o u('\u022d'): u('o'), # ȭ -> o u('\u022e'): u('o'), # Ȯ -> o u('\u022f'): u('o'), # ȯ -> o u('\u0230'): u('o'), # Ȱ -> o u('\u0231'): u('o'), # ȱ -> o u('\u0232'): u('y'), # Ȳ -> y u('\u0233'): u('y'), # ȳ -> y u('\u0234'): u('l'), # ȴ -> l u('\u0235'): u('n'), # ȵ -> n u('\u0236'): u('t'), # ȶ -> t u('\u0237'): u('j'), # ȷ -> j u('\u023a'): u('a'), # Ⱥ -> a u('\u023b'): u('c'), # Ȼ -> c u('\u023c'): u('c'), # ȼ -> c u('\u023d'): u('l'), # Ƚ -> l u('\u023e'): u('t'), # Ⱦ -> t u('\u0243'): u('b'), # Ƀ -> b u('\u0244'): u('u'), # Ʉ -> u u('\u0246'): u('e'), # Ɇ -> e u('\u0247'): u('e'), # ɇ -> e u('\u0248'): u('j'), # Ɉ -> j u('\u0249'): u('j'), # ɉ -> j u('\u024a'): u('q'), # Ɋ -> q u('\u024b'): u('q'), # ɋ -> q u('\u024c'): u('r'), # Ɍ -> r u('\u024d'): u('r'), # ɍ -> r u('\u024e'): u('y'), # Ɏ -> y u('\u024f'): u('y'), # ɏ -> y u('\u0253'): u('b'), # ɓ -> b u('\u0255'): u('c'), # ɕ -> c u('\u0256'): u('d'), # ɖ -> d u('\u0257'): u('d'), # ɗ -> d u('\u025a'): u('e'), # ɚ -> e u('\u025d'): u('e'), # ɝ -> e u('\u025f'): u('j'), # ɟ -> j u('\u0260'): u('g'), # ɠ -> g u('\u0268'): u('i'), # ɨ -> i u('\u026b'): u('l'), # ɫ -> l u('\u026c'): u('l'), # ɬ -> l u('\u026d'): u('l'), # ɭ -> l u('\u0271'): u('m'), # ɱ -> m u('\u0272'): u('n'), # ɲ -> n u('\u0273'): u('n'), # ɳ -> n u('\u0275'): u('o'), # ɵ -> o u('\u027c'): u('r'), # ɼ -> r u('\u027d'): u('r'), # ɽ -> r u('\u027e'): u('r'), # ɾ -> r u('\u0282'): u('s'), # ʂ -> s u('\u0284'): u('j'), # ʄ -> j u('\u0288'): u('t'), # ʈ -> t u('\u0289'): u('u'), # ʉ -> u u('\u028b'): u('v'), # ʋ -> v u('\u028f'): u('y'), # ʏ -> y u('\u0290'): u('z'), # ʐ -> z u('\u0291'): u('z'), # ʑ -> z u('\u029d'): u('j'), # ʝ -> j u('\u02a0'): u('q'), # ʠ -> q u('\u0303'): u('p'), # ̃ -> p u('\u0308'): u('t'), # ̈ -> t u('\u030a'): u('y'), # ̊ -> y u('\u030c'): u('j'), # ̌ -> j u('\u0323'): u('l'), # ̣ -> l u('\u0329'): u('s'), # ̩ -> s u('\u0331'): u('h'), # ̱ -> h u('\u1d6c'): u('b'), # ᵬ -> b u('\u1d6d'): u('d'), # ᵭ -> d u('\u1d6e'): u('f'), # ᵮ -> f u('\u1d72'): u('r'), # ᵲ -> r u('\u1d73'): u('r'), # ᵳ -> r u('\u1d75'): u('t'), # ᵵ -> t u('\u1e00'): u('a'), # Ḁ -> a u('\u1e01'): u('a'), # ḁ -> a u('\u1e02'): u('b'), # Ḃ -> b u('\u1e03'): u('b'), # ḃ -> b u('\u1e04'): u('b'), # Ḅ -> b u('\u1e05'): u('b'), # ḅ -> b u('\u1e06'): u('b'), # Ḇ -> b u('\u1e07'): u('b'), # ḇ -> b u('\u1e08'): u('c'), # Ḉ -> c u('\u1e09'): u('c'), # ḉ -> c u('\u1e0a'): u('d'), # Ḋ -> d u('\u1e0b'): u('d'), # ḋ -> d u('\u1e0c'): u('d'), # Ḍ -> d u('\u1e0d'): u('d'), # ḍ -> d u('\u1e0e'): u('d'), # Ḏ -> d u('\u1e0f'): u('d'), # ḏ -> d u('\u1e10'): u('d'), # Ḑ -> d u('\u1e11'): u('d'), # ḑ -> d u('\u1e12'): u('d'), # Ḓ -> d u('\u1e13'): u('d'), # ḓ -> d u('\u1e14'): u('e'), # Ḕ -> e u('\u1e15'): u('e'), # ḕ -> e u('\u1e16'): u('e'), # Ḗ -> e u('\u1e17'): u('e'), # ḗ -> e u('\u1e18'): u('e'), # Ḙ -> e u('\u1e19'): u('e'), # ḙ -> e u('\u1e1a'): u('e'), # Ḛ -> e u('\u1e1b'): u('e'), # ḛ -> e u('\u1e1c'): u('e'), # Ḝ -> e u('\u1e1d'): u('e'), # ḝ -> e u('\u1e1e'): u('f'), # Ḟ -> f u('\u1e1f'): u('f'), # ḟ -> f u('\u1e20'): u('g'), # Ḡ -> g u('\u1e21'): u('g'), # ḡ -> g u('\u1e22'): u('h'), # Ḣ -> h u('\u1e23'): u('h'), # ḣ -> h u('\u1e24'): u('h'), # Ḥ -> h u('\u1e25'): u('h'), # ḥ -> h u('\u1e26'): u('h'), # Ḧ -> h u('\u1e27'): u('h'), # ḧ -> h u('\u1e28'): u('h'), # Ḩ -> h u('\u1e29'): u('h'), # ḩ -> h u('\u1e2a'): u('h'), # Ḫ -> h u('\u1e2b'): u('h'), # ḫ -> h u('\u1e2c'): u('i'), # Ḭ -> i u('\u1e2d'): u('i'), # ḭ -> i u('\u1e2e'): u('i'), # Ḯ -> i u('\u1e2f'): u('i'), # ḯ -> i u('\u1e30'): u('k'), # Ḱ -> k u('\u1e31'): u('k'), # ḱ -> k u('\u1e32'): u('k'), # Ḳ -> k u('\u1e33'): u('k'), # ḳ -> k u('\u1e34'): u('k'), # Ḵ -> k u('\u1e35'): u('k'), # ḵ -> k u('\u1e36'): u('l'), # Ḷ -> l u('\u1e37'): u('l'), # ḷ -> l u('\u1e38'): u('l'), # Ḹ -> l u('\u1e39'): u('l'), # ḹ -> l u('\u1e3a'): u('l'), # Ḻ -> l u('\u1e3b'): u('l'), # ḻ -> l u('\u1e3c'): u('l'), # Ḽ -> l u('\u1e3d'): u('l'), # ḽ -> l u('\u1e3e'): u('m'), # Ḿ -> m u('\u1e3f'): u('m'), # ḿ -> m u('\u1e40'): u('m'), # Ṁ -> m u('\u1e41'): u('m'), # ṁ -> m u('\u1e42'): u('m'), # Ṃ -> m u('\u1e43'): u('m'), # ṃ -> m u('\u1e44'): u('n'), # Ṅ -> n u('\u1e45'): u('n'), # ṅ -> n u('\u1e46'): u('n'), # Ṇ -> n u('\u1e47'): u('n'), # ṇ -> n u('\u1e48'): u('n'), # Ṉ -> n u('\u1e49'): u('n'), # ṉ -> n u('\u1e4a'): u('n'), # Ṋ -> n u('\u1e4b'): u('n'), # ṋ -> n u('\u1e4c'): u('o'), # Ṍ -> o u('\u1e4d'): u('o'), # ṍ -> o u('\u1e4e'): u('o'), # Ṏ -> o u('\u1e4f'): u('o'), # ṏ -> o u('\u1e50'): u('o'), # Ṑ -> o u('\u1e51'): u('o'), # ṑ -> o u('\u1e52'): u('o'), # Ṓ -> o u('\u1e53'): u('o'), # ṓ -> o u('\u1e54'): u('p'), # Ṕ -> p u('\u1e55'): u('p'), # ṕ -> p u('\u1e56'): u('p'), # Ṗ -> p u('\u1e57'): u('p'), # ṗ -> p u('\u1e58'): u('r'), # Ṙ -> r u('\u1e59'): u('r'), # ṙ -> r u('\u1e5a'): u('r'), # Ṛ -> r u('\u1e5b'): u('r'), # ṛ -> r u('\u1e5c'): u('r'), # Ṝ -> r u('\u1e5d'): u('r'), # ṝ -> r u('\u1e5e'): u('r'), # Ṟ -> r u('\u1e5f'): u('r'), # ṟ -> r u('\u1e60'): u('s'), # Ṡ -> s u('\u1e61'): u('s'), # ṡ -> s u('\u1e62'): u('s'), # Ṣ -> s u('\u1e63'): u('s'), # ṣ -> s u('\u1e64'): u('s'), # Ṥ -> s u('\u1e65'): u('s'), # ṥ -> s u('\u1e66'): u('s'), # Ṧ -> s u('\u1e67'): u('s'), # ṧ -> s u('\u1e68'): u('s'), # Ṩ -> s u('\u1e69'): u('s'), # ṩ -> s u('\u1e6a'): u('t'), # Ṫ -> t u('\u1e6b'): u('t'), # ṫ -> t u('\u1e6c'): u('t'), # Ṭ -> t u('\u1e6d'): u('t'), # ṭ -> t u('\u1e6e'): u('t'), # Ṯ -> t u('\u1e6f'): u('t'), # ṯ -> t u('\u1e70'): u('t'), # Ṱ -> t u('\u1e71'): u('t'), # ṱ -> t u('\u1e72'): u('u'), # Ṳ -> u u('\u1e73'): u('u'), # ṳ -> u u('\u1e74'): u('u'), # Ṵ -> u u('\u1e75'): u('u'), # ṵ -> u u('\u1e76'): u('u'), # Ṷ -> u u('\u1e77'): u('u'), # ṷ -> u u('\u1e78'): u('u'), # Ṹ -> u u('\u1e79'): u('u'), # ṹ -> u u('\u1e7a'): u('u'), # Ṻ -> u u('\u1e7b'): u('u'), # ṻ -> u u('\u1e7c'): u('v'), # Ṽ -> v u('\u1e7d'): u('v'), # ṽ -> v u('\u1e7e'): u('v'), # Ṿ -> v u('\u1e7f'): u('v'), # ṿ -> v u('\u1e80'): u('w'), # Ẁ -> w u('\u1e81'): u('w'), # ẁ -> w u('\u1e82'): u('w'), # Ẃ -> w u('\u1e83'): u('w'), # ẃ -> w u('\u1e84'): u('w'), # Ẅ -> w u('\u1e85'): u('w'), # ẅ -> w u('\u1e86'): u('w'), # Ẇ -> w u('\u1e87'): u('w'), # ẇ -> w u('\u1e88'): u('w'), # Ẉ -> w u('\u1e89'): u('w'), # ẉ -> w u('\u1e8a'): u('x'), # Ẋ -> x u('\u1e8b'): u('x'), # ẋ -> x u('\u1e8c'): u('x'), # Ẍ -> x u('\u1e8d'): u('x'), # ẍ -> x u('\u1e8e'): u('y'), # Ẏ -> y u('\u1e8f'): u('y'), # ẏ -> y u('\u1e90'): u('z'), # Ẑ -> z u('\u1e91'): u('z'), # ẑ -> z u('\u1e92'): u('z'), # Ẓ -> z u('\u1e93'): u('z'), # ẓ -> z u('\u1e94'): u('z'), # Ẕ -> z u('\u1e95'): u('z'), # ẕ -> z u('\u1e96'): u('h'), # ẖ -> h u('\u1e97'): u('t'), # ẗ -> t u('\u1e98'): u('w'), # ẘ -> w u('\u1e99'): u('y'), # ẙ -> y u('\u1e9a'): u('a'), # ẚ -> a u('\u1e9b'): u('s'), # ẛ -> s u('\u1ea0'): u('a'), # Ạ -> a u('\u1ea1'): u('a'), # ạ -> a u('\u1ea2'): u('a'), # Ả -> a u('\u1ea3'): u('a'), # ả -> a u('\u1ea4'): u('a'), # Ấ -> a u('\u1ea5'): u('a'), # ấ -> a u('\u1ea6'): u('a'), # Ầ -> a u('\u1ea7'): u('a'), # ầ -> a u('\u1ea8'): u('a'), # Ẩ -> a u('\u1ea9'): u('a'), # ẩ -> a u('\u1eaa'): u('a'), # Ẫ -> a u('\u1eab'): u('a'), # ẫ -> a u('\u1eac'): u('a'), # Ậ -> a u('\u1ead'): u('a'), # ậ -> a u('\u1eae'): u('a'), # Ắ -> a u('\u1eaf'): u('a'), # ắ -> a u('\u1eb0'): u('a'), # Ằ -> a u('\u1eb1'): u('a'), # ằ -> a u('\u1eb2'): u('a'), # Ẳ -> a u('\u1eb3'): u('a'), # ẳ -> a u('\u1eb4'): u('a'), # Ẵ -> a u('\u1eb5'): u('a'), # ẵ -> a u('\u1eb6'): u('a'), # Ặ -> a u('\u1eb7'): u('a'), # ặ -> a u('\u1eb8'): u('e'), # Ẹ -> e u('\u1eb9'): u('e'), # ẹ -> e u('\u1eba'): u('e'), # Ẻ -> e u('\u1ebb'): u('e'), # ẻ -> e u('\u1ebc'): u('e'), # Ẽ -> e u('\u1ebd'): u('e'), # ẽ -> e u('\u1ebe'): u('e'), # Ế -> e u('\u1ebf'): u('e'), # ế -> e u('\u1ec0'): u('e'), # Ề -> e u('\u1ec1'): u('e'), # ề -> e u('\u1ec2'): u('e'), # Ể -> e u('\u1ec3'): u('e'), # ể -> e u('\u1ec4'): u('e'), # Ễ -> e u('\u1ec5'): u('e'), # ễ -> e u('\u1ec6'): u('e'), # Ệ -> e u('\u1ec7'): u('e'), # ệ -> e u('\u1ec8'): u('i'), # Ỉ -> i u('\u1ec9'): u('i'), # ỉ -> i u('\u1eca'): u('i'), # Ị -> i u('\u1ecb'): u('i'), # ị -> i u('\u1ecc'): u('o'), # Ọ -> o u('\u1ecd'): u('o'), # ọ -> o u('\u1ece'): u('o'), # Ỏ -> o u('\u1ecf'): u('o'), # ỏ -> o u('\u1ed0'): u('o'), # Ố -> o u('\u1ed1'): u('o'), # ố -> o u('\u1ed2'): u('o'), # Ồ -> o u('\u1ed3'): u('o'), # ồ -> o u('\u1ed4'): u('o'), # Ổ -> o u('\u1ed5'): u('o'), # ổ -> o u('\u1ed6'): u('o'), # Ỗ -> o u('\u1ed7'): u('o'), # ỗ -> o u('\u1ed8'): u('o'), # Ộ -> o u('\u1ed9'): u('o'), # ộ -> o u('\u1eda'): u('o'), # Ớ -> o u('\u1edb'): u('o'), # ớ -> o u('\u1edc'): u('o'), # Ờ -> o u('\u1edd'): u('o'), # ờ -> o u('\u1ede'): u('o'), # Ở -> o u('\u1edf'): u('o'), # ở -> o u('\u1ee0'): u('o'), # Ỡ -> o u('\u1ee1'): u('o'), # ỡ -> o u('\u1ee2'): u('o'), # Ợ -> o u('\u1ee3'): u('o'), # ợ -> o u('\u1ee4'): u('u'), # Ụ -> u u('\u1ee5'): u('u'), # ụ -> u u('\u1ee6'): u('u'), # Ủ -> u u('\u1ee7'): u('u'), # ủ -> u u('\u1ee8'): u('u'), # Ứ -> u u('\u1ee9'): u('u'), # ứ -> u u('\u1eea'): u('u'), # Ừ -> u u('\u1eeb'): u('u'), # ừ -> u u('\u1eec'): u('u'), # Ử -> u u('\u1eed'): u('u'), # ử -> u u('\u1eee'): u('u'), # Ữ -> u u('\u1eef'): u('u'), # ữ -> u u('\u1ef0'): u('u'), # Ự -> u u('\u1ef1'): u('u'), # ự -> u u('\u1ef2'): u('y'), # Ỳ -> y u('\u1ef3'): u('y'), # ỳ -> y u('\u1ef4'): u('y'), # Ỵ -> y u('\u1ef5'): u('y'), # ỵ -> y u('\u1ef6'): u('y'), # Ỷ -> y u('\u1ef7'): u('y'), # ỷ -> y u('\u1ef8'): u('y'), # Ỹ -> y u('\u1ef9'): u('y'), # ỹ -> y u('\u2c60'): u('l'), # Ⱡ -> l u('\u2c61'): u('l'), # ⱡ -> l u('\u2c62'): u('l'), # Ɫ -> l u('\u2c63'): u('p'), # Ᵽ -> p u('\u2c64'): u('r'), # Ɽ -> r u('\u2c65'): u('a'), # ⱥ -> a u('\u2c66'): u('t'), # ⱦ -> t u('\u2c67'): u('h'), # Ⱨ -> h u('\u2c68'): u('h'), # ⱨ -> h u('\u2c69'): u('k'), # Ⱪ -> k u('\u2c6a'): u('k'), # ⱪ -> k u('\u2c6b'): u('z'), # Ⱬ -> z u('\u2c6c'): u('z'), # ⱬ -> z u('\uff10'): u('0'), # 0 -> 0 u('\uff11'): u('1'), # 1 -> 1 u('\uff12'): u('2'), # 2 -> 2 u('\uff13'): u('3'), # 3 -> 3 u('\uff14'): u('4'), # 4 -> 4 u('\uff15'): u('5'), # 5 -> 5 u('\uff16'): u('6'), # 6 -> 6 u('\uff17'): u('7'), # 7 -> 7 u('\uff18'): u('8'), # 8 -> 8 u('\uff19'): u('9'), # 9 -> 9 u('\uff21'): u('A'), # A -> A u('\uff22'): u('B'), # B -> B u('\uff23'): u('C'), # C -> C u('\uff24'): u('D'), # D -> D u('\uff25'): u('E'), # E -> E u('\uff26'): u('F'), # F -> F u('\uff27'): u('G'), # G -> G u('\uff28'): u('H'), # H -> H u('\uff29'): u('I'), # I -> I u('\uff2a'): u('J'), # J -> J u('\uff2b'): u('K'), # K -> K u('\uff2c'): u('L'), # L -> L u('\uff2d'): u('M'), # M -> M u('\uff2e'): u('N'), # N -> N u('\uff2f'): u('O'), # O -> O u('\uff30'): u('P'), # P -> P u('\uff31'): u('Q'), # Q -> Q u('\uff32'): u('R'), # R -> R u('\uff33'): u('S'), # S -> S u('\uff34'): u('T'), # T -> T u('\uff35'): u('U'), # U -> U u('\uff36'): u('V'), # V -> V u('\uff37'): u('W'), # W -> W u('\uff38'): u('X'), # X -> X u('\uff39'): u('Y'), # Y -> Y u('\uff3a'): u('Z'), # Z -> Z u('\uff41'): u('a'), # a -> a u('\uff42'): u('b'), # b -> b u('\uff43'): u('c'), # c -> c u('\uff44'): u('d'), # d -> d u('\uff45'): u('e'), # e -> e u('\uff46'): u('f'), # f -> f u('\uff47'): u('g'), # g -> g u('\uff48'): u('h'), # h -> h u('\uff49'): u('i'), # i -> i u('\uff4a'): u('j'), # j -> j u('\uff4b'): u('k'), # k -> k u('\uff4c'): u('l'), # l -> l u('\uff4d'): u('m'), # m -> m u('\uff4e'): u('n'), # n -> n u('\uff4f'): u('o'), # o -> o u('\uff50'): u('p'), # p -> p u('\uff51'): u('q'), # q -> q u('\uff52'): u('r'), # r -> r u('\uff53'): u('s'), # s -> s u('\uff54'): u('t'), # t -> t u('\uff55'): u('u'), # u -> u u('\uff56'): u('v'), # v -> v u('\uff57'): u('w'), # w -> w u('\uff58'): u('x'), # x -> x u('\uff59'): u('y'), # y -> y u('\uff5a'): u('z'), # z -> z } # The unicode.translate() method actually requires a dictionary mapping # character *numbers* to characters, for some reason. accent_map = dict((ord(k), v) for k, v in iteritems(accent_map)) # This Sphinx charset table taken from http://speeple.com/unicode-maps.txt default_charset = """ ################################################## # Latin # A U+00C0->a, U+00C1->a, U+00C2->a, U+00C3->a, U+00C4->a, U+00C5->a, U+00E0->a, U+00E1->a, U+00E2->a, U+00E3->a, U+00E4->a, U+00E5->a, U+0100->a, U+0101->a, U+0102->a, U+0103->a, U+010300->a, U+0104->a, U+0105->a, U+01CD->a, U+01CE->a, U+01DE->a, U+01DF->a, U+01E0->a, U+01E1->a, U+01FA->a, U+01FB->a, U+0200->a, U+0201->a, U+0202->a, U+0203->a, U+0226->a, U+0227->a, U+023A->a, U+0250->a, U+04D0->a, U+04D1->a, U+1D2C->a, U+1D43->a, U+1D44->a, U+1D8F->a, U+1E00->a, U+1E01->a, U+1E9A->a, U+1EA0->a, U+1EA1->a, U+1EA2->a, U+1EA3->a, U+1EA4->a, U+1EA5->a, U+1EA6->a, U+1EA7->a, U+1EA8->a, U+1EA9->a, U+1EAA->a, U+1EAB->a, U+1EAC->a, U+1EAD->a, U+1EAE->a, U+1EAF->a, U+1EB0->a, U+1EB1->a, U+1EB2->a, U+1EB3->a, U+1EB4->a, U+1EB5->a, U+1EB6->a, U+1EB7->a, U+2090->a, U+2C65->a # B U+0180->b, U+0181->b, U+0182->b, U+0183->b, U+0243->b, U+0253->b, U+0299->b, U+16D2->b, U+1D03->b, U+1D2E->b, U+1D2F->b, U+1D47->b, U+1D6C->b, U+1D80->b, U+1E02->b, U+1E03->b, U+1E04->b, U+1E05->b, U+1E06->b, U+1E07->b # C U+00C7->c, U+00E7->c, U+0106->c, U+0107->c, U+0108->c, U+0109->c, U+010A->c, U+010B->c, U+010C->c, U+010D->c, U+0187->c, U+0188->c, U+023B->c, U+023C->c, U+0255->c, U+0297->c, U+1D9C->c, U+1D9D->c, U+1E08->c, U+1E09->c, U+212D->c, U+2184->c # D U+010E->d, U+010F->d, U+0110->d, U+0111->d, U+0189->d, U+018A->d, U+018B->d, U+018C->d, U+01C5->d, U+01F2->d, U+0221->d, U+0256->d, U+0257->d, U+1D05->d, U+1D30->d, U+1D48->d, U+1D6D->d, U+1D81->d, U+1D91->d, U+1E0A->d, U+1E0B->d, U+1E0C->d, U+1E0D->d, U+1E0E->d, U+1E0F->d, U+1E10->d, U+1E11->d, U+1E12->d, U+1E13->d # E U+00C8->e, U+00C9->e, U+00CA->e, U+00CB->e, U+00E8->e, U+00E9->e, U+00EA->e, U+00EB->e, U+0112->e, U+0113->e, U+0114->e, U+0115->e, U+0116->e, U+0117->e, U+0118->e, U+0119->e, U+011A->e, U+011B->e, U+018E->e, U+0190->e, U+01DD->e, U+0204->e, U+0205->e, U+0206->e, U+0207->e, U+0228->e, U+0229->e, U+0246->e, U+0247->e, U+0258->e, U+025B->e, U+025C->e, U+025D->e, U+025E->e, U+029A->e, U+1D07->e, U+1D08->e, U+1D31->e, U+1D32->e, U+1D49->e, U+1D4B->e, U+1D4C->e, U+1D92->e, U+1D93->e, U+1D94->e, U+1D9F->e, U+1E14->e, U+1E15->e, U+1E16->e, U+1E17->e, U+1E18->e, U+1E19->e, U+1E1A->e, U+1E1B->e, U+1E1C->e, U+1E1D->e, U+1EB8->e, U+1EB9->e, U+1EBA->e, U+1EBB->e, U+1EBC->e, U+1EBD->e, U+1EBE->e, U+1EBF->e, U+1EC0->e, U+1EC1->e, U+1EC2->e, U+1EC3->e, U+1EC4->e, U+1EC5->e, U+1EC6->e, U+1EC7->e, U+2091->e # F U+0191->f, U+0192->f, U+1D6E->f, U+1D82->f, U+1DA0->f, U+1E1E->f, U+1E1F->f # G U+011C->g, U+011D->g, U+011E->g, U+011F->g, U+0120->g, U+0121->g, U+0122->g, U+0123->g, U+0193->g, U+01E4->g, U+01E5->g, U+01E6->g, U+01E7->g, U+01F4->g, U+01F5->g, U+0260->g, U+0261->g, U+0262->g, U+029B->g, U+1D33->g, U+1D4D->g, U+1D77->g, U+1D79->g, U+1D83->g, U+1DA2->g, U+1E20->g, U+1E21->g # H U+0124->h, U+0125->h, U+0126->h, U+0127->h, U+021E->h, U+021F->h, U+0265->h, U+0266->h, U+029C->h, U+02AE->h, U+02AF->h, U+02B0->h, U+02B1->h, U+1D34->h, U+1DA3->h, U+1E22->h, U+1E23->h, U+1E24->h, U+1E25->h, U+1E26->h, U+1E27->h, U+1E28->h, U+1E29->h, U+1E2A->h, U+1E2B->h, U+1E96->h, U+210C->h, U+2C67->h, U+2C68->h, U+2C75->h, U+2C76->h # I U+00CC->i, U+00CD->i, U+00CE->i, U+00CF->i, U+00EC->i, U+00ED->i, U+00EE->i, U+00EF->i, U+010309->i, U+0128->i, U+0129->i, U+012A->i, U+012B->i, U+012C->i, U+012D->i, U+012E->i, U+012F->i, U+0130->i, U+0131->i, U+0197->i, U+01CF->i, U+01D0->i, U+0208->i, U+0209->i, U+020A->i, U+020B->i, U+0268->i, U+026A->i, U+040D->i, U+0418->i, U+0419->i, U+0438->i, U+0439->i, U+0456->i, U+1D09->i, U+1D35->i, U+1D4E->i, U+1D62->i, U+1D7B->i, U+1D96->i, U+1DA4->i, U+1DA6->i, U+1DA7->i, U+1E2C->i, U+1E2D->i, U+1E2E->i, U+1E2F->i, U+1EC8->i, U+1EC9->i, U+1ECA->i, U+1ECB->i, U+2071->i, U+2111->i # J U+0134->j, U+0135->j, U+01C8->j, U+01CB->j, U+01F0->j, U+0237->j, U+0248->j, U+0249->j, U+025F->j, U+0284->j, U+029D->j, U+02B2->j, U+1D0A->j, U+1D36->j, U+1DA1->j, U+1DA8->j # K U+0136->k, U+0137->k, U+0198->k, U+0199->k, U+01E8->k, U+01E9->k, U+029E->k, U+1D0B->k, U+1D37->k, U+1D4F->k, U+1D84->k, U+1E30->k, U+1E31->k, U+1E32->k, U+1E33->k, U+1E34->k, U+1E35->k, U+2C69->k, U+2C6A->k # L U+0139->l, U+013A->l, U+013B->l, U+013C->l, U+013D->l, U+013E->l, U+013F->l, U+0140->l, U+0141->l, U+0142->l, U+019A->l, U+01C8->l, U+0234->l, U+023D->l, U+026B->l, U+026C->l, U+026D->l, U+029F->l, U+02E1->l, U+1D0C->l, U+1D38->l, U+1D85->l, U+1DA9->l, U+1DAA->l, U+1DAB->l, U+1E36->l, U+1E37->l, U+1E38->l, U+1E39->l, U+1E3A->l, U+1E3B->l, U+1E3C->l, U+1E3D->l, U+2C60->l, U+2C61->l, U+2C62->l # M U+019C->m, U+026F->m, U+0270->m, U+0271->m, U+1D0D->m, U+1D1F->m, U+1D39->m, U+1D50->m, U+1D5A->m, U+1D6F->m, U+1D86->m, U+1DAC->m, U+1DAD->m, U+1E3E->m, U+1E3F->m, U+1E40->m, U+1E41->m, U+1E42->m, U+1E43->m # N U+00D1->n, U+00F1->n, U+0143->n, U+0144->n, U+0145->n, U+0146->n, U+0147->n, U+0148->n, U+0149->n, U+019D->n, U+019E->n, U+01CB->n, U+01F8->n, U+01F9->n, U+0220->n, U+0235->n, U+0272->n, U+0273->n, U+0274->n, U+1D0E->n, U+1D3A->n, U+1D3B->n, U+1D70->n, U+1D87->n, U+1DAE->n, U+1DAF->n, U+1DB0->n, U+1E44->n, U+1E45->n, U+1E46->n, U+1E47->n, U+1E48->n, U+1E49->n, U+1E4A->n, U+1E4B->n, U+207F->n # O U+00D2->o, U+00D3->o, U+00D4->o, U+00D5->o, U+00D6->o, U+00D8->o, U+00F2->o, U+00F3->o, U+00F4->o, U+00F5->o, U+00F6->o, U+00F8->o, U+01030F->o, U+014C->o, U+014D->o, U+014E->o, U+014F->o, U+0150->o, U+0151->o, U+0186->o, U+019F->o, U+01A0->o, U+01A1->o, U+01D1->o, U+01D2->o, U+01EA->o, U+01EB->o, U+01EC->o, U+01ED->o, U+01FE->o, U+01FF->o, U+020C->o, U+020D->o, U+020E->o, U+020F->o, U+022A->o, U+022B->o, U+022C->o, U+022D->o, U+022E->o, U+022F->o, U+0230->o, U+0231->o, U+0254->o, U+0275->o, U+043E->o, U+04E6->o, U+04E7->o, U+04E8->o, U+04E9->o, U+04EA->o, U+04EB->o, U+1D0F->o, U+1D10->o, U+1D11->o, U+1D12->o, U+1D13->o, U+1D16->o, U+1D17->o, U+1D3C->o, U+1D52->o, U+1D53->o, U+1D54->o, U+1D55->o, U+1D97->o, U+1DB1->o, U+1E4C->o, U+1E4D->o, U+1E4E->o, U+1E4F->o, U+1E50->o, U+1E51->o, U+1E52->o, U+1E53->o, U+1ECC->o, U+1ECD->o, U+1ECE->o, U+1ECF->o, U+1ED0->o, U+1ED1->o, U+1ED2->o, U+1ED3->o, U+1ED4->o, U+1ED5->o, U+1ED6->o, U+1ED7->o, U+1ED8->o, U+1ED9->o, U+1EDA->o, U+1EDB->o, U+1EDC->o, U+1EDD->o, U+1EDE->o, U+1EDF->o, U+1EE0->o, U+1EE1->o, U+1EE2->o, U+1EE3->o, U+2092->o, U+2C9E->o, U+2C9F->o # P U+01A4->p, U+01A5->p, U+1D18->p, U+1D3E->p, U+1D56->p, U+1D71->p, U+1D7D->p, U+1D88->p, U+1E54->p, U+1E55->p, U+1E56->p, U+1E57->p, U+2C63->p # Q U+024A->q, U+024B->q, U+02A0->q # R U+0154->r, U+0155->r, U+0156->r, U+0157->r, U+0158->r, U+0159->r, U+0210->r, U+0211->r, U+0212->r, U+0213->r, U+024C->r, U+024D->r, U+0279->r, U+027A->r, U+027B->r, U+027C->r, U+027D->r, U+027E->r, U+027F->r, U+0280->r, U+0281->r, U+02B3->r, U+02B4->r, U+02B5->r, U+02B6->r, U+1D19->r, U+1D1A->r, U+1D3F->r, U+1D63->r, U+1D72->r, U+1D73->r, U+1D89->r, U+1DCA->r, U+1E58->r, U+1E59->r, U+1E5A->r, U+1E5B->r, U+1E5C->r, U+1E5D->r, U+1E5E->r, U+1E5F->r, U+211C->r, U+2C64->r # S U+00DF->s, U+015A->s, U+015B->s, U+015C->s, U+015D->s, U+015E->s, U+015F->s, U+0160->s, U+0161->s, U+017F->s, U+0218->s, U+0219->s, U+023F->s, U+0282->s, U+02E2->s, U+1D74->s, U+1D8A->s, U+1DB3->s, U+1E60->s, U+1E61->s, U+1E62->s, U+1E63->s, U+1E64->s, U+1E65->s, U+1E66->s, U+1E67->s, U+1E68->s, U+1E69->s, U+1E9B->s # T U+0162->t, U+0163->t, U+0164->t, U+0165->t, U+0166->t, U+0167->t, U+01AB->t, U+01AC->t, U+01AD->t, U+01AE->t, U+021A->t, U+021B->t, U+0236->t, U+023E->t, U+0287->t, U+0288->t, U+1D1B->t, U+1D40->t, U+1D57->t, U+1D75->t, U+1DB5->t, U+1E6A->t, U+1E6B->t, U+1E6C->t, U+1E6D->t, U+1E6E->t, U+1E6F->t, U+1E70->t, U+1E71->t, U+1E97->t, U+2C66->t # U U+00D9->u, U+00DA->u, U+00DB->u, U+00DC->u, U+00F9->u, U+00FA->u, U+00FB->u, U+00FC->u, U+010316->u, U+0168->u, U+0169->u, U+016A->u, U+016B->u, U+016C->u, U+016D->u, U+016E->u, U+016F->u, U+0170->u, U+0171->u, U+0172->u, U+0173->u, U+01AF->u, U+01B0->u, U+01D3->u, U+01D4->u, U+01D5->u, U+01D6->u, U+01D7->u, U+01D8->u, U+01D9->u, U+01DA->u, U+01DB->u, U+01DC->u, U+0214->u, U+0215->u, U+0216->u, U+0217->u, U+0244->u, U+0289->u, U+1D1C->u, U+1D1D->u, U+1D1E->u, U+1D41->u, U+1D58->u, U+1D59->u, U+1D64->u, U+1D7E->u, U+1D99->u, U+1DB6->u, U+1DB8->u, U+1E72->u, U+1E73->u, U+1E74->u, U+1E75->u, U+1E76->u, U+1E77->u, U+1E78->u, U+1E79->u, U+1E7A->u, U+1E7B->u, U+1EE4->u, U+1EE5->u, U+1EE6->u, U+1EE7->u, U+1EE8->u, U+1EE9->u, U+1EEA->u, U+1EEB->u, U+1EEC->u, U+1EED->u, U+1EEE->u, U+1EEF->u, U+1EF0->u, U+1EF1->u # V U+01B2->v, U+0245->v, U+028B->v, U+028C->v, U+1D20->v, U+1D5B->v, U+1D65->v, U+1D8C->v, U+1DB9->v, U+1DBA->v, U+1E7C->v, U+1E7D->v, U+1E7E->v, U+1E7F->v, U+2C74->v # W U+0174->w, U+0175->w, U+028D->w, U+02B7->w, U+1D21->w, U+1D42->w, U+1E80->w, U+1E81->w, U+1E82->w, U+1E83->w, U+1E84->w, U+1E85->w, U+1E86->w, U+1E87->w, U+1E88->w, U+1E89->w, U+1E98->w # X U+02E3->x, U+1D8D->x, U+1E8A->x, U+1E8B->x, U+1E8C->x, U+1E8D->x, U+2093->x # Y U+00DD->y, U+00FD->y, U+00FF->y, U+0176->y, U+0177->y, U+0178->y, U+01B3->y, U+01B4->y, U+0232->y, U+0233->y, U+024E->y, U+024F->y, U+028E->y, U+028F->y, U+02B8->y, U+1E8E->y, U+1E8F->y, U+1E99->y, U+1EF2->y, U+1EF3->y, U+1EF4->y, U+1EF5->y, U+1EF6->y, U+1EF7->y, U+1EF8->y, U+1EF9->y # Z U+0179->z, U+017A->z, U+017B->z, U+017C->z, U+017D->z, U+017E->z, U+01B5->z, U+01B6->z, U+0224->z, U+0225->z, U+0240->z, U+0290->z, U+0291->z, U+1D22->z, U+1D76->z, U+1D8E->z, U+1DBB->z, U+1DBC->z, U+1DBD->z, U+1E90->z, U+1E91->z, U+1E92->z, U+1E93->z, U+1E94->z, U+1E95->z, U+2128->z, U+2C6B->z, U+2C6C->z # Latin Extras: U+00C6->U+00E6, U+01E2->U+00E6, U+01E3->U+00E6, U+01FC->U+00E6, U+01FD->U+00E6, U+1D01->U+00E6, U+1D02->U+00E6, U+1D2D->U+00E6, U+1D46->U+00E6, U+00E6 ################################################## # Arabic U+0622->U+0627, U+0623->U+0627, U+0624->U+0648, U+0625->U+0627, U+0626->U+064A, U+06C0->U+06D5, U+06C2->U+06C1, U+06D3->U+06D2, U+FB50->U+0671, U+FB51->U+0671, U+FB52->U+067B, U+FB53->U+067B, U+FB54->U+067B, U+FB56->U+067E, U+FB57->U+067E, U+FB58->U+067E, U+FB5A->U+0680, U+FB5B->U+0680, U+FB5C->U+0680, U+FB5E->U+067A, U+FB5F->U+067A, U+FB60->U+067A, U+FB62->U+067F, U+FB63->U+067F, U+FB64->U+067F, U+FB66->U+0679, U+FB67->U+0679, U+FB68->U+0679, U+FB6A->U+06A4, U+FB6B->U+06A4, U+FB6C->U+06A4, U+FB6E->U+06A6, U+FB6F->U+06A6, U+FB70->U+06A6, U+FB72->U+0684, U+FB73->U+0684, U+FB74->U+0684, U+FB76->U+0683, U+FB77->U+0683, U+FB78->U+0683, U+FB7A->U+0686, U+FB7B->U+0686, U+FB7C->U+0686, U+FB7E->U+0687, U+FB7F->U+0687, U+FB80->U+0687, U+FB82->U+068D, U+FB83->U+068D, U+FB84->U+068C, U+FB85->U+068C, U+FB86->U+068E, U+FB87->U+068E, U+FB88->U+0688, U+FB89->U+0688, U+FB8A->U+0698, U+FB8B->U+0698, U+FB8C->U+0691, U+FB8D->U+0691, U+FB8E->U+06A9, U+FB8F->U+06A9, U+FB90->U+06A9, U+FB92->U+06AF, U+FB93->U+06AF, U+FB94->U+06AF, U+FB96->U+06B3, U+FB97->U+06B3, U+FB98->U+06B3, U+FB9A->U+06B1, U+FB9B->U+06B1, U+FB9C->U+06B1, U+FB9E->U+06BA, U+FB9F->U+06BA, U+FBA0->U+06BB, U+FBA1->U+06BB, U+FBA2->U+06BB, U+FBA4->U+06C0, U+FBA5->U+06C0, U+FBA6->U+06C1, U+FBA7->U+06C1, U+FBA8->U+06C1, U+FBAA->U+06BE, U+FBAB->U+06BE, U+FBAC->U+06BE, U+FBAE->U+06D2, U+FBAF->U+06D2, U+FBB0->U+06D3, U+FBB1->U+06D3, U+FBD3->U+06AD, U+FBD4->U+06AD, U+FBD5->U+06AD, U+FBD7->U+06C7, U+FBD8->U+06C7, U+FBD9->U+06C6, U+FBDA->U+06C6, U+FBDB->U+06C8, U+FBDC->U+06C8, U+FBDD->U+0677, U+FBDE->U+06CB, U+FBDF->U+06CB, U+FBE0->U+06C5, U+FBE1->U+06C5, U+FBE2->U+06C9, U+FBE3->U+06C9, U+FBE4->U+06D0, U+FBE5->U+06D0, U+FBE6->U+06D0, U+FBE8->U+0649, U+FBFC->U+06CC, U+FBFD->U+06CC, U+FBFE->U+06CC, U+0621, U+0627..U+063A, U+0641..U+064A, U+0660..U+0669, U+066E, U+066F, U+0671..U+06BF, U+06C1, U+06C3..U+06D2, U+06D5, U+06EE..U+06FC, U+06FF, U+0750..U+076D, U+FB55, U+FB59, U+FB5D, U+FB61, U+FB65, U+FB69, U+FB6D, U+FB71, U+FB75, U+FB79, U+FB7D, U+FB81, U+FB91, U+FB95, U+FB99, U+FB9D, U+FBA3, U+FBA9, U+FBAD, U+FBD6, U+FBE7, U+FBE9, U+FBFF ################################################## # Armenian U+0531..U+0556->U+0561..U+0586, U+0561..U+0586, U+0587 ################################################# # Bengali U+09DC->U+09A1, U+09DD->U+09A2, U+09DF->U+09AF, U+09F0->U+09AC, U+09F1->U+09AC, U+0985..U+0990, U+0993..U+09B0, U+09B2, U+09B6..U+09B9, U+09CE, U+09E0, U+09E1, U+09E6..U+09EF ################################################# # CJK* U+F900->U+8C48, U+F901->U+66F4, U+F902->U+8ECA, U+F903->U+8CC8, U+F904->U+6ED1, U+F905->U+4E32, U+F906->U+53E5, U+F907->U+9F9C, U+F908->U+9F9C, U+F909->U+5951, U+F90A->U+91D1, U+F90B->U+5587, U+F90C->U+5948, U+F90D->U+61F6, U+F90E->U+7669, U+F90F->U+7F85, U+F910->U+863F, U+F911->U+87BA, U+F912->U+88F8, U+F913->U+908F, U+F914->U+6A02, U+F915->U+6D1B, U+F916->U+70D9, U+F917->U+73DE, U+F918->U+843D, U+F919->U+916A, U+F91A->U+99F1, U+F91B->U+4E82, U+F91C->U+5375, U+F91D->U+6B04, U+F91E->U+721B, U+F91F->U+862D, U+F920->U+9E1E, U+F921->U+5D50, U+F922->U+6FEB, U+F923->U+85CD, U+F924->U+8964, U+F925->U+62C9, U+F926->U+81D8, U+F927->U+881F, U+F928->U+5ECA, U+F929->U+6717, U+F92A->U+6D6A, U+F92B->U+72FC, U+F92C->U+90CE, U+F92D->U+4F86, U+F92E->U+51B7, U+F92F->U+52DE, U+F930->U+64C4, U+F931->U+6AD3, U+F932->U+7210, U+F933->U+76E7, U+F934->U+8001, U+F935->U+8606, U+F936->U+865C, U+F937->U+8DEF, U+F938->U+9732, U+F939->U+9B6F, U+F93A->U+9DFA, U+F93B->U+788C, U+F93C->U+797F, U+F93D->U+7DA0, U+F93E->U+83C9, U+F93F->U+9304, U+F940->U+9E7F, U+F941->U+8AD6, U+F942->U+58DF, U+F943->U+5F04, U+F944->U+7C60, U+F945->U+807E, U+F946->U+7262, U+F947->U+78CA, U+F948->U+8CC2, U+F949->U+96F7, U+F94A->U+58D8, U+F94B->U+5C62, U+F94C->U+6A13, U+F94D->U+6DDA, U+F94E->U+6F0F, U+F94F->U+7D2F, U+F950->U+7E37, U+F951->U+964B, U+F952->U+52D2, U+F953->U+808B, U+F954->U+51DC, U+F955->U+51CC, U+F956->U+7A1C, U+F957->U+7DBE, U+F958->U+83F1, U+F959->U+9675, U+F95A->U+8B80, U+F95B->U+62CF, U+F95C->U+6A02, U+F95D->U+8AFE, U+F95E->U+4E39, U+F95F->U+5BE7, U+F960->U+6012, U+F961->U+7387, U+F962->U+7570, U+F963->U+5317, U+F964->U+78FB, U+F965->U+4FBF, U+F966->U+5FA9, U+F967->U+4E0D, U+F968->U+6CCC, U+F969->U+6578, U+F96A->U+7D22, U+F96B->U+53C3, U+F96C->U+585E, U+F96D->U+7701, U+F96E->U+8449, U+F96F->U+8AAA, U+F970->U+6BBA, U+F971->U+8FB0, U+F972->U+6C88, U+F973->U+62FE, U+F974->U+82E5, U+F975->U+63A0, U+F976->U+7565, U+F977->U+4EAE, U+F978->U+5169, U+F979->U+51C9, U+F97A->U+6881, U+F97B->U+7CE7, U+F97C->U+826F, U+F97D->U+8AD2, U+F97E->U+91CF, U+F97F->U+52F5, U+F980->U+5442, U+F981->U+5973, U+F982->U+5EEC, U+F983->U+65C5, U+F984->U+6FFE, U+F985->U+792A, U+F986->U+95AD, U+F987->U+9A6A, U+F988->U+9E97, U+F989->U+9ECE, U+F98A->U+529B, U+F98B->U+66C6, U+F98C->U+6B77, U+F98D->U+8F62, U+F98E->U+5E74, U+F98F->U+6190, U+F990->U+6200, U+F991->U+649A, U+F992->U+6F23, U+F993->U+7149, U+F994->U+7489, U+F995->U+79CA, U+F996->U+7DF4, U+F997->U+806F, U+F998->U+8F26, U+F999->U+84EE, U+F99A->U+9023, U+F99B->U+934A, U+F99C->U+5217, U+F99D->U+52A3, U+F99E->U+54BD, U+F99F->U+70C8, U+F9A0->U+88C2, U+F9A1->U+8AAA, U+F9A2->U+5EC9, U+F9A3->U+5FF5, U+F9A4->U+637B, U+F9A5->U+6BAE, U+F9A6->U+7C3E, U+F9A7->U+7375, U+F9A8->U+4EE4, U+F9A9->U+56F9, U+F9AA->U+5BE7, U+F9AB->U+5DBA, U+F9AC->U+601C, U+F9AD->U+73B2, U+F9AE->U+7469, U+F9AF->U+7F9A, U+F9B0->U+8046, U+F9B1->U+9234, U+F9B2->U+96F6, U+F9B3->U+9748, U+F9B4->U+9818, U+F9B5->U+4F8B, U+F9B6->U+79AE, U+F9B7->U+91B4, U+F9B8->U+96B8, U+F9B9->U+60E1, U+F9BA->U+4E86, U+F9BB->U+50DA, U+F9BC->U+5BEE, U+F9BD->U+5C3F, U+F9BE->U+6599, U+F9BF->U+6A02, U+F9C0->U+71CE, U+F9C1->U+7642, U+F9C2->U+84FC, U+F9C3->U+907C, U+F9C4->U+9F8D, U+F9C5->U+6688, U+F9C6->U+962E, U+F9C7->U+5289, U+F9C8->U+677B, U+F9C9->U+67F3, U+F9CA->U+6D41, U+F9CB->U+6E9C, U+F9CC->U+7409, U+F9CD->U+7559, U+F9CE->U+786B, U+F9CF->U+7D10, U+F9D0->U+985E, U+F9D1->U+516D, U+F9D2->U+622E, U+F9D3->U+9678, U+F9D4->U+502B, U+F9D5->U+5D19, U+F9D6->U+6DEA, U+F9D7->U+8F2A, U+F9D8->U+5F8B, U+F9D9->U+6144, U+F9DA->U+6817, U+F9DB->U+7387, U+F9DC->U+9686, U+F9DD->U+5229, U+F9DE->U+540F, U+F9DF->U+5C65, U+F9E0->U+6613, U+F9E1->U+674E, U+F9E2->U+68A8, U+F9E3->U+6CE5, U+F9E4->U+7406, U+F9E5->U+75E2, U+F9E6->U+7F79, U+F9E7->U+88CF, U+F9E8->U+88E1, U+F9E9->U+91CC, U+F9EA->U+96E2, U+F9EB->U+533F, U+F9EC->U+6EBA, U+F9ED->U+541D, U+F9EE->U+71D0, U+F9EF->U+7498, U+F9F0->U+85FA, U+F9F1->U+96A3, U+F9F2->U+9C57, U+F9F3->U+9E9F, U+F9F4->U+6797, U+F9F5->U+6DCB, U+F9F6->U+81E8, U+F9F7->U+7ACB, U+F9F8->U+7B20, U+F9F9->U+7C92, U+F9FA->U+72C0, U+F9FB->U+7099, U+F9FC->U+8B58, U+F9FD->U+4EC0, U+F9FE->U+8336, U+F9FF->U+523A, U+FA00->U+5207, U+FA01->U+5EA6, U+FA02->U+62D3, U+FA03->U+7CD6, U+FA04->U+5B85, U+FA05->U+6D1E, U+FA06->U+66B4, U+FA07->U+8F3B, U+FA08->U+884C, U+FA09->U+964D, U+FA0A->U+898B, U+FA0B->U+5ED3, U+FA0C->U+5140, U+FA0D->U+55C0, U+FA10->U+585A, U+FA12->U+6674, U+FA15->U+51DE, U+FA16->U+732A, U+FA17->U+76CA, U+FA18->U+793C, U+FA19->U+795E, U+FA1A->U+7965, U+FA1B->U+798F, U+FA1C->U+9756, U+FA1D->U+7CBE, U+FA1E->U+7FBD, U+FA20->U+8612, U+FA22->U+8AF8, U+FA25->U+9038, U+FA26->U+90FD, U+FA2A->U+98EF, U+FA2B->U+98FC, U+FA2C->U+9928, U+FA2D->U+9DB4, U+FA30->U+4FAE, U+FA31->U+50E7, U+FA32->U+514D, U+FA33->U+52C9, U+FA34->U+52E4, U+FA35->U+5351, U+FA36->U+559D, U+FA37->U+5606, U+FA38->U+5668, U+FA39->U+5840, U+FA3A->U+58A8, U+FA3B->U+5C64, U+FA3C->U+5C6E, U+FA3D->U+6094, U+FA3E->U+6168, U+FA3F->U+618E, U+FA40->U+61F2, U+FA41->U+654F, U+FA42->U+65E2, U+FA43->U+6691, U+FA44->U+6885, U+FA45->U+6D77, U+FA46->U+6E1A, U+FA47->U+6F22, U+FA48->U+716E, U+FA49->U+722B, U+FA4A->U+7422, U+FA4B->U+7891, U+FA4C->U+793E, U+FA4D->U+7949, U+FA4E->U+7948, U+FA4F->U+7950, U+FA50->U+7956, U+FA51->U+795D, U+FA52->U+798D, U+FA53->U+798E, U+FA54->U+7A40, U+FA55->U+7A81, U+FA56->U+7BC0, U+FA57->U+7DF4, U+FA58->U+7E09, U+FA59->U+7E41, U+FA5A->U+7F72, U+FA5B->U+8005, U+FA5C->U+81ED, U+FA5D->U+8279, U+FA5E->U+8279, U+FA5F->U+8457, U+FA60->U+8910, U+FA61->U+8996, U+FA62->U+8B01, U+FA63->U+8B39, U+FA64->U+8CD3, U+FA65->U+8D08, U+FA66->U+8FB6, U+FA67->U+9038, U+FA68->U+96E3, U+FA69->U+97FF, U+FA6A->U+983B, U+FA70->U+4E26, U+FA71->U+51B5, U+FA72->U+5168, U+FA73->U+4F80, U+FA74->U+5145, U+FA75->U+5180, U+FA76->U+52C7, U+FA77->U+52FA, U+FA78->U+559D, U+FA79->U+5555, U+FA7A->U+5599, U+FA7B->U+55E2, U+FA7C->U+585A, U+FA7D->U+58B3, U+FA7E->U+5944, U+FA7F->U+5954, U+FA80->U+5A62, U+FA81->U+5B28, U+FA82->U+5ED2, U+FA83->U+5ED9, U+FA84->U+5F69, U+FA85->U+5FAD, U+FA86->U+60D8, U+FA87->U+614E, U+FA88->U+6108, U+FA89->U+618E, U+FA8A->U+6160, U+FA8B->U+61F2, U+FA8C->U+6234, U+FA8D->U+63C4, U+FA8E->U+641C, U+FA8F->U+6452, U+FA90->U+6556, U+FA91->U+6674, U+FA92->U+6717, U+FA93->U+671B, U+FA94->U+6756, U+FA95->U+6B79, U+FA96->U+6BBA, U+FA97->U+6D41, U+FA98->U+6EDB, U+FA99->U+6ECB, U+FA9A->U+6F22, U+FA9B->U+701E, U+FA9C->U+716E, U+FA9D->U+77A7, U+FA9E->U+7235, U+FA9F->U+72AF, U+FAA0->U+732A, U+FAA1->U+7471, U+FAA2->U+7506, U+FAA3->U+753B, U+FAA4->U+761D, U+FAA5->U+761F, U+FAA6->U+76CA, U+FAA7->U+76DB, U+FAA8->U+76F4, U+FAA9->U+774A, U+FAAA->U+7740, U+FAAB->U+78CC, U+FAAC->U+7AB1, U+FAAD->U+7BC0, U+FAAE->U+7C7B, U+FAAF->U+7D5B, U+FAB0->U+7DF4, U+FAB1->U+7F3E, U+FAB2->U+8005, U+FAB3->U+8352, U+FAB4->U+83EF, U+FAB5->U+8779, U+FAB6->U+8941, U+FAB7->U+8986, U+FAB8->U+8996, U+FAB9->U+8ABF, U+FABA->U+8AF8, U+FABB->U+8ACB, U+FABC->U+8B01, U+FABD->U+8AFE, U+FABE->U+8AED, U+FABF->U+8B39, U+FAC0->U+8B8A, U+FAC1->U+8D08, U+FAC2->U+8F38, U+FAC3->U+9072, U+FAC4->U+9199, U+FAC5->U+9276, U+FAC6->U+967C, U+FAC7->U+96E3, U+FAC8->U+9756, U+FAC9->U+97DB, U+FACA->U+97FF, U+FACB->U+980B, U+FACC->U+983B, U+FACD->U+9B12, U+FACE->U+9F9C, U+FACF->U+2284A, U+FAD0->U+22844, U+FAD1->U+233D5, U+FAD2->U+3B9D, U+FAD3->U+4018, U+FAD4->U+4039, U+FAD5->U+25249, U+FAD6->U+25CD0, U+FAD7->U+27ED3, U+FAD8->U+9F43, U+FAD9->U+9F8E, U+2F800->U+4E3D, U+2F801->U+4E38, U+2F802->U+4E41, U+2F803->U+20122, U+2F804->U+4F60, U+2F805->U+4FAE, U+2F806->U+4FBB, U+2F807->U+5002, U+2F808->U+507A, U+2F809->U+5099, U+2F80A->U+50E7, U+2F80B->U+50CF, U+2F80C->U+349E, U+2F80D->U+2063A, U+2F80E->U+514D, U+2F80F->U+5154, U+2F810->U+5164, U+2F811->U+5177, U+2F812->U+2051C, U+2F813->U+34B9, U+2F814->U+5167, U+2F815->U+518D, U+2F816->U+2054B, U+2F817->U+5197, U+2F818->U+51A4, U+2F819->U+4ECC, U+2F81A->U+51AC, U+2F81B->U+51B5, U+2F81C->U+291DF, U+2F81D->U+51F5, U+2F81E->U+5203, U+2F81F->U+34DF, U+2F820->U+523B, U+2F821->U+5246, U+2F822->U+5272, U+2F823->U+5277, U+2F824->U+3515, U+2F825->U+52C7, U+2F826->U+52C9, U+2F827->U+52E4, U+2F828->U+52FA, U+2F829->U+5305, U+2F82A->U+5306, U+2F82B->U+5317, U+2F82C->U+5349, U+2F82D->U+5351, U+2F82E->U+535A, U+2F82F->U+5373, U+2F830->U+537D, U+2F831->U+537F, U+2F832->U+537F, U+2F833->U+537F, U+2F834->U+20A2C, U+2F835->U+7070, U+2F836->U+53CA, U+2F837->U+53DF, U+2F838->U+20B63, U+2F839->U+53EB, U+2F83A->U+53F1, U+2F83B->U+5406, U+2F83C->U+549E, U+2F83D->U+5438, U+2F83E->U+5448, U+2F83F->U+5468, U+2F840->U+54A2, U+2F841->U+54F6, U+2F842->U+5510, U+2F843->U+5553, U+2F844->U+5563, U+2F845->U+5584, U+2F846->U+5584, U+2F847->U+5599, U+2F848->U+55AB, U+2F849->U+55B3, U+2F84A->U+55C2, U+2F84B->U+5716, U+2F84C->U+5606, U+2F84D->U+5717, U+2F84E->U+5651, U+2F84F->U+5674, U+2F850->U+5207, U+2F851->U+58EE, U+2F852->U+57CE, U+2F853->U+57F4, U+2F854->U+580D, U+2F855->U+578B, U+2F856->U+5832, U+2F857->U+5831, U+2F858->U+58AC, U+2F859->U+214E4, U+2F85A->U+58F2, U+2F85B->U+58F7, U+2F85C->U+5906, U+2F85D->U+591A, U+2F85E->U+5922, U+2F85F->U+5962, U+2F860->U+216A8, U+2F861->U+216EA, U+2F862->U+59EC, U+2F863->U+5A1B, U+2F864->U+5A27, U+2F865->U+59D8, U+2F866->U+5A66, U+2F867->U+36EE, U+2F868->U+36FC, U+2F869->U+5B08, U+2F86A->U+5B3E, U+2F86B->U+5B3E, U+2F86C->U+219C8, U+2F86D->U+5BC3, U+2F86E->U+5BD8, U+2F86F->U+5BE7, U+2F870->U+5BF3, U+2F871->U+21B18, U+2F872->U+5BFF, U+2F873->U+5C06, U+2F874->U+5F53, U+2F875->U+5C22, U+2F876->U+3781, U+2F877->U+5C60, U+2F878->U+5C6E, U+2F879->U+5CC0, U+2F87A->U+5C8D, U+2F87B->U+21DE4, U+2F87C->U+5D43, U+2F87D->U+21DE6, U+2F87E->U+5D6E, U+2F87F->U+5D6B, U+2F880->U+5D7C, U+2F881->U+5DE1, U+2F882->U+5DE2, U+2F883->U+382F, U+2F884->U+5DFD, U+2F885->U+5E28, U+2F886->U+5E3D, U+2F887->U+5E69, U+2F888->U+3862, U+2F889->U+22183, U+2F88A->U+387C, U+2F88B->U+5EB0, U+2F88C->U+5EB3, U+2F88D->U+5EB6, U+2F88E->U+5ECA, U+2F88F->U+2A392, U+2F890->U+5EFE, U+2F891->U+22331, U+2F892->U+22331, U+2F893->U+8201, U+2F894->U+5F22, U+2F895->U+5F22, U+2F896->U+38C7, U+2F897->U+232B8, U+2F898->U+261DA, U+2F899->U+5F62, U+2F89A->U+5F6B, U+2F89B->U+38E3, U+2F89C->U+5F9A, U+2F89D->U+5FCD, U+2F89E->U+5FD7, U+2F89F->U+5FF9, U+2F8A0->U+6081, U+2F8A1->U+393A, U+2F8A2->U+391C, U+2F8A3->U+6094, U+2F8A4->U+226D4, U+2F8A5->U+60C7, U+2F8A6->U+6148, U+2F8A7->U+614C, U+2F8A8->U+614E, U+2F8A9->U+614C, U+2F8AA->U+617A, U+2F8AB->U+618E, U+2F8AC->U+61B2, U+2F8AD->U+61A4, U+2F8AE->U+61AF, U+2F8AF->U+61DE, U+2F8B0->U+61F2, U+2F8B1->U+61F6, U+2F8B2->U+6210, U+2F8B3->U+621B, U+2F8B4->U+625D, U+2F8B5->U+62B1, U+2F8B6->U+62D4, U+2F8B7->U+6350, U+2F8B8->U+22B0C, U+2F8B9->U+633D, U+2F8BA->U+62FC, U+2F8BB->U+6368, U+2F8BC->U+6383, U+2F8BD->U+63E4, U+2F8BE->U+22BF1, U+2F8BF->U+6422, U+2F8C0->U+63C5, U+2F8C1->U+63A9, U+2F8C2->U+3A2E, U+2F8C3->U+6469, U+2F8C4->U+647E, U+2F8C5->U+649D, U+2F8C6->U+6477, U+2F8C7->U+3A6C, U+2F8C8->U+654F, U+2F8C9->U+656C, U+2F8CA->U+2300A, U+2F8CB->U+65E3, U+2F8CC->U+66F8, U+2F8CD->U+6649, U+2F8CE->U+3B19, U+2F8CF->U+6691, U+2F8D0->U+3B08, U+2F8D1->U+3AE4, U+2F8D2->U+5192, U+2F8D3->U+5195, U+2F8D4->U+6700, U+2F8D5->U+669C, U+2F8D6->U+80AD, U+2F8D7->U+43D9, U+2F8D8->U+6717, U+2F8D9->U+671B, U+2F8DA->U+6721, U+2F8DB->U+675E, U+2F8DC->U+6753, U+2F8DD->U+233C3, U+2F8DE->U+3B49, U+2F8DF->U+67FA, U+2F8E0->U+6785, U+2F8E1->U+6852, U+2F8E2->U+6885, U+2F8E3->U+2346D, U+2F8E4->U+688E, U+2F8E5->U+681F, U+2F8E6->U+6914, U+2F8E7->U+3B9D, U+2F8E8->U+6942, U+2F8E9->U+69A3, U+2F8EA->U+69EA, U+2F8EB->U+6AA8, U+2F8EC->U+236A3, U+2F8ED->U+6ADB, U+2F8EE->U+3C18, U+2F8EF->U+6B21, U+2F8F0->U+238A7, U+2F8F1->U+6B54, U+2F8F2->U+3C4E, U+2F8F3->U+6B72, U+2F8F4->U+6B9F, U+2F8F5->U+6BBA, U+2F8F6->U+6BBB, U+2F8F7->U+23A8D, U+2F8F8->U+21D0B, U+2F8F9->U+23AFA, U+2F8FA->U+6C4E, U+2F8FB->U+23CBC, U+2F8FC->U+6CBF, U+2F8FD->U+6CCD, U+2F8FE->U+6C67, U+2F8FF->U+6D16, U+2F900->U+6D3E, U+2F901->U+6D77, U+2F902->U+6D41, U+2F903->U+6D69, U+2F904->U+6D78, U+2F905->U+6D85, U+2F906->U+23D1E, U+2F907->U+6D34, U+2F908->U+6E2F, U+2F909->U+6E6E, U+2F90A->U+3D33, U+2F90B->U+6ECB, U+2F90C->U+6EC7, U+2F90D->U+23ED1, U+2F90E->U+6DF9, U+2F90F->U+6F6E, U+2F910->U+23F5E, U+2F911->U+23F8E, U+2F912->U+6FC6, U+2F913->U+7039, U+2F914->U+701E, U+2F915->U+701B, U+2F916->U+3D96, U+2F917->U+704A, U+2F918->U+707D, U+2F919->U+7077, U+2F91A->U+70AD, U+2F91B->U+20525, U+2F91C->U+7145, U+2F91D->U+24263, U+2F91E->U+719C, U+2F91F->U+243AB, U+2F920->U+7228, U+2F921->U+7235, U+2F922->U+7250, U+2F923->U+24608, U+2F924->U+7280, U+2F925->U+7295, U+2F926->U+24735, U+2F927->U+24814, U+2F928->U+737A, U+2F929->U+738B, U+2F92A->U+3EAC, U+2F92B->U+73A5, U+2F92C->U+3EB8, U+2F92D->U+3EB8, U+2F92E->U+7447, U+2F92F->U+745C, U+2F930->U+7471, U+2F931->U+7485, U+2F932->U+74CA, U+2F933->U+3F1B, U+2F934->U+7524, U+2F935->U+24C36, U+2F936->U+753E, U+2F937->U+24C92, U+2F938->U+7570, U+2F939->U+2219F, U+2F93A->U+7610, U+2F93B->U+24FA1, U+2F93C->U+24FB8, U+2F93D->U+25044, U+2F93E->U+3FFC, U+2F93F->U+4008, U+2F940->U+76F4, U+2F941->U+250F3, U+2F942->U+250F2, U+2F943->U+25119, U+2F944->U+25133, U+2F945->U+771E, U+2F946->U+771F, U+2F947->U+771F, U+2F948->U+774A, U+2F949->U+4039, U+2F94A->U+778B, U+2F94B->U+4046, U+2F94C->U+4096, U+2F94D->U+2541D, U+2F94E->U+784E, U+2F94F->U+788C, U+2F950->U+78CC, U+2F951->U+40E3, U+2F952->U+25626, U+2F953->U+7956, U+2F954->U+2569A, U+2F955->U+256C5, U+2F956->U+798F, U+2F957->U+79EB, U+2F958->U+412F, U+2F959->U+7A40, U+2F95A->U+7A4A, U+2F95B->U+7A4F, U+2F95C->U+2597C, U+2F95D->U+25AA7, U+2F95E->U+25AA7, U+2F95F->U+7AEE, U+2F960->U+4202, U+2F961->U+25BAB, U+2F962->U+7BC6, U+2F963->U+7BC9, U+2F964->U+4227, U+2F965->U+25C80, U+2F966->U+7CD2, U+2F967->U+42A0, U+2F968->U+7CE8, U+2F969->U+7CE3, U+2F96A->U+7D00, U+2F96B->U+25F86, U+2F96C->U+7D63, U+2F96D->U+4301, U+2F96E->U+7DC7, U+2F96F->U+7E02, U+2F970->U+7E45, U+2F971->U+4334, U+2F972->U+26228, U+2F973->U+26247, U+2F974->U+4359, U+2F975->U+262D9, U+2F976->U+7F7A, U+2F977->U+2633E, U+2F978->U+7F95, U+2F979->U+7FFA, U+2F97A->U+8005, U+2F97B->U+264DA, U+2F97C->U+26523, U+2F97D->U+8060, U+2F97E->U+265A8, U+2F97F->U+8070, U+2F980->U+2335F, U+2F981->U+43D5, U+2F982->U+80B2, U+2F983->U+8103, U+2F984->U+440B, U+2F985->U+813E, U+2F986->U+5AB5, U+2F987->U+267A7, U+2F988->U+267B5, U+2F989->U+23393, U+2F98A->U+2339C, U+2F98B->U+8201, U+2F98C->U+8204, U+2F98D->U+8F9E, U+2F98E->U+446B, U+2F98F->U+8291, U+2F990->U+828B, U+2F991->U+829D, U+2F992->U+52B3, U+2F993->U+82B1, U+2F994->U+82B3, U+2F995->U+82BD, U+2F996->U+82E6, U+2F997->U+26B3C, U+2F998->U+82E5, U+2F999->U+831D, U+2F99A->U+8363, U+2F99B->U+83AD, U+2F99C->U+8323, U+2F99D->U+83BD, U+2F99E->U+83E7, U+2F99F->U+8457, U+2F9A0->U+8353, U+2F9A1->U+83CA, U+2F9A2->U+83CC, U+2F9A3->U+83DC, U+2F9A4->U+26C36, U+2F9A5->U+26D6B, U+2F9A6->U+26CD5, U+2F9A7->U+452B, U+2F9A8->U+84F1, U+2F9A9->U+84F3, U+2F9AA->U+8516, U+2F9AB->U+273CA, U+2F9AC->U+8564, U+2F9AD->U+26F2C, U+2F9AE->U+455D, U+2F9AF->U+4561, U+2F9B0->U+26FB1, U+2F9B1->U+270D2, U+2F9B2->U+456B, U+2F9B3->U+8650, U+2F9B4->U+865C, U+2F9B5->U+8667, U+2F9B6->U+8669, U+2F9B7->U+86A9, U+2F9B8->U+8688, U+2F9B9->U+870E, U+2F9BA->U+86E2, U+2F9BB->U+8779, U+2F9BC->U+8728, U+2F9BD->U+876B, U+2F9BE->U+8786, U+2F9BF->U+45D7, U+2F9C0->U+87E1, U+2F9C1->U+8801, U+2F9C2->U+45F9, U+2F9C3->U+8860, U+2F9C4->U+8863, U+2F9C5->U+27667, U+2F9C6->U+88D7, U+2F9C7->U+88DE, U+2F9C8->U+4635, U+2F9C9->U+88FA, U+2F9CA->U+34BB, U+2F9CB->U+278AE, U+2F9CC->U+27966, U+2F9CD->U+46BE, U+2F9CE->U+46C7, U+2F9CF->U+8AA0, U+2F9D0->U+8AED, U+2F9D1->U+8B8A, U+2F9D2->U+8C55, U+2F9D3->U+27CA8, U+2F9D4->U+8CAB, U+2F9D5->U+8CC1, U+2F9D6->U+8D1B, U+2F9D7->U+8D77, U+2F9D8->U+27F2F, U+2F9D9->U+20804, U+2F9DA->U+8DCB, U+2F9DB->U+8DBC, U+2F9DC->U+8DF0, U+2F9DD->U+208DE, U+2F9DE->U+8ED4, U+2F9DF->U+8F38, U+2F9E0->U+285D2, U+2F9E1->U+285ED, U+2F9E2->U+9094, U+2F9E3->U+90F1, U+2F9E4->U+9111, U+2F9E5->U+2872E, U+2F9E6->U+911B, U+2F9E7->U+9238, U+2F9E8->U+92D7, U+2F9E9->U+92D8, U+2F9EA->U+927C, U+2F9EB->U+93F9, U+2F9EC->U+9415, U+2F9ED->U+28BFA, U+2F9EE->U+958B, U+2F9EF->U+4995, U+2F9F0->U+95B7, U+2F9F1->U+28D77, U+2F9F2->U+49E6, U+2F9F3->U+96C3, U+2F9F4->U+5DB2, U+2F9F5->U+9723, U+2F9F6->U+29145, U+2F9F7->U+2921A, U+2F9F8->U+4A6E, U+2F9F9->U+4A76, U+2F9FA->U+97E0, U+2F9FB->U+2940A, U+2F9FC->U+4AB2, U+2F9FD->U+29496, U+2F9FE->U+980B, U+2F9FF->U+980B, U+2FA00->U+9829, U+2FA01->U+295B6, U+2FA02->U+98E2, U+2FA03->U+4B33, U+2FA04->U+9929, U+2FA05->U+99A7, U+2FA06->U+99C2, U+2FA07->U+99FE, U+2FA08->U+4BCE, U+2FA09->U+29B30, U+2FA0A->U+9B12, U+2FA0B->U+9C40, U+2FA0C->U+9CFD, U+2FA0D->U+4CCE, U+2FA0E->U+4CED, U+2FA0F->U+9D67, U+2FA10->U+2A0CE, U+2FA11->U+4CF8, U+2FA12->U+2A105, U+2FA13->U+2A20E, U+2FA14->U+2A291, U+2FA15->U+9EBB, U+2FA16->U+4D56, U+2FA17->U+9EF9, U+2FA18->U+9EFE, U+2FA19->U+9F05, U+2FA1A->U+9F0F, U+2FA1B->U+9F16, U+2FA1C->U+9F3B, U+2FA1D->U+2A600, U+2F00->U+4E00, U+2F01->U+4E28, U+2F02->U+4E36, U+2F03->U+4E3F, U+2F04->U+4E59, U+2F05->U+4E85, U+2F06->U+4E8C, U+2F07->U+4EA0, U+2F08->U+4EBA, U+2F09->U+513F, U+2F0A->U+5165, U+2F0B->U+516B, U+2F0C->U+5182, U+2F0D->U+5196, U+2F0E->U+51AB, U+2F0F->U+51E0, U+2F10->U+51F5, U+2F11->U+5200, U+2F12->U+529B, U+2F13->U+52F9, U+2F14->U+5315, U+2F15->U+531A, U+2F16->U+5338, U+2F17->U+5341, U+2F18->U+535C, U+2F19->U+5369, U+2F1A->U+5382, U+2F1B->U+53B6, U+2F1C->U+53C8, U+2F1D->U+53E3, U+2F1E->U+56D7, U+2F1F->U+571F, U+2F20->U+58EB, U+2F21->U+5902, U+2F22->U+590A, U+2F23->U+5915, U+2F24->U+5927, U+2F25->U+5973, U+2F26->U+5B50, U+2F27->U+5B80, U+2F28->U+5BF8, U+2F29->U+5C0F, U+2F2A->U+5C22, U+2F2B->U+5C38, U+2F2C->U+5C6E, U+2F2D->U+5C71, U+2F2E->U+5DDB, U+2F2F->U+5DE5, U+2F30->U+5DF1, U+2F31->U+5DFE, U+2F32->U+5E72, U+2F33->U+5E7A, U+2F34->U+5E7F, U+2F35->U+5EF4, U+2F36->U+5EFE, U+2F37->U+5F0B, U+2F38->U+5F13, U+2F39->U+5F50, U+2F3A->U+5F61, U+2F3B->U+5F73, U+2F3C->U+5FC3, U+2F3D->U+6208, U+2F3E->U+6236, U+2F3F->U+624B, U+2F40->U+652F, U+2F41->U+6534, U+2F42->U+6587, U+2F43->U+6597, U+2F44->U+65A4, U+2F45->U+65B9, U+2F46->U+65E0, U+2F47->U+65E5, U+2F48->U+66F0, U+2F49->U+6708, U+2F4A->U+6728, U+2F4B->U+6B20, U+2F4C->U+6B62, U+2F4D->U+6B79, U+2F4E->U+6BB3, U+2F4F->U+6BCB, U+2F50->U+6BD4, U+2F51->U+6BDB, U+2F52->U+6C0F, U+2F53->U+6C14, U+2F54->U+6C34, U+2F55->U+706B, U+2F56->U+722A, U+2F57->U+7236, U+2F58->U+723B, U+2F59->U+723F, U+2F5A->U+7247, U+2F5B->U+7259, U+2F5C->U+725B, U+2F5D->U+72AC, U+2F5E->U+7384, U+2F5F->U+7389, U+2F60->U+74DC, U+2F61->U+74E6, U+2F62->U+7518, U+2F63->U+751F, U+2F64->U+7528, U+2F65->U+7530, U+2F66->U+758B, U+2F67->U+7592, U+2F68->U+7676, U+2F69->U+767D, U+2F6A->U+76AE, U+2F6B->U+76BF, U+2F6C->U+76EE, U+2F6D->U+77DB, U+2F6E->U+77E2, U+2F6F->U+77F3, U+2F70->U+793A, U+2F71->U+79B8, U+2F72->U+79BE, U+2F73->U+7A74, U+2F74->U+7ACB, U+2F75->U+7AF9, U+2F76->U+7C73, U+2F77->U+7CF8, U+2F78->U+7F36, U+2F79->U+7F51, U+2F7A->U+7F8A, U+2F7B->U+7FBD, U+2F7C->U+8001, U+2F7D->U+800C, U+2F7E->U+8012, U+2F7F->U+8033, U+2F80->U+807F, U+2F81->U+8089, U+2F82->U+81E3, U+2F83->U+81EA, U+2F84->U+81F3, U+2F85->U+81FC, U+2F86->U+820C, U+2F87->U+821B, U+2F88->U+821F, U+2F89->U+826E, U+2F8A->U+8272, U+2F8B->U+8278, U+2F8C->U+864D, U+2F8D->U+866B, U+2F8E->U+8840, U+2F8F->U+884C, U+2F90->U+8863, U+2F91->U+897E, U+2F92->U+898B, U+2F93->U+89D2, U+2F94->U+8A00, U+2F95->U+8C37, U+2F96->U+8C46, U+2F97->U+8C55, U+2F98->U+8C78, U+2F99->U+8C9D, U+2F9A->U+8D64, U+2F9B->U+8D70, U+2F9C->U+8DB3, U+2F9D->U+8EAB, U+2F9E->U+8ECA, U+2F9F->U+8F9B, U+2FA0->U+8FB0, U+2FA1->U+8FB5, U+2FA2->U+9091, U+2FA3->U+9149, U+2FA4->U+91C6, U+2FA5->U+91CC, U+2FA6->U+91D1, U+2FA7->U+9577, U+2FA8->U+9580, U+2FA9->U+961C, U+2FAA->U+96B6, U+2FAB->U+96B9, U+2FAC->U+96E8, U+2FAD->U+9751, U+2FAE->U+975E, U+2FAF->U+9762, U+2FB0->U+9769, U+2FB1->U+97CB, U+2FB2->U+97ED, U+2FB3->U+97F3, U+2FB4->U+9801, U+2FB5->U+98A8, U+2FB6->U+98DB, U+2FB7->U+98DF, U+2FB8->U+9996, U+2FB9->U+9999, U+2FBA->U+99AC, U+2FBB->U+9AA8, U+2FBC->U+9AD8, U+2FBD->U+9ADF, U+2FBE->U+9B25, U+2FBF->U+9B2F, U+2FC0->U+9B32, U+2FC1->U+9B3C, U+2FC2->U+9B5A, U+2FC3->U+9CE5, U+2FC4->U+9E75, U+2FC5->U+9E7F, U+2FC6->U+9EA5, U+2FC7->U+9EBB, U+2FC8->U+9EC3, U+2FC9->U+9ECD, U+2FCA->U+9ED1, U+2FCB->U+9EF9, U+2FCC->U+9EFD, U+2FCD->U+9F0E, U+2FCE->U+9F13, U+2FCF->U+9F20, U+2FD0->U+9F3B, U+2FD1->U+9F4A, U+2FD2->U+9F52, U+2FD3->U+9F8D, U+2FD4->U+9F9C, U+2FD5->U+9FA0, U+3042->U+3041, U+3044->U+3043, U+3046->U+3045, U+3048->U+3047, U+304A->U+3049, U+304C->U+304B, U+304E->U+304D, U+3050->U+304F, U+3052->U+3051, U+3054->U+3053, U+3056->U+3055, U+3058->U+3057, U+305A->U+3059, U+305C->U+305B, U+305E->U+305D, U+3060->U+305F, U+3062->U+3061, U+3064->U+3063, U+3065->U+3063, U+3067->U+3066, U+3069->U+3068, U+3070->U+306F, U+3071->U+306F, U+3073->U+3072, U+3074->U+3072, U+3076->U+3075, U+3077->U+3075, U+3079->U+3078, U+307A->U+3078, U+307C->U+307B, U+307D->U+307B, U+3084->U+3083, U+3086->U+3085, U+3088->U+3087, U+308F->U+308E, U+3094->U+3046, U+3095->U+304B, U+3096->U+3051, U+30A2->U+30A1, U+30A4->U+30A3, U+30A6->U+30A5, U+30A8->U+30A7, U+30AA->U+30A9, U+30AC->U+30AB, U+30AE->U+30AD, U+30B0->U+30AF, U+30B2->U+30B1, U+30B4->U+30B3, U+30B6->U+30B5, U+30B8->U+30B7, U+30BA->U+30B9, U+30BC->U+30BB, U+30BE->U+30BD, U+30C0->U+30BF, U+30C2->U+30C1, U+30C5->U+30C4, U+30C7->U+30C6, U+30C9->U+30C8, U+30D0->U+30CF, U+30D1->U+30CF, U+30D3->U+30D2, U+30D4->U+30D2, U+30D6->U+30D5, U+30D7->U+30D5, U+30D9->U+30D8, U+30DA->U+30D8, U+30DC->U+30DB, U+30DD->U+30DB, U+30E4->U+30E3, U+30E6->U+30E5, U+30E8->U+30E7, U+30EF->U+30EE, U+30F4->U+30A6, U+30AB->U+30F5, U+30B1->U+30F6, U+30F7->U+30EF, U+30F8->U+30F0, U+30F9->U+30F1, U+30FA->U+30F2, U+30AF->U+31F0, U+30B7->U+31F1, U+30B9->U+31F2, U+30C8->U+31F3, U+30CC->U+31F4, U+30CF->U+31F5, U+30D2->U+31F6, U+30D5->U+31F7, U+30D8->U+31F8, U+30DB->U+31F9, U+30E0->U+31FA, U+30E9->U+31FB, U+30EA->U+31FC, U+30EB->U+31FD, U+30EC->U+31FE, U+30ED->U+31FF, U+FF66->U+30F2, U+FF67->U+30A1, U+FF68->U+30A3, U+FF69->U+30A5, U+FF6A->U+30A7, U+FF6B->U+30A9, U+FF6C->U+30E3, U+FF6D->U+30E5, U+FF6E->U+30E7, U+FF6F->U+30C3, U+FF71->U+30A1, U+FF72->U+30A3, U+FF73->U+30A5, U+FF74->U+30A7, U+FF75->U+30A9, U+FF76->U+30AB, U+FF77->U+30AD, U+FF78->U+30AF, U+FF79->U+30B1, U+FF7A->U+30B3, U+FF7B->U+30B5, U+FF7C->U+30B7, U+FF7D->U+30B9, U+FF7E->U+30BB, U+FF7F->U+30BD, U+FF80->U+30BF, U+FF81->U+30C1, U+FF82->U+30C3, U+FF83->U+30C6, U+FF84->U+30C8, U+FF85->U+30CA, U+FF86->U+30CB, U+FF87->U+30CC, U+FF88->U+30CD, U+FF89->U+30CE, U+FF8A->U+30CF, U+FF8B->U+30D2, U+FF8C->U+30D5, U+FF8D->U+30D8, U+FF8E->U+30DB, U+FF8F->U+30DE, U+FF90->U+30DF, U+FF91->U+30E0, U+FF92->U+30E1, U+FF93->U+30E2, U+FF94->U+30E3, U+FF95->U+30E5, U+FF96->U+30E7, U+FF97->U+30E9, U+FF98->U+30EA, U+FF99->U+30EB, U+FF9A->U+30EC, U+FF9B->U+30ED, U+FF9C->U+30EF, U+FF9D->U+30F3, U+FFA0->U+3164, U+FFA1->U+3131, U+FFA2->U+3132, U+FFA3->U+3133, U+FFA4->U+3134, U+FFA5->U+3135, U+FFA6->U+3136, U+FFA7->U+3137, U+FFA8->U+3138, U+FFA9->U+3139, U+FFAA->U+313A, U+FFAB->U+313B, U+FFAC->U+313C, U+FFAD->U+313D, U+FFAE->U+313E, U+FFAF->U+313F, U+FFB0->U+3140, U+FFB1->U+3141, U+FFB2->U+3142, U+FFB3->U+3143, U+FFB4->U+3144, U+FFB5->U+3145, U+FFB6->U+3146, U+FFB7->U+3147, U+FFB8->U+3148, U+FFB9->U+3149, U+FFBA->U+314A, U+FFBB->U+314B, U+FFBC->U+314C, U+FFBD->U+314D, U+FFBE->U+314E, U+FFC2->U+314F, U+FFC3->U+3150, U+FFC4->U+3151, U+FFC5->U+3152, U+FFC6->U+3153, U+FFC7->U+3154, U+FFCA->U+3155, U+FFCB->U+3156, U+FFCC->U+3157, U+FFCD->U+3158, U+FFCE->U+3159, U+FFCF->U+315A, U+FFD2->U+315B, U+FFD3->U+315C, U+FFD4->U+315D, U+FFD5->U+315E, U+FFD6->U+315F, U+FFD7->U+3160, U+FFDA->U+3161, U+FFDB->U+3162, U+FFDC->U+3163, U+3131->U+1100, U+3132->U+1101, U+3133->U+11AA, U+3134->U+1102, U+3135->U+11AC, U+3136->U+11AD, U+3137->U+1103, U+3138->U+1104, U+3139->U+1105, U+313A->U+11B0, U+313B->U+11B1, U+313C->U+11B2, U+313D->U+11B3, U+313E->U+11B4, U+313F->U+11B5, U+3140->U+111A, U+3141->U+1106, U+3142->U+1107, U+3143->U+1108, U+3144->U+1121, U+3145->U+1109, U+3146->U+110A, U+3147->U+110B, U+3148->U+110C, U+3149->U+110D, U+314A->U+110E, U+314B->U+110F, U+314C->U+1110, U+314D->U+1111, U+314E->U+1112, U+314F->U+1161, U+3150->U+1162, U+3151->U+1163, U+3152->U+1164, U+3153->U+1165, U+3154->U+1166, U+3155->U+1167, U+3156->U+1168, U+3157->U+1169, U+3158->U+116A, U+3159->U+116B, U+315A->U+116C, U+315B->U+116D, U+315C->U+116E, U+315D->U+116F, U+315E->U+1170, U+315F->U+1171, U+3160->U+1172, U+3161->U+1173, U+3162->U+1174, U+3163->U+1175, U+3165->U+1114, U+3166->U+1115, U+3167->U+11C7, U+3168->U+11C8, U+3169->U+11CC, U+316A->U+11CE, U+316B->U+11D3, U+316C->U+11D7, U+316D->U+11D9, U+316E->U+111C, U+316F->U+11DD, U+3170->U+11DF, U+3171->U+111D, U+3172->U+111E, U+3173->U+1120, U+3174->U+1122, U+3175->U+1123, U+3176->U+1127, U+3177->U+1129, U+3178->U+112B, U+3179->U+112C, U+317A->U+112D, U+317B->U+112E, U+317C->U+112F, U+317D->U+1132, U+317E->U+1136, U+317F->U+1140, U+3180->U+1147, U+3181->U+114C, U+3182->U+11F1, U+3183->U+11F2, U+3184->U+1157, U+3185->U+1158, U+3186->U+1159, U+3187->U+1184, U+3188->U+1185, U+3189->U+1188, U+318A->U+1191, U+318B->U+1192, U+318C->U+1194, U+318D->U+119E, U+318E->U+11A1, U+A490->U+A408, U+A491->U+A1B9, U+4E00..U+9FBB, U+3400..U+4DB5, U+20000..U+2A6D6, U+FA0E, U+FA0F, U+FA11, U+FA13, U+FA14, U+FA1F, U+FA21, U+FA23, U+FA24, U+FA27, U+FA28, U+FA29, U+3105..U+312C, U+31A0..U+31B7, U+3041, U+3043, U+3045, U+3047, U+3049, U+304B, U+304D, U+304F, U+3051, U+3053, U+3055, U+3057, U+3059, U+305B, U+305D, U+305F, U+3061, U+3063, U+3066, U+3068, U+306A..U+306F, U+3072, U+3075, U+3078, U+307B, U+307E..U+3083, U+3085, U+3087, U+3089..U+308E, U+3090..U+3093, U+30A1, U+30A3, U+30A5, U+30A7, U+30A9, U+30AD, U+30AF, U+30B3, U+30B5, U+30BB, U+30BD, U+30BF, U+30C1, U+30C3, U+30C4, U+30C6, U+30CA, U+30CB, U+30CD, U+30CE, U+30DE, U+30DF, U+30E1, U+30E2, U+30E3, U+30E5, U+30E7, U+30EE, U+30F0..U+30F3, U+30F5, U+30F6, U+31F0, U+31F1, U+31F2, U+31F3, U+31F4, U+31F5, U+31F6, U+31F7, U+31F8, U+31F9, U+31FA, U+31FB, U+31FC, U+31FD, U+31FE, U+31FF, U+AC00..U+D7A3, U+1100..U+1159, U+1161..U+11A2, U+11A8..U+11F9, U+A000..U+A48C, U+A492..U+A4C6 ################################################## # Coptic # Notes: Some shared Greek characters, may require amendments. U+2C80->U+2C81, U+2C81, U+2C82->U+2C83, U+2C83, U+2C84->U+2C85, U+2C85, U+2C86->U+2C87, U+2C87, U+2C88->U+2C89, U+2C89, U+2C8A->U+2C8B, U+2C8B, U+2C8C->U+2C8D, U+2C8D, U+2C8E->U+2C8F, U+2C8F, U+2C90->U+2C91, U+2C91, U+2C92->U+2C93, U+2C93, U+2C94->U+2C95, U+2C95, U+2C96->U+2C97, U+2C97, U+2C98->U+2C99, U+2C99, U+2C9A->U+2C9B, U+2C9B, U+2C9C->U+2C9D, U+2C9D, U+2C9E->U+2C9F, U+2C9F, U+2CA0->U+2CA1, U+2CA1, U+2CA2->U+2CA3, U+2CA3, U+2CA4->U+2CA5, U+2CA5, U+2CA6->U+2CA7, U+2CA7, U+2CA8->U+2CA9, U+2CA9, U+2CAA->U+2CAB, U+2CAB, U+2CAC->U+2CAD, U+2CAD, U+2CAE->U+2CAF, U+2CAF, U+2CB0->U+2CB1, U+2CB1, U+2CB2->U+2CB3, U+2CB3, U+2CB4->U+2CB5, U+2CB5, U+2CB6->U+2CB7, U+2CB7, U+2CB8->U+2CB9, U+2CB9, U+2CBA->U+2CBB, U+2CBB, U+2CBC->U+2CBD, U+2CBD, U+2CBE->U+2CBF, U+2CBF, U+2CC0->U+2CC1, U+2CC1, U+2CC2->U+2CC3, U+2CC3, U+2CC4->U+2CC5, U+2CC5, U+2CC6->U+2CC7, U+2CC7, U+2CC8->U+2CC9, U+2CC9, U+2CCA->U+2CCB, U+2CCB, U+2CCC->U+2CCD, U+2CCD, U+2CCE->U+2CCF, U+2CCF, U+2CD0->U+2CD1, U+2CD1, U+2CD2->U+2CD3, U+2CD3, U+2CD4->U+2CD5, U+2CD5, U+2CD6->U+2CD7, U+2CD7, U+2CD8->U+2CD9, U+2CD9, U+2CDA->U+2CDB, U+2CDB, U+2CDC->U+2CDD, U+2CDD, U+2CDE->U+2CDF, U+2CDF, U+2CE0->U+2CE1, U+2CE1, U+2CE2->U+2CE3, U+2CE3 ################################################## # Cryllic* U+0400->U+0435, U+0401->U+0435, U+0402->U+0452, U+0452, U+0403->U+0433, U+0404->U+0454, U+0454, U+0405->U+0455, U+0455, U+0406->U+0456, U+0407->U+0456, U+0457->U+0456, U+0456, U+0408..U+040B->U+0458..U+045B, U+0458..U+045B, U+040C->U+043A, U+040D->U+0438, U+040E->U+0443, U+040F->U+045F, U+045F, U+0450->U+0435, U+0451->U+0435, U+0453->U+0433, U+045C->U+043A, U+045D->U+0438, U+045E->U+0443, U+0460->U+0461, U+0461, U+0462->U+0463, U+0463, U+0464->U+0465, U+0465, U+0466->U+0467, U+0467, U+0468->U+0469, U+0469, U+046A->U+046B, U+046B, U+046C->U+046D, U+046D, U+046E->U+046F, U+046F, U+0470->U+0471, U+0471, U+0472->U+0473, U+0473, U+0474->U+0475, U+0476->U+0475, U+0477->U+0475, U+0475, U+0478->U+0479, U+0479, U+047A->U+047B, U+047B, U+047C->U+047D, U+047D, U+047E->U+047F, U+047F, U+0480->U+0481, U+0481, U+048A->U+0438, U+048B->U+0438, U+048C->U+044C, U+048D->U+044C, U+048E->U+0440, U+048F->U+0440, U+0490->U+0433, U+0491->U+0433, U+0490->U+0433, U+0491->U+0433, U+0492->U+0433, U+0493->U+0433, U+0494->U+0433, U+0495->U+0433, U+0496->U+0436, U+0497->U+0436, U+0498->U+0437, U+0499->U+0437, U+049A->U+043A, U+049B->U+043A, U+049C->U+043A, U+049D->U+043A, U+049E->U+043A, U+049F->U+043A, U+04A0->U+043A, U+04A1->U+043A, U+04A2->U+043D, U+04A3->U+043D, U+04A4->U+043D, U+04A5->U+043D, U+04A6->U+043F, U+04A7->U+043F, U+04A8->U+04A9, U+04A9, U+04AA->U+0441, U+04AB->U+0441, U+04AC->U+0442, U+04AD->U+0442, U+04AE->U+0443, U+04AF->U+0443, U+04B0->U+0443, U+04B1->U+0443, U+04B2->U+0445, U+04B3->U+0445, U+04B4->U+04B5, U+04B5, U+04B6->U+0447, U+04B7->U+0447, U+04B8->U+0447, U+04B9->U+0447, U+04BA->U+04BB, U+04BB, U+04BC->U+04BD, U+04BE->U+04BD, U+04BF->U+04BD, U+04BD, U+04C0->U+04CF, U+04CF, U+04C1->U+0436, U+04C2->U+0436, U+04C3->U+043A, U+04C4->U+043A, U+04C5->U+043B, U+04C6->U+043B, U+04C7->U+043D, U+04C8->U+043D, U+04C9->U+043D, U+04CA->U+043D, U+04CB->U+0447, U+04CC->U+0447, U+04CD->U+043C, U+04CE->U+043C, U+04D0->U+0430, U+04D1->U+0430, U+04D2->U+0430, U+04D3->U+0430, U+04D4->U+00E6, U+04D5->U+00E6, U+04D6->U+0435, U+04D7->U+0435, U+04D8->U+04D9, U+04DA->U+04D9, U+04DB->U+04D9, U+04D9, U+04DC->U+0436, U+04DD->U+0436, U+04DE->U+0437, U+04DF->U+0437, U+04E0->U+04E1, U+04E1, U+04E2->U+0438, U+04E3->U+0438, U+04E4->U+0438, U+04E5->U+0438, U+04E6->U+043E, U+04E7->U+043E, U+04E8->U+043E, U+04E9->U+043E, U+04EA->U+043E, U+04EB->U+043E, U+04EC->U+044D, U+04ED->U+044D, U+04EE->U+0443, U+04EF->U+0443, U+04F0->U+0443, U+04F1->U+0443, U+04F2->U+0443, U+04F3->U+0443, U+04F4->U+0447, U+04F5->U+0447, U+04F6->U+0433, U+04F7->U+0433, U+04F8->U+044B, U+04F9->U+044B, U+04FA->U+0433, U+04FB->U+0433, U+04FC->U+0445, U+04FD->U+0445, U+04FE->U+0445, U+04FF->U+0445, U+0410..U+0418->U+0430..U+0438, U+0419->U+0438, U+0430..U+0438, U+041A..U+042F->U+043A..U+044F, U+043A..U+044F ################################################## # Devanagari U+0929->U+0928, U+0931->U+0930, U+0934->U+0933, U+0958->U+0915, U+0959->U+0916, U+095A->U+0917, U+095B->U+091C, U+095C->U+0921, U+095D->U+0922, U+095E->U+092B, U+095F->U+092F, U+0904..U+0928, U+092A..U+0930, U+0932, U+0933, U+0935..U+0939, U+0960, U+0961, U+0966..U+096F, U+097B..U+097F ################################################## # Georgian U+10FC->U+10DC, U+10D0..U+10FA, U+10A0..U+10C5->U+2D00..U+2D25, U+2D00..U+2D25 ################################################## # Greek U+0386->U+03B1, U+0388->U+03B5, U+0389->U+03B7, U+038A->U+03B9, U+038C->U+03BF, U+038E->U+03C5, U+038F->U+03C9, U+0390->U+03B9, U+03AA->U+03B9, U+03AB->U+03C5, U+03AC->U+03B1, U+03AD->U+03B5, U+03AE->U+03B7, U+03AF->U+03B9, U+03B0->U+03C5, U+03CA->U+03B9, U+03CB->U+03C5, U+03CC->U+03BF, U+03CD->U+03C5, U+03CE->U+03C9, U+03D0->U+03B2, U+03D1->U+03B8, U+03D2->U+03C5, U+03D3->U+03C5, U+03D4->U+03C5, U+03D5->U+03C6, U+03D6->U+03C0, U+03D8->U+03D9, U+03DA->U+03DB, U+03DC->U+03DD, U+03DE->U+03DF, U+03E0->U+03E1, U+03E2->U+03E3, U+03E4->U+03E5, U+03E6->U+03E7, U+03E8->U+03E9, U+03EA->U+03EB, U+03EC->U+03ED, U+03EE->U+03EF, U+03F0->U+03BA, U+03F1->U+03C1, U+03F2->U+03C3, U+03F4->U+03B8, U+03F5->U+03B5, U+03F6->U+03B5, U+03F7->U+03F8, U+03F9->U+03C3, U+03FA->U+03FB, U+1F00->U+03B1, U+1F01->U+03B1, U+1F02->U+03B1, U+1F03->U+03B1, U+1F04->U+03B1, U+1F05->U+03B1, U+1F06->U+03B1, U+1F07->U+03B1, U+1F08->U+03B1, U+1F09->U+03B1, U+1F0A->U+03B1, U+1F0B->U+03B1, U+1F0C->U+03B1, U+1F0D->U+03B1, U+1F0E->U+03B1, U+1F0F->U+03B1, U+1F10->U+03B5, U+1F11->U+03B5, U+1F12->U+03B5, U+1F13->U+03B5, U+1F14->U+03B5, U+1F15->U+03B5, U+1F18->U+03B5, U+1F19->U+03B5, U+1F1A->U+03B5, U+1F1B->U+03B5, U+1F1C->U+03B5, U+1F1D->U+03B5, U+1F20->U+03B7, U+1F21->U+03B7, U+1F22->U+03B7, U+1F23->U+03B7, U+1F24->U+03B7, U+1F25->U+03B7, U+1F26->U+03B7, U+1F27->U+03B7, U+1F28->U+03B7, U+1F29->U+03B7, U+1F2A->U+03B7, U+1F2B->U+03B7, U+1F2C->U+03B7, U+1F2D->U+03B7, U+1F2E->U+03B7, U+1F2F->U+03B7, U+1F30->U+03B9, U+1F31->U+03B9, U+1F32->U+03B9, U+1F33->U+03B9, U+1F34->U+03B9, U+1F35->U+03B9, U+1F36->U+03B9, U+1F37->U+03B9, U+1F38->U+03B9, U+1F39->U+03B9, U+1F3A->U+03B9, U+1F3B->U+03B9, U+1F3C->U+03B9, U+1F3D->U+03B9, U+1F3E->U+03B9, U+1F3F->U+03B9, U+1F40->U+03BF, U+1F41->U+03BF, U+1F42->U+03BF, U+1F43->U+03BF, U+1F44->U+03BF, U+1F45->U+03BF, U+1F48->U+03BF, U+1F49->U+03BF, U+1F4A->U+03BF, U+1F4B->U+03BF, U+1F4C->U+03BF, U+1F4D->U+03BF, U+1F50->U+03C5, U+1F51->U+03C5, U+1F52->U+03C5, U+1F53->U+03C5, U+1F54->U+03C5, U+1F55->U+03C5, U+1F56->U+03C5, U+1F57->U+03C5, U+1F59->U+03C5, U+1F5B->U+03C5, U+1F5D->U+03C5, U+1F5F->U+03C5, U+1F60->U+03C9, U+1F61->U+03C9, U+1F62->U+03C9, U+1F63->U+03C9, U+1F64->U+03C9, U+1F65->U+03C9, U+1F66->U+03C9, U+1F67->U+03C9, U+1F68->U+03C9, U+1F69->U+03C9, U+1F6A->U+03C9, U+1F6B->U+03C9, U+1F6C->U+03C9, U+1F6D->U+03C9, U+1F6E->U+03C9, U+1F6F->U+03C9, U+1F70->U+03B1, U+1F71->U+03B1, U+1F72->U+03B5, U+1F73->U+03B5, U+1F74->U+03B7, U+1F75->U+03B7, U+1F76->U+03B9, U+1F77->U+03B9, U+1F78->U+03BF, U+1F79->U+03BF, U+1F7A->U+03C5, U+1F7B->U+03C5, U+1F7C->U+03C9, U+1F7D->U+03C9, U+1F80->U+03B1, U+1F81->U+03B1, U+1F82->U+03B1, U+1F83->U+03B1, U+1F84->U+03B1, U+1F85->U+03B1, U+1F86->U+03B1, U+1F87->U+03B1, U+1F88->U+03B1, U+1F89->U+03B1, U+1F8A->U+03B1, U+1F8B->U+03B1, U+1F8C->U+03B1, U+1F8D->U+03B1, U+1F8E->U+03B1, U+1F8F->U+03B1, U+1F90->U+03B7, U+1F91->U+03B7, U+1F92->U+03B7, U+1F93->U+03B7, U+1F94->U+03B7, U+1F95->U+03B7, U+1F96->U+03B7, U+1F97->U+03B7, U+1F98->U+03B7, U+1F99->U+03B7, U+1F9A->U+03B7, U+1F9B->U+03B7, U+1F9C->U+03B7, U+1F9D->U+03B7, U+1F9E->U+03B7, U+1F9F->U+03B7, U+1FA0->U+03C9, U+1FA1->U+03C9, U+1FA2->U+03C9, U+1FA3->U+03C9, U+1FA4->U+03C9, U+1FA5->U+03C9, U+1FA6->U+03C9, U+1FA7->U+03C9, U+1FA8->U+03C9, U+1FA9->U+03C9, U+1FAA->U+03C9, U+1FAB->U+03C9, U+1FAC->U+03C9, U+1FAD->U+03C9, U+1FAE->U+03C9, U+1FAF->U+03C9, U+1FB0->U+03B1, U+1FB1->U+03B1, U+1FB2->U+03B1, U+1FB3->U+03B1, U+1FB4->U+03B1, U+1FB6->U+03B1, U+1FB7->U+03B1, U+1FB8->U+03B1, U+1FB9->U+03B1, U+1FBA->U+03B1, U+1FBB->U+03B1, U+1FBC->U+03B1, U+1FC2->U+03B7, U+1FC3->U+03B7, U+1FC4->U+03B7, U+1FC6->U+03B7, U+1FC7->U+03B7, U+1FC8->U+03B5, U+1FC9->U+03B5, U+1FCA->U+03B7, U+1FCB->U+03B7, U+1FCC->U+03B7, U+1FD0->U+03B9, U+1FD1->U+03B9, U+1FD2->U+03B9, U+1FD3->U+03B9, U+1FD6->U+03B9, U+1FD7->U+03B9, U+1FD8->U+03B9, U+1FD9->U+03B9, U+1FDA->U+03B9, U+1FDB->U+03B9, U+1FE0->U+03C5, U+1FE1->U+03C5, U+1FE2->U+03C5, U+1FE3->U+03C5, U+1FE4->U+03C1, U+1FE5->U+03C1, U+1FE6->U+03C5, U+1FE7->U+03C5, U+1FE8->U+03C5, U+1FE9->U+03C5, U+1FEA->U+03C5, U+1FEB->U+03C5, U+1FEC->U+03C1, U+1FF2->U+03C9, U+1FF3->U+03C9, U+1FF4->U+03C9, U+1FF6->U+03C9, U+1FF7->U+03C9, U+1FF8->U+03BF, U+1FF9->U+03BF, U+1FFA->U+03C9, U+1FFB->U+03C9, U+1FFC->U+03C9, U+0391..U+03A1->U+03B1..U+03C1, U+03B1..U+03C1, U+03A3..U+03A9->U+03C3..U+03C9, U+03C3..U+03C9, U+03C2, U+03D9, U+03DB, U+03DD, U+03DF, U+03E1, U+03E3, U+03E5, U+03E7, U+03E9, U+03EB, U+03ED, U+03EF, U+03F3, U+03F8, U+03FB ################################################## # Gujarati U+0A85..U+0A8C, U+0A8F, U+0A90, U+0A93..U+0AB0, U+0AB2, U+0AB3, U+0AB5..U+0AB9, U+0AE0, U+0AE1, U+0AE6..U+0AEF ################################################## # Gurmukhi U+0A33->U+0A32, U+0A36->U+0A38, U+0A59->U+0A16, U+0A5A->U+0A17, U+0A5B->U+0A1C, U+0A5E->U+0A2B, U+0A05..U+0A0A, U+0A0F, U+0A10, U+0A13..U+0A28, U+0A2A..U+0A30, U+0A32, U+0A35, U+0A38, U+0A39, U+0A5C, U+0A66..U+0A6F ################################################# # Hebrew* U+FB1D->U+05D9, U+FB1F->U+05F2, U+FB20->U+05E2, U+FB21->U+05D0, U+FB22->U+05D3, U+FB23->U+05D4, U+FB24->U+05DB, U+FB25->U+05DC, U+FB26->U+05DD, U+FB27->U+05E8, U+FB28->U+05EA, U+FB2A->U+05E9, U+FB2B->U+05E9, U+FB2C->U+05E9, U+FB2D->U+05E9, U+FB2E->U+05D0, U+FB2F->U+05D0, U+FB30->U+05D0, U+FB31->U+05D1, U+FB32->U+05D2, U+FB33->U+05D3, U+FB34->U+05D4, U+FB35->U+05D5, U+FB36->U+05D6, U+FB38->U+05D8, U+FB39->U+05D9, U+FB3A->U+05DA, U+FB3B->U+05DB, U+FB3C->U+05DC, U+FB3E->U+05DE, U+FB40->U+05E0, U+FB41->U+05E1, U+FB43->U+05E3, U+FB44->U+05E4, U+FB46->U+05E6, U+FB47->U+05E7, U+FB48->U+05E8, U+FB49->U+05E9, U+FB4A->U+05EA, U+FB4B->U+05D5, U+FB4C->U+05D1, U+FB4D->U+05DB, U+FB4E->U+05E4, U+FB4F->U+05D0, U+05D0..U+05F2 ################################################# # Kannada U+0C85..U+0C8C, U+0C8E..U+0C90, U+0C92..U+0CA8, U+0CAA..U+0CB3, U+0CB5..U+0CB9, U+0CE0, U+0CE1, U+0CE6..U+0CEF ################################################# # Limbu U+1900..U+191C, U+1930..U+1938, U+1946..U+194F ################################################# # Malayalam U+0D05..U+0D0C, U+0D0E..U+0D10, U+0D12..U+0D28, U+0D2A..U+0D39, U+0D60, U+0D61, U+0D66..U+0D6F ################################################# # Tamil U+0B94->U+0B92, U+0B85..U+0B8A, U+0B8E..U+0B90, U+0B92, U+0B93, U+0B95, U+0B99, U+0B9A, U+0B9C, U+0B9E, U+0B9F, U+0BA3, U+0BA4, U+0BA8..U+0BAA, U+0BAE..U+0BB9, U+0BE6..U+0BEF ################################################# # Thai U+0E01..U+0E30, U+0E32, U+0E33, U+0E40..U+0E46, U+0E50..U+0E5B ################################################## # Common U+FF10..U+FF19->0..9, U+FF21..U+FF3A->a..z, U+FF41..U+FF5A->a..z, 0..9, A..Z->a..z, a..z """ # The expected value format is a commas-separated list of mappings. # Two simplest mappings simply declare a character as valid, and map a single character # to another single character, respectively. But specifying the whole table in such # form would result in bloated and barely manageable specifications. So there are # several syntax shortcuts that let you map ranges of characters at once. The complete # list is as follows: # # A->a # Single char mapping, declares source char 'A' as allowed to occur within keywords # and maps it to destination char 'a' (but does not declare 'a' as allowed). # A..Z->a..z # Range mapping, declares all chars in source range as allowed and maps them to # the destination range. Does not declare destination range as allowed. Also checks # ranges' lengths (the lengths must be equal). # a # Stray char mapping, declares a character as allowed and maps it to itself. # Equivalent to a->a single char mapping. # a..z # Stray range mapping, declares all characters in range as allowed and maps them to # themselves. Equivalent to a..z->a..z range mapping. # A..Z/2 # Checkerboard range map. Maps every pair of chars to the second char. # More formally, declares odd characters in range as allowed and maps them to the # even ones; also declares even characters as allowed and maps them to themselves. # For instance, A..Z/2 is equivalent to A->B, B->B, C->D, D->D, ..., Y->Z, Z->Z. # This mapping shortcut is helpful for a number of Unicode blocks where uppercase # and lowercase letters go in such interleaved order instead of contiguous chunks. _dewhite = re.compile(r"\s") _char = r"((?:U\+[0-9A-Fa-f]{4,6})|.)" _char_map = re.compile("^" + _char + "->" + _char + "$") _range_map = re.compile("^" + _char + r"\.\." + _char + "->" + _char + ".." + _char + "$") _stray_char = re.compile("^" + _char + "$") _stray_range = re.compile("^" + _char + r"\.\." + _char + "$") _checker_range = re.compile("^" + _char + r"\.\." + _char + "/2$") def charspec_to_int(string): # Converts a character specification of the form 'A' or 'U+23BC' # to an integer if string.startswith("U+"): return int(string[2:], 16) elif len(string) == 1: return ord(string) else: raise Exception("Can't convert charspec: %r" % string) def charset_table_to_dict(tablestring): """Takes a string with the contents of a Sphinx charset table file and returns a mapping object (a defaultdict, actually) of the kind expected by the unicode.translate() method: that is, it maps a character number to a unicode character or None if the character is not a valid word character. The Sphinx charset table format is described at http://www.sphinxsearch.com/docs/current.html#conf-charset-table. """ #map = {} map = defaultdict(lambda: None) for line in tablestring.split("\n"): if not line or line.startswith("#"): continue line = _dewhite.sub("", line) for item in line.split(","): if not item: continue match = _range_map.match(item) if match: start1 = charspec_to_int(match.group(1)) end1 = charspec_to_int(match.group(2)) start2 = charspec_to_int(match.group(3)) end2 = charspec_to_int(match.group(4)) assert (end1 - start1) == (end2 - start2) try: for fromord, tooord in izip(xrange(start1, end1 + 1), xrange(start2, end2 + 1)): map[fromord] = unichr(tooord) except ValueError: pass continue match = _char_map.match(item) if match: fromord = charspec_to_int(match.group(1)) toord = charspec_to_int(match.group(2)) try: map[fromord] = unichr(toord) except ValueError: pass continue match = _stray_char.match(item) if match: ord = charspec_to_int(match.group(0)) try: map[ord] = unichr(ord) except ValueError: pass continue match = _stray_range.match(item) if match: start = charspec_to_int(match.group(1)) end = charspec_to_int(match.group(2)) try: for ord in xrange(start, end + 1): map[ord] = unichr(ord) except ValueError: pass continue match = _checker_range.match(item) if match: fromord = charspec_to_int(match.group(1)) toord = charspec_to_int(match.group(2)) assert toord - fromord % 2 == 0 for ord in xrange(fromord, toord + 1, 2): try: map[ord] = unichr(ord + 1) map[ord + 1] = unichr(ord + 1) except ValueError: pass continue raise Exception("Don't know what to do with %r" % item) return dict(map)
MatthewWilkes/django-oscar
refs/heads/master
src/oscar/apps/checkout/exceptions.py
63
class FailedPreCondition(Exception): def __init__(self, url, message=None, messages=None): self.url = url if message: self.messages = [message] elif messages: self.messages = messages else: self.messages = [] class PassedSkipCondition(Exception): """ To be raised when a skip condition has been passed and the current view should be skipped. The passed URL dictates where to. """ def __init__(self, url): self.url = url
justvanbloom/airtime
refs/heads/2.5.x
python_apps/media-monitor/mm2/media/monitor/toucher.py
10
# -*- coding: utf-8 -*- import pure as mmp import os from log import Loggable from exceptions import CouldNotCreateIndexFile from ..saas.thread import InstanceInheritingThread class Toucher(Loggable): """ Class responsible for touching a file at a certain path when called """ def __init__(self,path): self.path = path def __call__(self): try: mmp.fondle(self.path) except Exception as e: self.logger.info("Failed to touch file: '%s'. Logging exception." % self.path) self.logger.info(str(e)) import time class RepeatTimer(InstanceInheritingThread): def __init__(self, interval, callable, *args, **kwargs): super(RepeatTimer, self).__init__() self.interval = interval self.callable = callable self.args = args self.kwargs = kwargs def run(self): while True: time.sleep(self.interval) self.callable(*self.args, **self.kwargs) class ToucherThread(Loggable): """ Creates a thread that touches a file 'path' every 'interval' seconds """ def __init__(self, path, interval=5): if not os.path.exists(path): try: # TODO : rewrite using with? f = open(path,'w') f.write('') f.close() except Exception as e: raise CouldNotCreateIndexFile(path,e) cb = Toucher(path) t = RepeatTimer(interval, cb) t.daemon = True # thread terminates once process is done t.start()
cortedeltimo/SickRage
refs/heads/master
lib/sqlalchemy/ext/instrumentation.py
80
"""Extensible class instrumentation. The :mod:`sqlalchemy.ext.instrumentation` package provides for alternate systems of class instrumentation within the ORM. Class instrumentation refers to how the ORM places attributes on the class which maintain data and track changes to that data, as well as event hooks installed on the class. .. note:: The extension package is provided for the benefit of integration with other object management packages, which already perform their own instrumentation. It is not intended for general use. For examples of how the instrumentation extension is used, see the example :ref:`examples_instrumentation`. .. versionchanged:: 0.8 The :mod:`sqlalchemy.orm.instrumentation` was split out so that all functionality having to do with non-standard instrumentation was moved out to :mod:`sqlalchemy.ext.instrumentation`. When imported, the module installs itself within :mod:`sqlalchemy.orm.instrumentation` so that it takes effect, including recognition of ``__sa_instrumentation_manager__`` on mapped classes, as well :data:`.instrumentation_finders` being used to determine class instrumentation resolution. """ from ..orm import instrumentation as orm_instrumentation from ..orm.instrumentation import ( ClassManager, InstrumentationFactory, _default_state_getter, _default_dict_getter, _default_manager_getter ) from ..orm import attributes, collections, base as orm_base from .. import util from ..orm import exc as orm_exc import weakref INSTRUMENTATION_MANAGER = '__sa_instrumentation_manager__' """Attribute, elects custom instrumentation when present on a mapped class. Allows a class to specify a slightly or wildly different technique for tracking changes made to mapped attributes and collections. Only one instrumentation implementation is allowed in a given object inheritance hierarchy. The value of this attribute must be a callable and will be passed a class object. The callable must return one of: - An instance of an InstrumentationManager or subclass - An object implementing all or some of InstrumentationManager (TODO) - A dictionary of callables, implementing all or some of the above (TODO) - An instance of a ClassManager or subclass This attribute is consulted by SQLAlchemy instrumentation resolution, once the :mod:`sqlalchemy.ext.instrumentation` module has been imported. If custom finders are installed in the global instrumentation_finders list, they may or may not choose to honor this attribute. """ def find_native_user_instrumentation_hook(cls): """Find user-specified instrumentation management for a class.""" return getattr(cls, INSTRUMENTATION_MANAGER, None) instrumentation_finders = [find_native_user_instrumentation_hook] """An extensible sequence of callables which return instrumentation implementations When a class is registered, each callable will be passed a class object. If None is returned, the next finder in the sequence is consulted. Otherwise the return must be an instrumentation factory that follows the same guidelines as sqlalchemy.ext.instrumentation.INSTRUMENTATION_MANAGER. By default, the only finder is find_native_user_instrumentation_hook, which searches for INSTRUMENTATION_MANAGER. If all finders return None, standard ClassManager instrumentation is used. """ class ExtendedInstrumentationRegistry(InstrumentationFactory): """Extends :class:`.InstrumentationFactory` with additional bookkeeping, to accommodate multiple types of class managers. """ _manager_finders = weakref.WeakKeyDictionary() _state_finders = weakref.WeakKeyDictionary() _dict_finders = weakref.WeakKeyDictionary() _extended = False def _locate_extended_factory(self, class_): for finder in instrumentation_finders: factory = finder(class_) if factory is not None: manager = self._extended_class_manager(class_, factory) return manager, factory else: return None, None def _check_conflicts(self, class_, factory): existing_factories = self._collect_management_factories_for(class_).\ difference([factory]) if existing_factories: raise TypeError( "multiple instrumentation implementations specified " "in %s inheritance hierarchy: %r" % ( class_.__name__, list(existing_factories))) def _extended_class_manager(self, class_, factory): manager = factory(class_) if not isinstance(manager, ClassManager): manager = _ClassInstrumentationAdapter(class_, manager) if factory != ClassManager and not self._extended: # somebody invoked a custom ClassManager. # reinstall global "getter" functions with the more # expensive ones. self._extended = True _install_instrumented_lookups() self._manager_finders[class_] = manager.manager_getter() self._state_finders[class_] = manager.state_getter() self._dict_finders[class_] = manager.dict_getter() return manager def _collect_management_factories_for(self, cls): """Return a collection of factories in play or specified for a hierarchy. Traverses the entire inheritance graph of a cls and returns a collection of instrumentation factories for those classes. Factories are extracted from active ClassManagers, if available, otherwise instrumentation_finders is consulted. """ hierarchy = util.class_hierarchy(cls) factories = set() for member in hierarchy: manager = self.manager_of_class(member) if manager is not None: factories.add(manager.factory) else: for finder in instrumentation_finders: factory = finder(member) if factory is not None: break else: factory = None factories.add(factory) factories.discard(None) return factories def unregister(self, class_): if class_ in self._manager_finders: del self._manager_finders[class_] del self._state_finders[class_] del self._dict_finders[class_] super(ExtendedInstrumentationRegistry, self).unregister(class_) def manager_of_class(self, cls): if cls is None: return None return self._manager_finders.get(cls, _default_manager_getter)(cls) def state_of(self, instance): if instance is None: raise AttributeError("None has no persistent state.") return self._state_finders.get( instance.__class__, _default_state_getter)(instance) def dict_of(self, instance): if instance is None: raise AttributeError("None has no persistent state.") return self._dict_finders.get( instance.__class__, _default_dict_getter)(instance) orm_instrumentation._instrumentation_factory = \ _instrumentation_factory = ExtendedInstrumentationRegistry() orm_instrumentation.instrumentation_finders = instrumentation_finders class InstrumentationManager(object): """User-defined class instrumentation extension. :class:`.InstrumentationManager` can be subclassed in order to change how class instrumentation proceeds. This class exists for the purposes of integration with other object management frameworks which would like to entirely modify the instrumentation methodology of the ORM, and is not intended for regular usage. For interception of class instrumentation events, see :class:`.InstrumentationEvents`. The API for this class should be considered as semi-stable, and may change slightly with new releases. .. versionchanged:: 0.8 :class:`.InstrumentationManager` was moved from :mod:`sqlalchemy.orm.instrumentation` to :mod:`sqlalchemy.ext.instrumentation`. """ # r4361 added a mandatory (cls) constructor to this interface. # given that, perhaps class_ should be dropped from all of these # signatures. def __init__(self, class_): pass def manage(self, class_, manager): setattr(class_, '_default_class_manager', manager) def dispose(self, class_, manager): delattr(class_, '_default_class_manager') def manager_getter(self, class_): def get(cls): return cls._default_class_manager return get def instrument_attribute(self, class_, key, inst): pass def post_configure_attribute(self, class_, key, inst): pass def install_descriptor(self, class_, key, inst): setattr(class_, key, inst) def uninstall_descriptor(self, class_, key): delattr(class_, key) def install_member(self, class_, key, implementation): setattr(class_, key, implementation) def uninstall_member(self, class_, key): delattr(class_, key) def instrument_collection_class(self, class_, key, collection_class): return collections.prepare_instrumentation(collection_class) def get_instance_dict(self, class_, instance): return instance.__dict__ def initialize_instance_dict(self, class_, instance): pass def install_state(self, class_, instance, state): setattr(instance, '_default_state', state) def remove_state(self, class_, instance): delattr(instance, '_default_state') def state_getter(self, class_): return lambda instance: getattr(instance, '_default_state') def dict_getter(self, class_): return lambda inst: self.get_instance_dict(class_, inst) class _ClassInstrumentationAdapter(ClassManager): """Adapts a user-defined InstrumentationManager to a ClassManager.""" def __init__(self, class_, override): self._adapted = override self._get_state = self._adapted.state_getter(class_) self._get_dict = self._adapted.dict_getter(class_) ClassManager.__init__(self, class_) def manage(self): self._adapted.manage(self.class_, self) def dispose(self): self._adapted.dispose(self.class_) def manager_getter(self): return self._adapted.manager_getter(self.class_) def instrument_attribute(self, key, inst, propagated=False): ClassManager.instrument_attribute(self, key, inst, propagated) if not propagated: self._adapted.instrument_attribute(self.class_, key, inst) def post_configure_attribute(self, key): super(_ClassInstrumentationAdapter, self).post_configure_attribute(key) self._adapted.post_configure_attribute(self.class_, key, self[key]) def install_descriptor(self, key, inst): self._adapted.install_descriptor(self.class_, key, inst) def uninstall_descriptor(self, key): self._adapted.uninstall_descriptor(self.class_, key) def install_member(self, key, implementation): self._adapted.install_member(self.class_, key, implementation) def uninstall_member(self, key): self._adapted.uninstall_member(self.class_, key) def instrument_collection_class(self, key, collection_class): return self._adapted.instrument_collection_class( self.class_, key, collection_class) def initialize_collection(self, key, state, factory): delegate = getattr(self._adapted, 'initialize_collection', None) if delegate: return delegate(key, state, factory) else: return ClassManager.initialize_collection(self, key, state, factory) def new_instance(self, state=None): instance = self.class_.__new__(self.class_) self.setup_instance(instance, state) return instance def _new_state_if_none(self, instance): """Install a default InstanceState if none is present. A private convenience method used by the __init__ decorator. """ if self.has_state(instance): return False else: return self.setup_instance(instance) def setup_instance(self, instance, state=None): self._adapted.initialize_instance_dict(self.class_, instance) if state is None: state = self._state_constructor(instance, self) # the given instance is assumed to have no state self._adapted.install_state(self.class_, instance, state) return state def teardown_instance(self, instance): self._adapted.remove_state(self.class_, instance) def has_state(self, instance): try: self._get_state(instance) except orm_exc.NO_STATE: return False else: return True def state_getter(self): return self._get_state def dict_getter(self): return self._get_dict def _install_instrumented_lookups(): """Replace global class/object management functions with ExtendedInstrumentationRegistry implementations, which allow multiple types of class managers to be present, at the cost of performance. This function is called only by ExtendedInstrumentationRegistry and unit tests specific to this behavior. The _reinstall_default_lookups() function can be called after this one to re-establish the default functions. """ _install_lookups( dict( instance_state=_instrumentation_factory.state_of, instance_dict=_instrumentation_factory.dict_of, manager_of_class=_instrumentation_factory.manager_of_class ) ) def _reinstall_default_lookups(): """Restore simplified lookups.""" _install_lookups( dict( instance_state=_default_state_getter, instance_dict=_default_dict_getter, manager_of_class=_default_manager_getter ) ) def _install_lookups(lookups): global instance_state, instance_dict, manager_of_class instance_state = lookups['instance_state'] instance_dict = lookups['instance_dict'] manager_of_class = lookups['manager_of_class'] orm_base.instance_state = attributes.instance_state = \ orm_instrumentation.instance_state = instance_state orm_base.instance_dict = attributes.instance_dict = \ orm_instrumentation.instance_dict = instance_dict orm_base.manager_of_class = attributes.manager_of_class = \ orm_instrumentation.manager_of_class = manager_of_class
Reroot/stagger
refs/heads/master
test/fileutil.py
15
#!/usr/bin/env python3 # # fileutil.py # From the stagger project: http://code.google.com/p/stagger/ # # Copyright (c) 2009-2011 Karoly Lorentey <karoly@lorentey.hu> # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # - Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # # - Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in # the documentation and/or other materials provided with the # distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import unittest import io import random import tempfile import warnings import os import signal from stagger.fileutil import * class FileutilTestCase(unittest.TestCase): def testSuppressInterrupt(self): foo = 0 try: with suppress_interrupt(): # Verify that KeyboardInterrupts are deferred # until the end of this block. foo += 1 os.kill(0, signal.SIGINT) # Simulate C-c foo += 1 except KeyboardInterrupt: # This should be triggered, but only after the second increment. foo += 1 except AttributeError: # There is no os.kill on Windows: we can't test this feature there. return except WindowsError: # Ditto return self.assertEqual(foo, 3, "Can't suppress interrupts") def testReplaceChunk(self): def compare(data, filename): with opened(filename, "rb") as file: data2 = file.read() return data == data2 def random_data(length): return bytearray(random.randint(0, 255) for i in range(length)) def random_offset(size): r = random.randint(0, 10) if r < 2: return 0 if r < 4: return size return random.randint(0, size) def random_length(maxsize=None): if maxsize is None: maxsize = CHUNK_SIZE_MAX maxsize = min(maxsize, CHUNK_SIZE_MAX) r = random.randint(0, 10) if r < 2: return 0 if r < 4: return maxsize return random.randint(0, maxsize) def replace_both(data, filename, offset, length, chunk, in_place): data[offset:offset + length] = chunk replace_chunk(filename, offset, length, chunk, in_place=in_place) FILESIZE = 100 * 1024 CHUNK_SIZE_MAX = 10 * 1024 # Create a random temp file and a matching bytearray; replace some random chunks # with other random chunks in both objects; results should match. for in_place in [False, True]: data = random_data(FILESIZE) file = tempfile.NamedTemporaryFile(prefix="staggertest-", suffix=".tmp", delete=False) try: filename = file.name file.write(data) file.close() size = len(data) for i in range(40): offset = random_offset(size) length = random_length(size - offset) if random.randint(0, 6) == 0: chunk_length = length else: chunk_length = random_length() chunk = random_data(chunk_length) #print("i={0} size={1} offset={2} length={3} chunk={4} in_place={5}" # .format(i, size, offset, length, len(chunk), in_place)) replace_both(data, filename, offset, length, chunk, in_place) self.assertTrue(compare(data, filename)) size += len(chunk) - length self.assertTrue(size == len(data)) finally: os.unlink(filename) suite = unittest.TestLoader().loadTestsFromTestCase(FileutilTestCase) if __name__ == "__main__": warnings.simplefilter("always", stagger.Warning) unittest.main(defaultTest="suite")
bencevans/mopidy
refs/heads/develop
tests/core/test_mixer.py
17
from __future__ import absolute_import, unicode_literals import unittest import mock import pykka from mopidy import core, mixer from tests import dummy_mixer class CoreMixerTest(unittest.TestCase): def setUp(self): # noqa: N802 self.mixer = mock.Mock(spec=mixer.Mixer) self.core = core.Core(mixer=self.mixer, backends=[]) def test_get_volume(self): self.mixer.get_volume.return_value.get.return_value = 30 self.assertEqual(self.core.mixer.get_volume(), 30) self.mixer.get_volume.assert_called_once_with() def test_set_volume(self): self.mixer.set_volume.return_value.get.return_value = True self.core.mixer.set_volume(30) self.mixer.set_volume.assert_called_once_with(30) def test_get_mute(self): self.mixer.get_mute.return_value.get.return_value = True self.assertEqual(self.core.mixer.get_mute(), True) self.mixer.get_mute.assert_called_once_with() def test_set_mute(self): self.mixer.set_mute.return_value.get.return_value = True self.core.mixer.set_mute(True) self.mixer.set_mute.assert_called_once_with(True) class CoreNoneMixerTest(unittest.TestCase): def setUp(self): # noqa: N802 self.core = core.Core(mixer=None, backends=[]) def test_get_volume_return_none_because_it_is_unknown(self): self.assertEqual(self.core.mixer.get_volume(), None) def test_set_volume_return_false_because_it_failed(self): self.assertEqual(self.core.mixer.set_volume(30), False) def test_get_mute_return_none_because_it_is_unknown(self): self.assertEqual(self.core.mixer.get_mute(), None) def test_set_mute_return_false_because_it_failed(self): self.assertEqual(self.core.mixer.set_mute(True), False) @mock.patch.object(mixer.MixerListener, 'send') class CoreMixerListenerTest(unittest.TestCase): def setUp(self): # noqa: N802 self.mixer = dummy_mixer.create_proxy() self.core = core.Core(mixer=self.mixer, backends=[]) def tearDown(self): # noqa: N802 pykka.ActorRegistry.stop_all() def test_forwards_mixer_volume_changed_event_to_frontends(self, send): self.assertEqual(self.core.mixer.set_volume(volume=60), True) self.assertEqual(send.call_args[0][0], 'volume_changed') self.assertEqual(send.call_args[1]['volume'], 60) def test_forwards_mixer_mute_changed_event_to_frontends(self, send): self.core.mixer.set_mute(mute=True) self.assertEqual(send.call_args[0][0], 'mute_changed') self.assertEqual(send.call_args[1]['mute'], True) @mock.patch.object(mixer.MixerListener, 'send') class CoreNoneMixerListenerTest(unittest.TestCase): def setUp(self): # noqa: N802 self.core = core.Core(mixer=None, backends=[]) def test_forwards_mixer_volume_changed_event_to_frontends(self, send): self.assertEqual(self.core.mixer.set_volume(volume=60), False) self.assertEqual(send.call_count, 0) def test_forwards_mixer_mute_changed_event_to_frontends(self, send): self.core.mixer.set_mute(mute=True) self.assertEqual(send.call_count, 0) class MockBackendCoreMixerBase(unittest.TestCase): def setUp(self): # noqa: N802 self.mixer = mock.Mock() self.mixer.actor_ref.actor_class.__name__ = 'DummyMixer' self.core = core.Core(mixer=self.mixer, backends=[]) class GetVolumeBadBackendTest(MockBackendCoreMixerBase): def test_backend_raises_exception(self): self.mixer.get_volume.return_value.get.side_effect = Exception self.assertEqual(self.core.mixer.get_volume(), None) def test_backend_returns_too_small_value(self): self.mixer.get_volume.return_value.get.return_value = -1 self.assertEqual(self.core.mixer.get_volume(), None) def test_backend_returns_too_large_value(self): self.mixer.get_volume.return_value.get.return_value = 1000 self.assertEqual(self.core.mixer.get_volume(), None) def test_backend_returns_wrong_type(self): self.mixer.get_volume.return_value.get.return_value = '12' self.assertEqual(self.core.mixer.get_volume(), None) class SetVolumeBadBackendTest(MockBackendCoreMixerBase): def test_backend_raises_exception(self): self.mixer.set_volume.return_value.get.side_effect = Exception self.assertFalse(self.core.mixer.set_volume(30)) def test_backend_returns_wrong_type(self): self.mixer.set_volume.return_value.get.return_value = 'done' self.assertFalse(self.core.mixer.set_volume(30)) class GetMuteBadBackendTest(MockBackendCoreMixerBase): def test_backend_raises_exception(self): self.mixer.get_mute.return_value.get.side_effect = Exception self.assertEqual(self.core.mixer.get_mute(), None) def test_backend_returns_wrong_type(self): self.mixer.get_mute.return_value.get.return_value = '12' self.assertEqual(self.core.mixer.get_mute(), None) class SetMuteBadBackendTest(MockBackendCoreMixerBase): def test_backend_raises_exception(self): self.mixer.set_mute.return_value.get.side_effect = Exception self.assertFalse(self.core.mixer.set_mute(True)) def test_backend_returns_wrong_type(self): self.mixer.set_mute.return_value.get.return_value = 'done' self.assertFalse(self.core.mixer.set_mute(True))
mattmccarthy11/vidly-development
refs/heads/master
bk/mediadrop/lib/tests/helpers_test.py
10
# -*- coding: utf-8 -*- # This file is a part of MediaDrop (http://www.mediadrop.net), # Copyright 2009-2015 MediaDrop contributors # For the exact contribution history, see the git revision log. # The source code contained in this file is licensed under the GPLv3 or # (at your option) any later version. # See LICENSE.txt in the main project directory, for more information. from mediadrop.lib.test.db_testcase import DBTestCase from mediadrop.lib.test.pythonic_testcase import * from mediadrop.lib.test.request_mixin import RequestMixin class DefaultPageTitleTest(DBTestCase, RequestMixin): def setUp(self): super(DefaultPageTitleTest, self).setUp() self.init_fake_request() def test_default_page_title_ignores_default_if_not_specified(self): # mediadrop.lib.helpers imports 'pylons.request' on class load time # so we import the symbol locally after we injected a fake request from mediadrop.lib.helpers import default_page_title assert_equals('MediaDrop', default_page_title()) import unittest def suite(): suite = unittest.TestSuite() suite.addTest(unittest.makeSuite(DefaultPageTitleTest)) return suite if __name__ == '__main__': unittest.main(defaultTest='suite')
TathagataChakraborti/resource-conflicts
refs/heads/master
PLANROB-2015/seq-sat-lama/py2.5/lib/python2.5/plat-linux3/IN.py
3
# Generated by h2py from /usr/include/netinet/in.h _NETINET_IN_H = 1 # Included from features.h _FEATURES_H = 1 __USE_ANSI = 1 __FAVOR_BSD = 1 _ISOC95_SOURCE = 1 _ISOC99_SOURCE = 1 _POSIX_SOURCE = 1 _POSIX_C_SOURCE = 200809L _XOPEN_SOURCE = 700 _XOPEN_SOURCE_EXTENDED = 1 _LARGEFILE64_SOURCE = 1 _BSD_SOURCE = 1 _SVID_SOURCE = 1 _ATFILE_SOURCE = 1 _BSD_SOURCE = 1 _SVID_SOURCE = 1 __USE_ISOC99 = 1 __USE_ISOC95 = 1 _POSIX_SOURCE = 1 _POSIX_C_SOURCE = 2 _POSIX_C_SOURCE = 199506L _POSIX_C_SOURCE = 200112L _POSIX_C_SOURCE = 200809L __USE_POSIX_IMPLICITLY = 1 __USE_POSIX = 1 __USE_POSIX2 = 1 __USE_POSIX199309 = 1 __USE_POSIX199506 = 1 __USE_XOPEN2K = 1 __USE_ISOC95 = 1 __USE_ISOC99 = 1 __USE_XOPEN2K8 = 1 _ATFILE_SOURCE = 1 __USE_XOPEN = 1 __USE_XOPEN_EXTENDED = 1 __USE_UNIX98 = 1 _LARGEFILE_SOURCE = 1 __USE_XOPEN2K8 = 1 __USE_XOPEN2K8XSI = 1 __USE_XOPEN2K = 1 __USE_XOPEN2KXSI = 1 __USE_ISOC95 = 1 __USE_ISOC99 = 1 __USE_XOPEN_EXTENDED = 1 __USE_LARGEFILE = 1 __USE_LARGEFILE64 = 1 __USE_FILE_OFFSET64 = 1 __USE_MISC = 1 __USE_BSD = 1 __USE_SVID = 1 __USE_ATFILE = 1 __USE_GNU = 1 __USE_REENTRANT = 1 __USE_FORTIFY_LEVEL = 2 __USE_FORTIFY_LEVEL = 1 __USE_FORTIFY_LEVEL = 0 # Included from bits/predefs.h __STDC_IEC_559__ = 1 __STDC_IEC_559_COMPLEX__ = 1 __STDC_ISO_10646__ = 200009L __GNU_LIBRARY__ = 6 __GLIBC__ = 2 __GLIBC_MINOR__ = 15 __GLIBC_HAVE_LONG_LONG = 1 # Included from sys/cdefs.h _SYS_CDEFS_H = 1 def __NTH(fct): return fct def __NTH(fct): return fct def __P(args): return args def __PMT(args): return args def __STRING(x): return #x def __bos(ptr): return __builtin_object_size (ptr, __USE_FORTIFY_LEVEL > 1) def __bos0(ptr): return __builtin_object_size (ptr, 0) def __warnattr(msg): return __attribute__((__warning__ (msg))) __flexarr = [] __flexarr = [0] __flexarr = [] __flexarr = [1] def __ASMNAME(cname): return __ASMNAME2 (__USER_LABEL_PREFIX__, cname) def __attribute__(xyz): return def __attribute_format_arg__(x): return __attribute__ ((__format_arg__ (x))) def __attribute_format_arg__(x): return # Included from bits/wordsize.h __WORDSIZE = 64 __WORDSIZE_COMPAT32 = 1 __WORDSIZE = 32 __LDBL_COMPAT = 1 def __LDBL_REDIR_DECL(name): return \ __USE_LARGEFILE = 1 __USE_LARGEFILE64 = 1 __USE_EXTERN_INLINES = 1 __USE_EXTERN_INLINES_IN_LIBC = 1 # Included from gnu/stubs.h # Included from stdint.h _STDINT_H = 1 # Included from bits/wchar.h _BITS_WCHAR_H = 1 __WCHAR_MAX = (2147483647) __WCHAR_MIN = (-__WCHAR_MAX - 1) def __INT64_C(c): return c ## L def __UINT64_C(c): return c ## UL def __INT64_C(c): return c ## LL def __UINT64_C(c): return c ## ULL INT8_MIN = (-128) INT16_MIN = (-32767-1) INT32_MIN = (-2147483647-1) INT64_MIN = (-__INT64_C(9223372036854775807)-1) INT8_MAX = (127) INT16_MAX = (32767) INT32_MAX = (2147483647) INT64_MAX = (__INT64_C(9223372036854775807)) UINT8_MAX = (255) UINT16_MAX = (65535) UINT64_MAX = (__UINT64_C(18446744073709551615)) INT_LEAST8_MIN = (-128) INT_LEAST16_MIN = (-32767-1) INT_LEAST32_MIN = (-2147483647-1) INT_LEAST64_MIN = (-__INT64_C(9223372036854775807)-1) INT_LEAST8_MAX = (127) INT_LEAST16_MAX = (32767) INT_LEAST32_MAX = (2147483647) INT_LEAST64_MAX = (__INT64_C(9223372036854775807)) UINT_LEAST8_MAX = (255) UINT_LEAST16_MAX = (65535) UINT_LEAST64_MAX = (__UINT64_C(18446744073709551615)) INT_FAST8_MIN = (-128) INT_FAST16_MIN = (-9223372036854775807L-1) INT_FAST32_MIN = (-9223372036854775807L-1) INT_FAST16_MIN = (-2147483647-1) INT_FAST32_MIN = (-2147483647-1) INT_FAST64_MIN = (-__INT64_C(9223372036854775807)-1) INT_FAST8_MAX = (127) INT_FAST16_MAX = (9223372036854775807L) INT_FAST32_MAX = (9223372036854775807L) INT_FAST16_MAX = (2147483647) INT_FAST32_MAX = (2147483647) INT_FAST64_MAX = (__INT64_C(9223372036854775807)) UINT_FAST8_MAX = (255) UINT_FAST64_MAX = (__UINT64_C(18446744073709551615)) INTPTR_MIN = (-9223372036854775807L-1) INTPTR_MAX = (9223372036854775807L) INTPTR_MIN = (-2147483647-1) INTPTR_MAX = (2147483647) INTMAX_MIN = (-__INT64_C(9223372036854775807)-1) INTMAX_MAX = (__INT64_C(9223372036854775807)) UINTMAX_MAX = (__UINT64_C(18446744073709551615)) PTRDIFF_MIN = (-9223372036854775807L-1) PTRDIFF_MAX = (9223372036854775807L) PTRDIFF_MIN = (-2147483647-1) PTRDIFF_MAX = (2147483647) SIG_ATOMIC_MIN = (-2147483647-1) SIG_ATOMIC_MAX = (2147483647) WCHAR_MIN = __WCHAR_MIN WCHAR_MAX = __WCHAR_MAX def INT8_C(c): return c def INT16_C(c): return c def INT32_C(c): return c def INT64_C(c): return c ## L def INT64_C(c): return c ## LL def UINT8_C(c): return c def UINT16_C(c): return c def UINT32_C(c): return c ## U def UINT64_C(c): return c ## UL def UINT64_C(c): return c ## ULL def INTMAX_C(c): return c ## L def UINTMAX_C(c): return c ## UL def INTMAX_C(c): return c ## LL def UINTMAX_C(c): return c ## ULL # Included from sys/socket.h _SYS_SOCKET_H = 1 # Included from sys/uio.h _SYS_UIO_H = 1 # Included from sys/types.h _SYS_TYPES_H = 1 # Included from bits/types.h _BITS_TYPES_H = 1 __S32_TYPE = int __SWORD_TYPE = int __SLONG32_TYPE = int # Included from bits/typesizes.h _BITS_TYPESIZES_H = 1 __PID_T_TYPE = __S32_TYPE __DADDR_T_TYPE = __S32_TYPE __KEY_T_TYPE = __S32_TYPE __CLOCKID_T_TYPE = __S32_TYPE __SSIZE_T_TYPE = __SWORD_TYPE __FD_SETSIZE = 1024 # Included from time.h _TIME_H = 1 # Included from bits/time.h _STRUCT_TIMEVAL = 1 _BITS_TIME_H = 1 CLOCKS_PER_SEC = 1000000l CLOCK_REALTIME = 0 CLOCK_MONOTONIC = 1 CLOCK_PROCESS_CPUTIME_ID = 2 CLOCK_THREAD_CPUTIME_ID = 3 CLOCK_MONOTONIC_RAW = 4 CLOCK_REALTIME_COARSE = 5 CLOCK_MONOTONIC_COARSE = 6 CLOCK_BOOTTIME = 7 CLOCK_REALTIME_ALARM = 8 CLOCK_BOOTTIME_ALARM = 9 TIMER_ABSTIME = 1 # Included from bits/timex.h _BITS_TIMEX_H = 1 ADJ_OFFSET = 0x0001 ADJ_FREQUENCY = 0x0002 ADJ_MAXERROR = 0x0004 ADJ_ESTERROR = 0x0008 ADJ_STATUS = 0x0010 ADJ_TIMECONST = 0x0020 ADJ_TAI = 0x0080 ADJ_MICRO = 0x1000 ADJ_NANO = 0x2000 ADJ_TICK = 0x4000 ADJ_OFFSET_SINGLESHOT = 0x8001 ADJ_OFFSET_SS_READ = 0xa001 MOD_OFFSET = ADJ_OFFSET MOD_FREQUENCY = ADJ_FREQUENCY MOD_MAXERROR = ADJ_MAXERROR MOD_ESTERROR = ADJ_ESTERROR MOD_STATUS = ADJ_STATUS MOD_TIMECONST = ADJ_TIMECONST MOD_CLKB = ADJ_TICK MOD_CLKA = ADJ_OFFSET_SINGLESHOT MOD_TAI = ADJ_TAI MOD_MICRO = ADJ_MICRO MOD_NANO = ADJ_NANO STA_PLL = 0x0001 STA_PPSFREQ = 0x0002 STA_PPSTIME = 0x0004 STA_FLL = 0x0008 STA_INS = 0x0010 STA_DEL = 0x0020 STA_UNSYNC = 0x0040 STA_FREQHOLD = 0x0080 STA_PPSSIGNAL = 0x0100 STA_PPSJITTER = 0x0200 STA_PPSWANDER = 0x0400 STA_PPSERROR = 0x0800 STA_CLOCKERR = 0x1000 STA_NANO = 0x2000 STA_MODE = 0x4000 STA_CLK = 0x8000 STA_RONLY = (STA_PPSSIGNAL | STA_PPSJITTER | STA_PPSWANDER | \ STA_PPSERROR | STA_CLOCKERR | STA_NANO | STA_MODE | STA_CLK) CLK_TCK = CLOCKS_PER_SEC __clock_t_defined = 1 __time_t_defined = 1 __clockid_t_defined = 1 __timer_t_defined = 1 __timespec_defined = 1 # Included from xlocale.h _XLOCALE_H = 1 def __isleap(year): return \ __BIT_TYPES_DEFINED__ = 1 # Included from endian.h _ENDIAN_H = 1 __LITTLE_ENDIAN = 1234 __BIG_ENDIAN = 4321 __PDP_ENDIAN = 3412 # Included from bits/endian.h __BYTE_ORDER = __LITTLE_ENDIAN __FLOAT_WORD_ORDER = __BYTE_ORDER LITTLE_ENDIAN = __LITTLE_ENDIAN BIG_ENDIAN = __BIG_ENDIAN PDP_ENDIAN = __PDP_ENDIAN BYTE_ORDER = __BYTE_ORDER # Included from bits/byteswap.h _BITS_BYTESWAP_H = 1 def __bswap_constant_16(x): return \ def __bswap_16(x): return \ def __bswap_16(x): return \ def __bswap_constant_32(x): return \ def __bswap_32(x): return \ def __bswap_32(x): return \ def __bswap_32(x): return \ def __bswap_constant_64(x): return \ def __bswap_64(x): return \ def __bswap_64(x): return \ def htobe16(x): return __bswap_16 (x) def htole16(x): return (x) def be16toh(x): return __bswap_16 (x) def le16toh(x): return (x) def htobe32(x): return __bswap_32 (x) def htole32(x): return (x) def be32toh(x): return __bswap_32 (x) def le32toh(x): return (x) def htobe64(x): return __bswap_64 (x) def htole64(x): return (x) def be64toh(x): return __bswap_64 (x) def le64toh(x): return (x) def htobe16(x): return (x) def htole16(x): return __bswap_16 (x) def be16toh(x): return (x) def le16toh(x): return __bswap_16 (x) def htobe32(x): return (x) def htole32(x): return __bswap_32 (x) def be32toh(x): return (x) def le32toh(x): return __bswap_32 (x) def htobe64(x): return (x) def htole64(x): return __bswap_64 (x) def be64toh(x): return (x) def le64toh(x): return __bswap_64 (x) # Included from sys/select.h _SYS_SELECT_H = 1 # Included from bits/select.h __FD_ZERO_STOS = "stosq" __FD_ZERO_STOS = "stosl" def __FD_ZERO(fdsp): return \ def __FD_ZERO(set): return \ # Included from bits/sigset.h _SIGSET_H_types = 1 _SIGSET_H_fns = 1 def __sigmask(sig): return \ def __sigemptyset(set): return \ def __sigfillset(set): return \ def __sigisemptyset(set): return \ def __FD_ELT(d): return ((d) / __NFDBITS) FD_SETSIZE = __FD_SETSIZE def FD_ZERO(fdsetp): return __FD_ZERO (fdsetp) # Included from bits/select2.h def __FD_ELT(d): return \ # Included from sys/sysmacros.h _SYS_SYSMACROS_H = 1 def major(dev): return gnu_dev_major (dev) def minor(dev): return gnu_dev_minor (dev) # Included from bits/pthreadtypes.h _BITS_PTHREADTYPES_H = 1 __SIZEOF_PTHREAD_ATTR_T = 56 __SIZEOF_PTHREAD_MUTEX_T = 40 __SIZEOF_PTHREAD_MUTEXATTR_T = 4 __SIZEOF_PTHREAD_COND_T = 48 __SIZEOF_PTHREAD_CONDATTR_T = 4 __SIZEOF_PTHREAD_RWLOCK_T = 56 __SIZEOF_PTHREAD_RWLOCKATTR_T = 8 __SIZEOF_PTHREAD_BARRIER_T = 32 __SIZEOF_PTHREAD_BARRIERATTR_T = 4 __SIZEOF_PTHREAD_ATTR_T = 36 __SIZEOF_PTHREAD_MUTEX_T = 24 __SIZEOF_PTHREAD_MUTEXATTR_T = 4 __SIZEOF_PTHREAD_COND_T = 48 __SIZEOF_PTHREAD_CONDATTR_T = 4 __SIZEOF_PTHREAD_RWLOCK_T = 32 __SIZEOF_PTHREAD_RWLOCKATTR_T = 8 __SIZEOF_PTHREAD_BARRIER_T = 20 __SIZEOF_PTHREAD_BARRIERATTR_T = 4 __PTHREAD_MUTEX_HAVE_PREV = 1 # Included from bits/uio.h _BITS_UIO_H = 1 UIO_MAXIOV = 1024 _BITS_UIO_H_FOR_SYS_UIO_H = 1 # Included from bits/socket.h PF_UNSPEC = 0 PF_LOCAL = 1 PF_UNIX = PF_LOCAL PF_FILE = PF_LOCAL PF_INET = 2 PF_AX25 = 3 PF_IPX = 4 PF_APPLETALK = 5 PF_NETROM = 6 PF_BRIDGE = 7 PF_ATMPVC = 8 PF_X25 = 9 PF_INET6 = 10 PF_ROSE = 11 PF_DECnet = 12 PF_NETBEUI = 13 PF_SECURITY = 14 PF_KEY = 15 PF_NETLINK = 16 PF_ROUTE = PF_NETLINK PF_PACKET = 17 PF_ASH = 18 PF_ECONET = 19 PF_ATMSVC = 20 PF_RDS = 21 PF_SNA = 22 PF_IRDA = 23 PF_PPPOX = 24 PF_WANPIPE = 25 PF_LLC = 26 PF_CAN = 29 PF_TIPC = 30 PF_BLUETOOTH = 31 PF_IUCV = 32 PF_RXRPC = 33 PF_ISDN = 34 PF_PHONET = 35 PF_IEEE802154 = 36 PF_CAIF = 37 PF_ALG = 38 PF_NFC = 39 PF_MAX = 40 AF_UNSPEC = PF_UNSPEC AF_LOCAL = PF_LOCAL AF_UNIX = PF_UNIX AF_FILE = PF_FILE AF_INET = PF_INET AF_AX25 = PF_AX25 AF_IPX = PF_IPX AF_APPLETALK = PF_APPLETALK AF_NETROM = PF_NETROM AF_BRIDGE = PF_BRIDGE AF_ATMPVC = PF_ATMPVC AF_X25 = PF_X25 AF_INET6 = PF_INET6 AF_ROSE = PF_ROSE AF_DECnet = PF_DECnet AF_NETBEUI = PF_NETBEUI AF_SECURITY = PF_SECURITY AF_KEY = PF_KEY AF_NETLINK = PF_NETLINK AF_ROUTE = PF_ROUTE AF_PACKET = PF_PACKET AF_ASH = PF_ASH AF_ECONET = PF_ECONET AF_ATMSVC = PF_ATMSVC AF_RDS = PF_RDS AF_SNA = PF_SNA AF_IRDA = PF_IRDA AF_PPPOX = PF_PPPOX AF_WANPIPE = PF_WANPIPE AF_LLC = PF_LLC AF_CAN = PF_CAN AF_TIPC = PF_TIPC AF_BLUETOOTH = PF_BLUETOOTH AF_IUCV = PF_IUCV AF_RXRPC = PF_RXRPC AF_ISDN = PF_ISDN AF_PHONET = PF_PHONET AF_IEEE802154 = PF_IEEE802154 AF_CAIF = PF_CAIF AF_ALG = PF_ALG AF_NFC = PF_NFC AF_MAX = PF_MAX SOL_RAW = 255 SOL_DECNET = 261 SOL_X25 = 262 SOL_PACKET = 263 SOL_ATM = 264 SOL_AAL = 265 SOL_IRDA = 266 SOMAXCONN = 128 # Included from bits/sockaddr.h _BITS_SOCKADDR_H = 1 def __SOCKADDR_COMMON(sa_prefix): return \ _SS_SIZE = 128 def CMSG_FIRSTHDR(mhdr): return \ # Included from asm/socket.h # Included from bits/socket2.h def IN_CLASSA(a): return ((((in_addr_t)(a)) & (-2147483648)) == 0) IN_CLASSA_NET = (-16777216) IN_CLASSA_NSHIFT = 24 IN_CLASSA_HOST = ((-1) & ~IN_CLASSA_NET) IN_CLASSA_MAX = 128 def IN_CLASSB(a): return ((((in_addr_t)(a)) & (-1073741824)) == (-2147483648)) IN_CLASSB_NET = (-65536) IN_CLASSB_NSHIFT = 16 IN_CLASSB_HOST = ((-1) & ~IN_CLASSB_NET) IN_CLASSB_MAX = 65536 def IN_CLASSC(a): return ((((in_addr_t)(a)) & (-536870912)) == (-1073741824)) IN_CLASSC_NET = (-256) IN_CLASSC_NSHIFT = 8 IN_CLASSC_HOST = ((-1) & ~IN_CLASSC_NET) def IN_CLASSD(a): return ((((in_addr_t)(a)) & (-268435456)) == (-536870912)) def IN_MULTICAST(a): return IN_CLASSD(a) def IN_EXPERIMENTAL(a): return ((((in_addr_t)(a)) & (-536870912)) == (-536870912)) def IN_BADCLASS(a): return ((((in_addr_t)(a)) & (-268435456)) == (-268435456)) IN_LOOPBACKNET = 127 INET_ADDRSTRLEN = 16 INET6_ADDRSTRLEN = 46 # Included from bits/in.h IP_OPTIONS = 4 IP_HDRINCL = 3 IP_TOS = 1 IP_TTL = 2 IP_RECVOPTS = 6 IP_RETOPTS = 7 IP_MULTICAST_IF = 32 IP_MULTICAST_TTL = 33 IP_MULTICAST_LOOP = 34 IP_ADD_MEMBERSHIP = 35 IP_DROP_MEMBERSHIP = 36 IP_UNBLOCK_SOURCE = 37 IP_BLOCK_SOURCE = 38 IP_ADD_SOURCE_MEMBERSHIP = 39 IP_DROP_SOURCE_MEMBERSHIP = 40 IP_MSFILTER = 41 MCAST_JOIN_GROUP = 42 MCAST_BLOCK_SOURCE = 43 MCAST_UNBLOCK_SOURCE = 44 MCAST_LEAVE_GROUP = 45 MCAST_JOIN_SOURCE_GROUP = 46 MCAST_LEAVE_SOURCE_GROUP = 47 MCAST_MSFILTER = 48 MCAST_EXCLUDE = 0 MCAST_INCLUDE = 1 IP_ROUTER_ALERT = 5 IP_PKTINFO = 8 IP_PKTOPTIONS = 9 IP_PMTUDISC = 10 IP_MTU_DISCOVER = 10 IP_RECVERR = 11 IP_RECVTTL = 12 IP_RECVTOS = 13 IP_MTU = 14 IP_FREEBIND = 15 IP_IPSEC_POLICY = 16 IP_XFRM_POLICY = 17 IP_PASSSEC = 18 IP_TRANSPARENT = 19 IP_MULTICAST_ALL = 49 IP_ORIGDSTADDR = 20 IP_RECVORIGDSTADDR = IP_ORIGDSTADDR IP_MINTTL = 21 IP_PMTUDISC_DONT = 0 IP_PMTUDISC_WANT = 1 IP_PMTUDISC_DO = 2 IP_PMTUDISC_PROBE = 3 SOL_IP = 0 IP_DEFAULT_MULTICAST_TTL = 1 IP_DEFAULT_MULTICAST_LOOP = 1 IP_MAX_MEMBERSHIPS = 20 IPV6_ADDRFORM = 1 IPV6_2292PKTINFO = 2 IPV6_2292HOPOPTS = 3 IPV6_2292DSTOPTS = 4 IPV6_2292RTHDR = 5 IPV6_2292PKTOPTIONS = 6 IPV6_CHECKSUM = 7 IPV6_2292HOPLIMIT = 8 IPV6_NEXTHOP = 9 IPV6_AUTHHDR = 10 IPV6_UNICAST_HOPS = 16 IPV6_MULTICAST_IF = 17 IPV6_MULTICAST_HOPS = 18 IPV6_MULTICAST_LOOP = 19 IPV6_JOIN_GROUP = 20 IPV6_LEAVE_GROUP = 21 IPV6_ROUTER_ALERT = 22 IPV6_MTU_DISCOVER = 23 IPV6_MTU = 24 IPV6_RECVERR = 25 IPV6_V6ONLY = 26 IPV6_JOIN_ANYCAST = 27 IPV6_LEAVE_ANYCAST = 28 IPV6_IPSEC_POLICY = 34 IPV6_XFRM_POLICY = 35 IPV6_RECVPKTINFO = 49 IPV6_PKTINFO = 50 IPV6_RECVHOPLIMIT = 51 IPV6_HOPLIMIT = 52 IPV6_RECVHOPOPTS = 53 IPV6_HOPOPTS = 54 IPV6_RTHDRDSTOPTS = 55 IPV6_RECVRTHDR = 56 IPV6_RTHDR = 57 IPV6_RECVDSTOPTS = 58 IPV6_DSTOPTS = 59 IPV6_RECVTCLASS = 66 IPV6_TCLASS = 67 IPV6_ADD_MEMBERSHIP = IPV6_JOIN_GROUP IPV6_DROP_MEMBERSHIP = IPV6_LEAVE_GROUP IPV6_RXHOPOPTS = IPV6_HOPOPTS IPV6_RXDSTOPTS = IPV6_DSTOPTS IPV6_PMTUDISC_DONT = 0 IPV6_PMTUDISC_WANT = 1 IPV6_PMTUDISC_DO = 2 IPV6_PMTUDISC_PROBE = 3 SOL_IPV6 = 41 SOL_ICMPV6 = 58 IPV6_RTHDR_LOOSE = 0 IPV6_RTHDR_STRICT = 1 IPV6_RTHDR_TYPE_0 = 0 def ntohl(x): return (x) def ntohs(x): return (x) def htonl(x): return (x) def htons(x): return (x) def ntohl(x): return __bswap_32 (x) def ntohs(x): return __bswap_16 (x) def htonl(x): return __bswap_32 (x) def htons(x): return __bswap_16 (x) def IN6_IS_ADDR_UNSPECIFIED(a): return \ def IN6_IS_ADDR_LOOPBACK(a): return \ def IN6_IS_ADDR_LINKLOCAL(a): return \ def IN6_IS_ADDR_SITELOCAL(a): return \ def IN6_IS_ADDR_V4MAPPED(a): return \ def IN6_IS_ADDR_V4COMPAT(a): return \ def IN6_IS_ADDR_UNSPECIFIED(a): return \ def IN6_IS_ADDR_LOOPBACK(a): return \ def IN6_IS_ADDR_LINKLOCAL(a): return \ def IN6_IS_ADDR_SITELOCAL(a): return \ def IN6_IS_ADDR_V4MAPPED(a): return \ def IN6_IS_ADDR_V4COMPAT(a): return \ def IN6_IS_ADDR_MC_NODELOCAL(a): return \ def IN6_IS_ADDR_MC_LINKLOCAL(a): return \ def IN6_IS_ADDR_MC_SITELOCAL(a): return \ def IN6_IS_ADDR_MC_ORGLOCAL(a): return \ def IN6_IS_ADDR_MC_GLOBAL(a): return \
mbareta/edx-platform-ft
refs/heads/open-release/eucalyptus.master
lms/djangoapps/badges/api/views.py
17
""" API views for badges """ from opaque_keys import InvalidKeyError from opaque_keys.edx.keys import CourseKey from rest_framework import generics from rest_framework.exceptions import APIException from openedx.core.djangoapps.user_api.permissions import is_field_shared_factory from openedx.core.lib.api.authentication import ( OAuth2AuthenticationAllowInactiveUser, SessionAuthenticationAllowInactiveUser ) from xmodule_django.models import CourseKeyField from badges.models import BadgeAssertion from .serializers import BadgeAssertionSerializer class InvalidCourseKeyError(APIException): """ Raised the course key given isn't valid. """ status_code = 400 default_detail = "The course key provided was invalid." class UserBadgeAssertions(generics.ListAPIView): """ ** Use cases ** Request a list of assertions for a user, optionally constrained to a course. ** Example Requests ** GET /api/badges/v1/assertions/user/{username}/ ** Response Values ** Body comprised of a list of objects with the following fields: * badge_class: The badge class the assertion was awarded for. Represented as an object with the following fields: * slug: The identifier for the badge class * issuing_component: The software component responsible for issuing this badge. * display_name: The display name of the badge. * course_id: The course key of the course this badge is scoped to, or null if it isn't scoped to a course. * description: A description of the award and its significance. * criteria: A description of what is needed to obtain this award. * image_url: A URL to the icon image used to represent this award. * image_url: The baked assertion image derived from the badge_class icon-- contains metadata about the award in its headers. * assertion_url: The URL to the OpenBadges BadgeAssertion object, for verification by compatible tools and software. ** Params ** * slug (optional): The identifier for a particular badge class to filter by. * issuing_component (optional): The issuing component for a particular badge class to filter by (requires slug to have been specified, or this will be ignored.) If slug is provided and this is not, assumes the issuing_component should be empty. * course_id (optional): Returns assertions that were awarded as part of a particular course. If slug is provided, and this field is not specified, assumes that the target badge has an empty course_id field. '*' may be used to get all badges with the specified slug, issuing_component combination across all courses. ** Returns ** * 200 on success, with a list of Badge Assertion objects. * 403 if a user who does not have permission to masquerade as another user specifies a username other than their own. * 404 if the specified user does not exist { "count": 7, "previous": null, "num_pages": 1, "results": [ { "badge_class": { "slug": "special_award", "issuing_component": "openedx__course", "display_name": "Very Special Award", "course_id": "course-v1:edX+DemoX+Demo_Course", "description": "Awarded for people who did something incredibly special", "criteria": "Do something incredibly special.", "image": "http://example.com/media/badge_classes/badges/special_xdpqpBv_9FYOZwN.png" }, "image_url": "http://badges.example.com/media/issued/cd75b69fc1c979fcc1697c8403da2bdf.png", "assertion_url": "http://badges.example.com/public/assertions/07020647-e772-44dd-98b7-d13d34335ca6" }, ... ] } """ serializer_class = BadgeAssertionSerializer authentication_classes = ( OAuth2AuthenticationAllowInactiveUser, SessionAuthenticationAllowInactiveUser ) permission_classes = (is_field_shared_factory("accomplishments_shared"),) def filter_queryset(self, queryset): """ Return most recent to least recent badge. """ return queryset.order_by('-created') def get_queryset(self): """ Get all badges for the username specified. """ queryset = BadgeAssertion.objects.filter(user__username=self.kwargs['username']) provided_course_id = self.request.query_params.get('course_id') if provided_course_id == '*': # We might want to get all the matching course scoped badges to see how many courses # a user managed to get a specific award on. course_id = None elif provided_course_id: try: course_id = CourseKey.from_string(provided_course_id) except InvalidKeyError: raise InvalidCourseKeyError elif 'slug' not in self.request.query_params: # Need to get all badges for the user. course_id = None else: # Django won't let us use 'None' for querying a ForeignKey field. We have to use this special # 'Empty' value to indicate we're looking only for badges without a course key set. course_id = CourseKeyField.Empty if course_id is not None: queryset = queryset.filter(badge_class__course_id=course_id) if self.request.query_params.get('slug'): queryset = queryset.filter( badge_class__slug=self.request.query_params['slug'], badge_class__issuing_component=self.request.query_params.get('issuing_component', '') ) return queryset
dkubiak789/odoo
refs/heads/8.0
openerp/addons/base/tests/test_api.py
182
from openerp import models from openerp.tools import mute_logger from openerp.osv.orm import except_orm from openerp.tests import common class TestAPI(common.TransactionCase): """ test the new API of the ORM """ def assertIsRecordset(self, value, model): self.assertIsInstance(value, models.BaseModel) self.assertEqual(value._name, model) def assertIsRecord(self, value, model): self.assertIsRecordset(value, model) self.assertTrue(len(value) <= 1) def assertIsNull(self, value, model): self.assertIsRecordset(value, model) self.assertFalse(value) @mute_logger('openerp.models') def test_00_query(self): """ Build a recordset, and check its contents. """ domain = [('name', 'ilike', 'j')] ids = self.registry('res.partner').search(self.cr, self.uid, domain) partners = self.env['res.partner'].search(domain) # partners is a collection of browse records corresponding to ids self.assertTrue(ids) self.assertTrue(partners) # partners and its contents are instance of the model self.assertIsRecordset(partners, 'res.partner') for p in partners: self.assertIsRecord(p, 'res.partner') self.assertEqual([p.id for p in partners], ids) self.assertEqual(self.env['res.partner'].browse(ids), partners) @mute_logger('openerp.models') def test_01_query_offset(self): """ Build a recordset with offset, and check equivalence. """ partners1 = self.env['res.partner'].search([], offset=10) partners2 = self.env['res.partner'].search([])[10:] self.assertIsRecordset(partners1, 'res.partner') self.assertIsRecordset(partners2, 'res.partner') self.assertEqual(list(partners1), list(partners2)) @mute_logger('openerp.models') def test_02_query_limit(self): """ Build a recordset with offset, and check equivalence. """ partners1 = self.env['res.partner'].search([], limit=10) partners2 = self.env['res.partner'].search([])[:10] self.assertIsRecordset(partners1, 'res.partner') self.assertIsRecordset(partners2, 'res.partner') self.assertEqual(list(partners1), list(partners2)) @mute_logger('openerp.models') def test_03_query_offset_limit(self): """ Build a recordset with offset and limit, and check equivalence. """ partners1 = self.env['res.partner'].search([], offset=3, limit=7) partners2 = self.env['res.partner'].search([])[3:10] self.assertIsRecordset(partners1, 'res.partner') self.assertIsRecordset(partners2, 'res.partner') self.assertEqual(list(partners1), list(partners2)) @mute_logger('openerp.models') def test_05_immutable(self): """ Check that a recordset remains the same, even after updates. """ domain = [('name', 'ilike', 'j')] partners = self.env['res.partner'].search(domain) self.assertTrue(partners) ids = map(int, partners) # modify those partners, and check that partners has not changed self.registry('res.partner').write(self.cr, self.uid, ids, {'active': False}) self.assertEqual(ids, map(int, partners)) # redo the search, and check that the result is now empty partners2 = self.env['res.partner'].search(domain) self.assertFalse(partners2) @mute_logger('openerp.models') def test_06_fields(self): """ Check that relation fields return records, recordsets or nulls. """ user = self.registry('res.users').browse(self.cr, self.uid, self.uid) self.assertIsRecord(user, 'res.users') self.assertIsRecord(user.partner_id, 'res.partner') self.assertIsRecordset(user.groups_id, 'res.groups') partners = self.env['res.partner'].search([]) for name, field in partners._fields.iteritems(): if field.type == 'many2one': for p in partners: self.assertIsRecord(p[name], field.comodel_name) elif field.type == 'reference': for p in partners: if p[name]: self.assertIsRecord(p[name], field.comodel_name) elif field.type in ('one2many', 'many2many'): for p in partners: self.assertIsRecordset(p[name], field.comodel_name) @mute_logger('openerp.models') def test_07_null(self): """ Check behavior of null instances. """ # select a partner without a parent partner = self.env['res.partner'].search([('parent_id', '=', False)])[0] # check partner and related null instances self.assertTrue(partner) self.assertIsRecord(partner, 'res.partner') self.assertFalse(partner.parent_id) self.assertIsNull(partner.parent_id, 'res.partner') self.assertIs(partner.parent_id.id, False) self.assertFalse(partner.parent_id.user_id) self.assertIsNull(partner.parent_id.user_id, 'res.users') self.assertIs(partner.parent_id.user_id.name, False) self.assertFalse(partner.parent_id.user_id.groups_id) self.assertIsRecordset(partner.parent_id.user_id.groups_id, 'res.groups') @mute_logger('openerp.models') def test_10_old_old(self): """ Call old-style methods in the old-fashioned way. """ partners = self.env['res.partner'].search([('name', 'ilike', 'j')]) self.assertTrue(partners) ids = map(int, partners) # call method name_get on partners' model, and check its effect res = partners._model.name_get(self.cr, self.uid, ids) self.assertEqual(len(res), len(ids)) self.assertEqual(set(val[0] for val in res), set(ids)) @mute_logger('openerp.models') def test_20_old_new(self): """ Call old-style methods in the new API style. """ partners = self.env['res.partner'].search([('name', 'ilike', 'j')]) self.assertTrue(partners) # call method name_get on partners itself, and check its effect res = partners.name_get() self.assertEqual(len(res), len(partners)) self.assertEqual(set(val[0] for val in res), set(map(int, partners))) @mute_logger('openerp.models') def test_25_old_new(self): """ Call old-style methods on records (new API style). """ partners = self.env['res.partner'].search([('name', 'ilike', 'j')]) self.assertTrue(partners) # call method name_get on partner records, and check its effect for p in partners: res = p.name_get() self.assertTrue(isinstance(res, list) and len(res) == 1) self.assertTrue(isinstance(res[0], tuple) and len(res[0]) == 2) self.assertEqual(res[0][0], p.id) @mute_logger('openerp.models') def test_30_new_old(self): """ Call new-style methods in the old-fashioned way. """ partners = self.env['res.partner'].search([('name', 'ilike', 'j')]) self.assertTrue(partners) ids = map(int, partners) # call method write on partners' model, and check its effect partners._model.write(self.cr, self.uid, ids, {'active': False}) for p in partners: self.assertFalse(p.active) @mute_logger('openerp.models') def test_40_new_new(self): """ Call new-style methods in the new API style. """ partners = self.env['res.partner'].search([('name', 'ilike', 'j')]) self.assertTrue(partners) # call method write on partners itself, and check its effect partners.write({'active': False}) for p in partners: self.assertFalse(p.active) @mute_logger('openerp.models') def test_45_new_new(self): """ Call new-style methods on records (new API style). """ partners = self.env['res.partner'].search([('name', 'ilike', 'j')]) self.assertTrue(partners) # call method write on partner records, and check its effects for p in partners: p.write({'active': False}) for p in partners: self.assertFalse(p.active) @mute_logger('openerp.models') @mute_logger('openerp.addons.base.ir.ir_model') def test_50_environment(self): """ Test environment on records. """ # partners and reachable records are attached to self.env partners = self.env['res.partner'].search([('name', 'ilike', 'j')]) self.assertEqual(partners.env, self.env) for x in (partners, partners[0], partners[0].company_id): self.assertEqual(x.env, self.env) for p in partners: self.assertEqual(p.env, self.env) # check that the current user can read and modify company data partners[0].company_id.name partners[0].company_id.write({'name': 'Fools'}) # create an environment with the demo user demo = self.env['res.users'].search([('login', '=', 'demo')])[0] demo_env = self.env(user=demo) self.assertNotEqual(demo_env, self.env) # partners and related records are still attached to self.env self.assertEqual(partners.env, self.env) for x in (partners, partners[0], partners[0].company_id): self.assertEqual(x.env, self.env) for p in partners: self.assertEqual(p.env, self.env) # create record instances attached to demo_env demo_partners = partners.sudo(demo) self.assertEqual(demo_partners.env, demo_env) for x in (demo_partners, demo_partners[0], demo_partners[0].company_id): self.assertEqual(x.env, demo_env) for p in demo_partners: self.assertEqual(p.env, demo_env) # demo user can read but not modify company data demo_partners[0].company_id.name with self.assertRaises(except_orm): demo_partners[0].company_id.write({'name': 'Pricks'}) # remove demo user from all groups demo.write({'groups_id': [(5,)]}) # demo user can no longer access partner data with self.assertRaises(except_orm): demo_partners[0].company_id.name @mute_logger('openerp.models') def test_55_draft(self): """ Test draft mode nesting. """ env = self.env self.assertFalse(env.in_draft) with env.do_in_draft(): self.assertTrue(env.in_draft) with env.do_in_draft(): self.assertTrue(env.in_draft) with env.do_in_draft(): self.assertTrue(env.in_draft) self.assertTrue(env.in_draft) self.assertTrue(env.in_draft) self.assertFalse(env.in_draft) @mute_logger('openerp.models') def test_60_cache(self): """ Check the record cache behavior """ partners = self.env['res.partner'].search([('child_ids', '!=', False)]) partner1, partner2 = partners[0], partners[1] children1, children2 = partner1.child_ids, partner2.child_ids self.assertTrue(children1) self.assertTrue(children2) # take a child contact child = children1[0] self.assertEqual(child.parent_id, partner1) self.assertIn(child, partner1.child_ids) self.assertNotIn(child, partner2.child_ids) # fetch data in the cache for p in partners: p.name, p.company_id.name, p.user_id.name, p.contact_address self.env.check_cache() # change its parent child.write({'parent_id': partner2.id}) self.env.check_cache() # check recordsets self.assertEqual(child.parent_id, partner2) self.assertNotIn(child, partner1.child_ids) self.assertIn(child, partner2.child_ids) self.assertEqual(set(partner1.child_ids + child), set(children1)) self.assertEqual(set(partner2.child_ids), set(children2 + child)) self.env.check_cache() # delete it child.unlink() self.env.check_cache() # check recordsets self.assertEqual(set(partner1.child_ids), set(children1) - set([child])) self.assertEqual(set(partner2.child_ids), set(children2)) self.env.check_cache() @mute_logger('openerp.models') def test_60_cache_prefetching(self): """ Check the record cache prefetching """ self.env.invalidate_all() # all the records of an instance already have an entry in cache partners = self.env['res.partner'].search([]) partner_ids = self.env.prefetch['res.partner'] self.assertEqual(set(partners.ids), set(partner_ids)) # countries have not been fetched yet; their cache must be empty countries = self.env['res.country'].browse() self.assertFalse(self.env.prefetch['res.country']) # reading ONE partner should fetch them ALL countries |= partners[0].country_id country_cache = self.env.cache[partners._fields['country_id']] self.assertLessEqual(set(partners._ids), set(country_cache)) # read all partners, and check that the cache already contained them country_ids = list(self.env.prefetch['res.country']) for p in partners: countries |= p.country_id self.assertLessEqual(set(countries.ids), set(country_ids)) @mute_logger('openerp.models') def test_70_one(self): """ Check method one(). """ # check with many records ps = self.env['res.partner'].search([('name', 'ilike', 'a')]) self.assertTrue(len(ps) > 1) with self.assertRaises(except_orm): ps.ensure_one() p1 = ps[0] self.assertEqual(len(p1), 1) self.assertEqual(p1.ensure_one(), p1) p0 = self.env['res.partner'].browse() self.assertEqual(len(p0), 0) with self.assertRaises(except_orm): p0.ensure_one() @mute_logger('openerp.models') def test_80_contains(self): """ Test membership on recordset. """ p1 = self.env['res.partner'].search([('name', 'ilike', 'a')], limit=1).ensure_one() ps = self.env['res.partner'].search([('name', 'ilike', 'a')]) self.assertTrue(p1 in ps) @mute_logger('openerp.models') def test_80_set_operations(self): """ Check set operations on recordsets. """ pa = self.env['res.partner'].search([('name', 'ilike', 'a')]) pb = self.env['res.partner'].search([('name', 'ilike', 'b')]) self.assertTrue(pa) self.assertTrue(pb) self.assertTrue(set(pa) & set(pb)) concat = pa + pb self.assertEqual(list(concat), list(pa) + list(pb)) self.assertEqual(len(concat), len(pa) + len(pb)) difference = pa - pb self.assertEqual(len(difference), len(set(difference))) self.assertEqual(set(difference), set(pa) - set(pb)) self.assertLessEqual(difference, pa) intersection = pa & pb self.assertEqual(len(intersection), len(set(intersection))) self.assertEqual(set(intersection), set(pa) & set(pb)) self.assertLessEqual(intersection, pa) self.assertLessEqual(intersection, pb) union = pa | pb self.assertEqual(len(union), len(set(union))) self.assertEqual(set(union), set(pa) | set(pb)) self.assertGreaterEqual(union, pa) self.assertGreaterEqual(union, pb) # one cannot mix different models with set operations ps = pa ms = self.env['ir.ui.menu'].search([]) self.assertNotEqual(ps._name, ms._name) self.assertNotEqual(ps, ms) with self.assertRaises(except_orm): res = ps + ms with self.assertRaises(except_orm): res = ps - ms with self.assertRaises(except_orm): res = ps & ms with self.assertRaises(except_orm): res = ps | ms with self.assertRaises(except_orm): res = ps < ms with self.assertRaises(except_orm): res = ps <= ms with self.assertRaises(except_orm): res = ps > ms with self.assertRaises(except_orm): res = ps >= ms @mute_logger('openerp.models') def test_80_filter(self): """ Check filter on recordsets. """ ps = self.env['res.partner'].search([]) customers = ps.browse([p.id for p in ps if p.customer]) # filter on a single field self.assertEqual(ps.filtered(lambda p: p.customer), customers) self.assertEqual(ps.filtered('customer'), customers) # filter on a sequence of fields self.assertEqual( ps.filtered(lambda p: p.parent_id.customer), ps.filtered('parent_id.customer') ) @mute_logger('openerp.models') def test_80_map(self): """ Check map on recordsets. """ ps = self.env['res.partner'].search([]) parents = ps.browse() for p in ps: parents |= p.parent_id # map a single field self.assertEqual(ps.mapped(lambda p: p.parent_id), parents) self.assertEqual(ps.mapped('parent_id'), parents) # map a sequence of fields self.assertEqual( ps.mapped(lambda p: p.parent_id.name), [p.parent_id.name for p in ps] ) self.assertEqual( ps.mapped('parent_id.name'), [p.name for p in parents] )
snakeleon/YouCompleteMe-x64
refs/heads/master
third_party/ycmd/ycmd/completers/language_server/generic_lsp_completer.py
3
# Copyright (C) 2020 ycmd contributors # # This file is part of ycmd. # # ycmd is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # ycmd is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with ycmd. If not, see <http://www.gnu.org/licenses/>. import string from ycmd import responses, utils from ycmd.completers.language_server import language_server_completer class GenericLSPCompleter( language_server_completer.LanguageServerCompleter ): def __init__( self, user_options, server_settings ): self._name = server_settings[ 'name' ] self._supported_filetypes = server_settings[ 'filetypes' ] self._project_root_files = server_settings.get( 'project_root_files', [] ) self._capabilities = server_settings.get( 'capabilities', {} ) self._command_line = server_settings.get( 'cmdline' ) self._port = server_settings.get( 'port' ) if self._port: connection_type = 'tcp' if self._port == '*': self._port = utils.GetUnusedLocalhostPort() else: connection_type = 'stdio' if self._command_line: self._command_line[ 0 ] = utils.FindExecutable( self._command_line[ 0 ] ) for idx in range( len( self._command_line ) ): self._command_line[ idx ] = string.Template( self._command_line[ idx ] ).safe_substitute( { 'port': self._port } ) super().__init__( user_options, connection_type ) def GetProjectRootFiles( self ): return self._project_root_files def Language( self ): return self._name def GetServerName( self ): return self._name + 'Completer' def GetCommandLine( self ): return self._command_line def GetCustomSubcommands( self ): return { 'GetHover': lambda self, request_data, args: self._GetHover( request_data ) } def _GetHover( self, request_data ): raw_hover = self.GetHoverResponse( request_data ) if isinstance( raw_hover, dict ): # Both MarkedString and MarkupContent contain 'value' key. # MarkupContent is the only one not deprecated. return responses.BuildDetailedInfoResponse( raw_hover[ 'value' ] ) if isinstance( raw_hover, str ): # MarkedString might be just a string. return responses.BuildDetailedInfoResponse( raw_hover ) # If we got this far, this is a list of MarkedString objects. lines = [] for marked_string in raw_hover: if isinstance( marked_string, str ): lines.append( marked_string ) else: lines.append( marked_string[ 'value' ] ) return responses.BuildDetailedInfoResponse( '\n'.join( lines ) ) def GetCodepointForCompletionRequest( self, request_data ): if request_data[ 'force_semantic' ]: return request_data[ 'column_codepoint' ] return super().GetCodepointForCompletionRequest( request_data ) def SupportedFiletypes( self ): return self._supported_filetypes def ExtraCapabilities( self ): return self._capabilities def WorkspaceConfigurationResponse( self, request ): if self._capabilities.get( 'workspace', {} ).get( 'configuration' ): sections_to_config_map = self._settings.get( 'config_sections', {} ) return [ sections_to_config_map.get( item.get( 'section', '' ) ) for item in request[ 'params' ][ 'items' ] ]
PabloPiaggi/plumed2
refs/heads/master
user-doc/tutorials/lugano-2/wham.py
6
import math import sys # arguments read from command line # name of input file FILENAME_ = sys.argv[1] # number of BIAS NBIAS_ = int(sys.argv[2]) # temperature KBT_ = float(sys.argv[3]) # default parameters for WHAM # number of WHAM iterations NWHAM_ = 10000 # convergence thresold THRES_ = 1.0e-10 def get_wham_weights(nbias, nframes, bias_ts, nwham=NWHAM_, thres=THRES_): # find minimum bias min_bias = min(bias_ts) # initialize weights w = [] for i in range(0, nframes): w.append(1.0) # offset and exponential of the bias expv = [] for i in range(0, len(bias_ts)): expv.append(math.exp((-bias_ts[i]+min_bias)/KBT_)) # initialize Z Z = [] for j in range(0, nbias): Z.append(1.0) # WHAM iterations for iii in range(0, nwham): # store Z Z_old = Z[:] # recompute weights norm = 0.0 for i in range(0, len(w)): ew = 0.0 for j in range(0, len(Z)): ew += expv[i*len(Z)+j] / Z[j] w[i] = 1.0 / ew norm += w[i] # normalize weights for i in range(0, len(w)): w[i] /= norm # recompute Z for j in range(0, len(Z)): Z[j] = 0.0 for i in range(0, len(w)): for j in range(0, len(Z)): Z[j] += w[i]*expv[i*len(Z)+j] # normalize Z norm = sum(Z) for j in range(0, len(Z)): Z[j] /= norm # compute change in Z eps = 0.0 for j in range(0, len(Z)): d = math.log(Z[j]/Z_old[j]) eps += d*d # check convergence if(eps<thres): break # return weights return w # read FILENAME_ bias_ts=[] for lines in open(FILENAME_, "r").readlines(): riga=lines.strip().split() # skip comment lines if(riga[0]=="#!"): continue # read bias values # umbrella-sampling typical format if(len(riga) == NBIAS_+1): i0 = 1 i1 = NBIAS_+1 # bias exchange typical format elif(len(riga) == 2*NBIAS_+1): i0 = NBIAS_+1 i1 = 2*NBIAS_+1 # unknown format else: print(FILENAME_,"format is unknown!") exit() for i in range(i0, i1): bias_ts.append(float(riga[i])) # number of frames nframes = len(bias_ts) / NBIAS_ # printout print("Number of frames::", nframes) print("Number of bias entries::", len(bias_ts)) # get wham weights ws = get_wham_weights(NBIAS_, int(nframes), bias_ts) # printout WHAM weights to file print("Weights have been written to weights.dat") log = open("weights.dat", "w") for i in range(0, len(ws)): log.write("%32.30lf\n" % ws[i]) log.close()
frnhr/django-cms
refs/heads/develop
cms/extensions/toolbar.py
43
# -*- coding: utf-8 -*- from cms.utils.urlutils import admin_reverse from cms.api import get_page_draft from cms.toolbar_base import CMSToolbar from cms.utils import get_cms_setting from cms.utils.permissions import has_page_change_permission from django.core.urlresolvers import NoReverseMatch class ExtensionToolbar(CMSToolbar): """ ExtensionToolbar provides utility functions to handle much of the boilerplate involved in creating a toolbar for PageExtension and TitleExtension. The basic implementation of an extension toolbar using this class is:: @toolbar_pool.register class SampleExtension(ExtensionToolbar): model = ExtModel # The PageExtension / TitleExtension you are working with def populate(self): current_page_menu = self._setup_extension_toolbar() if current_page_menu: position = 0 page_extension, url = self.get_page_extension_admin() if url: current_page_menu.add_modal_item('Item label', url=url, disabled=not self.toolbar.edit_mode, position=position) For TitleExtension use ``get_title_extension_admin`` and cycle on the resulting title extensions and urls @toolbar_pool.register class SampleExtension(ExtensionToolbar): model = ExtModel # The PageExtension / TitleExtension you are working with def populate(self): current_page_menu = self._setup_extension_toolbar() if current_page_menu: position = 0 urls = self.get_title_extension_admin() for title_extension, url in urls: current_page_menu.add_modal_item('Item label', url=url, disabled=not self.toolbar.edit_mode, position=position) """ model = None page = None def _setup_extension_toolbar(self): """ Does all the sanity check for the current environment: * that a page exists * permissions check on the current page It returns the page menu or None if the above conditions are not met """ page = self._get_page() if not page: # Nothing to do return # check global permissions if CMS_PERMISSION is active if get_cms_setting('PERMISSION'): has_global_current_page_change_permission = has_page_change_permission(self.request) else: has_global_current_page_change_permission = True # check if user has page edit permission can_change = (self.request.current_page and self.request.current_page.has_change_permission(self.request)) current_page_menu = self.toolbar.get_or_create_menu('page') if can_change and has_global_current_page_change_permission: return current_page_menu else: return def _get_page(self): """ A utility method that caches the current page and make sure to use the draft version of the page. """ # always use draft if we have a page if not self.page: self.page = get_page_draft(self.request.current_page) return self.page def get_page_extension_admin(self): """ Get the admin url for the page extension menu item, depending on whether a PageExtension instance exists for the current page or not. Return a tuple of the current extension and the url; the extension is None if no instance exists, the url is None is no admin is registered for the extension. """ page = self._get_page() # Page extension try: page_extension = self.model.objects.get(extended_object_id=page.pk) except self.model.DoesNotExist: page_extension = None try: model_name = self.model.__name__.lower() if page_extension: admin_url = admin_reverse( '%s_%s_change' % (self.model._meta.app_label, model_name), args=(page_extension.pk,)) else: admin_url = "%s?extended_object=%s" % ( admin_reverse('%s_%s_add' % (self.model._meta.app_label, model_name)), self.page.pk) except NoReverseMatch: # pragma: no cover admin_url = None return page_extension, admin_url def get_title_extension_admin(self, language=None): """ Get the admin urls for the title extensions menu items, depending on whether a TitleExtension instance exists for each Title in the current page. A single language can be passed to only work on a single title. Return a list of tuples of the title extension and the url; the extension is None if no instance exists, the url is None is no admin is registered for the extension. """ page = self._get_page() urls = [] if language: titles = page.get_title_obj(language), else: titles = page.title_set.all() # Titles for title in titles: try: title_extension = self.model.objects.get(extended_object_id=title.pk) except self.model.DoesNotExist: title_extension = None try: model_name = self.model.__name__.lower() if title_extension: admin_url = admin_reverse( '%s_%s_change' % (self.model._meta.app_label, model_name), args=(title_extension.pk,)) else: admin_url = "%s?extended_object=%s" % ( admin_reverse('%s_%s_add' % (self.model._meta.app_label, model_name)), title.pk) except NoReverseMatch: # pragma: no cover admin_url = None if admin_url: urls.append((title_extension, admin_url)) return urls def _get_sub_menu(self, current_menu, key, label, position=None): """ Utility function to get a submenu of the current menu """ extension_menu = current_menu.get_or_create_menu( key, label, position=position) return extension_menu
ap--/python-seabreeze
refs/heads/master
src/seabreeze/pyseabreeze/transport.py
1
"""Implementation of the Seabreeze Transport layer. Some spectrometers can support different transports (usb, network, rs232, etc.) """ import importlib import inspect import logging import warnings from functools import partial import usb.core import usb.util try: from functools import partialmethod except ImportError: # https://gist.github.com/carymrobbins/8940382 # noinspection PyPep8Naming class partialmethod(partial): def __get__(self, instance, owner): if instance is None: return self args, kwargs = self.args or (), self.keywords or {} return partial(self.func, instance, *args, **kwargs) class TransportInterface(object): _required_init_kwargs = () def open_device(self, device): """open a seabreeze device Parameters ---------- device : seabreeze.pyseabreeze.devices.SeaBreezeDevice """ raise NotImplementedError("implement in derived transport class") @property def is_open(self): """return if device is opened Returns ------- bool """ raise NotImplementedError("implement in derived transport class") def close_device(self): """close the seabreeze device""" raise NotImplementedError("implement in derived transport class") def write(self, data, timeout_ms=None, **kwargs): """write data to the device""" raise NotImplementedError("implement in derived transport class") def read(self, size=None, timeout_ms=None, **kwargs): """read data from the Returns ------- str """ raise NotImplementedError("implement in derived transport class") @property def default_timeout_ms(self): raise NotImplementedError("implement in derived transport class") @property def protocol(self): raise NotImplementedError("implement in derived transport class") @classmethod def list_devices(cls): raise NotImplementedError("implement in derived transport class") @classmethod def register_model(cls, model_name, **kwargs): raise NotImplementedError("implement in derived transport class") @classmethod def supported_model(cls, device): """return if the device supports the transport or vice versa Returns ------- model_name : `` """ return None @classmethod def specialize(cls, model_name, **kwargs): raise NotImplementedError("implement in derived transport class") # encapsulate usb.core.USBError class USBTransportError(Exception): def __init__(self, *args, **kwargs): super(USBTransportError, self).__init__(*args) self.errno = kwargs.pop("errno", None) self.backend_error_code = kwargs.pop("error_code", None) if kwargs: raise TypeError("USBTransportError got unexpected kwarg") @classmethod def from_usberror(cls, err): return cls(str(err), errno=err.errno, error_code=err.backend_error_code) class USBTransportDeviceInUse(Exception): pass # this can and should be opaque to pyseabreeze class USBTransportHandle(object): def __init__(self, pyusb_device): """encapsulation for pyusb device classes Parameters ---------- pyusb_device : usb.core.Device """ self.pyusb_device = pyusb_device # noinspection PyUnresolvedReferences self.identity = ( pyusb_device.idVendor, pyusb_device.idProduct, pyusb_device.bus, pyusb_device.address, ) self.pyusb_backend = get_name_from_pyusb_backend(pyusb_device.backend) def close(self): try: self.pyusb_device.reset() except usb.core.USBError: logging.debug( "USBError while calling USBTransportHandle.close on {:04x}:{:04x}".format( self.identity[0], self.identity[1] ), exc_info=True, ) def __del__(self): if self.pyusb_backend == "libusb1": # have to check if .finalize() has been called # -> todo: maybe better to fix this in the api initialization of cseabreeze # -> todo: will probably have to check pyusb versions and only do this when necessary if not getattr(self.pyusb_device.backend, "_finalize_called", False): # if usb.core.Device.reset() gets called but the backend has been finalized already # (this happens only during interpreter shutdown) self.close() else: self.close() self.pyusb_device = None class USBTransport(TransportInterface): """implementation of the usb transport interface for spectrometers""" _required_init_kwargs = ("usb_product_id", "usb_endpoint_map", "usb_protocol") vendor_id = 0x2457 product_ids = {} # add logging _log = logging.getLogger(__name__) def __init__(self, usb_product_id, usb_endpoint_map, usb_protocol): super(USBTransport, self).__init__() self._product_id = usb_product_id self._endpoint_map = usb_endpoint_map self._protocol_cls = usb_protocol # internal settings self._default_read_size = { "low_speed": 64, "high_speed": 512, "high_speed_alt": 512, } self._read_endpoints = { "low_speed": "lowspeed_in", "high_speed": "highspeed_in", "high_speed_alt": "highspeed_in2", } self._default_read_endpoint = "low_speed" self._default_read_spectrum_endpoint = "high_speed" # internal state self._device = None self._opened = None self._protocol = None def open_device(self, device): if not isinstance(device, USBTransportHandle): raise TypeError("device needs to be a USBTransportHandle") # device.reset() self._device = device pyusb_device = self._device.pyusb_device try: if pyusb_device.is_kernel_driver_active(0): pyusb_device.detach_kernel_driver(0) except NotImplementedError: pass # unavailable on some systems/backends try: pyusb_device.set_configuration() except usb.core.USBError as err: if err.errno == 16: # TODO: warn as in cseabreeze self._opened = True raise USBTransportDeviceInUse( "device probably used by another thread/process" ) raise USBTransportError.from_usberror(err) else: self._opened = True # This will initialize the communication protocol if self._opened: self._protocol = self._protocol_cls(self) @property def is_open(self): return self._opened or False def close_device(self): if self._device is not None: self._device.close() self._device = None self._opened = False self._protocol = None def write(self, data, timeout_ms=None, **kwargs): if self._device is None: raise RuntimeError("device not opened") if kwargs: warnings.warn("kwargs provided but ignored: {}".format(kwargs)) return self._device.pyusb_device.write( self._endpoint_map.ep_out, data, timeout=timeout_ms ) def read(self, size=None, timeout_ms=None, mode=None, **kwargs): if self._device is None: raise RuntimeError("device not opened") mode = mode if mode is not None else self._default_read_endpoint endpoint = getattr(self._endpoint_map, self._read_endpoints[mode]) if size is None: size = self._default_read_size[mode] if kwargs: warnings.warn("kwargs provided but ignored: {}".format(kwargs)) return self._device.pyusb_device.read(endpoint, size, timeout=timeout_ms) @property def default_timeout_ms(self): return self._device.pyusb_device.default_timeout if self._device else None @property def protocol(self): return self._protocol @classmethod def list_devices(cls, **kwargs): """list pyusb devices for all available spectrometers Note: this includes spectrometers that are currently opened in other processes on the machine. Yields ------ devices : USBTransportHandle unique pyusb devices for each available spectrometer """ # check if a specific pyusb backend is requested _pyusb_backend = kwargs.get("_pyusb_backend", None) # get all matching devices try: pyusb_devices = usb.core.find( find_all=True, custom_match=lambda dev: ( dev.idVendor == cls.vendor_id and dev.idProduct in cls.product_ids ), backend=get_pyusb_backend_from_name(name=_pyusb_backend), ) except usb.core.NoBackendError: raise RuntimeError("No pyusb backend found") # encapsulate for pyusb_device in pyusb_devices: yield USBTransportHandle(pyusb_device) @classmethod def register_model(cls, model_name, **kwargs): product_id = kwargs.get("usb_product_id") if product_id in cls.product_ids: raise ValueError( "product_id 0x{:04x} already in registry".format(product_id) ) cls.product_ids[product_id] = model_name @classmethod def supported_model(cls, device): """return supported model Parameters ---------- device : USBTransportHandle """ if not isinstance(device, USBTransportHandle): return None # noinspection PyUnresolvedReferences return cls.product_ids[device.pyusb_device.idProduct] @classmethod def specialize(cls, model_name, **kwargs): assert set(kwargs) == set(cls._required_init_kwargs) # usb transport register automatically on registration cls.register_model(model_name, **kwargs) specialized_class = type( "USBTransport{}".format(model_name), (cls,), {"__init__": partialmethod(cls.__init__, **kwargs)}, ) return specialized_class @classmethod def initialize(cls, **_kwargs): for device in cls.list_devices(**_kwargs): try: device.pyusb_device.reset() # usb.util.dispose_resources(device) <- already done by device.reset() except Exception as err: cls._log.debug( "initialize failed: {}('{}')".format( err.__class__.__name__, getattr(err, "message", "no message") ) ) @classmethod def shutdown(cls, **_kwargs): # dispose usb resources for device in cls.list_devices(**_kwargs): try: usb.util.dispose_resources(device.pyusb_device) except Exception as err: cls._log.debug( "shutdown failed: {}('{}')".format( err.__class__.__name__, getattr(err, "message", "no message") ) ) _pyusb_backend_instances = {} def get_pyusb_backend_from_name(name): """internal: allow requesting a specific pyusb backend for testing""" if name is None: # default is pick first that works: ('libusb1', 'libusb0', 'openusb0') _backend = None else: try: _backend = _pyusb_backend_instances[name] except KeyError: m = importlib.import_module("usb.backend.{}".format(name)) # noinspection PyUnresolvedReferences _backend = m.get_backend() # raise if a pyusb backend was requested but can't be loaded if _backend is None: raise RuntimeError("pyusb '{}' backend failed to load") _pyusb_backend_instances[name] = _backend return _backend def get_name_from_pyusb_backend(backend): """internal: return backend name from loaded backend""" module = inspect.getmodule(backend) if not module: return None return module.__name__.split(".")[-1]
openslack/openslack-monitor
refs/heads/master
portal/web/controller/expression.py
5
# -*- coding:utf-8 -*- __author__ = 'Ulric Qin' from web import app from flask import request, g, render_template, jsonify from web.model.expression import Expression from web.model.action import Action from frame.params import required_chk from frame.config import UIC_ADDRESS @app.route('/expressions') def expressions_get(): g.menu = 'expressions' page = int(request.args.get('p', 1)) limit = int(request.args.get('limit', 6)) query = request.args.get('q', '').strip() mine = request.args.get('mine', '1') me = g.user_name if mine == '1' else None vs, total = Expression.query(page, limit, query, me) for v in vs: v.action = Action.get(v.action_id) return render_template( 'expression/list.html', data={ 'vs': vs, 'total': total, 'query': query, 'limit': limit, 'page': page, 'mine': mine, } ) @app.route('/expression/delete/<expression_id>') def expression_delete_get(expression_id): expression_id = int(expression_id) Expression.delete_one(expression_id) return jsonify(msg='') @app.route('/expression/add') def expression_add_get(): g.menu = 'expressions' a = None o = Expression.get(int(request.args.get('id', '0').strip())) if o: a = Action.get(o.action_id) return render_template('expression/add.html', data={'action': a, 'expression': o, 'uic_address': UIC_ADDRESS['external']}) @app.route('/expression/update', methods=['POST']) def expression_update_post(): expression_id = request.form['expression_id'].strip() expression = request.form['expression'].strip() func = request.form['func'].strip() op = request.form['op'].strip() right_value = request.form['right_value'].strip() uic_groups = request.form['uic'].strip() max_step = request.form['max_step'].strip() priority = int(request.form['priority'].strip()) note = request.form['note'].strip() url = request.form['url'].strip() callback = request.form['callback'].strip() before_callback_sms = request.form['before_callback_sms'] before_callback_mail = request.form['before_callback_mail'] after_callback_sms = request.form['after_callback_sms'] after_callback_mail = request.form['after_callback_mail'] msg = required_chk({ 'expression': expression, 'func': func, 'op': op, 'right_value': right_value, }) if msg: return jsonify(msg=msg) if not max_step: max_step = 3 if not priority: priority = 0 return jsonify(msg=Expression.save_or_update( expression_id, expression, func, op, right_value, uic_groups, max_step, priority, note, url, callback, before_callback_sms, before_callback_mail, after_callback_sms, after_callback_mail, g.user_name, )) @app.route('/expression/pause') def expression_pause_get(): expression_id = request.args.get("id", '') pause = request.args.get('pause', '') if not expression_id: return jsonify(msg='id is blank') if not pause: return jsonify(msg='pause is blank') e = Expression.get(expression_id) if not e: return jsonify('no such expression %s' % expression_id) Expression.update_dict({'pause': pause}, 'id=%s', [expression_id]) return jsonify(msg='') @app.route('/expression/view/<eid>') def expression_view_get(eid): eid = int(eid) g.menu = 'expressions' a = None o = Expression.get(eid) if o: a = Action.get(o.action_id) else: return 'no such expression' return render_template('expression/view.html', data={'action': a, 'expression': o})
hale36/SRTV
refs/heads/master
lib/imdb/_exceptions.py
128
""" _exceptions module (imdb package). This module provides the exception hierarchy used by the imdb package. Copyright 2004-2009 Davide Alberani <da@erlug.linux.it> This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA """ import logging class IMDbError(Exception): """Base class for every exception raised by the imdb package.""" _logger = logging.getLogger('imdbpy') def __init__(self, *args, **kwargs): """Initialize the exception and pass the message to the log system.""" # Every raised exception also dispatch a critical log. self._logger.critical('%s exception raised; args: %s; kwds: %s', self.__class__.__name__, args, kwargs, exc_info=True) Exception.__init__(self, *args, **kwargs) class IMDbDataAccessError(IMDbError): """Exception raised when is not possible to access needed data.""" pass class IMDbParserError(IMDbError): """Exception raised when an error occurred parsing the data.""" pass
xuweiliang/Codelibrary
refs/heads/master
nova/virt/libvirt/volume/gpfs.py
54
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from nova.virt.libvirt.volume import fs class LibvirtGPFSVolumeDriver(fs.LibvirtBaseFileSystemVolumeDriver): """Class for volumes backed by gpfs volume.""" def _get_mount_point_base(self): return '' def get_config(self, connection_info, disk_info): """Returns xml for libvirt.""" conf = super(LibvirtGPFSVolumeDriver, self).get_config(connection_info, disk_info) conf.source_type = "file" conf.source_path = connection_info['data']['device_path'] return conf
jpadilla/dj-stripe
refs/heads/master
tests/apps/testapp_namespaced/__init__.py
12133432
gm2211/vpnAlfredWorkflow
refs/heads/develop
src/alp/core_dependencies/__init__.py
12133432
wmde/jenkins-job-builder
refs/heads/master
tests/properties/__init__.py
12133432
gajendrasinghSolanki/qualitybots
refs/heads/master
src/appengine/filters/__init__.py
12133432
Juniper/neutron
refs/heads/master
neutron/plugins/bigswitch/config.py
6
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2014 Big Switch Networks, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # # @author: Mandeep Dhami, Big Switch Networks, Inc. # @author: Sumit Naiksatam, sumitnaiksatam@gmail.com, Big Switch Networks, Inc. # @author: Kevin Benton, Big Switch Networks, Inc. """ This module manages configuration options """ from oslo.config import cfg from neutron.agent.common import config as agconfig from neutron.common import utils from neutron.extensions import portbindings restproxy_opts = [ cfg.ListOpt('servers', default=['localhost:8800'], help=_("A comma separated list of BigSwitch or Floodlight " "servers and port numbers. The plugin proxies the " "requests to the BigSwitch/Floodlight server, " "which performs the networking configuration. Only one" "server is needed per deployment, but you may wish to" "deploy multiple servers to support failover.")), cfg.StrOpt('server_auth', default=None, secret=True, help=_("The username and password for authenticating against " " the BigSwitch or Floodlight controller.")), cfg.BoolOpt('server_ssl', default=True, help=_("If True, Use SSL when connecting to the BigSwitch or " "Floodlight controller.")), cfg.BoolOpt('ssl_sticky', default=True, help=_("Trust and store the first certificate received for " "each controller address and use it to validate future " "connections to that address.")), cfg.BoolOpt('no_ssl_validation', default=False, help=_("Disables SSL certificate validation for controllers")), cfg.BoolOpt('cache_connections', default=True, help=_("Re-use HTTP/HTTPS connections to the controller.")), cfg.StrOpt('ssl_cert_directory', default='/etc/neutron/plugins/bigswitch/ssl', help=_("Directory containing ca_certs and host_certs " "certificate directories.")), cfg.BoolOpt('sync_data', default=False, help=_("Sync data on connect")), cfg.BoolOpt('auto_sync_on_failure', default=True, help=_("If neutron fails to create a resource because " "the backend controller doesn't know of a dependency, " "automatically trigger a full data synchronization " "to the controller.")), cfg.IntOpt('consistency_interval', default=60, help=_("Time between verifications that the backend controller " "database is consistent with Neutron")), cfg.IntOpt('server_timeout', default=10, help=_("Maximum number of seconds to wait for proxy request " "to connect and complete.")), cfg.IntOpt('thread_pool_size', default=4, help=_("Maximum number of threads to spawn to handle large " "volumes of port creations.")), cfg.StrOpt('neutron_id', default='neutron-' + utils.get_hostname(), deprecated_name='quantum_id', help=_("User defined identifier for this Neutron deployment")), cfg.BoolOpt('add_meta_server_route', default=True, help=_("Flag to decide if a route to the metadata server " "should be injected into the VM")), ] router_opts = [ cfg.MultiStrOpt('tenant_default_router_rule', default=['*:any:any:permit'], help=_("The default router rules installed in new tenant " "routers. Repeat the config option for each rule. " "Format is <tenant>:<source>:<destination>:<action>" " Use an * to specify default for all tenants.")), cfg.IntOpt('max_router_rules', default=200, help=_("Maximum number of router rules")), ] nova_opts = [ cfg.StrOpt('vif_type', default='ovs', help=_("Virtual interface type to configure on " "Nova compute nodes")), ] # Each VIF Type can have a list of nova host IDs that are fixed to that type for i in portbindings.VIF_TYPES: opt = cfg.ListOpt('node_override_vif_' + i, default=[], help=_("Nova compute nodes to manually set VIF " "type to %s") % i) nova_opts.append(opt) # Add the vif types for reference later nova_opts.append(cfg.ListOpt('vif_types', default=portbindings.VIF_TYPES, help=_('List of allowed vif_type values.'))) agent_opts = [ cfg.StrOpt('integration_bridge', default='br-int', help=_('Name of integration bridge on compute ' 'nodes used for security group insertion.')), cfg.IntOpt('polling_interval', default=5, help=_('Seconds between agent checks for port changes')), cfg.StrOpt('virtual_switch_type', default='ovs', help=_('Virtual switch type.')) ] def register_config(): cfg.CONF.register_opts(restproxy_opts, "RESTPROXY") cfg.CONF.register_opts(router_opts, "ROUTER") cfg.CONF.register_opts(nova_opts, "NOVA") cfg.CONF.register_opts(agent_opts, "RESTPROXYAGENT") agconfig.register_root_helper(cfg.CONF)
zlsun/XX-Net
refs/heads/master
code/default/python27/1.0/lib/wsgiref/validate.py
23
# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org) # Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php # Also licenced under the Apache License, 2.0: http://opensource.org/licenses/apache2.0.php # Licensed to PSF under a Contributor Agreement """ Middleware to check for obedience to the WSGI specification. Some of the things this checks: * Signature of the application and start_response (including that keyword arguments are not used). * Environment checks: - Environment is a dictionary (and not a subclass). - That all the required keys are in the environment: REQUEST_METHOD, SERVER_NAME, SERVER_PORT, wsgi.version, wsgi.input, wsgi.errors, wsgi.multithread, wsgi.multiprocess, wsgi.run_once - That HTTP_CONTENT_TYPE and HTTP_CONTENT_LENGTH are not in the environment (these headers should appear as CONTENT_LENGTH and CONTENT_TYPE). - Warns if QUERY_STRING is missing, as the cgi module acts unpredictably in that case. - That CGI-style variables (that don't contain a .) have (non-unicode) string values - That wsgi.version is a tuple - That wsgi.url_scheme is 'http' or 'https' (@@: is this too restrictive?) - Warns if the REQUEST_METHOD is not known (@@: probably too restrictive). - That SCRIPT_NAME and PATH_INFO are empty or start with / - That at least one of SCRIPT_NAME or PATH_INFO are set. - That CONTENT_LENGTH is a positive integer. - That SCRIPT_NAME is not '/' (it should be '', and PATH_INFO should be '/'). - That wsgi.input has the methods read, readline, readlines, and __iter__ - That wsgi.errors has the methods flush, write, writelines * The status is a string, contains a space, starts with an integer, and that integer is in range (> 100). * That the headers is a list (not a subclass, not another kind of sequence). * That the items of the headers are tuples of strings. * That there is no 'status' header (that is used in CGI, but not in WSGI). * That the headers don't contain newlines or colons, end in _ or -, or contain characters codes below 037. * That Content-Type is given if there is content (CGI often has a default content type, but WSGI does not). * That no Content-Type is given when there is no content (@@: is this too restrictive?) * That the exc_info argument to start_response is a tuple or None. * That all calls to the writer are with strings, and no other methods on the writer are accessed. * That wsgi.input is used properly: - .read() is called with zero or one argument - That it returns a string - That readline, readlines, and __iter__ return strings - That .close() is not called - No other methods are provided * That wsgi.errors is used properly: - .write() and .writelines() is called with a string - That .close() is not called, and no other methods are provided. * The response iterator: - That it is not a string (it should be a list of a single string; a string will work, but perform horribly). - That .next() returns a string - That the iterator is not iterated over until start_response has been called (that can signal either a server or application error). - That .close() is called (doesn't raise exception, only prints to sys.stderr, because we only know it isn't called when the object is garbage collected). """ __all__ = ['validator'] import re import sys from types import DictType, StringType, TupleType, ListType import warnings header_re = re.compile(r'^[a-zA-Z][a-zA-Z0-9\-_]*$') bad_header_value_re = re.compile(r'[\000-\037]') class WSGIWarning(Warning): """ Raised in response to WSGI-spec-related warnings """ def assert_(cond, *args): if not cond: raise AssertionError(*args) def validator(application): """ When applied between a WSGI server and a WSGI application, this middleware will check for WSGI compliancy on a number of levels. This middleware does not modify the request or response in any way, but will raise an AssertionError if anything seems off (except for a failure to close the application iterator, which will be printed to stderr -- there's no way to raise an exception at that point). """ def lint_app(*args, **kw): assert_(len(args) == 2, "Two arguments required") assert_(not kw, "No keyword arguments allowed") environ, start_response = args check_environ(environ) # We use this to check if the application returns without # calling start_response: start_response_started = [] def start_response_wrapper(*args, **kw): assert_(len(args) == 2 or len(args) == 3, ( "Invalid number of arguments: %s" % (args,))) assert_(not kw, "No keyword arguments allowed") status = args[0] headers = args[1] if len(args) == 3: exc_info = args[2] else: exc_info = None check_status(status) check_headers(headers) check_content_type(status, headers) check_exc_info(exc_info) start_response_started.append(None) return WriteWrapper(start_response(*args)) environ['wsgi.input'] = InputWrapper(environ['wsgi.input']) environ['wsgi.errors'] = ErrorWrapper(environ['wsgi.errors']) iterator = application(environ, start_response_wrapper) assert_(iterator is not None and iterator != False, "The application must return an iterator, if only an empty list") check_iterator(iterator) return IteratorWrapper(iterator, start_response_started) return lint_app class InputWrapper: def __init__(self, wsgi_input): self.input = wsgi_input def read(self, *args): assert_(len(args) <= 1) v = self.input.read(*args) assert_(type(v) is type("")) return v def readline(self): v = self.input.readline() assert_(type(v) is type("")) return v def readlines(self, *args): assert_(len(args) <= 1) lines = self.input.readlines(*args) assert_(type(lines) is type([])) for line in lines: assert_(type(line) is type("")) return lines def __iter__(self): while 1: line = self.readline() if not line: return yield line def close(self): assert_(0, "input.close() must not be called") class ErrorWrapper: def __init__(self, wsgi_errors): self.errors = wsgi_errors def write(self, s): assert_(type(s) is type("")) self.errors.write(s) def flush(self): self.errors.flush() def writelines(self, seq): for line in seq: self.write(line) def close(self): assert_(0, "errors.close() must not be called") class WriteWrapper: def __init__(self, wsgi_writer): self.writer = wsgi_writer def __call__(self, s): assert_(type(s) is type("")) self.writer(s) class PartialIteratorWrapper: def __init__(self, wsgi_iterator): self.iterator = wsgi_iterator def __iter__(self): # We want to make sure __iter__ is called return IteratorWrapper(self.iterator, None) class IteratorWrapper: def __init__(self, wsgi_iterator, check_start_response): self.original_iterator = wsgi_iterator self.iterator = iter(wsgi_iterator) self.closed = False self.check_start_response = check_start_response def __iter__(self): return self def next(self): assert_(not self.closed, "Iterator read after closed") v = self.iterator.next() if self.check_start_response is not None: assert_(self.check_start_response, "The application returns and we started iterating over its body, but start_response has not yet been called") self.check_start_response = None return v def close(self): self.closed = True if hasattr(self.original_iterator, 'close'): self.original_iterator.close() def __del__(self): if not self.closed: sys.stderr.write( "Iterator garbage collected without being closed") assert_(self.closed, "Iterator garbage collected without being closed") def check_environ(environ): assert_(type(environ) is DictType, "Environment is not of the right type: %r (environment: %r)" % (type(environ), environ)) for key in ['REQUEST_METHOD', 'SERVER_NAME', 'SERVER_PORT', 'wsgi.version', 'wsgi.input', 'wsgi.errors', 'wsgi.multithread', 'wsgi.multiprocess', 'wsgi.run_once']: assert_(key in environ, "Environment missing required key: %r" % (key,)) for key in ['HTTP_CONTENT_TYPE', 'HTTP_CONTENT_LENGTH']: assert_(key not in environ, "Environment should not have the key: %s " "(use %s instead)" % (key, key[5:])) if 'QUERY_STRING' not in environ: warnings.warn( 'QUERY_STRING is not in the WSGI environment; the cgi ' 'module will use sys.argv when this variable is missing, ' 'so application errors are more likely', WSGIWarning) for key in environ.keys(): if '.' in key: # Extension, we don't care about its type continue assert_(type(environ[key]) is StringType, "Environmental variable %s is not a string: %r (value: %r)" % (key, type(environ[key]), environ[key])) assert_(type(environ['wsgi.version']) is TupleType, "wsgi.version should be a tuple (%r)" % (environ['wsgi.version'],)) assert_(environ['wsgi.url_scheme'] in ('http', 'https'), "wsgi.url_scheme unknown: %r" % environ['wsgi.url_scheme']) check_input(environ['wsgi.input']) check_errors(environ['wsgi.errors']) # @@: these need filling out: if environ['REQUEST_METHOD'] not in ( 'GET', 'HEAD', 'POST', 'OPTIONS', 'PATCH', 'PUT', 'DELETE', 'TRACE'): warnings.warn( "Unknown REQUEST_METHOD: %r" % environ['REQUEST_METHOD'], WSGIWarning) assert_(not environ.get('SCRIPT_NAME') or environ['SCRIPT_NAME'].startswith('/'), "SCRIPT_NAME doesn't start with /: %r" % environ['SCRIPT_NAME']) assert_(not environ.get('PATH_INFO') or environ['PATH_INFO'].startswith('/'), "PATH_INFO doesn't start with /: %r" % environ['PATH_INFO']) if environ.get('CONTENT_LENGTH'): assert_(int(environ['CONTENT_LENGTH']) >= 0, "Invalid CONTENT_LENGTH: %r" % environ['CONTENT_LENGTH']) if not environ.get('SCRIPT_NAME'): assert_('PATH_INFO' in environ, "One of SCRIPT_NAME or PATH_INFO are required (PATH_INFO " "should at least be '/' if SCRIPT_NAME is empty)") assert_(environ.get('SCRIPT_NAME') != '/', "SCRIPT_NAME cannot be '/'; it should instead be '', and " "PATH_INFO should be '/'") def check_input(wsgi_input): for attr in ['read', 'readline', 'readlines', '__iter__']: assert_(hasattr(wsgi_input, attr), "wsgi.input (%r) doesn't have the attribute %s" % (wsgi_input, attr)) def check_errors(wsgi_errors): for attr in ['flush', 'write', 'writelines']: assert_(hasattr(wsgi_errors, attr), "wsgi.errors (%r) doesn't have the attribute %s" % (wsgi_errors, attr)) def check_status(status): assert_(type(status) is StringType, "Status must be a string (not %r)" % status) # Implicitly check that we can turn it into an integer: status_code = status.split(None, 1)[0] assert_(len(status_code) == 3, "Status codes must be three characters: %r" % status_code) status_int = int(status_code) assert_(status_int >= 100, "Status code is invalid: %r" % status_int) if len(status) < 4 or status[3] != ' ': warnings.warn( "The status string (%r) should be a three-digit integer " "followed by a single space and a status explanation" % status, WSGIWarning) def check_headers(headers): assert_(type(headers) is ListType, "Headers (%r) must be of type list: %r" % (headers, type(headers))) header_names = {} for item in headers: assert_(type(item) is TupleType, "Individual headers (%r) must be of type tuple: %r" % (item, type(item))) assert_(len(item) == 2) name, value = item assert_(name.lower() != 'status', "The Status header cannot be used; it conflicts with CGI " "script, and HTTP status is not given through headers " "(value: %r)." % value) header_names[name.lower()] = None assert_('\n' not in name and ':' not in name, "Header names may not contain ':' or '\\n': %r" % name) assert_(header_re.search(name), "Bad header name: %r" % name) assert_(not name.endswith('-') and not name.endswith('_'), "Names may not end in '-' or '_': %r" % name) if bad_header_value_re.search(value): assert_(0, "Bad header value: %r (bad char: %r)" % (value, bad_header_value_re.search(value).group(0))) def check_content_type(status, headers): code = int(status.split(None, 1)[0]) # @@: need one more person to verify this interpretation of RFC 2616 # http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html NO_MESSAGE_BODY = (204, 304) for name, value in headers: if name.lower() == 'content-type': if code not in NO_MESSAGE_BODY: return assert_(0, ("Content-Type header found in a %s response, " "which must not return content.") % code) if code not in NO_MESSAGE_BODY: assert_(0, "No Content-Type header found in headers (%s)" % headers) def check_exc_info(exc_info): assert_(exc_info is None or type(exc_info) is type(()), "exc_info (%r) is not a tuple: %r" % (exc_info, type(exc_info))) # More exc_info checks? def check_iterator(iterator): # Technically a string is legal, which is why it's a really bad # idea, because it may cause the response to be returned # character-by-character assert_(not isinstance(iterator, str), "You should not return a string as your application iterator, " "instead return a single-item list containing that string.")
heeraj123/oh-mainline
refs/heads/master
vendor/packages/django-extensions/django_extensions/templatetags/highlighting.py
44
""" Similar to syntax_color.py but this is intended more for being able to copy+paste actual code into your Django templates without needing to escape or anything crazy. http://lobstertech.com/2008/aug/30/django_syntax_highlight_template_tag/ Example: {% load highlighting %} <style> @import url("http://lobstertech.com/media/css/highlight.css"); .highlight { background: #f8f8f8; } .highlight { font-size: 11px; margin: 1em; border: 1px solid #ccc; border-left: 3px solid #F90; padding: 0; } .highlight pre { padding: 1em; overflow: auto; line-height: 120%; margin: 0; } .predesc { margin: 1.5em 1.5em -2.5em 1em; text-align: right; font: bold 12px Tahoma, Arial, sans-serif; letter-spacing: 1px; color: #333; } </style> <h2>check out this code</h2> {% highlight 'python' 'Excerpt: blah.py' %} def need_food(self): print "Love is <colder> than &death&" {% endhighlight %} """ from pygments import highlight as pyghighlight from pygments.lexers import get_lexer_by_name, guess_lexer from pygments.formatters import HtmlFormatter from django.conf import settings from django import template from django.template import Template, Context, Node, Variable from django.template.defaultfilters import stringfilter from django.utils.safestring import mark_safe register = template.Library() @register.filter @stringfilter def parse_template(value): return mark_safe(Template(value).render(Context())) parse_template.is_safe = True class CodeNode(Node): def __init__(self, language, nodelist, name=''): self.language = Variable(language) self.nodelist = nodelist if name: self.name = Variable(name) else: self.name = None def render(self, context): code = self.nodelist.render(context).strip() lexer = get_lexer_by_name(self.language.resolve(context)) formatter = HtmlFormatter(linenos=False) html = "" if self.name: name = self.name.resolve(context) html = '<div class="predesc"><span>%s</span></div>' % (name) return html + pyghighlight(code, lexer, formatter) @register.tag def highlight(parser, token): """ Allows you to put a highlighted source code <pre> block in your code. This takes two arguments, the language and a little explaination message that will be generated before the code. The second argument is optional. Your code will be fed through pygments so you can use any language it supports. {% load highlighting %} {% highlight 'python' 'Excerpt: blah.py' %} def need_food(self): print "Love is colder than death" {% endhighlight %} """ nodelist = parser.parse(('endhighlight',)) parser.delete_first_token() bits = token.split_contents()[1:] if len(bits) < 1: raise TemplateSyntaxError("'highlight' statement requires an argument") return CodeNode(bits[0], nodelist, *bits[1:])
krull/docker-zenoss4
refs/heads/master
init_fs/usr/local/zenoss/ZenPacks/ZenPacks.zenoss.Microsoft.Windows-2.6.9.egg/ZenPacks/zenoss/Microsoft/Windows/migrate/RemoveWinRMServices.py
1
############################################################################## # # Copyright (C) Zenoss, Inc. 2016, all rights reserved. # # This content is made available according to terms specified in # License.zenoss under the directory where your Zenoss product is installed. # ############################################################################## """Remove Windows services. ZEN-24347 winrmservices need to be removed as they are incompatible with the WinService class. """ # Logging import logging # Zenoss Imports from Products.ZenModel.migrate.Migrate import Version from Products.ZenModel.ZenPack import ZenPackMigration LOG = logging.getLogger('zen.MicrosoftWindows') class RemoveWinRMServices(ZenPackMigration): version = Version(2, 6, 3) def migrate(self, pack): org = pack.dmd.Devices.getOrganizer('/Server/Microsoft') devices = org.getSubDevices() device_count = len(devices) if device_count: LOG.info('Removing incompatible Windows Services from {} device{}.' .format(device_count, 's' if device_count > 1 else '')) for device in devices: device.os.removeRelation('winrmservices') device.os.buildRelations()
rockyzhang/zhangyanhit-python-for-android-mips
refs/heads/master
python-build/python-libs/gdata/src/gdata/client.py
133
#!/usr/bin/env python # # Copyright (C) 2008, 2009 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # This module is used for version 2 of the Google Data APIs. """Provides a client to interact with Google Data API servers. This module is used for version 2 of the Google Data APIs. The primary class in this module is GDClient. GDClient: handles auth and CRUD operations when communicating with servers. GDataClient: deprecated client for version one services. Will be removed. """ __author__ = 'j.s@google.com (Jeff Scudder)' import re import atom.client import atom.core import atom.http_core import gdata.gauth import gdata.data # Old imports import gdata.service import urllib import urlparse import gdata.auth import atom class Error(Exception): pass class RequestError(Error): status = None reason = None body = None headers = None class RedirectError(RequestError): pass class CaptchaChallenge(RequestError): captcha_url = None captcha_token = None class ClientLoginTokenMissing(Error): pass class MissingOAuthParameters(Error): pass class ClientLoginFailed(RequestError): pass class UnableToUpgradeToken(RequestError): pass class Unauthorized(Error): pass class BadAuthenticationServiceURL(RedirectError): pass class BadAuthentication(RequestError): pass def error_from_response(message, http_response, error_class, response_body=None): """Creates a new exception and sets the HTTP information in the error. Args: message: str human readable message to be displayed if the exception is not caught. http_response: The response from the server, contains error information. error_class: The exception to be instantiated and populated with information from the http_response response_body: str (optional) specify if the response has already been read from the http_response object. """ if response_body is None: body = http_response.read() else: body = response_body error = error_class('%s: %i, %s' % (message, http_response.status, body)) error.status = http_response.status error.reason = http_response.reason error.body = body error.headers = http_response.getheaders() return error def get_xml_version(version): """Determines which XML schema to use based on the client API version. Args: version: string which is converted to an int. The version string is in the form 'Major.Minor.x.y.z' and only the major version number is considered. If None is provided assume version 1. """ if version is None: return 1 return int(version.split('.')[0]) class GDClient(atom.client.AtomPubClient): """Communicates with Google Data servers to perform CRUD operations. This class is currently experimental and may change in backwards incompatible ways. This class exists to simplify the following three areas involved in using the Google Data APIs. CRUD Operations: The client provides a generic 'request' method for making HTTP requests. There are a number of convenience methods which are built on top of request, which include get_feed, get_entry, get_next, post, update, and delete. These methods contact the Google Data servers. Auth: Reading user-specific private data requires authorization from the user as do any changes to user data. An auth_token object can be passed into any of the HTTP requests to set the Authorization header in the request. You may also want to set the auth_token member to a an object which can use modify_request to set the Authorization header in the HTTP request. If you are authenticating using the email address and password, you can use the client_login method to obtain an auth token and set the auth_token member. If you are using browser redirects, specifically AuthSub, you will want to use gdata.gauth.AuthSubToken.from_url to obtain the token after the redirect, and you will probably want to updgrade this since use token to a multiple use (session) token using the upgrade_token method. API Versions: This client is multi-version capable and can be used with Google Data API version 1 and version 2. The version should be specified by setting the api_version member to a string, either '1' or '2'. """ # The gsessionid is used by Google Calendar to prevent redirects. __gsessionid = None api_version = None # Name of the Google Data service when making a ClientLogin request. auth_service = None # URL prefixes which should be requested for AuthSub and OAuth. auth_scopes = None def request(self, method=None, uri=None, auth_token=None, http_request=None, converter=None, desired_class=None, redirects_remaining=4, **kwargs): """Make an HTTP request to the server. See also documentation for atom.client.AtomPubClient.request. If a 302 redirect is sent from the server to the client, this client assumes that the redirect is in the form used by the Google Calendar API. The same request URI and method will be used as in the original request, but a gsessionid URL parameter will be added to the request URI with the value provided in the server's 302 redirect response. If the 302 redirect is not in the format specified by the Google Calendar API, a RedirectError will be raised containing the body of the server's response. The method calls the client's modify_request method to make any changes required by the client before the request is made. For example, a version 2 client could add a GData-Version: 2 header to the request in its modify_request method. Args: method: str The HTTP verb for this request, usually 'GET', 'POST', 'PUT', or 'DELETE' uri: atom.http_core.Uri, str, or unicode The URL being requested. auth_token: An object which sets the Authorization HTTP header in its modify_request method. Recommended classes include gdata.gauth.ClientLoginToken and gdata.gauth.AuthSubToken among others. http_request: (optional) atom.http_core.HttpRequest converter: function which takes the body of the response as it's only argument and returns the desired object. desired_class: class descended from atom.core.XmlElement to which a successful response should be converted. If there is no converter function specified (converter=None) then the desired_class will be used in calling the atom.core.parse function. If neither the desired_class nor the converter is specified, an HTTP reponse object will be returned. redirects_remaining: (optional) int, if this number is 0 and the server sends a 302 redirect, the request method will raise an exception. This parameter is used in recursive request calls to avoid an infinite loop. Any additional arguments are passed through to atom.client.AtomPubClient.request. Returns: An HTTP response object (see atom.http_core.HttpResponse for a description of the object's interface) if no converter was specified and no desired_class was specified. If a converter function was provided, the results of calling the converter are returned. If no converter was specified but a desired_class was provided, the response body will be converted to the class using atom.core.parse. """ if isinstance(uri, (str, unicode)): uri = atom.http_core.Uri.parse_uri(uri) # Add the gsession ID to the URL to prevent further redirects. # TODO: If different sessions are using the same client, there will be a # multitude of redirects and session ID shuffling. # If the gsession ID is in the URL, adopt it as the standard location. if uri is not None and uri.query is not None and 'gsessionid' in uri.query: self.__gsessionid = uri.query['gsessionid'] # The gsession ID could also be in the HTTP request. elif (http_request is not None and http_request.uri is not None and http_request.uri.query is not None and 'gsessionid' in http_request.uri.query): self.__gsessionid = http_request.uri.query['gsessionid'] # If the gsession ID is stored in the client, and was not present in the # URI then add it to the URI. elif self.__gsessionid is not None: uri.query['gsessionid'] = self.__gsessionid # The AtomPubClient should call this class' modify_request before # performing the HTTP request. #http_request = self.modify_request(http_request) response = atom.client.AtomPubClient.request(self, method=method, uri=uri, auth_token=auth_token, http_request=http_request, **kwargs) # On success, convert the response body using the desired converter # function if present. if response is None: return None if response.status == 200 or response.status == 201: if converter is not None: return converter(response) elif desired_class is not None: if self.api_version is not None: return atom.core.parse(response.read(), desired_class, version=get_xml_version(self.api_version)) else: # No API version was specified, so allow parse to # use the default version. return atom.core.parse(response.read(), desired_class) else: return response # TODO: move the redirect logic into the Google Calendar client once it # exists since the redirects are only used in the calendar API. elif response.status == 302: if redirects_remaining > 0: location = response.getheader('Location') if location is not None: m = re.compile('[\?\&]gsessionid=(\w*)').search(location) if m is not None: self.__gsessionid = m.group(1) # Make a recursive call with the gsession ID in the URI to follow # the redirect. return self.request(method=method, uri=uri, auth_token=auth_token, http_request=http_request, converter=converter, desired_class=desired_class, redirects_remaining=redirects_remaining-1, **kwargs) else: raise error_from_response('302 received without Location header', response, RedirectError) else: raise error_from_response('Too many redirects from server', response, RedirectError) elif response.status == 401: raise error_from_response('Unauthorized - Server responded with', response, Unauthorized) # If the server's response was not a 200, 201, 302, or 401, raise an # exception. else: raise error_from_response('Server responded with', response, RequestError) Request = request def request_client_login_token(self, email, password, source, service=None, account_type='HOSTED_OR_GOOGLE', auth_url=atom.http_core.Uri.parse_uri( 'https://www.google.com/accounts/ClientLogin'), captcha_token=None, captcha_response=None): service = service or self.auth_service # Set the target URL. http_request = atom.http_core.HttpRequest(uri=auth_url, method='POST') http_request.add_body_part( gdata.gauth.generate_client_login_request_body(email=email, password=password, service=service, source=source, account_type=account_type, captcha_token=captcha_token, captcha_response=captcha_response), 'application/x-www-form-urlencoded') # Use the underlying http_client to make the request. response = self.http_client.request(http_request) response_body = response.read() if response.status == 200: token_string = gdata.gauth.get_client_login_token_string(response_body) if token_string is not None: return gdata.gauth.ClientLoginToken(token_string) else: raise ClientLoginTokenMissing( 'Recieved a 200 response to client login request,' ' but no token was present. %s' % (response_body,)) elif response.status == 403: captcha_challenge = gdata.gauth.get_captcha_challenge(response_body) if captcha_challenge: challenge = CaptchaChallenge('CAPTCHA required') challenge.captcha_url = captcha_challenge['url'] challenge.captcha_token = captcha_challenge['token'] raise challenge elif response_body.splitlines()[0] == 'Error=BadAuthentication': raise BadAuthentication('Incorrect username or password') else: raise error_from_response('Server responded with a 403 code', response, RequestError, response_body) elif response.status == 302: # Google tries to redirect all bad URLs back to # http://www.google.<locale>. If a redirect # attempt is made, assume the user has supplied an incorrect # authentication URL raise error_from_response('Server responded with a redirect', response, BadAuthenticationServiceURL, response_body) else: raise error_from_response('Server responded to ClientLogin request', response, ClientLoginFailed, response_body) RequestClientLoginToken = request_client_login_token def client_login(self, email, password, source, service=None, account_type='HOSTED_OR_GOOGLE', auth_url='https://www.google.com/accounts/ClientLogin', captcha_token=None, captcha_response=None): service = service or self.auth_service self.auth_token = self.request_client_login_token(email, password, source, service=service, account_type=account_type, auth_url=auth_url, captcha_token=captcha_token, captcha_response=captcha_response) ClientLogin = client_login def upgrade_token(self, token=None, url=atom.http_core.Uri.parse_uri( 'https://www.google.com/accounts/AuthSubSessionToken')): """Asks the Google auth server for a multi-use AuthSub token. For details on AuthSub, see: http://code.google.com/apis/accounts/docs/AuthSub.html Args: token: gdata.gauth.AuthSubToken or gdata.gauth.SecureAuthSubToken (optional) If no token is passed in, the client's auth_token member is used to request the new token. The token object will be modified to contain the new session token string. url: str or atom.http_core.Uri (optional) The URL to which the token upgrade request should be sent. Defaults to: https://www.google.com/accounts/AuthSubSessionToken Returns: The upgraded gdata.gauth.AuthSubToken object. """ # Default to using the auth_token member if no token is provided. if token is None: token = self.auth_token # We cannot upgrade a None token. if token is None: raise UnableToUpgradeToken('No token was provided.') if not isinstance(token, gdata.gauth.AuthSubToken): raise UnableToUpgradeToken( 'Cannot upgrade the token because it is not an AuthSubToken object.') http_request = atom.http_core.HttpRequest(uri=url, method='GET') token.modify_request(http_request) # Use the lower level HttpClient to make the request. response = self.http_client.request(http_request) if response.status == 200: token._upgrade_token(response.read()) return token else: raise UnableToUpgradeToken( 'Server responded to token upgrade request with %s: %s' % ( response.status, response.read())) UpgradeToken = upgrade_token def get_oauth_token(self, scopes, next, consumer_key, consumer_secret=None, rsa_private_key=None, url=gdata.gauth.REQUEST_TOKEN_URL): """Obtains an OAuth request token to allow the user to authorize this app. Once this client has a request token, the user can authorize the request token by visiting the authorization URL in their browser. After being redirected back to this app at the 'next' URL, this app can then exchange the authorized request token for an access token. For more information see the documentation on Google Accounts with OAuth: http://code.google.com/apis/accounts/docs/OAuth.html#AuthProcess Args: scopes: list of strings or atom.http_core.Uri objects which specify the URL prefixes which this app will be accessing. For example, to access the Google Calendar API, you would want to use scopes: ['https://www.google.com/calendar/feeds/', 'http://www.google.com/calendar/feeds/'] next: str or atom.http_core.Uri object, The URL which the user's browser should be sent to after they authorize access to their data. This should be a URL in your application which will read the token information from the URL and upgrade the request token to an access token. consumer_key: str This is the identifier for this application which you should have received when you registered your application with Google to use OAuth. consumer_secret: str (optional) The shared secret between your app and Google which provides evidence that this request is coming from you application and not another app. If present, this libraries assumes you want to use an HMAC signature to verify requests. Keep this data a secret. rsa_private_key: str (optional) The RSA private key which is used to generate a digital signature which is checked by Google's server. If present, this library assumes that you want to use an RSA signature to verify requests. Keep this data a secret. url: The URL to which a request for a token should be made. The default is Google's OAuth request token provider. """ http_request = None if rsa_private_key is not None: http_request = gdata.gauth.generate_request_for_request_token( consumer_key, gdata.gauth.RSA_SHA1, scopes, rsa_key=rsa_private_key, auth_server_url=url, next=next) elif consumer_secret is not None: http_request = gdata.gauth.generate_request_for_request_token( consumer_key, gdata.gauth.HMAC_SHA1, scopes, consumer_secret=consumer_secret, auth_server_url=url, next=next) else: raise MissingOAuthParameters( 'To request an OAuth token, you must provide your consumer secret' ' or your private RSA key.') response = self.http_client.request(http_request) response_body = response.read() if response.status != 200: raise error_from_response('Unable to obtain OAuth request token', response, RequestError, response_body) if rsa_private_key is not None: return gdata.gauth.rsa_token_from_body(response_body, consumer_key, rsa_private_key, gdata.gauth.REQUEST_TOKEN) elif consumer_secret is not None: return gdata.gauth.hmac_token_from_body(response_body, consumer_key, consumer_secret, gdata.gauth.REQUEST_TOKEN) GetOAuthToken = get_oauth_token def get_access_token(self, request_token, url=gdata.gauth.ACCESS_TOKEN_URL): """Exchanges an authorized OAuth request token for an access token. Contacts the Google OAuth server to upgrade a previously authorized request token. Once the request token is upgraded to an access token, the access token may be used to access the user's data. For more details, see the Google Accounts OAuth documentation: http://code.google.com/apis/accounts/docs/OAuth.html#AccessToken Args: request_token: An OAuth token which has been authorized by the user. url: (optional) The URL to which the upgrade request should be sent. Defaults to: https://www.google.com/accounts/OAuthAuthorizeToken """ http_request = gdata.gauth.generate_request_for_access_token( request_token, auth_server_url=url) response = self.http_client.request(http_request) response_body = response.read() if response.status != 200: raise error_from_response( 'Unable to upgrade OAuth request token to access token', response, RequestError, response_body) return gdata.gauth.upgrade_to_access_token(request_token, response_body) GetAccessToken = get_access_token def modify_request(self, http_request): """Adds or changes request before making the HTTP request. This client will add the API version if it is specified. Subclasses may override this method to add their own request modifications before the request is made. """ http_request = atom.client.AtomPubClient.modify_request(self, http_request) if self.api_version is not None: http_request.headers['GData-Version'] = self.api_version return http_request ModifyRequest = modify_request def get_feed(self, uri, auth_token=None, converter=None, desired_class=gdata.data.GDFeed, **kwargs): return self.request(method='GET', uri=uri, auth_token=auth_token, converter=converter, desired_class=desired_class, **kwargs) GetFeed = get_feed def get_entry(self, uri, auth_token=None, converter=None, desired_class=gdata.data.GDEntry, **kwargs): return self.request(method='GET', uri=uri, auth_token=auth_token, converter=converter, desired_class=desired_class, **kwargs) GetEntry = get_entry def get_next(self, feed, auth_token=None, converter=None, desired_class=None, **kwargs): """Fetches the next set of results from the feed. When requesting a feed, the number of entries returned is capped at a service specific default limit (often 25 entries). You can specify your own entry-count cap using the max-results URL query parameter. If there are more results than could fit under max-results, the feed will contain a next link. This method performs a GET against this next results URL. Returns: A new feed object containing the next set of entries in this feed. """ if converter is None and desired_class is None: desired_class = feed.__class__ return self.get_feed(feed.get_next_url(), auth_token=auth_token, converter=converter, desired_class=desired_class, **kwargs) GetNext = get_next # TODO: add a refresh method to re-fetch the entry/feed from the server # if it has been updated. def post(self, entry, uri, auth_token=None, converter=None, desired_class=None, **kwargs): if converter is None and desired_class is None: desired_class = entry.__class__ http_request = atom.http_core.HttpRequest() http_request.add_body_part( entry.to_string(get_xml_version(self.api_version)), 'application/atom+xml') return self.request(method='POST', uri=uri, auth_token=auth_token, http_request=http_request, converter=converter, desired_class=desired_class, **kwargs) Post = post def update(self, entry, auth_token=None, force=False, **kwargs): """Edits the entry on the server by sending the XML for this entry. Performs a PUT and converts the response to a new entry object with a matching class to the entry passed in. Args: entry: auth_token: force: boolean stating whether an update should be forced. Defaults to False. Normally, if a change has been made since the passed in entry was obtained, the server will not overwrite the entry since the changes were based on an obsolete version of the entry. Setting force to True will cause the update to silently overwrite whatever version is present. Returns: A new Entry object of a matching type to the entry which was passed in. """ http_request = atom.http_core.HttpRequest() http_request.add_body_part( entry.to_string(get_xml_version(self.api_version)), 'application/atom+xml') # Include the ETag in the request if this is version 2 of the API. if self.api_version and self.api_version.startswith('2'): if force: http_request.headers['If-Match'] = '*' elif hasattr(entry, 'etag') and entry.etag: http_request.headers['If-Match'] = entry.etag return self.request(method='PUT', uri=entry.find_edit_link(), auth_token=auth_token, http_request=http_request, desired_class=entry.__class__, **kwargs) Update = update def delete(self, entry_or_uri, auth_token=None, force=False, **kwargs): # If the user passes in a URL, just delete directly, may not work as # the service might require an ETag. if isinstance(entry_or_uri, (str, unicode, atom.http_core.Uri)): return self.request(method='DELETE', uri=entry_or_uri, auth_token=auth_token, **kwargs) http_request = atom.http_core.HttpRequest() # Include the ETag in the request if this is version 2 of the API. if self.api_version and self.api_version.startswith('2'): if force: http_request.headers['If-Match'] = '*' elif hasattr(entry_or_uri, 'etag') and entry_or_uri.etag: http_request.headers['If-Match'] = entry_or_uri.etag return self.request(method='DELETE', uri=entry_or_uri.find_edit_link(), http_request=http_request, auth_token=auth_token, **kwargs) Delete = delete #TODO: implement batch requests. #def batch(feed, uri, auth_token=None, converter=None, **kwargs): # pass # TODO: add a refresh method to request a conditional update to an entry # or feed. def _add_query_param(param_string, value, http_request): if value: http_request.uri.query[param_string] = value class Query(object): def __init__(self, text_query=None, categories=None, author=None, alt=None, updated_min=None, updated_max=None, pretty_print=False, published_min=None, published_max=None, start_index=None, max_results=None, strict=False): """Constructs a Google Data Query to filter feed contents serverside. Args: text_query: Full text search str (optional) categories: list of strings (optional). Each string is a required category. To include an 'or' query, put a | in the string between terms. For example, to find everything in the Fitz category and the Laurie or Jane category (Fitz and (Laurie or Jane)) you would set categories to ['Fitz', 'Laurie|Jane']. author: str (optional) The service returns entries where the author name and/or email address match your query string. alt: str (optional) for the Alternative representation type you'd like the feed in. If you don't specify an alt parameter, the service returns an Atom feed. This is equivalent to alt='atom'. alt='rss' returns an RSS 2.0 result feed. alt='json' returns a JSON representation of the feed. alt='json-in-script' Requests a response that wraps JSON in a script tag. alt='atom-in-script' Requests an Atom response that wraps an XML string in a script tag. alt='rss-in-script' Requests an RSS response that wraps an XML string in a script tag. updated_min: str (optional), RFC 3339 timestamp format, lower bounds. For example: 2005-08-09T10:57:00-08:00 updated_max: str (optional) updated time must be earlier than timestamp. pretty_print: boolean (optional) If True the server's XML response will be indented to make it more human readable. Defaults to False. published_min: str (optional), Similar to updated_min but for published time. published_max: str (optional), Similar to updated_max but for published time. start_index: int or str (optional) 1-based index of the first result to be retrieved. Note that this isn't a general cursoring mechanism. If you first send a query with ?start-index=1&max-results=10 and then send another query with ?start-index=11&max-results=10, the service cannot guarantee that the results are equivalent to ?start-index=1&max-results=20, because insertions and deletions could have taken place in between the two queries. max_results: int or str (optional) Maximum number of results to be retrieved. Each service has a default max (usually 25) which can vary from service to service. There is also a service-specific limit to the max_results you can fetch in a request. strict: boolean (optional) If True, the server will return an error if the server does not recognize any of the parameters in the request URL. Defaults to False. """ self.text_query = text_query self.categories = categories or [] self.author = author self.alt = alt self.updated_min = updated_min self.updated_max = updated_max self.pretty_print = pretty_print self.published_min = published_min self.published_max = published_max self.start_index = start_index self.max_results = max_results self.strict = strict def modify_request(self, http_request): _add_query_param('q', self.text_query, http_request) if self.categories: http_request.uri.query['categories'] = ','.join(self.categories) _add_query_param('author', self.author, http_request) _add_query_param('alt', self.alt, http_request) _add_query_param('updated-min', self.updated_min, http_request) _add_query_param('updated-max', self.updated_max, http_request) if self.pretty_print: http_request.uri.query['prettyprint'] = 'true' _add_query_param('published-min', self.published_min, http_request) _add_query_param('published-max', self.published_max, http_request) if self.start_index is not None: http_request.uri.query['start-index'] = str(self.start_index) if self.max_results is not None: http_request.uri.query['max-results'] = str(self.max_results) if self.strict: http_request.uri.query['strict'] = 'true' ModifyRequest = modify_request class GDQuery(atom.http_core.Uri): def _get_text_query(self): return self.query['q'] def _set_text_query(self, value): self.query['q'] = value text_query = property(_get_text_query, _set_text_query, doc='The q parameter for searching for an exact text match on content') # Version 1 code. SCOPE_URL_PARAM_NAME = gdata.service.SCOPE_URL_PARAM_NAME # Maps the service names used in ClientLogin to scope URLs. CLIENT_LOGIN_SCOPES = gdata.service.CLIENT_LOGIN_SCOPES class AuthorizationRequired(gdata.service.Error): pass class GDataClient(gdata.service.GDataService): """This class is deprecated. All functionality has been migrated to gdata.service.GDataService. """ @atom.deprecated('This class will be removed, use GDClient instead.') def __init__(self, application_name=None, tokens=None): gdata.service.GDataService.__init__(self, source=application_name, tokens=tokens) @atom.deprecated('The GDataClient class will be removed in a future release' ', use GDClient.ClientLogin instead') def ClientLogin(self, username, password, service_name, source=None, account_type=None, auth_url=None, login_token=None, login_captcha=None): gdata.service.GDataService.ClientLogin(self, username=username, password=password, account_type=account_type, service=service_name, auth_service_url=auth_url, source=source, captcha_token=login_token, captcha_response=login_captcha) @atom.deprecated('The GDataClient class will be removed in a future release' ', use GDClient.GetEntry or GDClient.GetFeed') def Get(self, url, parser): """Simplified interface for Get. Requires a parser function which takes the server response's body as the only argument. Args: url: A string or something that can be converted to a string using str. The URL of the requested resource. parser: A function which takes the HTTP body from the server as it's only result. Common values would include str, gdata.GDataEntryFromString, and gdata.GDataFeedFromString. Returns: The result of calling parser(http_response_body). """ return gdata.service.GDataService.Get(self, uri=url, converter=parser) @atom.deprecated('The GDataClient class will be removed in a future release' ', use GDClient.Post instead') def Post(self, data, url, parser, media_source=None): """Streamlined version of Post. Requires a parser function which takes the server response's body as the only argument. """ return gdata.service.GDataService.Post(self, data=data, uri=url, media_source=media_source, converter=parser) @atom.deprecated('The GDataClient class will be removed in a future release' ', use GDClient.Put instead') def Put(self, data, url, parser, media_source=None): """Streamlined version of Put. Requires a parser function which takes the server response's body as the only argument. """ return gdata.service.GDataService.Put(self, data=data, uri=url, media_source=media_source, converter=parser) @atom.deprecated('The GDataClient class will be removed in a future release' ', use GDClient.Delete instead') def Delete(self, url): return gdata.service.GDataService.Delete(self, uri=url) ExtractToken = gdata.service.ExtractToken GenerateAuthSubRequestUrl = gdata.service.GenerateAuthSubRequestUrl
yaroslavprogrammer/django-axes
refs/heads/master
axes/test_urls.py
29
from django.conf.urls import patterns, include from django.contrib import admin urlpatterns = patterns('', (r'^admin/', include(admin.site.urls)), )
arunpersaud/pico-python
refs/heads/master
examples/specgram_plot.py
3
# -*- coding: utf-8 # # Colin O'Flynn, Copyright (C) 2013. All Rights Reserved. <coflynn@newae.com> # import math import time import inspect import numpy as np from picoscope import ps6000 import pylab as plt import scipy import scipy.fftpack def fft(signal, freq): FFT = abs(scipy.fft(signal)) FFTdb = 20*scipy.log10(FFT) freqs = scipy.fftpack.fftfreq(len(signal), 1/freq) FFTdb = FFTdb[2:len(freqs)/2] freqs = freqs[2:len(freqs)/2] return (freqs, FFTdb) def examplePS6000(): fig=plt.figure() plt.ion() plt.show() print "Attempting to open..." ps = ps6000.PS6000() #Example of simple capture res = ps.setSamplingFrequency(250E6, 4096) sampleRate = res[0] print "Sampling @ %f MHz, %d samples"%(res[0]/1E6, res[1]) ps.setChannel("A", "AC", 50E-3) blockdata = np.array(0) for i in range(0, 50): ps.runBlock() while(ps.isReady() == False): time.sleep(0.01) print "Sampling Done" data = ps.getDataV("A", 4096) blockdata = np.append(blockdata, data) ##Simple FFT #print "FFT In Progress" #[freqs, FFTdb] = fft(data, res[0]) #plt.clf() #plt.plot(freqs, FFTdb) #plt.draw() start = (i - 5) * 4096 if start < 0: start = 0 #Spectrum Graph, keeps growing plt.clf() plt.specgram(blockdata[start:], NFFT=4096, Fs=res[0], noverlap=512) plt.xlabel('Measurement #') plt.ylabel('Frequency (Hz)') plt.draw() ps.close() if __name__ == "__main__": examplePS6000()
xsynergy510x/android_external_chromium_org
refs/heads/cm-12.1
components/policy/resources/PRESUBMIT.py
44
# Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # If this presubmit check fails or misbehaves, please complain to # mnissler@chromium.org, pastarmovj@chromium.org or joaodasilva@chromium.org. import itertools import sys import xml.dom.minidom def _GetPolicyTemplates(template_path): # Read list of policies in the template. eval() is used instead of a JSON # parser because policy_templates.json is not quite JSON, and uses some # python features such as #-comments and '''strings'''. policy_templates.json # is actually maintained as a python dictionary. with open(template_path) as f: template_data = eval(f.read(), {}) policies = ( policy for policy in template_data['policy_definitions'] if policy['type'] != 'group' ) groups = ( policy['policies'] for policy in template_data['policy_definitions'] if policy['type'] == 'group' ) subpolicies = ( policy for group in groups for policy in group ) return list(itertools.chain(policies, subpolicies)) def _CheckPolicyTemplatesSyntax(input_api, output_api): local_path = input_api.PresubmitLocalPath() filepath = input_api.os_path.join(local_path, 'policy_templates.json') if any(f.AbsoluteLocalPath() == filepath for f in input_api.AffectedFiles()): old_sys_path = sys.path try: tools_path = input_api.os_path.normpath( input_api.os_path.join(local_path, input_api.os_path.pardir, 'tools')) sys.path = [ tools_path ] + sys.path # Optimization: only load this when it's needed. import syntax_check_policy_template_json checker = syntax_check_policy_template_json.PolicyTemplateChecker() if checker.Run([], filepath) > 0: return [output_api.PresubmitError('Syntax error(s) in file:', [filepath])] finally: sys.path = old_sys_path return [] def _CheckPolicyTestCases(input_api, output_api, policies): # Read list of policies in chrome/test/data/policy/policy_test_cases.json. root = input_api.change.RepositoryRoot() policy_test_cases_file = input_api.os_path.join( root, 'chrome', 'test', 'data', 'policy', 'policy_test_cases.json') test_names = input_api.json.load(open(policy_test_cases_file)).keys() tested_policies = frozenset(name.partition('.')[0] for name in test_names if name[:2] != '--') policy_names = frozenset(policy['name'] for policy in policies) # Finally check if any policies are missing. missing = policy_names - tested_policies extra = tested_policies - policy_names error_missing = ('Policy \'%s\' was added to policy_templates.json but not ' 'to src/chrome/test/data/policy/policy_test_cases.json. ' 'Please update both files.') error_extra = ('Policy \'%s\' is tested by ' 'src/chrome/test/data/policy/policy_test_cases.json but is not' ' defined in policy_templates.json. Please update both files.') results = [] for policy in missing: results.append(output_api.PresubmitError(error_missing % policy)) for policy in extra: results.append(output_api.PresubmitError(error_extra % policy)) return results def _CheckPolicyHistograms(input_api, output_api, policies): root = input_api.change.RepositoryRoot() histograms = input_api.os_path.join( root, 'tools', 'metrics', 'histograms', 'histograms.xml') with open(histograms) as f: tree = xml.dom.minidom.parseString(f.read()) enums = (tree.getElementsByTagName('histogram-configuration')[0] .getElementsByTagName('enums')[0] .getElementsByTagName('enum')) policy_enum = [e for e in enums if e.getAttribute('name') == 'EnterprisePolicies'][0] policy_ids = frozenset([int(e.getAttribute('value')) for e in policy_enum.getElementsByTagName('int')]) error_missing = ('Policy \'%s\' was added to policy_templates.json but not ' 'to src/tools/metrics/histograms/histograms.xml. ' 'Please update both files.') results = [] for policy in policies: if policy['id'] not in policy_ids: results.append(output_api.PresubmitError(error_missing % policy['name'])) return results def _CommonChecks(input_api, output_api): results = [] results.extend(_CheckPolicyTemplatesSyntax(input_api, output_api)) os_path = input_api.os_path local_path = input_api.PresubmitLocalPath() template_path = os_path.join(local_path, 'policy_templates.json') affected_files = input_api.AffectedFiles() if any(f.AbsoluteLocalPath() == template_path for f in affected_files): try: policies = _GetPolicyTemplates(template_path) except: results.append(output_api.PresubmitError('Invalid Python/JSON syntax.')) return results results.extend(_CheckPolicyTestCases(input_api, output_api, policies)) results.extend(_CheckPolicyHistograms(input_api, output_api, policies)) return results def CheckChangeOnUpload(input_api, output_api): return _CommonChecks(input_api, output_api) def CheckChangeOnCommit(input_api, output_api): return _CommonChecks(input_api, output_api)
hw20686832/simple_crawler
refs/heads/master
utils/logger.py
1
#coding:utf-8 import logging class Logger(object): def __init__(self): self.handler = logging.StreamHandler() formatter = logging.Formatter("%(asctime)s [%(name)s]-%(levelname)s: %(message)s") self.handler.setFormatter(formatter) def getlog(self, logger="crawler", level=None): logger = logging.getLogger(logger) if level: logger.setLevel(level) else: logger.setLevel(logging.DEBUG) logger.addHandler(self.handler) return logger
dvberkel/servo
refs/heads/master
tests/wpt/web-platform-tests/tools/wptserve/wptserve/logger.py
489
class NoOpLogger(object): def critical(self, msg): pass def error(self, msg): pass def info(self, msg): pass def warning(self, msg): pass def debug(self, msg): pass logger = NoOpLogger() _set_logger = False def set_logger(new_logger): global _set_logger if _set_logger: raise Exception("Logger must be set at most once") global logger logger = new_logger _set_logger = True def get_logger(): return logger
barbuza/django
refs/heads/master
tests/migrations/migrations_test_apps/lookuperror_a/migrations/0004_a4.py
381
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('lookuperror_a', '0003_a3'), ] operations = [ migrations.CreateModel( name='A4', fields=[ ('id', models.AutoField(auto_created=True, serialize=False, verbose_name='ID', primary_key=True)), ], ), ]
towerjoo/mindsbook
refs/heads/master
django/conf/urls/shortcut.py
353
from django.conf.urls.defaults import * urlpatterns = patterns('django.views', (r'^(?P<content_type_id>\d+)/(?P<object_id>.*)/$', 'defaults.shortcut'), )
Onirik79/aaritmud
refs/heads/master
data/proto_items/carrozzone-zingaro/__init__.py
12133432
chuckoy/monopoly-cash-tracker
refs/heads/master
monopoly/__init__.py
12133432
centrumholdings/cthulhubot
refs/heads/master
tests/unit_project/tests/test_particular_commands.py
1
from djangosanetesting import UnitTestCase from mock import Mock from django.conf import settings from cthulhubot.commands import get_available_commands, get_command, get_undiscovered_commands from cthulhubot.mongo import get_database_name from cthulhubot.models import Project from cthulhubot.commands import Git, ADDITIONAL_COMMANDS class TestingGitWithDefaultParameters(Git): identifier = 'cthulhubot-test-git-defaulted' parameters = { 'repository' : { 'help' : u'Out git', 'value' : 'ssh://our.server.tld/GIT/$name', }, 'branch' : { 'help' : u'Branch to export', 'value' : 'automation', } } class TestGit(UnitTestCase): def setUp(self): if TestingGitWithDefaultParameters.identifier not in ADDITIONAL_COMMANDS: ADDITIONAL_COMMANDS[TestingGitWithDefaultParameters.identifier] = TestingGitWithDefaultParameters self.command = get_command('cthulhubot-git')() self.sub_command = get_command('cthulhubot-test-git-defaulted')() self.repository_uri = '/tmp/repo.git' self.project = Project(name='test', slug='test', tracker_uri='http://example.com', repository_uri=self.repository_uri) def test_discovered(self): assert self.command is not None def test_helper_command_discovered(self): assert self.sub_command is not None def test_parent_argument_taken_by_default(self): self.assert_equals('export', self.sub_command.get_buildbot_command().args['mode']) def test_overwritten_arg_taken_over_default(self): self.assert_equals('ssh://our.server.tld/GIT/$name', self.sub_command.get_buildbot_command().repourl) def test_overwritten_arg_taken_over_default_when_parent_has_none(self): self.assert_equals('automation', self.sub_command.get_buildbot_command().args['branch']) def test_given_args_takes_precedence_over_class_defaults(self): repo = 'ssh://our.server.tld/GIT/myrepo.git' self.assert_equals(repo, self.sub_command.get_buildbot_command(config={'repository' : repo}).repourl) def test_git_uri_taken_from_project_by_default(self): self.assert_equals(self.repository_uri, self.command.get_buildbot_command(project=self.project).repourl) def test_git_uri_hierarchically_parent_first(self): self.assert_equals('ssh://our.server.tld/GIT/$name', self.sub_command.get_buildbot_command(project=self.project).repourl) class TestUpdateRepositoryInformation(UnitTestCase): def setUp(self): super(TestUpdateRepositoryInformation, self).setUp() self.command = get_command('cthulhubot-update-repository-info')() self.mongo_config = { "host" : "host", "port" : 20000, "username" : "user", "password" : "heslo", "database_name" : "db", } self.project = Mock() self.project.repository_uri = '/tmp/repo.git' self.original_config = {} self._mock_mongo_settings() def _mock_mongo_settings(self): for key in self.mongo_config: setting = "MONGODB_%s" % key.upper() # we're test... if key == 'database_name': setting = "TEST_" + setting if hasattr(settings, setting): self.original_config[setting] = getattr(settings, setting) setattr(settings, setting, self.mongo_config[key]) def _unmock_mongo_settings(self): for key in self.mongo_config: setting = "MONGODB_%s" % key.upper() # we're test... if key == 'database_name': setting = "TEST_" + setting if setting in self.original_config: setattr(settings, setting, self.original_config[setting]) elif hasattr(settings, setting): delattr(settings._wrapped, setting) def test_command_configured(self): self.assert_equals([ "python", "setup.py", "save_repository_information_git", "--mongodb-host=%s" % self.mongo_config['host'], "--mongodb-port=%s" % self.mongo_config['port'], "--mongodb-database=%s" % self.mongo_config['database_name'], "--mongodb-collection=repository", "--mongodb-username=%s" % self.mongo_config['username'], "--mongodb-password=%s" % self.mongo_config['password'], "--repository-uri=%s" % self.project.repository_uri, ], self.command.get_shell_command(project=self.project) ) def test_empty_params_ommited(self): settings.MONGODB_USERNAME = None settings.MONGODB_PASSWORD = None self.assert_equals([ "python", "setup.py", "save_repository_information_git", "--mongodb-host=%s" % self.mongo_config['host'], "--mongodb-port=%s" % self.mongo_config['port'], "--mongodb-database=%s" % self.mongo_config['database_name'], "--mongodb-collection=repository", "--repository-uri=%s" % self.project.repository_uri, ], self.command.get_shell_command(project=self.project) ) def test_repository_uri_required(self): self.assert_raises(ValueError, self.command.get_shell_command) def tearDown(self): self._unmock_mongo_settings() super(TestUpdateRepositoryInformation, self).tearDown()
timm/timmnix
refs/heads/master
pypy3-v5.5.0-linux64/lib-python/3/test/test_eof.py
88
"""test script for a few new invalid token catches""" import unittest from test import support class EOFTestCase(unittest.TestCase): def test_EOFC(self): expect = "EOL while scanning string literal (<string>, line 1)" try: eval("""'this is a test\ """) except SyntaxError as msg: self.assertEqual(str(msg), expect) else: raise support.TestFailed def test_EOFS(self): expect = ("EOF while scanning triple-quoted string literal " "(<string>, line 1)") try: eval("""'''this is a test""") except SyntaxError as msg: self.assertEqual(str(msg), expect) else: raise support.TestFailed def test_main(): support.run_unittest(EOFTestCase) if __name__ == "__main__": test_main()
NSAmelchev/ignite
refs/heads/master
modules/platforms/python/pyignite/datatypes/__init__.py
11
# Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ This module contains classes, used internally by `pyignite` for parsing and creating binary data. """ from .complex import * from .internal import * from .null_object import * from .primitive import * from .primitive_arrays import * from .primitive_objects import * from .standard import *
SaschaMester/delicium
refs/heads/master
tools/chrome_proxy/integration_tests/chrome_proxy_measurements.py
1
# Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import base64 import logging import urlparse from common.chrome_proxy_measurements import ChromeProxyValidation from integration_tests import chrome_proxy_metrics as metrics from metrics import loading from telemetry.core import exceptions from telemetry.page import page_test class ChromeProxyDataSaving(page_test.PageTest): """Chrome proxy data saving measurement.""" def __init__(self, *args, **kwargs): super(ChromeProxyDataSaving, self).__init__(*args, **kwargs) self._metrics = metrics.ChromeProxyMetric() self._enable_proxy = True def CustomizeBrowserOptions(self, options): if self._enable_proxy: options.AppendExtraBrowserArgs('--enable-spdy-proxy-auth') def WillNavigateToPage(self, page, tab): tab.ClearCache(force=True) self._metrics.Start(page, tab) def ValidateAndMeasurePage(self, page, tab, results): # Wait for the load event. tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 300) self._metrics.Stop(page, tab) self._metrics.AddResultsForDataSaving(tab, results) class ChromeProxyHeaders(ChromeProxyValidation): """Correctness measurement for response headers.""" def __init__(self): super(ChromeProxyHeaders, self).__init__( restart_after_each_page=True, metrics=metrics.ChromeProxyMetric()) def AddResults(self, tab, results): self._metrics.AddResultsForHeaderValidation(tab, results) class ChromeProxyBypass(ChromeProxyValidation): """Correctness measurement for bypass responses.""" def __init__(self): super(ChromeProxyBypass, self).__init__( restart_after_each_page=True, metrics=metrics.ChromeProxyMetric()) def AddResults(self, tab, results): self._metrics.AddResultsForBypass(tab, results) class ChromeProxyCorsBypass(ChromeProxyValidation): """Correctness measurement for bypass responses for CORS requests.""" def __init__(self): super(ChromeProxyCorsBypass, self).__init__( restart_after_each_page=True, metrics=metrics.ChromeProxyMetric()) def ValidateAndMeasurePage(self, page, tab, results): # The test page sets window.xhrRequestCompleted to true when the XHR fetch # finishes. tab.WaitForJavaScriptExpression('window.xhrRequestCompleted', 300) super(ChromeProxyCorsBypass, self).ValidateAndMeasurePage(page, tab, results) def AddResults(self, tab, results): self._metrics.AddResultsForCorsBypass(tab, results) class ChromeProxyBlockOnce(ChromeProxyValidation): """Correctness measurement for block-once responses.""" def __init__(self): super(ChromeProxyBlockOnce, self).__init__( restart_after_each_page=True, metrics=metrics.ChromeProxyMetric()) def AddResults(self, tab, results): self._metrics.AddResultsForBlockOnce(tab, results) class ChromeProxySafebrowsingOn(ChromeProxyValidation): """Correctness measurement for safebrowsing.""" def __init__(self): super(ChromeProxySafebrowsingOn, self).__init__( metrics=metrics.ChromeProxyMetric()) def AddResults(self, tab, results): self._metrics.AddResultsForSafebrowsingOn(tab, results) class ChromeProxySafebrowsingOff(ChromeProxyValidation): """Correctness measurement for safebrowsing.""" def __init__(self): super(ChromeProxySafebrowsingOff, self).__init__( metrics=metrics.ChromeProxyMetric()) def AddResults(self, tab, results): self._metrics.AddResultsForSafebrowsingOff(tab, results) _FAKE_PROXY_AUTH_VALUE = 'aabbccdd3b7579186c1b0620614fdb1f0000ffff' _TEST_SERVER = 'chromeproxy-test.appspot.com' _TEST_SERVER_DEFAULT_URL = 'http://' + _TEST_SERVER + '/default' # We rely on the chromeproxy-test server to facilitate some of the tests. # The test server code is at <TBD location> and runs at _TEST_SERVER # # The test server allow request to override response status, headers, and # body through query parameters. See GetResponseOverrideURL. def GetResponseOverrideURL(url=_TEST_SERVER_DEFAULT_URL, respStatus=0, respHeader="", respBody=""): """ Compose the request URL with query parameters to override the chromeproxy-test server response. """ queries = [] if respStatus > 0: queries.append('respStatus=%d' % respStatus) if respHeader: queries.append('respHeader=%s' % base64.b64encode(respHeader)) if respBody: queries.append('respBody=%s' % base64.b64encode(respBody)) if len(queries) == 0: return url "&".join(queries) # url has query already if urlparse.urlparse(url).query: return url + '&' + "&".join(queries) else: return url + '?' + "&".join(queries) class ChromeProxyHTTPFallbackProbeURL(ChromeProxyValidation): """Correctness measurement for proxy fallback. In this test, the probe URL does not return 'OK'. Chrome is expected to use the fallback proxy. """ def __init__(self): super(ChromeProxyHTTPFallbackProbeURL, self).__init__( restart_after_each_page=True, metrics=metrics.ChromeProxyMetric()) def CustomizeBrowserOptions(self, options): super(ChromeProxyHTTPFallbackProbeURL, self).CustomizeBrowserOptions(options) # Set the secure proxy check URL to the google.com favicon, which will be # interpreted as a secure proxy check failure since the response body is not # "OK". The google.com favicon is used because it will load reliably fast, # and there have been problems with chromeproxy-test.appspot.com being slow # and causing tests to flake. options.AppendExtraBrowserArgs( '--data-reduction-proxy-secure-proxy-check-url=' 'http://www.google.com/favicon.ico') def AddResults(self, tab, results): self._metrics.AddResultsForHTTPFallback(tab, results) class ChromeProxyHTTPFallbackViaHeader(ChromeProxyValidation): """Correctness measurement for proxy fallback. In this test, the configured proxy is the chromeproxy-test server which will send back a response without the expected Via header. Chrome is expected to use the fallback proxy and add the configured proxy to the bad proxy list. """ def __init__(self): super(ChromeProxyHTTPFallbackViaHeader, self).__init__( restart_after_each_page=True, metrics=metrics.ChromeProxyMetric()) def CustomizeBrowserOptions(self, options): super(ChromeProxyHTTPFallbackViaHeader, self).CustomizeBrowserOptions(options) options.AppendExtraBrowserArgs('--ignore-certificate-errors') options.AppendExtraBrowserArgs( '--spdy-proxy-auth-origin=http://%s' % _TEST_SERVER) def AddResults(self, tab, results): self._metrics.AddResultsForHTTPFallback(tab, results) class ChromeProxyClientVersion(ChromeProxyValidation): """Correctness measurement for version directives in Chrome-Proxy header. The test verifies that the version information provided in the Chrome-Proxy request header overrides any version, if specified, that is provided in the user agent string. """ def __init__(self): super(ChromeProxyClientVersion, self).__init__( metrics=metrics.ChromeProxyMetric()) def CustomizeBrowserOptions(self, options): super(ChromeProxyClientVersion, self).CustomizeBrowserOptions(options) options.AppendExtraBrowserArgs('--user-agent="Chrome/32.0.1700.99"') def AddResults(self, tab, results): self._metrics.AddResultsForClientVersion(tab, results) class ChromeProxyClientType(ChromeProxyValidation): """Correctness measurement for Chrome-Proxy header client type directives.""" def __init__(self): super(ChromeProxyClientType, self).__init__( restart_after_each_page=True, metrics=metrics.ChromeProxyMetric()) self._chrome_proxy_client_type = None def AddResults(self, tab, results): # Get the Chrome-Proxy client type from the first page in the page set, so # that the client type value can be used to determine which of the later # pages in the page set should be bypassed. if not self._chrome_proxy_client_type: client_type = self._metrics.GetClientTypeFromRequests(tab) if client_type: self._chrome_proxy_client_type = client_type self._metrics.AddResultsForClientType(tab, results, self._chrome_proxy_client_type, self._page.bypass_for_client_type) class ChromeProxyLoFi(ChromeProxyValidation): """Correctness measurement for Lo-Fi in Chrome-Proxy header.""" def __init__(self): super(ChromeProxyLoFi, self).__init__(restart_after_each_page=True, metrics=metrics.ChromeProxyMetric()) def CustomizeBrowserOptions(self, options): super(ChromeProxyLoFi, self).CustomizeBrowserOptions(options) options.AppendExtraBrowserArgs('--data-reduction-proxy-lo-fi=always-on') def AddResults(self, tab, results): self._metrics.AddResultsForLoFi(tab, results) class ChromeProxyExpDirective(ChromeProxyValidation): """Correctness measurement for experiment directives in Chrome-Proxy header. This test verifies that "exp=test" in the Chrome-Proxy request header causes a bypass on the experiment test page. """ def __init__(self): super(ChromeProxyExpDirective, self).__init__( restart_after_each_page=True, metrics=metrics.ChromeProxyMetric()) def CustomizeBrowserOptions(self, options): super(ChromeProxyExpDirective, self).CustomizeBrowserOptions(options) options.AppendExtraBrowserArgs('--data-reduction-proxy-experiment=test') def AddResults(self, tab, results): self._metrics.AddResultsForBypass(tab, results, url_pattern='/exp/') class ChromeProxyPassThrough(ChromeProxyValidation): """Correctness measurement for Chrome-Proxy pass-through directives. This test verifies that "pass-through" in the Chrome-Proxy request header causes a resource to be loaded without Data Reduction Proxy transformations. """ def __init__(self): super(ChromeProxyPassThrough, self).__init__( restart_after_each_page=True, metrics=metrics.ChromeProxyMetric()) def CustomizeBrowserOptions(self, options): super(ChromeProxyPassThrough, self).CustomizeBrowserOptions(options) def AddResults(self, tab, results): self._metrics.AddResultsForPassThrough(tab, results) class ChromeProxyHTTPToDirectFallback(ChromeProxyValidation): """Correctness measurement for HTTP proxy fallback to direct.""" def __init__(self): super(ChromeProxyHTTPToDirectFallback, self).__init__( restart_after_each_page=True, metrics=metrics.ChromeProxyMetric()) def CustomizeBrowserOptions(self, options): super(ChromeProxyHTTPToDirectFallback, self).CustomizeBrowserOptions(options) # Set the primary proxy to something that will fail to be resolved so that # this test will run using the HTTP fallback proxy. options.AppendExtraBrowserArgs( '--spdy-proxy-auth-origin=http://nonexistent.googlezip.net') def WillNavigateToPage(self, page, tab): super(ChromeProxyHTTPToDirectFallback, self).WillNavigateToPage(page, tab) # Attempt to load a page through the nonexistent primary proxy in order to # cause a proxy fallback, and have this test run starting from the HTTP # fallback proxy. tab.Navigate(_TEST_SERVER_DEFAULT_URL) tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 300) def AddResults(self, tab, results): self._metrics.AddResultsForHTTPToDirectFallback(tab, results, _TEST_SERVER) class ChromeProxyReenableAfterBypass(ChromeProxyValidation): """Correctness measurement for re-enabling proxies after bypasses. This test loads a page that causes all data reduction proxies to be bypassed for 1 to 5 minutes, then waits 5 minutes and verifies that the proxy is no longer bypassed. """ def __init__(self): super(ChromeProxyReenableAfterBypass, self).__init__( restart_after_each_page=True, metrics=metrics.ChromeProxyMetric()) def AddResults(self, tab, results): self._metrics.AddResultsForReenableAfterBypass( tab, results, self._page.bypass_seconds_min, self._page.bypass_seconds_max) class ChromeProxySmoke(ChromeProxyValidation): """Smoke measurement for basic chrome proxy correctness.""" def __init__(self): super(ChromeProxySmoke, self).__init__(restart_after_each_page=True, metrics=metrics.ChromeProxyMetric()) def WillNavigateToPage(self, page, tab): super(ChromeProxySmoke, self).WillNavigateToPage(page, tab) def AddResults(self, tab, results): # Map a page name to its AddResults func. page_to_metrics = { 'header validation': [self._metrics.AddResultsForHeaderValidation], 'compression: image': [ self._metrics.AddResultsForHeaderValidation, self._metrics.AddResultsForDataSaving, ], 'compression: javascript': [ self._metrics.AddResultsForHeaderValidation, self._metrics.AddResultsForDataSaving, ], 'compression: css': [ self._metrics.AddResultsForHeaderValidation, self._metrics.AddResultsForDataSaving, ], 'bypass': [self._metrics.AddResultsForBypass], } if not self._page.name in page_to_metrics: raise page_test.MeasurementFailure( 'Invalid page name (%s) in smoke. Page name must be one of:\n%s' % ( self._page.name, page_to_metrics.keys())) for add_result in page_to_metrics[self._page.name]: add_result(tab, results) PROXIED = metrics.PROXIED DIRECT = metrics.DIRECT class ChromeProxyClientConfig(ChromeProxyValidation): """Chrome proxy client configuration service validation.""" def __init__(self): super(ChromeProxyClientConfig, self).__init__( restart_after_each_page=True, metrics=metrics.ChromeProxyMetric()) def CustomizeBrowserOptions(self, options): super(ChromeProxyClientConfig, self).CustomizeBrowserOptions(options) options.AppendExtraBrowserArgs( '--enable-data-reduction-proxy-config-client') def AddResults(self, tab, results): self._metrics.AddResultsForClientConfig(tab, results) class ChromeProxyVideoValidation(page_test.PageTest): """Validation for video pages. Measures pages using metrics.ChromeProxyVideoMetric. Pages can be fetched either direct from the origin server or via the proxy. If a page is fetched both ways, then the PROXIED and DIRECT measurements are compared to ensure the same video was loaded in both cases. """ def __init__(self): super(ChromeProxyVideoValidation, self).__init__( needs_browser_restart_after_each_page=True, clear_cache_before_each_run=True) # The type is _allMetrics[url][PROXIED,DIRECT][metricName] = value, # where (metricName,value) is a metric computed by videowrapper.js. self._allMetrics = {} def CustomizeBrowserOptionsForSinglePage(self, page, options): if page.use_chrome_proxy: options.AppendExtraBrowserArgs('--enable-spdy-proxy-auth') def DidNavigateToPage(self, page, tab): self._currMetrics = metrics.ChromeProxyVideoMetric(tab) self._currMetrics.Start(page, tab) def ValidateAndMeasurePage(self, page, tab, results): assert self._currMetrics self._currMetrics.Stop(page, tab) if page.url not in self._allMetrics: self._allMetrics[page.url] = {} # Verify this page. if page.use_chrome_proxy: self._currMetrics.AddResultsForProxied(tab, results) self._allMetrics[page.url][PROXIED] = self._currMetrics.videoMetrics else: self._currMetrics.AddResultsForDirect(tab, results) self._allMetrics[page.url][DIRECT] = self._currMetrics.videoMetrics self._currMetrics = None # Compare proxied and direct results for this url, if they exist. m = self._allMetrics[page.url] if PROXIED in m and DIRECT in m: self._CompareProxiedAndDirectMetrics(page.url, m[PROXIED], m[DIRECT]) def _CompareProxiedAndDirectMetrics(self, url, pm, dm): """Compare metrics from PROXIED and DIRECT fetches. Compares video metrics computed by videowrapper.js for pages that were fetch both PROXIED and DIRECT. Args: url: The url for the page being tested. pm: Metrics when loaded by the Flywheel proxy. dm: Metrics when loaded directly from the origin server. Raises: ChromeProxyMetricException on failure. """ def err(s): raise ChromeProxyMetricException, s if not pm['ready']: err('Proxied page did not load video: %s' % page.url) if not dm['ready']: err('Direct page did not load video: %s' % page.url) # Compare metrics that should match for PROXIED and DIRECT. for x in ('video_height', 'video_width', 'video_duration', 'decoded_frames'): if x not in pm: err('Proxied page has no %s: %s' % (x, page.url)) if x not in dm: err('Direct page has no %s: %s' % (x, page.url)) if pm[x] != dm[x]: err('Mismatch for %s (proxied=%s direct=%s): %s' % (x, str(pm[x]), str(dm[x]), page.url)) # Proxied XOCL should match direct CL. pxocl = pm['x_original_content_length_header'] dcl = dm['content_length_header'] if pxocl != dcl: err('Mismatch for content length (proxied=%s direct=%s): %s' % (str(pxocl), str(dcl), page.url)) class ChromeProxyInstrumentedVideoValidation(page_test.PageTest): """Tests a specially instrumented page for correct video transcoding.""" def __init__(self): super(ChromeProxyInstrumentedVideoValidation, self).__init__( needs_browser_restart_after_each_page=True, clear_cache_before_each_run=True) self._metrics = metrics.ChromeProxyInstrumentedVideoMetric() def CustomizeBrowserOptions(self, options): options.AppendExtraBrowserArgs('--enable-spdy-proxy-auth') def WillNavigateToPage(self, page, tab): tab.ClearCache(force=True) self._metrics.Start(page, tab) def ValidateAndMeasurePage(self, page, tab, results): self._metrics.Stop(page, tab) self._metrics.AddResults(tab, results)
brian-l/django-1.4.10
refs/heads/master
django/forms/extras/widgets.py
88
""" Extra HTML Widget classes """ import datetime import re from django.forms.widgets import Widget, Select from django.utils import datetime_safe from django.utils.dates import MONTHS from django.utils.safestring import mark_safe from django.utils.formats import get_format from django.conf import settings __all__ = ('SelectDateWidget',) RE_DATE = re.compile(r'(\d{4})-(\d\d?)-(\d\d?)$') def _parse_date_fmt(): fmt = get_format('DATE_FORMAT') escaped = False output = [] for char in fmt: if escaped: escaped = False elif char == '\\': escaped = True elif char in 'Yy': output.append('year') #if not self.first_select: self.first_select = 'year' elif char in 'bEFMmNn': output.append('month') #if not self.first_select: self.first_select = 'month' elif char in 'dj': output.append('day') #if not self.first_select: self.first_select = 'day' return output class SelectDateWidget(Widget): """ A Widget that splits date input into three <select> boxes. This also serves as an example of a Widget that has more than one HTML element and hence implements value_from_datadict. """ none_value = (0, '---') month_field = '%s_month' day_field = '%s_day' year_field = '%s_year' def __init__(self, attrs=None, years=None, required=True): # years is an optional list/tuple of years to use in the "year" select box. self.attrs = attrs or {} self.required = required if years: self.years = years else: this_year = datetime.date.today().year self.years = range(this_year, this_year+10) def render(self, name, value, attrs=None): try: year_val, month_val, day_val = value.year, value.month, value.day except AttributeError: year_val = month_val = day_val = None if isinstance(value, basestring): if settings.USE_L10N: try: input_format = get_format('DATE_INPUT_FORMATS')[0] v = datetime.datetime.strptime(value, input_format) year_val, month_val, day_val = v.year, v.month, v.day except ValueError: pass else: match = RE_DATE.match(value) if match: year_val, month_val, day_val = [int(v) for v in match.groups()] choices = [(i, i) for i in self.years] year_html = self.create_select(name, self.year_field, value, year_val, choices) choices = MONTHS.items() month_html = self.create_select(name, self.month_field, value, month_val, choices) choices = [(i, i) for i in range(1, 32)] day_html = self.create_select(name, self.day_field, value, day_val, choices) output = [] for field in _parse_date_fmt(): if field == 'year': output.append(year_html) elif field == 'month': output.append(month_html) elif field == 'day': output.append(day_html) return mark_safe(u'\n'.join(output)) def id_for_label(self, id_): first_select = None field_list = _parse_date_fmt() if field_list: first_select = field_list[0] if first_select is not None: return '%s_%s' % (id_, first_select) else: return '%s_month' % id_ def value_from_datadict(self, data, files, name): y = data.get(self.year_field % name) m = data.get(self.month_field % name) d = data.get(self.day_field % name) if y == m == d == "0": return None if y and m and d: if settings.USE_L10N: input_format = get_format('DATE_INPUT_FORMATS')[0] try: date_value = datetime.date(int(y), int(m), int(d)) except ValueError: return '%s-%s-%s' % (y, m, d) else: date_value = datetime_safe.new_date(date_value) return date_value.strftime(input_format) else: return '%s-%s-%s' % (y, m, d) return data.get(name, None) def create_select(self, name, field, value, val, choices): if 'id' in self.attrs: id_ = self.attrs['id'] else: id_ = 'id_%s' % name if not (self.required and val): choices.insert(0, self.none_value) local_attrs = self.build_attrs(id=field % id_) s = Select(choices=choices) select_html = s.render(field % name, val, local_attrs) return select_html def _has_changed(self, initial, data): try: input_format = get_format('DATE_INPUT_FORMATS')[0] data = datetime_safe.datetime.strptime(data, input_format).date() except (TypeError, ValueError): pass return super(SelectDateWidget, self)._has_changed(initial, data)
mattiacarpin/opportunistic
refs/heads/master
src/flow-monitor/examples/wifi-olsr-flowmon.py
108
# -*- Mode: Python; -*- # Copyright (c) 2009 INESC Porto # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License version 2 as # published by the Free Software Foundation; # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # # Authors: Gustavo Carneiro <gjc@inescporto.pt> import sys import ns.applications import ns.core import ns.flow_monitor import ns.internet import ns.mobility import ns.network import ns.olsr import ns.wifi try: import ns.visualizer except ImportError: pass DISTANCE = 100 # (m) NUM_NODES_SIDE = 3 def main(argv): cmd = ns.core.CommandLine() cmd.NumNodesSide = None cmd.AddValue("NumNodesSide", "Grid side number of nodes (total number of nodes will be this number squared)") cmd.Results = None cmd.AddValue("Results", "Write XML results to file") cmd.Plot = None cmd.AddValue("Plot", "Plot the results using the matplotlib python module") cmd.Parse(argv) wifi = ns.wifi.WifiHelper.Default() wifiMac = ns.wifi.NqosWifiMacHelper.Default() wifiPhy = ns.wifi.YansWifiPhyHelper.Default() wifiChannel = ns.wifi.YansWifiChannelHelper.Default() wifiPhy.SetChannel(wifiChannel.Create()) ssid = ns.wifi.Ssid("wifi-default") wifi.SetRemoteStationManager("ns3::ArfWifiManager") wifiMac.SetType ("ns3::AdhocWifiMac", "Ssid", ns.wifi.SsidValue(ssid)) internet = ns.internet.InternetStackHelper() list_routing = ns.internet.Ipv4ListRoutingHelper() olsr_routing = ns.olsr.OlsrHelper() static_routing = ns.internet.Ipv4StaticRoutingHelper() list_routing.Add(static_routing, 0) list_routing.Add(olsr_routing, 100) internet.SetRoutingHelper(list_routing) ipv4Addresses = ns.internet.Ipv4AddressHelper() ipv4Addresses.SetBase(ns.network.Ipv4Address("10.0.0.0"), ns.network.Ipv4Mask("255.255.255.0")) port = 9 # Discard port(RFC 863) onOffHelper = ns.applications.OnOffHelper("ns3::UdpSocketFactory", ns.network.Address(ns.network.InetSocketAddress(ns.network.Ipv4Address("10.0.0.1"), port))) onOffHelper.SetAttribute("DataRate", ns.network.DataRateValue(ns.network.DataRate("100kbps"))) onOffHelper.SetAttribute("OnTime", ns.core.StringValue ("ns3::ConstantRandomVariable[Constant=1]")) onOffHelper.SetAttribute("OffTime", ns.core.StringValue ("ns3::ConstantRandomVariable[Constant=0]")) addresses = [] nodes = [] if cmd.NumNodesSide is None: num_nodes_side = NUM_NODES_SIDE else: num_nodes_side = int(cmd.NumNodesSide) for xi in range(num_nodes_side): for yi in range(num_nodes_side): node = ns.network.Node() nodes.append(node) internet.Install(ns.network.NodeContainer(node)) mobility = ns.mobility.ConstantPositionMobilityModel() mobility.SetPosition(ns.core.Vector(xi*DISTANCE, yi*DISTANCE, 0)) node.AggregateObject(mobility) devices = wifi.Install(wifiPhy, wifiMac, node) ipv4_interfaces = ipv4Addresses.Assign(devices) addresses.append(ipv4_interfaces.GetAddress(0)) for i, node in enumerate(nodes): destaddr = addresses[(len(addresses) - 1 - i) % len(addresses)] #print i, destaddr onOffHelper.SetAttribute("Remote", ns.network.AddressValue(ns.network.InetSocketAddress(destaddr, port))) app = onOffHelper.Install(ns.network.NodeContainer(node)) urv = ns.core.UniformRandomVariable() app.Start(ns.core.Seconds(urv.GetValue(20, 30))) #internet.EnablePcapAll("wifi-olsr") flowmon_helper = ns.flow_monitor.FlowMonitorHelper() #flowmon_helper.SetMonitorAttribute("StartTime", ns.core.TimeValue(ns.core.Seconds(31))) monitor = flowmon_helper.InstallAll() monitor = flowmon_helper.GetMonitor() monitor.SetAttribute("DelayBinWidth", ns.core.DoubleValue(0.001)) monitor.SetAttribute("JitterBinWidth", ns.core.DoubleValue(0.001)) monitor.SetAttribute("PacketSizeBinWidth", ns.core.DoubleValue(20)) ns.core.Simulator.Stop(ns.core.Seconds(44.0)) ns.core.Simulator.Run() def print_stats(os, st): print >> os, " Tx Bytes: ", st.txBytes print >> os, " Rx Bytes: ", st.rxBytes print >> os, " Tx Packets: ", st.txPackets print >> os, " Rx Packets: ", st.rxPackets print >> os, " Lost Packets: ", st.lostPackets if st.rxPackets > 0: print >> os, " Mean{Delay}: ", (st.delaySum.GetSeconds() / st.rxPackets) print >> os, " Mean{Jitter}: ", (st.jitterSum.GetSeconds() / (st.rxPackets-1)) print >> os, " Mean{Hop Count}: ", float(st.timesForwarded) / st.rxPackets + 1 if 0: print >> os, "Delay Histogram" for i in range(st.delayHistogram.GetNBins () ): print >> os, " ",i,"(", st.delayHistogram.GetBinStart (i), "-", \ st.delayHistogram.GetBinEnd (i), "): ", st.delayHistogram.GetBinCount (i) print >> os, "Jitter Histogram" for i in range(st.jitterHistogram.GetNBins () ): print >> os, " ",i,"(", st.jitterHistogram.GetBinStart (i), "-", \ st.jitterHistogram.GetBinEnd (i), "): ", st.jitterHistogram.GetBinCount (i) print >> os, "PacketSize Histogram" for i in range(st.packetSizeHistogram.GetNBins () ): print >> os, " ",i,"(", st.packetSizeHistogram.GetBinStart (i), "-", \ st.packetSizeHistogram.GetBinEnd (i), "): ", st.packetSizeHistogram.GetBinCount (i) for reason, drops in enumerate(st.packetsDropped): print " Packets dropped by reason %i: %i" % (reason, drops) #for reason, drops in enumerate(st.bytesDropped): # print "Bytes dropped by reason %i: %i" % (reason, drops) monitor.CheckForLostPackets() classifier = flowmon_helper.GetClassifier() if cmd.Results is None: for flow_id, flow_stats in monitor.GetFlowStats(): t = classifier.FindFlow(flow_id) proto = {6: 'TCP', 17: 'UDP'} [t.protocol] print "FlowID: %i (%s %s/%s --> %s/%i)" % \ (flow_id, proto, t.sourceAddress, t.sourcePort, t.destinationAddress, t.destinationPort) print_stats(sys.stdout, flow_stats) else: print monitor.SerializeToXmlFile(cmd.Results, True, True) if cmd.Plot is not None: import pylab delays = [] for flow_id, flow_stats in monitor.GetFlowStats(): tupl = classifier.FindFlow(flow_id) if tupl.protocol == 17 and tupl.sourcePort == 698: continue delays.append(flow_stats.delaySum.GetSeconds() / flow_stats.rxPackets) pylab.hist(delays, 20) pylab.xlabel("Delay (s)") pylab.ylabel("Number of Flows") pylab.show() return 0 if __name__ == '__main__': sys.exit(main(sys.argv))
40223112/w16test
refs/heads/master
static/Brython3.1.3-20150514-095342/Lib/unittest/test/test_program.py
738
import io import os import sys import unittest class Test_TestProgram(unittest.TestCase): def test_discovery_from_dotted_path(self): loader = unittest.TestLoader() tests = [self] expectedPath = os.path.abspath(os.path.dirname(unittest.test.__file__)) self.wasRun = False def _find_tests(start_dir, pattern): self.wasRun = True self.assertEqual(start_dir, expectedPath) return tests loader._find_tests = _find_tests suite = loader.discover('unittest.test') self.assertTrue(self.wasRun) self.assertEqual(suite._tests, tests) # Horrible white box test def testNoExit(self): result = object() test = object() class FakeRunner(object): def run(self, test): self.test = test return result runner = FakeRunner() oldParseArgs = unittest.TestProgram.parseArgs def restoreParseArgs(): unittest.TestProgram.parseArgs = oldParseArgs unittest.TestProgram.parseArgs = lambda *args: None self.addCleanup(restoreParseArgs) def removeTest(): del unittest.TestProgram.test unittest.TestProgram.test = test self.addCleanup(removeTest) program = unittest.TestProgram(testRunner=runner, exit=False, verbosity=2) self.assertEqual(program.result, result) self.assertEqual(runner.test, test) self.assertEqual(program.verbosity, 2) class FooBar(unittest.TestCase): def testPass(self): assert True def testFail(self): assert False class FooBarLoader(unittest.TestLoader): """Test loader that returns a suite containing FooBar.""" def loadTestsFromModule(self, module): return self.suiteClass( [self.loadTestsFromTestCase(Test_TestProgram.FooBar)]) def test_NonExit(self): program = unittest.main(exit=False, argv=["foobar"], testRunner=unittest.TextTestRunner(stream=io.StringIO()), testLoader=self.FooBarLoader()) self.assertTrue(hasattr(program, 'result')) def test_Exit(self): self.assertRaises( SystemExit, unittest.main, argv=["foobar"], testRunner=unittest.TextTestRunner(stream=io.StringIO()), exit=True, testLoader=self.FooBarLoader()) def test_ExitAsDefault(self): self.assertRaises( SystemExit, unittest.main, argv=["foobar"], testRunner=unittest.TextTestRunner(stream=io.StringIO()), testLoader=self.FooBarLoader()) class InitialisableProgram(unittest.TestProgram): exit = False result = None verbosity = 1 defaultTest = None testRunner = None testLoader = unittest.defaultTestLoader module = '__main__' progName = 'test' test = 'test' def __init__(self, *args): pass RESULT = object() class FakeRunner(object): initArgs = None test = None raiseError = False def __init__(self, **kwargs): FakeRunner.initArgs = kwargs if FakeRunner.raiseError: FakeRunner.raiseError = False raise TypeError def run(self, test): FakeRunner.test = test return RESULT class TestCommandLineArgs(unittest.TestCase): def setUp(self): self.program = InitialisableProgram() self.program.createTests = lambda: None FakeRunner.initArgs = None FakeRunner.test = None FakeRunner.raiseError = False def testVerbosity(self): program = self.program for opt in '-q', '--quiet': program.verbosity = 1 program.parseArgs([None, opt]) self.assertEqual(program.verbosity, 0) for opt in '-v', '--verbose': program.verbosity = 1 program.parseArgs([None, opt]) self.assertEqual(program.verbosity, 2) def testBufferCatchFailfast(self): program = self.program for arg, attr in (('buffer', 'buffer'), ('failfast', 'failfast'), ('catch', 'catchbreak')): if attr == 'catch' and not hasInstallHandler: continue short_opt = '-%s' % arg[0] long_opt = '--%s' % arg for opt in short_opt, long_opt: setattr(program, attr, None) program.parseArgs([None, opt]) self.assertTrue(getattr(program, attr)) for opt in short_opt, long_opt: not_none = object() setattr(program, attr, not_none) program.parseArgs([None, opt]) self.assertEqual(getattr(program, attr), not_none) def testWarning(self): """Test the warnings argument""" # see #10535 class FakeTP(unittest.TestProgram): def parseArgs(self, *args, **kw): pass def runTests(self, *args, **kw): pass warnoptions = sys.warnoptions[:] try: sys.warnoptions[:] = [] # no warn options, no arg -> default self.assertEqual(FakeTP().warnings, 'default') # no warn options, w/ arg -> arg value self.assertEqual(FakeTP(warnings='ignore').warnings, 'ignore') sys.warnoptions[:] = ['somevalue'] # warn options, no arg -> None # warn options, w/ arg -> arg value self.assertEqual(FakeTP().warnings, None) self.assertEqual(FakeTP(warnings='ignore').warnings, 'ignore') finally: sys.warnoptions[:] = warnoptions def testRunTestsRunnerClass(self): program = self.program program.testRunner = FakeRunner program.verbosity = 'verbosity' program.failfast = 'failfast' program.buffer = 'buffer' program.warnings = 'warnings' program.runTests() self.assertEqual(FakeRunner.initArgs, {'verbosity': 'verbosity', 'failfast': 'failfast', 'buffer': 'buffer', 'warnings': 'warnings'}) self.assertEqual(FakeRunner.test, 'test') self.assertIs(program.result, RESULT) def testRunTestsRunnerInstance(self): program = self.program program.testRunner = FakeRunner() FakeRunner.initArgs = None program.runTests() # A new FakeRunner should not have been instantiated self.assertIsNone(FakeRunner.initArgs) self.assertEqual(FakeRunner.test, 'test') self.assertIs(program.result, RESULT) def testRunTestsOldRunnerClass(self): program = self.program FakeRunner.raiseError = True program.testRunner = FakeRunner program.verbosity = 'verbosity' program.failfast = 'failfast' program.buffer = 'buffer' program.test = 'test' program.runTests() # If initialising raises a type error it should be retried # without the new keyword arguments self.assertEqual(FakeRunner.initArgs, {}) self.assertEqual(FakeRunner.test, 'test') self.assertIs(program.result, RESULT) def testCatchBreakInstallsHandler(self): module = sys.modules['unittest.main'] original = module.installHandler def restore(): module.installHandler = original self.addCleanup(restore) self.installed = False def fakeInstallHandler(): self.installed = True module.installHandler = fakeInstallHandler program = self.program program.catchbreak = True program.testRunner = FakeRunner program.runTests() self.assertTrue(self.installed) def _patch_isfile(self, names, exists=True): def isfile(path): return path in names original = os.path.isfile os.path.isfile = isfile def restore(): os.path.isfile = original self.addCleanup(restore) def testParseArgsFileNames(self): # running tests with filenames instead of module names program = self.program argv = ['progname', 'foo.py', 'bar.Py', 'baz.PY', 'wing.txt'] self._patch_isfile(argv) program.createTests = lambda: None program.parseArgs(argv) # note that 'wing.txt' is not a Python file so the name should # *not* be converted to a module name expected = ['foo', 'bar', 'baz', 'wing.txt'] self.assertEqual(program.testNames, expected) def testParseArgsFilePaths(self): program = self.program argv = ['progname', 'foo/bar/baz.py', 'green\\red.py'] self._patch_isfile(argv) program.createTests = lambda: None program.parseArgs(argv) expected = ['foo.bar.baz', 'green.red'] self.assertEqual(program.testNames, expected) def testParseArgsNonExistentFiles(self): program = self.program argv = ['progname', 'foo/bar/baz.py', 'green\\red.py'] self._patch_isfile([]) program.createTests = lambda: None program.parseArgs(argv) self.assertEqual(program.testNames, argv[1:]) def testParseArgsAbsolutePathsThatCanBeConverted(self): cur_dir = os.getcwd() program = self.program def _join(name): return os.path.join(cur_dir, name) argv = ['progname', _join('foo/bar/baz.py'), _join('green\\red.py')] self._patch_isfile(argv) program.createTests = lambda: None program.parseArgs(argv) expected = ['foo.bar.baz', 'green.red'] self.assertEqual(program.testNames, expected) def testParseArgsAbsolutePathsThatCannotBeConverted(self): program = self.program # even on Windows '/...' is considered absolute by os.path.abspath argv = ['progname', '/foo/bar/baz.py', '/green/red.py'] self._patch_isfile(argv) program.createTests = lambda: None program.parseArgs(argv) self.assertEqual(program.testNames, argv[1:]) # it may be better to use platform specific functions to normalise paths # rather than accepting '.PY' and '\' as file seprator on Linux / Mac # it would also be better to check that a filename is a valid module # identifier (we have a regex for this in loader.py) # for invalid filenames should we raise a useful error rather than # leaving the current error message (import of filename fails) in place? if __name__ == '__main__': unittest.main()
klahnakoski/cloc
refs/heads/master
cloc/util/vendor/dateutil/zoneinfo/__init__.py
144
# -*- coding: utf-8 -*- """ Copyright (c) 2003-2005 Gustavo Niemeyer <gustavo@niemeyer.net> This module offers extensions to the standard Python datetime module. """ from dateutil.tz import tzfile from tarfile import TarFile import os __author__ = "Tomi Pieviläinen <tomi.pievilainen@iki.fi>" __license__ = "Simplified BSD" __all__ = ["setcachesize", "gettz", "rebuild"] CACHE = [] CACHESIZE = 10 class tzfile(tzfile): def __reduce__(self): return (gettz, (self._filename,)) def getzoneinfofile(): filenames = sorted(os.listdir(os.path.join(os.path.dirname(__file__)))) filenames.reverse() for entry in filenames: if entry.startswith("zoneinfo") and ".tar." in entry: return os.path.join(os.path.dirname(__file__), entry) return None ZONEINFOFILE = getzoneinfofile() del getzoneinfofile def setcachesize(size): global CACHESIZE, CACHE CACHESIZE = size del CACHE[size:] def gettz(name): tzinfo = None if ZONEINFOFILE: for cachedname, tzinfo in CACHE: if cachedname == name: break else: tf = TarFile.open(ZONEINFOFILE) try: zonefile = tf.extractfile(name) except KeyError: tzinfo = None else: tzinfo = tzfile(zonefile) tf.close() CACHE.insert(0, (name, tzinfo)) del CACHE[CACHESIZE:] return tzinfo def rebuild(filename, tag=None, format="gz"): import tempfile, shutil tmpdir = tempfile.mkdtemp() zonedir = os.path.join(tmpdir, "zoneinfo") moduledir = os.path.dirname(__file__) if tag: tag = "-"+tag targetname = "zoneinfo%s.tar.%s" % (tag, format) try: tf = TarFile.open(filename) # The "backwards" zone file contains links to other files, so must be # processed as last for name in sorted(tf.getnames(), key=lambda k: k != "backward" and k or "z"): if not (name.endswith(".sh") or name.endswith(".tab") or name == "leapseconds"): tf.extract(name, tmpdir) filepath = os.path.join(tmpdir, name) os.system("zic -d %s %s" % (zonedir, filepath)) tf.close() target = os.path.join(moduledir, targetname) for entry in os.listdir(moduledir): if entry.startswith("zoneinfo") and ".tar." in entry: os.unlink(os.path.join(moduledir, entry)) tf = TarFile.open(target, "w:%s" % format) for entry in os.listdir(zonedir): entrypath = os.path.join(zonedir, entry) tf.add(entrypath, entry) tf.close() finally: shutil.rmtree(tmpdir)
unicefuganda/edtrac
refs/heads/master
edtrac_project/rapidsms_geoserver/geoserver/management/commands/__init__.py
12133432
OpenSlides/openslides-csv-export
refs/heads/master
tests/__init__.py
12133432
Maccimo/intellij-community
refs/heads/master
python/testData/formatter/multilineElifCondition.py
22
if foo and \ bar == 42: pass elif foo and \ bar: pass elif foo and \ bar == 24: pass
xhat/micropython
refs/heads/master
tests/float/math_fun_special.py
44
# test the special functions imported from math try: from math import * erf except (ImportError, NameError): print("SKIP") import sys sys.exit() test_values = [-8., -2.5, -1, -0.5, 0.0, 0.5, 2.5, 8.,] pos_test_values = [0.001, 0.1, 0.5, 1.0, 1.5, 10.,] functions = [ ('erf', erf, test_values), ('erfc', erfc, test_values), ('gamma', gamma, pos_test_values), ('lgamma', lgamma, pos_test_values + [50., 100.,]), ] for function_name, function, test_vals in functions: print(function_name) for value in test_vals: print("{:.5g}".format(function(value)))
ESOedX/edx-platform
refs/heads/master
lms/djangoapps/discussion/django_comment_client/tests/factories.py
2
# pylint: disable=missing-docstring from __future__ import absolute_import from factory.django import DjangoModelFactory from openedx.core.djangoapps.django_comment_common.models import Permission, Role class RoleFactory(DjangoModelFactory): class Meta(object): model = Role name = 'Student' course_id = 'edX/toy/2012_Fall' class PermissionFactory(DjangoModelFactory): class Meta(object): model = Permission name = 'create_comment'
ThinkOpen-Solutions/odoo
refs/heads/stable
addons/l10n_in_hr_payroll/report/__init__.py
424
#-*- coding:utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). All Rights Reserved # d$ # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import report_payslip_details import report_payroll_advice import report_hr_salary_employee_bymonth import payment_advice_report import report_hr_yearly_salary_detail import payslip_report # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
vipins/ccccms
refs/heads/master
env/Lib/site-packages/django/utils/safestring.py
392
""" Functions for working with "safe strings": strings that can be displayed safely without further escaping in HTML. Marking something as a "safe string" means that the producer of the string has already turned characters that should not be interpreted by the HTML engine (e.g. '<') into the appropriate entities. """ from django.utils.functional import curry, Promise class EscapeData(object): pass class EscapeString(str, EscapeData): """ A string that should be HTML-escaped when output. """ pass class EscapeUnicode(unicode, EscapeData): """ A unicode object that should be HTML-escaped when output. """ pass class SafeData(object): pass class SafeString(str, SafeData): """ A string subclass that has been specifically marked as "safe" (requires no further escaping) for HTML output purposes. """ def __add__(self, rhs): """ Concatenating a safe string with another safe string or safe unicode object is safe. Otherwise, the result is no longer safe. """ t = super(SafeString, self).__add__(rhs) if isinstance(rhs, SafeUnicode): return SafeUnicode(t) elif isinstance(rhs, SafeString): return SafeString(t) return t def _proxy_method(self, *args, **kwargs): """ Wrap a call to a normal unicode method up so that we return safe results. The method that is being wrapped is passed in the 'method' argument. """ method = kwargs.pop('method') data = method(self, *args, **kwargs) if isinstance(data, str): return SafeString(data) else: return SafeUnicode(data) decode = curry(_proxy_method, method = str.decode) class SafeUnicode(unicode, SafeData): """ A unicode subclass that has been specifically marked as "safe" for HTML output purposes. """ def __add__(self, rhs): """ Concatenating a safe unicode object with another safe string or safe unicode object is safe. Otherwise, the result is no longer safe. """ t = super(SafeUnicode, self).__add__(rhs) if isinstance(rhs, SafeData): return SafeUnicode(t) return t def _proxy_method(self, *args, **kwargs): """ Wrap a call to a normal unicode method up so that we return safe results. The method that is being wrapped is passed in the 'method' argument. """ method = kwargs.pop('method') data = method(self, *args, **kwargs) if isinstance(data, str): return SafeString(data) else: return SafeUnicode(data) encode = curry(_proxy_method, method = unicode.encode) def mark_safe(s): """ Explicitly mark a string as safe for (HTML) output purposes. The returned object can be used everywhere a string or unicode object is appropriate. Can be called multiple times on a single string. """ if isinstance(s, SafeData): return s if isinstance(s, str) or (isinstance(s, Promise) and s._delegate_str): return SafeString(s) if isinstance(s, (unicode, Promise)): return SafeUnicode(s) return SafeString(str(s)) def mark_for_escaping(s): """ Explicitly mark a string as requiring HTML escaping upon output. Has no effect on SafeData subclasses. Can be called multiple times on a single string (the resulting escaping is only applied once). """ if isinstance(s, (SafeData, EscapeData)): return s if isinstance(s, str) or (isinstance(s, Promise) and s._delegate_str): return EscapeString(s) if isinstance(s, (unicode, Promise)): return EscapeUnicode(s) return EscapeString(str(s))
gevious/flask_slither
refs/heads/master
tests/acceptance/minimal-test.py
1
# -*- coding: utf-8 -*- # The minimal test setup consists of a basic resource linked to a mongo # database. Only the most basic functions are tested, without authentication # or validation. It is designed to mimic a basic out-the-box resource and # ensure that works as expected. from bson.objectid import ObjectId from flask import Flask from flask_slither import register_resource from flask_slither.resources import BaseResource from pymongo import MongoClient import json import unittest class MinimalResource(BaseResource): db_collection = 'minimals' class MinimalTest(unittest.TestCase): def setUp(self): self.app = Flask('Minimal') self.app.config['TESTING'] = True self.app.config['DB_NAME'] = 'testing_slither' self.client = self.app.test_client() register_resource(self.app, MinimalResource) self.db_client = MongoClient('localhost', 27017) self.db = self.db_client[self.app.config['DB_NAME']] self._load_fixtures() def tearDown(self): self.db['minimals'].drop() self.db_client.close() self.client = None self.app = None def _load_fixtures(self): fixtures = [ {'name': "Min1"}, {'name': "Min2", 'numbers': [1, 2, 3]}, {'name': "Min3", 'references': {'Min1': None, 'Min2': 'numbers'}}, ] for f in fixtures: self.db['minimals'].insert(f) def test_get_collection(self): """Get basic collection""" r = self.client.get('/minimals') self.assertEquals(r.status_code, 200) records = json.loads(r.data.decode('utf-8')) self.assertEquals(list(records.keys()), ['minimals']) self.assertEquals(len(records['minimals']), 3) def test_get_collection_limited(self): """Get basic collection limited to 2""" r = self.client.get('/minimals?_limit=2') self.assertEquals(r.status_code, 200) records = json.loads(r.data.decode('utf-8')) self.assertEquals(list(records.keys()), ['minimals']) self.assertEquals(len(records['minimals']), 2) def test_get_collection_projection(self): """Get basic collection with a projection""" r = self.client.get('/minimals?_fields=name,numbers') self.assertEquals(r.status_code, 200) records = json.loads(r.data.decode('utf-8')) self.assertEquals(list(records.keys()), ['minimals']) self.assertEquals(len(records['minimals']), 3) for r in records['minimals']: k = list(r.keys()) self.assertTrue('name' in k) if r['name'] == 'Min2': self.assertTrue('numbers' in k, "Numbers in {}".format(r['name'])) else: self.assertFalse('numbers' in k, "Numbers not in {}".format(r['name'])) self.assertTrue('references' not in k, "References not in {}".format(r['name'])) def test_get_instance(self): """Get instance""" obj = self.db['minimals'].find_one({}) r = self.client.get('/minimals/{}'.format(obj['_id'])) self.assertEquals(r.status_code, 200) records = json.loads(r.data.decode('utf-8')) self.assertEquals(list(records.keys()), ['minimals']) obj['id'] = str(obj.pop('_id')) self.assertEquals(records['minimals'], obj) def test_get_instance_missing(self): """Get instance which doesn't exist""" r = self.client.get('/minimals/1') self.assertEquals(r.status_code, 404) def test_delete_instance(self): """Delete instance""" obj = self.db['minimals'].find_one({}) r = self.client.delete('/minimals/{}'.format(obj['_id'])) self.assertEquals(r.status_code, 204) self.assertEquals(self.db['minimals'].find().count(), 2) self.assertIsNone(self.db['minimals'].find_one({'_id': obj['_id']})) def test_delete_instance_missing(self): """Delete instance which doesn't exist""" r = self.client.get('/minimals/1') self.assertEquals(r.status_code, 404) self.assertEquals(self.db['minimals'].find().count(), 3) def test_post(self): """Add new record""" data = {'name': "New data", 'subcol': {'first': 1, 'second': 2}} r = self.client.post('/minimals', data=json.dumps({'minimals': data}), content_type="application/json") self.assertEquals(r.status_code, 201) self.assertEquals(self.db['minimals'].find().count(), 4) response_record = json.loads(r.data.decode('utf-8'))['minimals'] self.assertEquals( r.location, "http://localhost/minimals/{}".format(response_record['id'])) db_rec = self.db['minimals'].find_one( {'_id': ObjectId(response_record['id'])}) self.assertIsNotNone(db_rec) for k in ['name', 'subcol']: self.assertEquals(db_rec[k], data[k]) self.assertEquals(response_record[k], data[k]) def test_post_missing_collection(self): """Add new record but payload is misformed""" data = {'name': "New data", 'subcol': {'first': 1, 'second': 2}} r = self.client.post('/minimals', data=json.dumps(data), content_type="application/json") self.assertEquals(r.status_code, 400) self.assertEquals(json.loads(r.data.decode('utf-8'))['errors'], 'Invalid JSON root in request body') def test_patch(self): """Update record with PATCH""" obj = self.db['minimals'].find_one({'name': 'Min3'}) data = {'name': "Patched record"} self.assertFalse(obj['name'] == data['name']) r = self.client.patch('/minimals/{}'.format(obj['_id']), data=json.dumps({'minimals': data}), content_type="application/json") self.assertEquals(r.status_code, 204) self.assertEquals(self.db['minimals'].find().count(), 3) obj = self.db['minimals'].find_one({'_id': obj['_id']}) self.assertEquals(obj['name'], data['name']) self.assertTrue('references' in obj) def test_put(self): """Update record with PUT""" obj = self.db['minimals'].find_one({'name': 'Min3'}) data = {'name': "Patched record", 'extra': "field"} self.assertFalse(obj['name'] == data['name']) r = self.client.put('/minimals/{}'.format(obj['_id']), data=json.dumps({'minimals': data}), content_type="application/json") self.assertEquals(r.status_code, 204) self.assertEquals(self.db['minimals'].find().count(), 3) obj = self.db['minimals'].find_one({'_id': obj['_id']}) self.assertEquals(obj['name'], data['name']) print(obj) print('references' in obj) self.assertFalse('references' in obj) self.assertEquals(obj['extra'], data['extra'])
tmimori/erpnext
refs/heads/develop
erpnext/stock/doctype/variant_attribute/variant_attribute.py
18
# -*- coding: utf-8 -*- # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors and contributors # For license information, please see license.txt from __future__ import unicode_literals import frappe from frappe.model.document import Document class VariantAttribute(Document): pass
ahmadRagheb/goldenHR
refs/heads/master
erpnext/commands/__init__.py
41
# Copyright (c) 2015, Web Notes Technologies Pvt. Ltd. and Contributors # MIT License. See license.txt from __future__ import unicode_literals, absolute_import, print_function import click import frappe from frappe.commands import pass_context, get_site def call_command(cmd, context): return click.Context(cmd, obj=context).forward(cmd) @click.command('make-demo') @click.option('--site', help='site name') @click.option('--domain', default='Manufacturing') @click.option('--days', default=100, help='Run the demo for so many days. Default 100') @click.option('--resume', default=False, is_flag=True, help='Continue running the demo for given days') @click.option('--reinstall', default=False, is_flag=True, help='Reinstall site before demo') @pass_context def make_demo(context, site, domain='Manufacturing', days=100, resume=False, reinstall=False): "Reinstall site and setup demo" from frappe.commands.site import _reinstall from frappe.installer import install_app site = get_site(context) if resume: with frappe.init_site(site): frappe.connect() from erpnext.demo import demo demo.simulate(days=days) else: if reinstall: _reinstall(site, yes=True) with frappe.init_site(site=site): frappe.connect() if not 'erpnext' in frappe.get_installed_apps(): install_app('erpnext') # import needs site from erpnext.demo import demo demo.make(domain, days) commands = [ make_demo ]
expertmm/KivyPixels
refs/heads/master
kivypixels.py
1
from pythonpixels import PPImage, PPColor # from pythonpixels import PPColor from pythonpixels import vec4_from_vec3, bgr_from_hex import pygame import os from pythonpixels import bufferToTupleStyleString # formerly static_createFromImageFile def load_image(self,fileName): returnKVI = None if os.path.exists(fileName): newSurface = pygame.image.load(fileName) returnKVI = KPImage(newSurface.get_size()) data = pygame.image.tostring(newSurface, 'RGBA', False) # blit_copy_with_bo(self, inputArray, inputStride, # inputByteDepth, input_size, # bOffset, gOffset, rOffset, aOffset): newSurface_byteDepth = newSurface.get_bytesize() # int(newSurface.get_bitsize()/8) newSurface_stride = newSurface.get_pitch() # newSurface.get_width() * newSurface_byteDepth bOffset = 0 gOffset = 1 rOffset = 2 aOffset = 3 returnKVI.blit_copy_with_bo(data, newSurface_stride, newSurface_byteDepth, newSurface.get_size(), bOffset, gOffset, rOffset, aOffset) else: print("ERROR in kivypixels.load_image:" + " file '" + fileName + "' does not exist") return returnKVI class KPImage(PPImage): def __init__(self, size, byte_depth=4): super(KPImage, self).__init__(size, byte_depth=4) self.brushFileName = None self.brushOriginalImage = None # no scale, no color self._brush_color = (1.0, 1.0, 1.0, 1.0) self.brushImage = None self.brushPixels = None self.brushSurface = None self.brushTexture = None def setBrushColor(self, color): #print("setting brush color to " + str(color)) self._brush_color = color if self.brushImage is not None: self.brushImage.blit_copy(self.brushOriginalImage) self.brushImage.tintByColor(self._brush_color) else: raise ValueError("brushImage is None in setBrushColor" " (you must call setBrushPath first)") def copyRuntimeVarsByRefFrom(self, kpimage): self.brushFileName = kpimage.brushFileName self.brushOriginalImage = kpimage.brushOriginalImage self._brush_color = kpimage._brush_color self.brushImage = kpimage.brushImage self.brushPixels = kpimage.brushPixels self.brushSurface = kpimage.brushSurface self.brushTexture = kpimage.brushTexture def setBrushPath(self, path): if os.path.isfile(path): self.brushFileName = path self.brushOriginalImage = load_image(self, path) print("loading brush '" + path + "'") if self.brushOriginalImage is not None: self.brushImage = KPImage( self.brushOriginalImage.get_size(), byte_depth=self.brushOriginalImage.byte_depth) else: raise ValueError("self.brushOriginalImage could not" " be loaded in setBrushPath") else: print("ERROR in setBrushPath: missing " + path + "'") def load(self, fileName): self.lastUsedFileName = fileName newSurface = pygame.image.load(fileName) self.init(newSurface.get_size()) data = pygame.image.tostring(newSurface, 'RGBA', False) # blit_copy_with_bo(self, inputArray, inputStride, # inputByteDepth, input_size, # bOffset, gOffset, rOffset, aOffset): newSurface_byteDepth = newSurface.get_bytesize() # int(newSurface.get_bitsize() / 8) newSurface_stride = newSurface.get_pitch() # newSurface.get_width() * newSurface_byteDepth bOffset = 0 gOffset = 1 rOffset = 2 aOffset = 3 KPImage.blit_copy_with_bo(self, data, newSurface_stride, newSurface_byteDepth, newSurface.get_size(), bOffset, gOffset, rOffset, aOffset) def saveAs(self, fileName): IsOK = None print("Saving '" + fileName + "'") # os.path.join(os.getcwd(), fileName) print(" current directory: " + os.getcwd()) # self.updateVariableImageSize() # self.assumed_fbo_stride = (int(self.fbo.size[0]) * # self.assumed_fbo_byteDepth) # self.pixelBuffer.blit_copy_with_bo(self.fbo.pixels, # self.assumed_fbo_stride, # self.assumed_fbo_byteDepth, # self.fbo.size, # bOffset, gOffset, rOffset, # aOffset) try: if (self.debugEnabled): print("self.size:"+str(self.size)) print("len(self.data):"+str(len(self.data))) # print("self.getMaxChannelValueNotIncludingAlpha():" + # str(self.getMaxChannelValueNotIncludingAlpha())) # print("self.getMaxAlphaValue():" + # str(self.getMaxAlphaValue())) translatedImage = KPImage(self.size, byte_depth=self.byte_depth) # Kivy 1.8.0 channel offsets are: # bOffset = 2 #formerly 0 #blue comes from green channel # gOffset = 0 #formerly 1 #green comes from blue channel # rOffset = 1 #formerly 2 # aOffset = 3 #formerly 3 # since pygame.image.fromstring (or pygame.image.save??) # has odd (accidentally [??] errant) byte order, # channel offsets are: # translatedImage.bOffset = self.gOffset #such as 1 # translatedImage.gOffset = self.bOffset #such as 0 # translatedImage.rOffset = self.rOffset #such as 2 # translatedImage.aOffset = self.aOffset #such as 3 translatedImage.bOffset = 0 translatedImage.gOffset = 1 translatedImage.rOffset = 2 translatedImage.aOffset = 3 # NOTE: kivy's pygame.image.fromstring(data, # (self.fbo.size[0], self.fbo.size[1]), 'RGBA', True) # is not at fault for channel order issue, and has correct # channel order translatedImage.blit_copy_with_bo(self.data, self.stride, self.byte_depth, self.size, self.bOffset, self.gOffset, self.rOffset, self.aOffset) data = bytes(translatedImage.data) # convert from bytearray # to bytes if (self.debugEnabled): debugX = 3 debugY = self.height - 3 if (debugX>=self.width): debugX=self.width-1 if (debugY>=self.height): debugY=self.height-1 debugIndex = debugY*self.stride + debugX*self.byte_depth print("debug pixel at (" + str(debugX) + "," + str(debugY) + "): " + bufferToTupleStyleString(data, debugIndex, self.byte_depth) ) surface = pygame.image.fromstring(data, self.size, 'RGBA', True) pygame.image.save(surface, fileName) IsOK = True except Exception as e: IsOK = False print("Could not finish saving: "+str(e)) return IsOK def save(self): return self.saveAs(self.lastUsedFileName) def brushAt(self, centerX, centerY): # normalSize = self.brushImage.get_norm_image_size() # self.brushImage.size[0] = self.brushImage.size[0] # # int(normalSize[0]) # self.brushImage.size[1] = self.brushImage.size[1] # # int(normalSize[1]) # self.set_at(touch.x, touch.y, self._brush_color) # # dont' uncomment, since size of this is only set at # # Save now (not on_size) # self.assumed_fbo_stride = (int(self.fbo.size[0]) * # self.assumed_fbo_byteDepth) # self.array_set_at_GRBA(self.fbo.pixels, assumed_fbo_stride, # assumed_fbo_byteDepth, touch.x, # touch.y, _brush_color) # self.fbo.add(self._brush_color) # brushPoint = Point(points=(atX, atY)) # self.fbo.add(brushPoint) # brushLine = Line( # points=[centerX, centerY, centerX+1, centerY], width=1) # self.fbo.add(brushLine) #destX = centerX - self.brushImage.center_x #destY = centerY - self.brushImage.center_y destX = int(centerX) - int(self.brushImage.size[0]/2) destY = int(centerY) - int(self.brushImage.size[1]/2) #destLineStartX = destX bOffset = self.bOffset gOffset = self.gOffset rOffset = self.rOffset aOffset = self.aOffset # brushBuffer_byteDepth = 4 # brushBuffer_stride = int(self.brushImage.width) * # brushBuffer_byteDepth src = self.brushImage.data # self.brushPixels # d_bi: di = destY * self.stride + destX * self.byte_depth destLineStartIndex = di if self.debugEnabled: print() print("self.brushImage.size:" + str(self.brushImage.size)) print("brushImage.byte_depth:" + str(self.brushImage.byte_depth)) print("brushImage.stride:" + str(self.brushImage.stride)) print("self.stride:" + str(self.stride)) print("self.byte_depth:" + str(self.byte_depth)) print("d_bi:" + str(di)) sourceLineStartIndex = 0 debugPixelWriteCount = 0 try: for sourceY in range(0,int(self.brushImage.size[1])): #destX = destLineStartX di = destLineStartIndex si = sourceLineStartIndex # s_bi for sourceX in range(0,int(self.brushImage.size[0])): sab = src[si + aOffset] # src_a_i # src_a: a = sab/255.0 # src_inv_a: ia = 1.0 - a dab = self.data[di+aOffset] da = dab/255.0 # si = sourceY * brushBuffer_stride + # sourceX * brushBuffer_byteDepth if (sab != 0): # calculate resulting alpha: # a_total_i = dab # if sab > dab: # a_total_i = sab a_total_i = int(dab) + sab if a_total_i>255: a_total_i = 255 # do alpha formula on colors # account for dest (color transparent more): # (from here down, 'a' affects color not alpha) dia = 1.0 - da # use alpha formula to overlay 1.0 onto brush's # alpha as dest alpha approaches 0.0 (dia 1.0) # to remove black fringe around brush stroke # ba = da*a # both alpha # bia = 1.0 - ba # a = da*a + dia*1.0 # a = dia*a + dia*da # a = ia*a + a*da ia = 1.0 - a # self.data[di+bOffset] = int(round( # ia*float(self.data[di+bOffset]) + # a*float(src[si+bOffset]))) # self.data[di+gOffset] = int(round( # ia*float(self.data[di+gOffset]) + # a*float(src[si+gOffset]))) # self.data[di+rOffset] = int(round( # ia*float(self.data[di+rOffset]) + # a*float(src[si+rOffset]))) # self.data[di+aOffset] = a_total_i res = [int( ia*float(self.data[di+bOffset]) + a*float(src[si+bOffset]) + .5 ), int( ia*float(self.data[di+gOffset]) + a*float(src[si+gOffset]) + .5 ), int( ia*float(self.data[di+rOffset]) + a*float(src[si+rOffset]) + .5 ), a_total_i] # brushBGRABytes = bytes(res) self.data[di+bOffset] = res[0] self.data[di+gOffset] = res[1] self.data[di+rOffset] = res[2] self.data[di+aOffset] = res[3] debugPixelWriteCount += 1 #destX += 1 di += self.byte_depth si += self.brushImage.byte_depth #destY += 1 destLineStartIndex += self.stride sourceLineStartIndex += self.brushImage.stride #except: except Exception as e: print("Could not finish brushAt: "+str(e)) print(" d_bi:" + str(di) + "; s_bi:" + str(si) + "; len(self.data):" + str(len(self.data)) + "; len(brushPixels):" + str(len(src))) # if (debugColor is not None): # print("debugColor:" + str(debugColor.b) + # "," + str(debugColor.g) + # "," + str(debugColor.r) + # "," + str(debugColor.a)) # else: # print("debugColor:None") # self.fbo.add(self._brush_color) # # brushRect = Rectangle(texture=self.brushTexture, # # pos = ((atX-self.brushImage.center_x, # # atY-self.brushImage.center_y), # # size=(self.brushImage.size[0], # # self.brushImage.size[1])) # brushRect = Rectangle(texture=self.brushTexture, # pos=(destX,destY), # size=(self.brushImage.size[0], # self.brushImage.size[1])) # # brushRect = Rectangle(source=self.brushFileName, # # pos=(touch.x,touch.y), size=(16,16)) # self.fbo.add(brushRect) if self.debugEnabled: print("debugPixelWriteCount:"+str(debugPixelWriteCount)) def tintByColor(self, color): # thisPPColor = PPColor() # thisPPColor.setBytesFromRGBA(int(color.r*255.0+.5), # int(color.g*255.0+.5), # int(color.b*255.0+.5), # int(color.a*255.0+.5) ) source_bOffset=0 source_gOffset=1 source_rOffset=2 source_aOffset=3 if len(color) < 4: color = vec4_from_vec3(color, 1.0) di = 0 # pixelByteIndex if (self.aOffset is not None): for pixelIndex in range(0,self.size[0]*self.size[1]): self.data[di+self.bOffset] = int(round( float(self.data[di+self.bOffset]) * color[source_bOffset])) self.data[di+self.gOffset] = int(round( float(self.data[di+self.gOffset]) * color[source_gOffset])) self.data[di+self.rOffset] = int(round( float(self.data[di+self.rOffset]) * color[source_rOffset])) self.data[di+self.aOffset] = int(round( float(self.data[di+self.aOffset]) * color[source_aOffset])) di += self.byte_depth elif (self.byte_depth==3): for pixelIndex in range(0,self.size[0]*self.size[1]): self.data[di+self.bOffset] = int(round( float(self.data[di+self.bOffset]) * color[source_bOffset])) self.data[di+self.gOffset] = int(round( float(self.data[di+self.gOffset]) * color[source_gOffset])) self.data[di+self.rOffset] = int(round( float(self.data[di+self.rOffset]) * color[source_rOffset])) di += self.byte_depth else: print("Not yet implemented KVImage tintByColor where" + " self.byte_depth=" + str(self.byte_depth)) if __name__ == "__main__": print("This module should be imported by your program.") print(" tests:") size = (128, 128) src_img = KPImage(size) dst_img = KPImage(size) print("blit_copy_with_bo 32-bit...") print(" src_img: " + str(src_img.get_dict(data_enable=False))) print(" dst_img: " + str(dst_img.get_dict(data_enable=False))) dst_img.blit_copy_with_bo(src_img.data, src_img.stride, src_img.byte_depth, src_img.size, src_img.bOffset, src_img.gOffset, src_img.rOffset, src_img.aOffset) print("blit_copy_with_bo grayscale...") src_img = KPImage(size, byte_depth=1) dst_img = KPImage(size, byte_depth=1) print(" src_img: " + str(src_img.get_dict(data_enable=False))) print(" dst_img: " + str(dst_img.get_dict(data_enable=False))) dst_img.blit_copy_with_bo(src_img.data, src_img.stride, src_img.byte_depth, src_img.size, src_img.bOffset, src_img.gOffset, src_img.rOffset, src_img.aOffset) print(" done testing kivypixels.")
skg-net/ansible
refs/heads/devel
lib/ansible/modules/network/avi/avi_ipaddrgroup.py
20
#!/usr/bin/python # # @author: Gaurav Rastogi (grastogi@avinetworks.com) # Eric Anderson (eanderson@avinetworks.com) # module_check: supported # Avi Version: 17.1.1 # # Copyright: (c) 2017 Gaurav Rastogi, <grastogi@avinetworks.com> # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: avi_ipaddrgroup author: Gaurav Rastogi (grastogi@avinetworks.com) short_description: Module for setup of IpAddrGroup Avi RESTful Object description: - This module is used to configure IpAddrGroup object - more examples at U(https://github.com/avinetworks/devops) requirements: [ avisdk ] version_added: "2.4" options: state: description: - The state that should be applied on the entity. default: present choices: ["absent", "present"] avi_api_update_method: description: - Default method for object update is HTTP PUT. - Setting to patch will override that behavior to use HTTP PATCH. version_added: "2.5" default: put choices: ["put", "patch"] avi_api_patch_op: description: - Patch operation to use when using avi_api_update_method as patch. version_added: "2.5" choices: ["add", "replace", "delete"] addrs: description: - Configure ip address(es). apic_epg_name: description: - Populate ip addresses from members of this cisco apic epg. country_codes: description: - Populate the ip address ranges from the geo database for this country. description: description: - User defined description for the object. ip_ports: description: - Configure (ip address, port) tuple(s). marathon_app_name: description: - Populate ip addresses from tasks of this marathon app. marathon_service_port: description: - Task port associated with marathon service port. - If marathon app has multiple service ports, this is required. - Else, the first task port is used. name: description: - Name of the ip address group. required: true prefixes: description: - Configure ip address prefix(es). ranges: description: - Configure ip address range(s). tenant_ref: description: - It is a reference to an object of type tenant. url: description: - Avi controller URL of the object. uuid: description: - Uuid of the ip address group. extends_documentation_fragment: - avi ''' EXAMPLES = """ - name: Create an IP Address Group configuration avi_ipaddrgroup: controller: '{{ controller }}' username: '{{ username }}' password: '{{ password }}' name: Client-Source-Block prefixes: - ip_addr: addr: 10.0.0.0 type: V4 mask: 8 - ip_addr: addr: 172.16.0.0 type: V4 mask: 12 - ip_addr: addr: 192.168.0.0 type: V4 mask: 16 """ RETURN = ''' obj: description: IpAddrGroup (api/ipaddrgroup) object returned: success, changed type: dict ''' from ansible.module_utils.basic import AnsibleModule try: from ansible.module_utils.network.avi.avi import ( avi_common_argument_spec, HAS_AVI, avi_ansible_api) except ImportError: HAS_AVI = False def main(): argument_specs = dict( state=dict(default='present', choices=['absent', 'present']), avi_api_update_method=dict(default='put', choices=['put', 'patch']), avi_api_patch_op=dict(choices=['add', 'replace', 'delete']), addrs=dict(type='list',), apic_epg_name=dict(type='str',), country_codes=dict(type='list',), description=dict(type='str',), ip_ports=dict(type='list',), marathon_app_name=dict(type='str',), marathon_service_port=dict(type='int',), name=dict(type='str', required=True), prefixes=dict(type='list',), ranges=dict(type='list',), tenant_ref=dict(type='str',), url=dict(type='str',), uuid=dict(type='str',), ) argument_specs.update(avi_common_argument_spec()) module = AnsibleModule( argument_spec=argument_specs, supports_check_mode=True) if not HAS_AVI: return module.fail_json(msg=( 'Avi python API SDK (avisdk>=17.1) is not installed. ' 'For more details visit https://github.com/avinetworks/sdk.')) return avi_ansible_api(module, 'ipaddrgroup', set([])) if __name__ == '__main__': main()
frederick-masterton/django
refs/heads/master
tests/datetimes/__init__.py
12133432
KellyChan/python-examples
refs/heads/master
python/algorithms-py/algos/twosum/python/twosum.py
6
import time class TwoSum(object): def __init__(self, nums, target): self.nums = nums self.target = target def brute_force(self): """ - time complexity: O(n^2) - space complexity: O(1) """ time_start = time.time() for i in range(len(self.nums)): for j in range(i+1, len(self.nums), 1): if self.nums[i] + self.nums[j] == self.target: time_end = time.time() print "(Brute Force) Time Cost: %f" % (time_end - time_start) return [i, j] time_end = time.time() print "(Brute Force) Time Cost: %f" % (time_end - time_start) return "No two sum solution." def two_pass_hash(self): """ - time complexity: O(n) - space complexity: O(n) """ time_start = time.time() # convert the num list as num dictionary num_dict = {} for (key, value) in enumerate(self.nums): num_dict[key] = value # look up (key, value) from the num dictionary for i in range(len(self.nums)): this_num = self.target - self.nums[i] keys = [key for key, value in num_dict.iteritems() if value == this_num] if keys and keys[0] != i: time_end = time.time() print "(Two Pass Hash) Time Cost: %f" % (time_end - time_start) return [i, keys[0]] time_end = time.time() print "(Two Pass Hash) Time Cost: %f" % (time_end - time_start) return "No two sum solution." def one_pass_hash(self): """ - time complexity: O(n) - space complexity: O(n) """ time_start = time.time() num_dict = {} for i in range(len(self.nums)): this_num = self.target - self.nums[i] if this_num in num_dict.values(): keys = [key for key, value in num_dict.iteritems() if value == this_num] time_end = time.time() print "(One Pass Hash) Time Cost: %f" % (time_end - time_start) return [keys[0], i] num_dict[i] = self.nums[i] time_end = time.time() print "(One Pass Hash) Time Cost: %f" % (time_end - time_start) return "No two sum solution."
Colabra/SMU
refs/heads/master
node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py
1283
# Copyright (c) 2012 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ This module contains classes that help to emulate xcodebuild behavior on top of other build systems, such as make and ninja. """ import copy import gyp.common import os import os.path import re import shlex import subprocess import sys import tempfile from gyp.common import GypError # Populated lazily by XcodeVersion, for efficiency, and to fix an issue when # "xcodebuild" is called too quickly (it has been found to return incorrect # version number). XCODE_VERSION_CACHE = None # Populated lazily by GetXcodeArchsDefault, to an |XcodeArchsDefault| instance # corresponding to the installed version of Xcode. XCODE_ARCHS_DEFAULT_CACHE = None def XcodeArchsVariableMapping(archs, archs_including_64_bit=None): """Constructs a dictionary with expansion for $(ARCHS_STANDARD) variable, and optionally for $(ARCHS_STANDARD_INCLUDING_64_BIT).""" mapping = {'$(ARCHS_STANDARD)': archs} if archs_including_64_bit: mapping['$(ARCHS_STANDARD_INCLUDING_64_BIT)'] = archs_including_64_bit return mapping class XcodeArchsDefault(object): """A class to resolve ARCHS variable from xcode_settings, resolving Xcode macros and implementing filtering by VALID_ARCHS. The expansion of macros depends on the SDKROOT used ("macosx", "iphoneos", "iphonesimulator") and on the version of Xcode. """ # Match variable like $(ARCHS_STANDARD). variable_pattern = re.compile(r'\$\([a-zA-Z_][a-zA-Z0-9_]*\)$') def __init__(self, default, mac, iphonesimulator, iphoneos): self._default = (default,) self._archs = {'mac': mac, 'ios': iphoneos, 'iossim': iphonesimulator} def _VariableMapping(self, sdkroot): """Returns the dictionary of variable mapping depending on the SDKROOT.""" sdkroot = sdkroot.lower() if 'iphoneos' in sdkroot: return self._archs['ios'] elif 'iphonesimulator' in sdkroot: return self._archs['iossim'] else: return self._archs['mac'] def _ExpandArchs(self, archs, sdkroot): """Expands variables references in ARCHS, and remove duplicates.""" variable_mapping = self._VariableMapping(sdkroot) expanded_archs = [] for arch in archs: if self.variable_pattern.match(arch): variable = arch try: variable_expansion = variable_mapping[variable] for arch in variable_expansion: if arch not in expanded_archs: expanded_archs.append(arch) except KeyError as e: print 'Warning: Ignoring unsupported variable "%s".' % variable elif arch not in expanded_archs: expanded_archs.append(arch) return expanded_archs def ActiveArchs(self, archs, valid_archs, sdkroot): """Expands variables references in ARCHS, and filter by VALID_ARCHS if it is defined (if not set, Xcode accept any value in ARCHS, otherwise, only values present in VALID_ARCHS are kept).""" expanded_archs = self._ExpandArchs(archs or self._default, sdkroot or '') if valid_archs: filtered_archs = [] for arch in expanded_archs: if arch in valid_archs: filtered_archs.append(arch) expanded_archs = filtered_archs return expanded_archs def GetXcodeArchsDefault(): """Returns the |XcodeArchsDefault| object to use to expand ARCHS for the installed version of Xcode. The default values used by Xcode for ARCHS and the expansion of the variables depends on the version of Xcode used. For all version anterior to Xcode 5.0 or posterior to Xcode 5.1 included uses $(ARCHS_STANDARD) if ARCHS is unset, while Xcode 5.0 to 5.0.2 uses $(ARCHS_STANDARD_INCLUDING_64_BIT). This variable was added to Xcode 5.0 and deprecated with Xcode 5.1. For "macosx" SDKROOT, all version starting with Xcode 5.0 includes 64-bit architecture as part of $(ARCHS_STANDARD) and default to only building it. For "iphoneos" and "iphonesimulator" SDKROOT, 64-bit architectures are part of $(ARCHS_STANDARD_INCLUDING_64_BIT) from Xcode 5.0. From Xcode 5.1, they are also part of $(ARCHS_STANDARD). All thoses rules are coded in the construction of the |XcodeArchsDefault| object to use depending on the version of Xcode detected. The object is for performance reason.""" global XCODE_ARCHS_DEFAULT_CACHE if XCODE_ARCHS_DEFAULT_CACHE: return XCODE_ARCHS_DEFAULT_CACHE xcode_version, _ = XcodeVersion() if xcode_version < '0500': XCODE_ARCHS_DEFAULT_CACHE = XcodeArchsDefault( '$(ARCHS_STANDARD)', XcodeArchsVariableMapping(['i386']), XcodeArchsVariableMapping(['i386']), XcodeArchsVariableMapping(['armv7'])) elif xcode_version < '0510': XCODE_ARCHS_DEFAULT_CACHE = XcodeArchsDefault( '$(ARCHS_STANDARD_INCLUDING_64_BIT)', XcodeArchsVariableMapping(['x86_64'], ['x86_64']), XcodeArchsVariableMapping(['i386'], ['i386', 'x86_64']), XcodeArchsVariableMapping( ['armv7', 'armv7s'], ['armv7', 'armv7s', 'arm64'])) else: XCODE_ARCHS_DEFAULT_CACHE = XcodeArchsDefault( '$(ARCHS_STANDARD)', XcodeArchsVariableMapping(['x86_64'], ['x86_64']), XcodeArchsVariableMapping(['i386', 'x86_64'], ['i386', 'x86_64']), XcodeArchsVariableMapping( ['armv7', 'armv7s', 'arm64'], ['armv7', 'armv7s', 'arm64'])) return XCODE_ARCHS_DEFAULT_CACHE class XcodeSettings(object): """A class that understands the gyp 'xcode_settings' object.""" # Populated lazily by _SdkPath(). Shared by all XcodeSettings, so cached # at class-level for efficiency. _sdk_path_cache = {} _sdk_root_cache = {} # Populated lazily by GetExtraPlistItems(). Shared by all XcodeSettings, so # cached at class-level for efficiency. _plist_cache = {} # Populated lazily by GetIOSPostbuilds. Shared by all XcodeSettings, so # cached at class-level for efficiency. _codesigning_key_cache = {} def __init__(self, spec): self.spec = spec self.isIOS = False # Per-target 'xcode_settings' are pushed down into configs earlier by gyp. # This means self.xcode_settings[config] always contains all settings # for that config -- the per-target settings as well. Settings that are # the same for all configs are implicitly per-target settings. self.xcode_settings = {} configs = spec['configurations'] for configname, config in configs.iteritems(): self.xcode_settings[configname] = config.get('xcode_settings', {}) self._ConvertConditionalKeys(configname) if self.xcode_settings[configname].get('IPHONEOS_DEPLOYMENT_TARGET', None): self.isIOS = True # This is only non-None temporarily during the execution of some methods. self.configname = None # Used by _AdjustLibrary to match .a and .dylib entries in libraries. self.library_re = re.compile(r'^lib([^/]+)\.(a|dylib)$') def _ConvertConditionalKeys(self, configname): """Converts or warns on conditional keys. Xcode supports conditional keys, such as CODE_SIGN_IDENTITY[sdk=iphoneos*]. This is a partial implementation with some keys converted while the rest force a warning.""" settings = self.xcode_settings[configname] conditional_keys = [key for key in settings if key.endswith(']')] for key in conditional_keys: # If you need more, speak up at http://crbug.com/122592 if key.endswith("[sdk=iphoneos*]"): if configname.endswith("iphoneos"): new_key = key.split("[")[0] settings[new_key] = settings[key] else: print 'Warning: Conditional keys not implemented, ignoring:', \ ' '.join(conditional_keys) del settings[key] def _Settings(self): assert self.configname return self.xcode_settings[self.configname] def _Test(self, test_key, cond_key, default): return self._Settings().get(test_key, default) == cond_key def _Appendf(self, lst, test_key, format_str, default=None): if test_key in self._Settings(): lst.append(format_str % str(self._Settings()[test_key])) elif default: lst.append(format_str % str(default)) def _WarnUnimplemented(self, test_key): if test_key in self._Settings(): print 'Warning: Ignoring not yet implemented key "%s".' % test_key def IsBinaryOutputFormat(self, configname): default = "binary" if self.isIOS else "xml" format = self.xcode_settings[configname].get('INFOPLIST_OUTPUT_FORMAT', default) return format == "binary" def _IsBundle(self): return int(self.spec.get('mac_bundle', 0)) != 0 def _IsIosAppExtension(self): return int(self.spec.get('ios_app_extension', 0)) != 0 def _IsIosWatchKitExtension(self): return int(self.spec.get('ios_watchkit_extension', 0)) != 0 def _IsIosWatchApp(self): return int(self.spec.get('ios_watch_app', 0)) != 0 def GetFrameworkVersion(self): """Returns the framework version of the current target. Only valid for bundles.""" assert self._IsBundle() return self.GetPerTargetSetting('FRAMEWORK_VERSION', default='A') def GetWrapperExtension(self): """Returns the bundle extension (.app, .framework, .plugin, etc). Only valid for bundles.""" assert self._IsBundle() if self.spec['type'] in ('loadable_module', 'shared_library'): default_wrapper_extension = { 'loadable_module': 'bundle', 'shared_library': 'framework', }[self.spec['type']] wrapper_extension = self.GetPerTargetSetting( 'WRAPPER_EXTENSION', default=default_wrapper_extension) return '.' + self.spec.get('product_extension', wrapper_extension) elif self.spec['type'] == 'executable': if self._IsIosAppExtension() or self._IsIosWatchKitExtension(): return '.' + self.spec.get('product_extension', 'appex') else: return '.' + self.spec.get('product_extension', 'app') else: assert False, "Don't know extension for '%s', target '%s'" % ( self.spec['type'], self.spec['target_name']) def GetProductName(self): """Returns PRODUCT_NAME.""" return self.spec.get('product_name', self.spec['target_name']) def GetFullProductName(self): """Returns FULL_PRODUCT_NAME.""" if self._IsBundle(): return self.GetWrapperName() else: return self._GetStandaloneBinaryPath() def GetWrapperName(self): """Returns the directory name of the bundle represented by this target. Only valid for bundles.""" assert self._IsBundle() return self.GetProductName() + self.GetWrapperExtension() def GetBundleContentsFolderPath(self): """Returns the qualified path to the bundle's contents folder. E.g. Chromium.app/Contents or Foo.bundle/Versions/A. Only valid for bundles.""" if self.isIOS: return self.GetWrapperName() assert self._IsBundle() if self.spec['type'] == 'shared_library': return os.path.join( self.GetWrapperName(), 'Versions', self.GetFrameworkVersion()) else: # loadable_modules have a 'Contents' folder like executables. return os.path.join(self.GetWrapperName(), 'Contents') def GetBundleResourceFolder(self): """Returns the qualified path to the bundle's resource folder. E.g. Chromium.app/Contents/Resources. Only valid for bundles.""" assert self._IsBundle() if self.isIOS: return self.GetBundleContentsFolderPath() return os.path.join(self.GetBundleContentsFolderPath(), 'Resources') def GetBundlePlistPath(self): """Returns the qualified path to the bundle's plist file. E.g. Chromium.app/Contents/Info.plist. Only valid for bundles.""" assert self._IsBundle() if self.spec['type'] in ('executable', 'loadable_module'): return os.path.join(self.GetBundleContentsFolderPath(), 'Info.plist') else: return os.path.join(self.GetBundleContentsFolderPath(), 'Resources', 'Info.plist') def GetProductType(self): """Returns the PRODUCT_TYPE of this target.""" if self._IsIosAppExtension(): assert self._IsBundle(), ('ios_app_extension flag requires mac_bundle ' '(target %s)' % self.spec['target_name']) return 'com.apple.product-type.app-extension' if self._IsIosWatchKitExtension(): assert self._IsBundle(), ('ios_watchkit_extension flag requires ' 'mac_bundle (target %s)' % self.spec['target_name']) return 'com.apple.product-type.watchkit-extension' if self._IsIosWatchApp(): assert self._IsBundle(), ('ios_watch_app flag requires mac_bundle ' '(target %s)' % self.spec['target_name']) return 'com.apple.product-type.application.watchapp' if self._IsBundle(): return { 'executable': 'com.apple.product-type.application', 'loadable_module': 'com.apple.product-type.bundle', 'shared_library': 'com.apple.product-type.framework', }[self.spec['type']] else: return { 'executable': 'com.apple.product-type.tool', 'loadable_module': 'com.apple.product-type.library.dynamic', 'shared_library': 'com.apple.product-type.library.dynamic', 'static_library': 'com.apple.product-type.library.static', }[self.spec['type']] def GetMachOType(self): """Returns the MACH_O_TYPE of this target.""" # Weird, but matches Xcode. if not self._IsBundle() and self.spec['type'] == 'executable': return '' return { 'executable': 'mh_execute', 'static_library': 'staticlib', 'shared_library': 'mh_dylib', 'loadable_module': 'mh_bundle', }[self.spec['type']] def _GetBundleBinaryPath(self): """Returns the name of the bundle binary of by this target. E.g. Chromium.app/Contents/MacOS/Chromium. Only valid for bundles.""" assert self._IsBundle() if self.spec['type'] in ('shared_library') or self.isIOS: path = self.GetBundleContentsFolderPath() elif self.spec['type'] in ('executable', 'loadable_module'): path = os.path.join(self.GetBundleContentsFolderPath(), 'MacOS') return os.path.join(path, self.GetExecutableName()) def _GetStandaloneExecutableSuffix(self): if 'product_extension' in self.spec: return '.' + self.spec['product_extension'] return { 'executable': '', 'static_library': '.a', 'shared_library': '.dylib', 'loadable_module': '.so', }[self.spec['type']] def _GetStandaloneExecutablePrefix(self): return self.spec.get('product_prefix', { 'executable': '', 'static_library': 'lib', 'shared_library': 'lib', # Non-bundled loadable_modules are called foo.so for some reason # (that is, .so and no prefix) with the xcode build -- match that. 'loadable_module': '', }[self.spec['type']]) def _GetStandaloneBinaryPath(self): """Returns the name of the non-bundle binary represented by this target. E.g. hello_world. Only valid for non-bundles.""" assert not self._IsBundle() assert self.spec['type'] in ( 'executable', 'shared_library', 'static_library', 'loadable_module'), ( 'Unexpected type %s' % self.spec['type']) target = self.spec['target_name'] if self.spec['type'] == 'static_library': if target[:3] == 'lib': target = target[3:] elif self.spec['type'] in ('loadable_module', 'shared_library'): if target[:3] == 'lib': target = target[3:] target_prefix = self._GetStandaloneExecutablePrefix() target = self.spec.get('product_name', target) target_ext = self._GetStandaloneExecutableSuffix() return target_prefix + target + target_ext def GetExecutableName(self): """Returns the executable name of the bundle represented by this target. E.g. Chromium.""" if self._IsBundle(): return self.spec.get('product_name', self.spec['target_name']) else: return self._GetStandaloneBinaryPath() def GetExecutablePath(self): """Returns the directory name of the bundle represented by this target. E.g. Chromium.app/Contents/MacOS/Chromium.""" if self._IsBundle(): return self._GetBundleBinaryPath() else: return self._GetStandaloneBinaryPath() def GetActiveArchs(self, configname): """Returns the architectures this target should be built for.""" config_settings = self.xcode_settings[configname] xcode_archs_default = GetXcodeArchsDefault() return xcode_archs_default.ActiveArchs( config_settings.get('ARCHS'), config_settings.get('VALID_ARCHS'), config_settings.get('SDKROOT')) def _GetSdkVersionInfoItem(self, sdk, infoitem): # xcodebuild requires Xcode and can't run on Command Line Tools-only # systems from 10.7 onward. # Since the CLT has no SDK paths anyway, returning None is the # most sensible route and should still do the right thing. try: return GetStdout(['xcodebuild', '-version', '-sdk', sdk, infoitem]) except: pass def _SdkRoot(self, configname): if configname is None: configname = self.configname return self.GetPerConfigSetting('SDKROOT', configname, default='') def _SdkPath(self, configname=None): sdk_root = self._SdkRoot(configname) if sdk_root.startswith('/'): return sdk_root return self._XcodeSdkPath(sdk_root) def _XcodeSdkPath(self, sdk_root): if sdk_root not in XcodeSettings._sdk_path_cache: sdk_path = self._GetSdkVersionInfoItem(sdk_root, 'Path') XcodeSettings._sdk_path_cache[sdk_root] = sdk_path if sdk_root: XcodeSettings._sdk_root_cache[sdk_path] = sdk_root return XcodeSettings._sdk_path_cache[sdk_root] def _AppendPlatformVersionMinFlags(self, lst): self._Appendf(lst, 'MACOSX_DEPLOYMENT_TARGET', '-mmacosx-version-min=%s') if 'IPHONEOS_DEPLOYMENT_TARGET' in self._Settings(): # TODO: Implement this better? sdk_path_basename = os.path.basename(self._SdkPath()) if sdk_path_basename.lower().startswith('iphonesimulator'): self._Appendf(lst, 'IPHONEOS_DEPLOYMENT_TARGET', '-mios-simulator-version-min=%s') else: self._Appendf(lst, 'IPHONEOS_DEPLOYMENT_TARGET', '-miphoneos-version-min=%s') def GetCflags(self, configname, arch=None): """Returns flags that need to be added to .c, .cc, .m, and .mm compilations.""" # This functions (and the similar ones below) do not offer complete # emulation of all xcode_settings keys. They're implemented on demand. self.configname = configname cflags = [] sdk_root = self._SdkPath() if 'SDKROOT' in self._Settings() and sdk_root: cflags.append('-isysroot %s' % sdk_root) if self._Test('CLANG_WARN_CONSTANT_CONVERSION', 'YES', default='NO'): cflags.append('-Wconstant-conversion') if self._Test('GCC_CHAR_IS_UNSIGNED_CHAR', 'YES', default='NO'): cflags.append('-funsigned-char') if self._Test('GCC_CW_ASM_SYNTAX', 'YES', default='YES'): cflags.append('-fasm-blocks') if 'GCC_DYNAMIC_NO_PIC' in self._Settings(): if self._Settings()['GCC_DYNAMIC_NO_PIC'] == 'YES': cflags.append('-mdynamic-no-pic') else: pass # TODO: In this case, it depends on the target. xcode passes # mdynamic-no-pic by default for executable and possibly static lib # according to mento if self._Test('GCC_ENABLE_PASCAL_STRINGS', 'YES', default='YES'): cflags.append('-mpascal-strings') self._Appendf(cflags, 'GCC_OPTIMIZATION_LEVEL', '-O%s', default='s') if self._Test('GCC_GENERATE_DEBUGGING_SYMBOLS', 'YES', default='YES'): dbg_format = self._Settings().get('DEBUG_INFORMATION_FORMAT', 'dwarf') if dbg_format == 'dwarf': cflags.append('-gdwarf-2') elif dbg_format == 'stabs': raise NotImplementedError('stabs debug format is not supported yet.') elif dbg_format == 'dwarf-with-dsym': cflags.append('-gdwarf-2') else: raise NotImplementedError('Unknown debug format %s' % dbg_format) if self._Settings().get('GCC_STRICT_ALIASING') == 'YES': cflags.append('-fstrict-aliasing') elif self._Settings().get('GCC_STRICT_ALIASING') == 'NO': cflags.append('-fno-strict-aliasing') if self._Test('GCC_SYMBOLS_PRIVATE_EXTERN', 'YES', default='NO'): cflags.append('-fvisibility=hidden') if self._Test('GCC_TREAT_WARNINGS_AS_ERRORS', 'YES', default='NO'): cflags.append('-Werror') if self._Test('GCC_WARN_ABOUT_MISSING_NEWLINE', 'YES', default='NO'): cflags.append('-Wnewline-eof') # In Xcode, this is only activated when GCC_COMPILER_VERSION is clang or # llvm-gcc. It also requires a fairly recent libtool, and # if the system clang isn't used, DYLD_LIBRARY_PATH needs to contain the # path to the libLTO.dylib that matches the used clang. if self._Test('LLVM_LTO', 'YES', default='NO'): cflags.append('-flto') self._AppendPlatformVersionMinFlags(cflags) # TODO: if self._Test('COPY_PHASE_STRIP', 'YES', default='NO'): self._WarnUnimplemented('COPY_PHASE_STRIP') self._WarnUnimplemented('GCC_DEBUGGING_SYMBOLS') self._WarnUnimplemented('GCC_ENABLE_OBJC_EXCEPTIONS') # TODO: This is exported correctly, but assigning to it is not supported. self._WarnUnimplemented('MACH_O_TYPE') self._WarnUnimplemented('PRODUCT_TYPE') if arch is not None: archs = [arch] else: assert self.configname archs = self.GetActiveArchs(self.configname) if len(archs) != 1: # TODO: Supporting fat binaries will be annoying. self._WarnUnimplemented('ARCHS') archs = ['i386'] cflags.append('-arch ' + archs[0]) if archs[0] in ('i386', 'x86_64'): if self._Test('GCC_ENABLE_SSE3_EXTENSIONS', 'YES', default='NO'): cflags.append('-msse3') if self._Test('GCC_ENABLE_SUPPLEMENTAL_SSE3_INSTRUCTIONS', 'YES', default='NO'): cflags.append('-mssse3') # Note 3rd 's'. if self._Test('GCC_ENABLE_SSE41_EXTENSIONS', 'YES', default='NO'): cflags.append('-msse4.1') if self._Test('GCC_ENABLE_SSE42_EXTENSIONS', 'YES', default='NO'): cflags.append('-msse4.2') cflags += self._Settings().get('WARNING_CFLAGS', []) if sdk_root: framework_root = sdk_root else: framework_root = '' config = self.spec['configurations'][self.configname] framework_dirs = config.get('mac_framework_dirs', []) for directory in framework_dirs: cflags.append('-F' + directory.replace('$(SDKROOT)', framework_root)) self.configname = None return cflags def GetCflagsC(self, configname): """Returns flags that need to be added to .c, and .m compilations.""" self.configname = configname cflags_c = [] if self._Settings().get('GCC_C_LANGUAGE_STANDARD', '') == 'ansi': cflags_c.append('-ansi') else: self._Appendf(cflags_c, 'GCC_C_LANGUAGE_STANDARD', '-std=%s') cflags_c += self._Settings().get('OTHER_CFLAGS', []) self.configname = None return cflags_c def GetCflagsCC(self, configname): """Returns flags that need to be added to .cc, and .mm compilations.""" self.configname = configname cflags_cc = [] clang_cxx_language_standard = self._Settings().get( 'CLANG_CXX_LANGUAGE_STANDARD') # Note: Don't make c++0x to c++11 so that c++0x can be used with older # clangs that don't understand c++11 yet (like Xcode 4.2's). if clang_cxx_language_standard: cflags_cc.append('-std=%s' % clang_cxx_language_standard) self._Appendf(cflags_cc, 'CLANG_CXX_LIBRARY', '-stdlib=%s') if self._Test('GCC_ENABLE_CPP_RTTI', 'NO', default='YES'): cflags_cc.append('-fno-rtti') if self._Test('GCC_ENABLE_CPP_EXCEPTIONS', 'NO', default='YES'): cflags_cc.append('-fno-exceptions') if self._Test('GCC_INLINES_ARE_PRIVATE_EXTERN', 'YES', default='NO'): cflags_cc.append('-fvisibility-inlines-hidden') if self._Test('GCC_THREADSAFE_STATICS', 'NO', default='YES'): cflags_cc.append('-fno-threadsafe-statics') # Note: This flag is a no-op for clang, it only has an effect for gcc. if self._Test('GCC_WARN_ABOUT_INVALID_OFFSETOF_MACRO', 'NO', default='YES'): cflags_cc.append('-Wno-invalid-offsetof') other_ccflags = [] for flag in self._Settings().get('OTHER_CPLUSPLUSFLAGS', ['$(inherited)']): # TODO: More general variable expansion. Missing in many other places too. if flag in ('$inherited', '$(inherited)', '${inherited}'): flag = '$OTHER_CFLAGS' if flag in ('$OTHER_CFLAGS', '$(OTHER_CFLAGS)', '${OTHER_CFLAGS}'): other_ccflags += self._Settings().get('OTHER_CFLAGS', []) else: other_ccflags.append(flag) cflags_cc += other_ccflags self.configname = None return cflags_cc def _AddObjectiveCGarbageCollectionFlags(self, flags): gc_policy = self._Settings().get('GCC_ENABLE_OBJC_GC', 'unsupported') if gc_policy == 'supported': flags.append('-fobjc-gc') elif gc_policy == 'required': flags.append('-fobjc-gc-only') def _AddObjectiveCARCFlags(self, flags): if self._Test('CLANG_ENABLE_OBJC_ARC', 'YES', default='NO'): flags.append('-fobjc-arc') def _AddObjectiveCMissingPropertySynthesisFlags(self, flags): if self._Test('CLANG_WARN_OBJC_MISSING_PROPERTY_SYNTHESIS', 'YES', default='NO'): flags.append('-Wobjc-missing-property-synthesis') def GetCflagsObjC(self, configname): """Returns flags that need to be added to .m compilations.""" self.configname = configname cflags_objc = [] self._AddObjectiveCGarbageCollectionFlags(cflags_objc) self._AddObjectiveCARCFlags(cflags_objc) self._AddObjectiveCMissingPropertySynthesisFlags(cflags_objc) self.configname = None return cflags_objc def GetCflagsObjCC(self, configname): """Returns flags that need to be added to .mm compilations.""" self.configname = configname cflags_objcc = [] self._AddObjectiveCGarbageCollectionFlags(cflags_objcc) self._AddObjectiveCARCFlags(cflags_objcc) self._AddObjectiveCMissingPropertySynthesisFlags(cflags_objcc) if self._Test('GCC_OBJC_CALL_CXX_CDTORS', 'YES', default='NO'): cflags_objcc.append('-fobjc-call-cxx-cdtors') self.configname = None return cflags_objcc def GetInstallNameBase(self): """Return DYLIB_INSTALL_NAME_BASE for this target.""" # Xcode sets this for shared_libraries, and for nonbundled loadable_modules. if (self.spec['type'] != 'shared_library' and (self.spec['type'] != 'loadable_module' or self._IsBundle())): return None install_base = self.GetPerTargetSetting( 'DYLIB_INSTALL_NAME_BASE', default='/Library/Frameworks' if self._IsBundle() else '/usr/local/lib') return install_base def _StandardizePath(self, path): """Do :standardizepath processing for path.""" # I'm not quite sure what :standardizepath does. Just call normpath(), # but don't let @executable_path/../foo collapse to foo. if '/' in path: prefix, rest = '', path if path.startswith('@'): prefix, rest = path.split('/', 1) rest = os.path.normpath(rest) # :standardizepath path = os.path.join(prefix, rest) return path def GetInstallName(self): """Return LD_DYLIB_INSTALL_NAME for this target.""" # Xcode sets this for shared_libraries, and for nonbundled loadable_modules. if (self.spec['type'] != 'shared_library' and (self.spec['type'] != 'loadable_module' or self._IsBundle())): return None default_install_name = \ '$(DYLIB_INSTALL_NAME_BASE:standardizepath)/$(EXECUTABLE_PATH)' install_name = self.GetPerTargetSetting( 'LD_DYLIB_INSTALL_NAME', default=default_install_name) # Hardcode support for the variables used in chromium for now, to # unblock people using the make build. if '$' in install_name: assert install_name in ('$(DYLIB_INSTALL_NAME_BASE:standardizepath)/' '$(WRAPPER_NAME)/$(PRODUCT_NAME)', default_install_name), ( 'Variables in LD_DYLIB_INSTALL_NAME are not generally supported ' 'yet in target \'%s\' (got \'%s\')' % (self.spec['target_name'], install_name)) install_name = install_name.replace( '$(DYLIB_INSTALL_NAME_BASE:standardizepath)', self._StandardizePath(self.GetInstallNameBase())) if self._IsBundle(): # These are only valid for bundles, hence the |if|. install_name = install_name.replace( '$(WRAPPER_NAME)', self.GetWrapperName()) install_name = install_name.replace( '$(PRODUCT_NAME)', self.GetProductName()) else: assert '$(WRAPPER_NAME)' not in install_name assert '$(PRODUCT_NAME)' not in install_name install_name = install_name.replace( '$(EXECUTABLE_PATH)', self.GetExecutablePath()) return install_name def _MapLinkerFlagFilename(self, ldflag, gyp_to_build_path): """Checks if ldflag contains a filename and if so remaps it from gyp-directory-relative to build-directory-relative.""" # This list is expanded on demand. # They get matched as: # -exported_symbols_list file # -Wl,exported_symbols_list file # -Wl,exported_symbols_list,file LINKER_FILE = r'(\S+)' WORD = r'\S+' linker_flags = [ ['-exported_symbols_list', LINKER_FILE], # Needed for NaCl. ['-unexported_symbols_list', LINKER_FILE], ['-reexported_symbols_list', LINKER_FILE], ['-sectcreate', WORD, WORD, LINKER_FILE], # Needed for remoting. ] for flag_pattern in linker_flags: regex = re.compile('(?:-Wl,)?' + '[ ,]'.join(flag_pattern)) m = regex.match(ldflag) if m: ldflag = ldflag[:m.start(1)] + gyp_to_build_path(m.group(1)) + \ ldflag[m.end(1):] # Required for ffmpeg (no idea why they don't use LIBRARY_SEARCH_PATHS, # TODO(thakis): Update ffmpeg.gyp): if ldflag.startswith('-L'): ldflag = '-L' + gyp_to_build_path(ldflag[len('-L'):]) return ldflag def GetLdflags(self, configname, product_dir, gyp_to_build_path, arch=None): """Returns flags that need to be passed to the linker. Args: configname: The name of the configuration to get ld flags for. product_dir: The directory where products such static and dynamic libraries are placed. This is added to the library search path. gyp_to_build_path: A function that converts paths relative to the current gyp file to paths relative to the build direcotry. """ self.configname = configname ldflags = [] # The xcode build is relative to a gyp file's directory, and OTHER_LDFLAGS # can contain entries that depend on this. Explicitly absolutify these. for ldflag in self._Settings().get('OTHER_LDFLAGS', []): ldflags.append(self._MapLinkerFlagFilename(ldflag, gyp_to_build_path)) if self._Test('DEAD_CODE_STRIPPING', 'YES', default='NO'): ldflags.append('-Wl,-dead_strip') if self._Test('PREBINDING', 'YES', default='NO'): ldflags.append('-Wl,-prebind') self._Appendf( ldflags, 'DYLIB_COMPATIBILITY_VERSION', '-compatibility_version %s') self._Appendf( ldflags, 'DYLIB_CURRENT_VERSION', '-current_version %s') self._AppendPlatformVersionMinFlags(ldflags) if 'SDKROOT' in self._Settings() and self._SdkPath(): ldflags.append('-isysroot ' + self._SdkPath()) for library_path in self._Settings().get('LIBRARY_SEARCH_PATHS', []): ldflags.append('-L' + gyp_to_build_path(library_path)) if 'ORDER_FILE' in self._Settings(): ldflags.append('-Wl,-order_file ' + '-Wl,' + gyp_to_build_path( self._Settings()['ORDER_FILE'])) if arch is not None: archs = [arch] else: assert self.configname archs = self.GetActiveArchs(self.configname) if len(archs) != 1: # TODO: Supporting fat binaries will be annoying. self._WarnUnimplemented('ARCHS') archs = ['i386'] ldflags.append('-arch ' + archs[0]) # Xcode adds the product directory by default. ldflags.append('-L' + product_dir) install_name = self.GetInstallName() if install_name and self.spec['type'] != 'loadable_module': ldflags.append('-install_name ' + install_name.replace(' ', r'\ ')) for rpath in self._Settings().get('LD_RUNPATH_SEARCH_PATHS', []): ldflags.append('-Wl,-rpath,' + rpath) sdk_root = self._SdkPath() if not sdk_root: sdk_root = '' config = self.spec['configurations'][self.configname] framework_dirs = config.get('mac_framework_dirs', []) for directory in framework_dirs: ldflags.append('-F' + directory.replace('$(SDKROOT)', sdk_root)) is_extension = self._IsIosAppExtension() or self._IsIosWatchKitExtension() if sdk_root and is_extension: # Adds the link flags for extensions. These flags are common for all # extensions and provide loader and main function. # These flags reflect the compilation options used by xcode to compile # extensions. ldflags.append('-lpkstart') if XcodeVersion() < '0900': ldflags.append(sdk_root + '/System/Library/PrivateFrameworks/PlugInKit.framework/PlugInKit') ldflags.append('-fapplication-extension') ldflags.append('-Xlinker -rpath ' '-Xlinker @executable_path/../../Frameworks') self._Appendf(ldflags, 'CLANG_CXX_LIBRARY', '-stdlib=%s') self.configname = None return ldflags def GetLibtoolflags(self, configname): """Returns flags that need to be passed to the static linker. Args: configname: The name of the configuration to get ld flags for. """ self.configname = configname libtoolflags = [] for libtoolflag in self._Settings().get('OTHER_LDFLAGS', []): libtoolflags.append(libtoolflag) # TODO(thakis): ARCHS? self.configname = None return libtoolflags def GetPerTargetSettings(self): """Gets a list of all the per-target settings. This will only fetch keys whose values are the same across all configurations.""" first_pass = True result = {} for configname in sorted(self.xcode_settings.keys()): if first_pass: result = dict(self.xcode_settings[configname]) first_pass = False else: for key, value in self.xcode_settings[configname].iteritems(): if key not in result: continue elif result[key] != value: del result[key] return result def GetPerConfigSetting(self, setting, configname, default=None): if configname in self.xcode_settings: return self.xcode_settings[configname].get(setting, default) else: return self.GetPerTargetSetting(setting, default) def GetPerTargetSetting(self, setting, default=None): """Tries to get xcode_settings.setting from spec. Assumes that the setting has the same value in all configurations and throws otherwise.""" is_first_pass = True result = None for configname in sorted(self.xcode_settings.keys()): if is_first_pass: result = self.xcode_settings[configname].get(setting, None) is_first_pass = False else: assert result == self.xcode_settings[configname].get(setting, None), ( "Expected per-target setting for '%s', got per-config setting " "(target %s)" % (setting, self.spec['target_name'])) if result is None: return default return result def _GetStripPostbuilds(self, configname, output_binary, quiet): """Returns a list of shell commands that contain the shell commands neccessary to strip this target's binary. These should be run as postbuilds before the actual postbuilds run.""" self.configname = configname result = [] if (self._Test('DEPLOYMENT_POSTPROCESSING', 'YES', default='NO') and self._Test('STRIP_INSTALLED_PRODUCT', 'YES', default='NO')): default_strip_style = 'debugging' if self.spec['type'] == 'loadable_module' and self._IsBundle(): default_strip_style = 'non-global' elif self.spec['type'] == 'executable': default_strip_style = 'all' strip_style = self._Settings().get('STRIP_STYLE', default_strip_style) strip_flags = { 'all': '', 'non-global': '-x', 'debugging': '-S', }[strip_style] explicit_strip_flags = self._Settings().get('STRIPFLAGS', '') if explicit_strip_flags: strip_flags += ' ' + _NormalizeEnvVarReferences(explicit_strip_flags) if not quiet: result.append('echo STRIP\\(%s\\)' % self.spec['target_name']) result.append('strip %s %s' % (strip_flags, output_binary)) self.configname = None return result def _GetDebugInfoPostbuilds(self, configname, output, output_binary, quiet): """Returns a list of shell commands that contain the shell commands neccessary to massage this target's debug information. These should be run as postbuilds before the actual postbuilds run.""" self.configname = configname # For static libraries, no dSYMs are created. result = [] if (self._Test('GCC_GENERATE_DEBUGGING_SYMBOLS', 'YES', default='YES') and self._Test( 'DEBUG_INFORMATION_FORMAT', 'dwarf-with-dsym', default='dwarf') and self.spec['type'] != 'static_library'): if not quiet: result.append('echo DSYMUTIL\\(%s\\)' % self.spec['target_name']) result.append('dsymutil %s -o %s' % (output_binary, output + '.dSYM')) self.configname = None return result def _GetTargetPostbuilds(self, configname, output, output_binary, quiet=False): """Returns a list of shell commands that contain the shell commands to run as postbuilds for this target, before the actual postbuilds.""" # dSYMs need to build before stripping happens. return ( self._GetDebugInfoPostbuilds(configname, output, output_binary, quiet) + self._GetStripPostbuilds(configname, output_binary, quiet)) def _GetIOSPostbuilds(self, configname, output_binary): """Return a shell command to codesign the iOS output binary so it can be deployed to a device. This should be run as the very last step of the build.""" if not (self.isIOS and self.spec['type'] == 'executable'): return [] settings = self.xcode_settings[configname] key = self._GetIOSCodeSignIdentityKey(settings) if not key: return [] # Warn for any unimplemented signing xcode keys. unimpl = ['OTHER_CODE_SIGN_FLAGS'] unimpl = set(unimpl) & set(self.xcode_settings[configname].keys()) if unimpl: print 'Warning: Some codesign keys not implemented, ignoring: %s' % ( ', '.join(sorted(unimpl))) return ['%s code-sign-bundle "%s" "%s" "%s" "%s"' % ( os.path.join('${TARGET_BUILD_DIR}', 'gyp-mac-tool'), key, settings.get('CODE_SIGN_RESOURCE_RULES_PATH', ''), settings.get('CODE_SIGN_ENTITLEMENTS', ''), settings.get('PROVISIONING_PROFILE', '')) ] def _GetIOSCodeSignIdentityKey(self, settings): identity = settings.get('CODE_SIGN_IDENTITY') if not identity: return None if identity not in XcodeSettings._codesigning_key_cache: output = subprocess.check_output( ['security', 'find-identity', '-p', 'codesigning', '-v']) for line in output.splitlines(): if identity in line: fingerprint = line.split()[1] cache = XcodeSettings._codesigning_key_cache assert identity not in cache or fingerprint == cache[identity], ( "Multiple codesigning fingerprints for identity: %s" % identity) XcodeSettings._codesigning_key_cache[identity] = fingerprint return XcodeSettings._codesigning_key_cache.get(identity, '') def AddImplicitPostbuilds(self, configname, output, output_binary, postbuilds=[], quiet=False): """Returns a list of shell commands that should run before and after |postbuilds|.""" assert output_binary is not None pre = self._GetTargetPostbuilds(configname, output, output_binary, quiet) post = self._GetIOSPostbuilds(configname, output_binary) return pre + postbuilds + post def _AdjustLibrary(self, library, config_name=None): if library.endswith('.framework'): l = '-framework ' + os.path.splitext(os.path.basename(library))[0] else: m = self.library_re.match(library) if m: l = '-l' + m.group(1) else: l = library sdk_root = self._SdkPath(config_name) if not sdk_root: sdk_root = '' # Xcode 7 started shipping with ".tbd" (text based stubs) files instead of # ".dylib" without providing a real support for them. What it does, for # "/usr/lib" libraries, is do "-L/usr/lib -lname" which is dependent on the # library order and cause collision when building Chrome. # # Instead substitude ".tbd" to ".dylib" in the generated project when the # following conditions are both true: # - library is referenced in the gyp file as "$(SDKROOT)/**/*.dylib", # - the ".dylib" file does not exists but a ".tbd" file do. library = l.replace('$(SDKROOT)', sdk_root) if l.startswith('$(SDKROOT)'): basename, ext = os.path.splitext(library) if ext == '.dylib' and not os.path.exists(library): tbd_library = basename + '.tbd' if os.path.exists(tbd_library): library = tbd_library return library def AdjustLibraries(self, libraries, config_name=None): """Transforms entries like 'Cocoa.framework' in libraries into entries like '-framework Cocoa', 'libcrypto.dylib' into '-lcrypto', etc. """ libraries = [self._AdjustLibrary(library, config_name) for library in libraries] return libraries def _BuildMachineOSBuild(self): return GetStdout(['sw_vers', '-buildVersion']) def _XcodeIOSDeviceFamily(self, configname): family = self.xcode_settings[configname].get('TARGETED_DEVICE_FAMILY', '1') return [int(x) for x in family.split(',')] def GetExtraPlistItems(self, configname=None): """Returns a dictionary with extra items to insert into Info.plist.""" if configname not in XcodeSettings._plist_cache: cache = {} cache['BuildMachineOSBuild'] = self._BuildMachineOSBuild() xcode, xcode_build = XcodeVersion() cache['DTXcode'] = xcode cache['DTXcodeBuild'] = xcode_build sdk_root = self._SdkRoot(configname) if not sdk_root: sdk_root = self._DefaultSdkRoot() cache['DTSDKName'] = sdk_root if xcode >= '0430': cache['DTSDKBuild'] = self._GetSdkVersionInfoItem( sdk_root, 'ProductBuildVersion') else: cache['DTSDKBuild'] = cache['BuildMachineOSBuild'] if self.isIOS: cache['DTPlatformName'] = cache['DTSDKName'] if configname.endswith("iphoneos"): cache['DTPlatformVersion'] = self._GetSdkVersionInfoItem( sdk_root, 'ProductVersion') cache['CFBundleSupportedPlatforms'] = ['iPhoneOS'] else: cache['CFBundleSupportedPlatforms'] = ['iPhoneSimulator'] XcodeSettings._plist_cache[configname] = cache # Include extra plist items that are per-target, not per global # XcodeSettings. items = dict(XcodeSettings._plist_cache[configname]) if self.isIOS: items['UIDeviceFamily'] = self._XcodeIOSDeviceFamily(configname) return items def _DefaultSdkRoot(self): """Returns the default SDKROOT to use. Prior to version 5.0.0, if SDKROOT was not explicitly set in the Xcode project, then the environment variable was empty. Starting with this version, Xcode uses the name of the newest SDK installed. """ xcode_version, xcode_build = XcodeVersion() if xcode_version < '0500': return '' default_sdk_path = self._XcodeSdkPath('') default_sdk_root = XcodeSettings._sdk_root_cache.get(default_sdk_path) if default_sdk_root: return default_sdk_root try: all_sdks = GetStdout(['xcodebuild', '-showsdks']) except: # If xcodebuild fails, there will be no valid SDKs return '' for line in all_sdks.splitlines(): items = line.split() if len(items) >= 3 and items[-2] == '-sdk': sdk_root = items[-1] sdk_path = self._XcodeSdkPath(sdk_root) if sdk_path == default_sdk_path: return sdk_root return '' class MacPrefixHeader(object): """A class that helps with emulating Xcode's GCC_PREFIX_HEADER feature. This feature consists of several pieces: * If GCC_PREFIX_HEADER is present, all compilations in that project get an additional |-include path_to_prefix_header| cflag. * If GCC_PRECOMPILE_PREFIX_HEADER is present too, then the prefix header is instead compiled, and all other compilations in the project get an additional |-include path_to_compiled_header| instead. + Compiled prefix headers have the extension gch. There is one gch file for every language used in the project (c, cc, m, mm), since gch files for different languages aren't compatible. + gch files themselves are built with the target's normal cflags, but they obviously don't get the |-include| flag. Instead, they need a -x flag that describes their language. + All o files in the target need to depend on the gch file, to make sure it's built before any o file is built. This class helps with some of these tasks, but it needs help from the build system for writing dependencies to the gch files, for writing build commands for the gch files, and for figuring out the location of the gch files. """ def __init__(self, xcode_settings, gyp_path_to_build_path, gyp_path_to_build_output): """If xcode_settings is None, all methods on this class are no-ops. Args: gyp_path_to_build_path: A function that takes a gyp-relative path, and returns a path relative to the build directory. gyp_path_to_build_output: A function that takes a gyp-relative path and a language code ('c', 'cc', 'm', or 'mm'), and that returns a path to where the output of precompiling that path for that language should be placed (without the trailing '.gch'). """ # This doesn't support per-configuration prefix headers. Good enough # for now. self.header = None self.compile_headers = False if xcode_settings: self.header = xcode_settings.GetPerTargetSetting('GCC_PREFIX_HEADER') self.compile_headers = xcode_settings.GetPerTargetSetting( 'GCC_PRECOMPILE_PREFIX_HEADER', default='NO') != 'NO' self.compiled_headers = {} if self.header: if self.compile_headers: for lang in ['c', 'cc', 'm', 'mm']: self.compiled_headers[lang] = gyp_path_to_build_output( self.header, lang) self.header = gyp_path_to_build_path(self.header) def _CompiledHeader(self, lang, arch): assert self.compile_headers h = self.compiled_headers[lang] if arch: h += '.' + arch return h def GetInclude(self, lang, arch=None): """Gets the cflags to include the prefix header for language |lang|.""" if self.compile_headers and lang in self.compiled_headers: return '-include %s' % self._CompiledHeader(lang, arch) elif self.header: return '-include %s' % self.header else: return '' def _Gch(self, lang, arch): """Returns the actual file name of the prefix header for language |lang|.""" assert self.compile_headers return self._CompiledHeader(lang, arch) + '.gch' def GetObjDependencies(self, sources, objs, arch=None): """Given a list of source files and the corresponding object files, returns a list of (source, object, gch) tuples, where |gch| is the build-directory relative path to the gch file each object file depends on. |compilable[i]| has to be the source file belonging to |objs[i]|.""" if not self.header or not self.compile_headers: return [] result = [] for source, obj in zip(sources, objs): ext = os.path.splitext(source)[1] lang = { '.c': 'c', '.cpp': 'cc', '.cc': 'cc', '.cxx': 'cc', '.m': 'm', '.mm': 'mm', }.get(ext, None) if lang: result.append((source, obj, self._Gch(lang, arch))) return result def GetPchBuildCommands(self, arch=None): """Returns [(path_to_gch, language_flag, language, header)]. |path_to_gch| and |header| are relative to the build directory. """ if not self.header or not self.compile_headers: return [] return [ (self._Gch('c', arch), '-x c-header', 'c', self.header), (self._Gch('cc', arch), '-x c++-header', 'cc', self.header), (self._Gch('m', arch), '-x objective-c-header', 'm', self.header), (self._Gch('mm', arch), '-x objective-c++-header', 'mm', self.header), ] def XcodeVersion(): """Returns a tuple of version and build version of installed Xcode.""" # `xcodebuild -version` output looks like # Xcode 4.6.3 # Build version 4H1503 # or like # Xcode 3.2.6 # Component versions: DevToolsCore-1809.0; DevToolsSupport-1806.0 # BuildVersion: 10M2518 # Convert that to '0463', '4H1503'. global XCODE_VERSION_CACHE if XCODE_VERSION_CACHE: return XCODE_VERSION_CACHE try: version_list = GetStdout(['xcodebuild', '-version']).splitlines() # In some circumstances xcodebuild exits 0 but doesn't return # the right results; for example, a user on 10.7 or 10.8 with # a bogus path set via xcode-select # In that case this may be a CLT-only install so fall back to # checking that version. if len(version_list) < 2: raise GypError("xcodebuild returned unexpected results") except: version = CLTVersion() if version: version = re.match(r'(\d\.\d\.?\d*)', version).groups()[0] else: raise GypError("No Xcode or CLT version detected!") # The CLT has no build information, so we return an empty string. version_list = [version, ''] version = version_list[0] build = version_list[-1] # Be careful to convert "4.2" to "0420": version = version.split()[-1].replace('.', '') version = (version + '0' * (3 - len(version))).zfill(4) if build: build = build.split()[-1] XCODE_VERSION_CACHE = (version, build) return XCODE_VERSION_CACHE # This function ported from the logic in Homebrew's CLT version check def CLTVersion(): """Returns the version of command-line tools from pkgutil.""" # pkgutil output looks like # package-id: com.apple.pkg.CLTools_Executables # version: 5.0.1.0.1.1382131676 # volume: / # location: / # install-time: 1382544035 # groups: com.apple.FindSystemFiles.pkg-group com.apple.DevToolsBoth.pkg-group com.apple.DevToolsNonRelocatableShared.pkg-group STANDALONE_PKG_ID = "com.apple.pkg.DeveloperToolsCLILeo" FROM_XCODE_PKG_ID = "com.apple.pkg.DeveloperToolsCLI" MAVERICKS_PKG_ID = "com.apple.pkg.CLTools_Executables" regex = re.compile('version: (?P<version>.+)') for key in [MAVERICKS_PKG_ID, STANDALONE_PKG_ID, FROM_XCODE_PKG_ID]: try: output = GetStdout(['/usr/sbin/pkgutil', '--pkg-info', key]) return re.search(regex, output).groupdict()['version'] except: continue def GetStdout(cmdlist): """Returns the content of standard output returned by invoking |cmdlist|. Raises |GypError| if the command return with a non-zero return code.""" job = subprocess.Popen(cmdlist, stdout=subprocess.PIPE) out = job.communicate()[0] if job.returncode != 0: sys.stderr.write(out + '\n') raise GypError('Error %d running %s' % (job.returncode, cmdlist[0])) return out.rstrip('\n') def MergeGlobalXcodeSettingsToSpec(global_dict, spec): """Merges the global xcode_settings dictionary into each configuration of the target represented by spec. For keys that are both in the global and the local xcode_settings dict, the local key gets precendence. """ # The xcode generator special-cases global xcode_settings and does something # that amounts to merging in the global xcode_settings into each local # xcode_settings dict. global_xcode_settings = global_dict.get('xcode_settings', {}) for config in spec['configurations'].values(): if 'xcode_settings' in config: new_settings = global_xcode_settings.copy() new_settings.update(config['xcode_settings']) config['xcode_settings'] = new_settings def IsMacBundle(flavor, spec): """Returns if |spec| should be treated as a bundle. Bundles are directories with a certain subdirectory structure, instead of just a single file. Bundle rules do not produce a binary but also package resources into that directory.""" is_mac_bundle = (int(spec.get('mac_bundle', 0)) != 0 and flavor == 'mac') if is_mac_bundle: assert spec['type'] != 'none', ( 'mac_bundle targets cannot have type none (target "%s")' % spec['target_name']) return is_mac_bundle def GetMacBundleResources(product_dir, xcode_settings, resources): """Yields (output, resource) pairs for every resource in |resources|. Only call this for mac bundle targets. Args: product_dir: Path to the directory containing the output bundle, relative to the build directory. xcode_settings: The XcodeSettings of the current target. resources: A list of bundle resources, relative to the build directory. """ dest = os.path.join(product_dir, xcode_settings.GetBundleResourceFolder()) for res in resources: output = dest # The make generator doesn't support it, so forbid it everywhere # to keep the generators more interchangable. assert ' ' not in res, ( "Spaces in resource filenames not supported (%s)" % res) # Split into (path,file). res_parts = os.path.split(res) # Now split the path into (prefix,maybe.lproj). lproj_parts = os.path.split(res_parts[0]) # If the resource lives in a .lproj bundle, add that to the destination. if lproj_parts[1].endswith('.lproj'): output = os.path.join(output, lproj_parts[1]) output = os.path.join(output, res_parts[1]) # Compiled XIB files are referred to by .nib. if output.endswith('.xib'): output = os.path.splitext(output)[0] + '.nib' # Compiled storyboard files are referred to by .storyboardc. if output.endswith('.storyboard'): output = os.path.splitext(output)[0] + '.storyboardc' yield output, res def GetMacInfoPlist(product_dir, xcode_settings, gyp_path_to_build_path): """Returns (info_plist, dest_plist, defines, extra_env), where: * |info_plist| is the source plist path, relative to the build directory, * |dest_plist| is the destination plist path, relative to the build directory, * |defines| is a list of preprocessor defines (empty if the plist shouldn't be preprocessed, * |extra_env| is a dict of env variables that should be exported when invoking |mac_tool copy-info-plist|. Only call this for mac bundle targets. Args: product_dir: Path to the directory containing the output bundle, relative to the build directory. xcode_settings: The XcodeSettings of the current target. gyp_to_build_path: A function that converts paths relative to the current gyp file to paths relative to the build direcotry. """ info_plist = xcode_settings.GetPerTargetSetting('INFOPLIST_FILE') if not info_plist: return None, None, [], {} # The make generator doesn't support it, so forbid it everywhere # to keep the generators more interchangable. assert ' ' not in info_plist, ( "Spaces in Info.plist filenames not supported (%s)" % info_plist) info_plist = gyp_path_to_build_path(info_plist) # If explicitly set to preprocess the plist, invoke the C preprocessor and # specify any defines as -D flags. if xcode_settings.GetPerTargetSetting( 'INFOPLIST_PREPROCESS', default='NO') == 'YES': # Create an intermediate file based on the path. defines = shlex.split(xcode_settings.GetPerTargetSetting( 'INFOPLIST_PREPROCESSOR_DEFINITIONS', default='')) else: defines = [] dest_plist = os.path.join(product_dir, xcode_settings.GetBundlePlistPath()) extra_env = xcode_settings.GetPerTargetSettings() return info_plist, dest_plist, defines, extra_env def _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration, additional_settings=None): """Return the environment variables that Xcode would set. See http://developer.apple.com/library/mac/#documentation/DeveloperTools/Reference/XcodeBuildSettingRef/1-Build_Setting_Reference/build_setting_ref.html#//apple_ref/doc/uid/TP40003931-CH3-SW153 for a full list. Args: xcode_settings: An XcodeSettings object. If this is None, this function returns an empty dict. built_products_dir: Absolute path to the built products dir. srcroot: Absolute path to the source root. configuration: The build configuration name. additional_settings: An optional dict with more values to add to the result. """ if not xcode_settings: return {} # This function is considered a friend of XcodeSettings, so let it reach into # its implementation details. spec = xcode_settings.spec # These are filled in on a as-needed basis. env = { 'BUILT_FRAMEWORKS_DIR' : built_products_dir, 'BUILT_PRODUCTS_DIR' : built_products_dir, 'CONFIGURATION' : configuration, 'PRODUCT_NAME' : xcode_settings.GetProductName(), # See /Developer/Platforms/MacOSX.platform/Developer/Library/Xcode/Specifications/MacOSX\ Product\ Types.xcspec for FULL_PRODUCT_NAME 'SRCROOT' : srcroot, 'SOURCE_ROOT': '${SRCROOT}', # This is not true for static libraries, but currently the env is only # written for bundles: 'TARGET_BUILD_DIR' : built_products_dir, 'TEMP_DIR' : '${TMPDIR}', } if xcode_settings.GetPerConfigSetting('SDKROOT', configuration): env['SDKROOT'] = xcode_settings._SdkPath(configuration) else: env['SDKROOT'] = '' if spec['type'] in ( 'executable', 'static_library', 'shared_library', 'loadable_module'): env['EXECUTABLE_NAME'] = xcode_settings.GetExecutableName() env['EXECUTABLE_PATH'] = xcode_settings.GetExecutablePath() env['FULL_PRODUCT_NAME'] = xcode_settings.GetFullProductName() mach_o_type = xcode_settings.GetMachOType() if mach_o_type: env['MACH_O_TYPE'] = mach_o_type env['PRODUCT_TYPE'] = xcode_settings.GetProductType() if xcode_settings._IsBundle(): env['CONTENTS_FOLDER_PATH'] = \ xcode_settings.GetBundleContentsFolderPath() env['UNLOCALIZED_RESOURCES_FOLDER_PATH'] = \ xcode_settings.GetBundleResourceFolder() env['INFOPLIST_PATH'] = xcode_settings.GetBundlePlistPath() env['WRAPPER_NAME'] = xcode_settings.GetWrapperName() install_name = xcode_settings.GetInstallName() if install_name: env['LD_DYLIB_INSTALL_NAME'] = install_name install_name_base = xcode_settings.GetInstallNameBase() if install_name_base: env['DYLIB_INSTALL_NAME_BASE'] = install_name_base if XcodeVersion() >= '0500' and not env.get('SDKROOT'): sdk_root = xcode_settings._SdkRoot(configuration) if not sdk_root: sdk_root = xcode_settings._XcodeSdkPath('') if sdk_root is None: sdk_root = '' env['SDKROOT'] = sdk_root if not additional_settings: additional_settings = {} else: # Flatten lists to strings. for k in additional_settings: if not isinstance(additional_settings[k], str): additional_settings[k] = ' '.join(additional_settings[k]) additional_settings.update(env) for k in additional_settings: additional_settings[k] = _NormalizeEnvVarReferences(additional_settings[k]) return additional_settings def _NormalizeEnvVarReferences(str): """Takes a string containing variable references in the form ${FOO}, $(FOO), or $FOO, and returns a string with all variable references in the form ${FOO}. """ # $FOO -> ${FOO} str = re.sub(r'\$([a-zA-Z_][a-zA-Z0-9_]*)', r'${\1}', str) # $(FOO) -> ${FOO} matches = re.findall(r'(\$\(([a-zA-Z0-9\-_]+)\))', str) for match in matches: to_replace, variable = match assert '$(' not in match, '$($(FOO)) variables not supported: ' + match str = str.replace(to_replace, '${' + variable + '}') return str def ExpandEnvVars(string, expansions): """Expands ${VARIABLES}, $(VARIABLES), and $VARIABLES in string per the expansions list. If the variable expands to something that references another variable, this variable is expanded as well if it's in env -- until no variables present in env are left.""" for k, v in reversed(expansions): string = string.replace('${' + k + '}', v) string = string.replace('$(' + k + ')', v) string = string.replace('$' + k, v) return string def _TopologicallySortedEnvVarKeys(env): """Takes a dict |env| whose values are strings that can refer to other keys, for example env['foo'] = '$(bar) and $(baz)'. Returns a list L of all keys of env such that key2 is after key1 in L if env[key2] refers to env[key1]. Throws an Exception in case of dependency cycles. """ # Since environment variables can refer to other variables, the evaluation # order is important. Below is the logic to compute the dependency graph # and sort it. regex = re.compile(r'\$\{([a-zA-Z0-9\-_]+)\}') def GetEdges(node): # Use a definition of edges such that user_of_variable -> used_varible. # This happens to be easier in this case, since a variable's # definition contains all variables it references in a single string. # We can then reverse the result of the topological sort at the end. # Since: reverse(topsort(DAG)) = topsort(reverse_edges(DAG)) matches = set([v for v in regex.findall(env[node]) if v in env]) for dependee in matches: assert '${' not in dependee, 'Nested variables not supported: ' + dependee return matches try: # Topologically sort, and then reverse, because we used an edge definition # that's inverted from the expected result of this function (see comment # above). order = gyp.common.TopologicallySorted(env.keys(), GetEdges) order.reverse() return order except gyp.common.CycleError, e: raise GypError( 'Xcode environment variables are cyclically dependent: ' + str(e.nodes)) def GetSortedXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration, additional_settings=None): env = _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration, additional_settings) return [(key, env[key]) for key in _TopologicallySortedEnvVarKeys(env)] def GetSpecPostbuildCommands(spec, quiet=False): """Returns the list of postbuilds explicitly defined on |spec|, in a form executable by a shell.""" postbuilds = [] for postbuild in spec.get('postbuilds', []): if not quiet: postbuilds.append('echo POSTBUILD\\(%s\\) %s' % ( spec['target_name'], postbuild['postbuild_name'])) postbuilds.append(gyp.common.EncodePOSIXShellList(postbuild['action'])) return postbuilds def _HasIOSTarget(targets): """Returns true if any target contains the iOS specific key IPHONEOS_DEPLOYMENT_TARGET.""" for target_dict in targets.values(): for config in target_dict['configurations'].values(): if config.get('xcode_settings', {}).get('IPHONEOS_DEPLOYMENT_TARGET'): return True return False def _AddIOSDeviceConfigurations(targets): """Clone all targets and append -iphoneos to the name. Configure these targets to build for iOS devices and use correct architectures for those builds.""" for target_dict in targets.itervalues(): toolset = target_dict['toolset'] configs = target_dict['configurations'] for config_name, config_dict in dict(configs).iteritems(): iphoneos_config_dict = copy.deepcopy(config_dict) configs[config_name + '-iphoneos'] = iphoneos_config_dict configs[config_name + '-iphonesimulator'] = config_dict if toolset == 'target': iphoneos_config_dict['xcode_settings']['SDKROOT'] = 'iphoneos' return targets def CloneConfigurationForDeviceAndEmulator(target_dicts): """If |target_dicts| contains any iOS targets, automatically create -iphoneos targets for iOS device builds.""" if _HasIOSTarget(target_dicts): return _AddIOSDeviceConfigurations(target_dicts) return target_dicts
specialunderwear/django-easymode
refs/heads/master
easymode/i18n/meta/utils.py
3
""" utility functions used by easymode's i18n.meta package to get localized attributes from a class. """ from django.conf import settings from django.utils import translation from easymode.utils import first_match from easymode.utils.languagecode import get_real_fieldname from easymode.utils import first_match def valid_for_gettext(value): """Gettext acts weird when empty string is passes, and passing none would be even weirder""" return value not in (None, "") def get_fallback_languages(): """Retrieve the fallback languages from the settings.py""" lang = translation.get_language() fallback_list = settings.FALLBACK_LANGUAGES.get(lang, None) if fallback_list: return fallback_list return settings.FALLBACK_LANGUAGES.get(lang[:2], []) def get_localized_property(context, field=None, language=None): ''' When accessing to the name of the field itself, the value in the current language will be returned. Unless it's set, the value in the default language will be returned. ''' if language: return getattr(context, get_real_fieldname(field, language)) if hasattr(settings, 'FALLBACK_LANGUAGES'): attrs = [translation.get_language()] attrs += get_fallback_languages() else: attrs = [ translation.get_language(), translation.get_language()[:2], settings.LANGUAGE_CODE, ] def predicate(x): value = getattr(context, get_real_fieldname(field, x), None) return value if valid_for_gettext(value) else None return first_match(predicate, attrs) def get_localized_field_name(context, field): """Get the name of the localized field""" attrs = [ translation.get_language(), translation.get_language()[:2], settings.LANGUAGE_CODE ] def predicate(x): field_name = get_real_fieldname(field, x) if hasattr(context, field_name): return field_name return None return first_match(predicate, attrs) def get_field_from_model_by_name(model_class, field_name): """ Get a field by name from a model class without messing with the app cache. """ return first_match(lambda x: x if x.name == field_name else None, model_class._meta.fields)
ryan-talley/f5-cccl
refs/heads/master
f5_cccl/resource/ltm/profile/__init__.py
1
#!/usr/bin/env python # Copyright (c) 2017,2018, F5 Networks, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """This module implements the F5 CCCL Profile class.""" from .profile import Profile # noqa: F401
dbentley/pants
refs/heads/master
pants-plugins/src/python/internal_backend/__init__.py
12133432
tmimori/frappe
refs/heads/develop
frappe/integrations/doctype/s3_backup_settings/__init__.py
12133432
myaskevich/python-bype
refs/heads/master
test/__init__.py
12133432
dnouri/Lasagne
refs/heads/master
nntools/theano_extensions/__init__.py
12133432