repo_name
stringlengths
5
100
ref
stringlengths
12
67
path
stringlengths
4
244
copies
stringlengths
1
8
content
stringlengths
0
1.05M
obreitwi/dota2vgs
refs/heads/master
dota2vgs/vgs.py
1
#!/usr/bin/env python # encoding: utf-8 # Copyright (c) 2013-2014 Oliver Breitwieser # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. __all__ = ["Composer"] from .logcfg import log from .cfg_parser import BindParser from .lst_parser import LST_Hotkey_Parser from .commands import Bind, Alias, StatefulAlias from .overlay import GroupWriter from .misc import load_data import string import itertools as it class ParseError(Exception): pass class Composer(object): """ Composes and write the VGS system. """ # allowed letters for names desired_letters = string.ascii_letters\ + "_" + string.digits # prefix for aliases prefix = "vgs_" prefix_original = "ori_" prefix_current = "cur_" prefix_group = "grp_" prefix_phrase = "phr_" designator_groups = "groups" designator_cmds = "phrases" @property def recursive_elements(self): return [self.designator_groups, self.designator_cmds] def __init__(self, cfg_files, lst_files, layout_file, output_file=None, ignore_keys=None, silent=False, lineending="\r\n" # windows style by default ): """ `cfg_files` is a list of filenames from which to read the original configuration that is to be preserved. `layout_file` yaml file with layout infomration of the VGS. """ self.silent = silent # aliases to be included in the final script self.aliases = {} self.LE = lineending # read existing binds self.existing_binds = {} for cfg_file in cfg_files: b = BindParser(cfg_file, silent=self.silent) for k,v in b.get().items(): self.existing_binds[k.lower()] = v self.layout = load_data(layout_file) self.check_layout_names() self._determine_used_keys() self.key_stateful = set([]) self.duplicates = {} if ignore_keys is not None: self.used_keys -= set(ignore_keys) # see if any of the used keys have a mapping in the lst file (dota 2 # options) for lst_file in lst_files: h = LST_Hotkey_Parser(lst_file, silent=self.silent) mapping = h.get_hotkey_functions(self.used_keys) self.existing_binds.update(mapping) # adjust the existing binding for the start hotkey only self.existing_binds[self.layout["hotkey"]] =\ self.get_aname_group("start") self._setup_aliases_existing_binds() self._setup_aliases_restore() if self.layout.get("vgs_console_menu_enabled", False): self.setup_menu() self.has_menu = True else: self.has_menu = False self.layout["name"] = "start" self.setup_aliases_group(self.layout) self.additional_commands() if output_file is not None: self.write_script_file(output_file) if not self.silent: log.info("Please go to the Dota 2 options menu and delete the " "bindings to the following keys: {}".format(self.used_keys)) def setup_menu(self): writer_kwargs = {} def add_if_exists(layout_name, kwarg_name): if layout_name in self.layout: writer_kwargs[kwarg_name] = self.layout[layout_name] add_if_exists("vgs_menu_lines_offset", "lines_offset") add_if_exists("vgs_menu_show_lines", "lines_write_area") add_if_exists("vgs_menu_notify_time", "notify_time") add_if_exists("vgs_menu_hotkeys_min_width", "hotkey_min_width") self.console_writer = GroupWriter(**writer_kwargs) self.console_writer.add_stop_commands_to_alias(self.aliases["restore"]) self.console_writer.set_footer(["", self.console_writer.format_hotkey(self.layout["hotkey_cancel"], "Cancel..")]) def add_alias(self, name, type_=Alias): new_alias = type_(self.get_alias_name(name), lineending=self.LE) self.aliases[name] = new_alias return new_alias def get_cmd_alias(self, a_from, a_to): return "alias {} {}".format(a_from, a_to) def get_alias_name(self, name): # make sure name contains prefix if not name.startswith(self.prefix): name = self.prefix + name return name def get_aname_current(self, key, off_state=False): """ `off_state` = True will return the off_state alias name for a stateful alias. """ if self.is_key_stateful(key): if not off_state: prefix = StatefulAlias.token_state_on else: prefix = StatefulAlias.token_state_off else: prefix = "" return prefix + self.get_alias_name(self.prefix_current + key) def get_aname_original(self, key, off_state=False): """ `off_state` = True will return the off_state alias name for a stateful alias. """ if self.is_key_stateful(key): if not off_state: prefix = StatefulAlias.token_state_on else: prefix = StatefulAlias.token_state_off else: prefix = "" return prefix + self.get_alias_name(self.prefix_original + key) def get_aname_group(self, name): return self.get_alias_name(self.prefix_group + name) def get_aname_phrase(self, name): return self.get_alias_name(self.prefix_phrase + name) def is_key_stateful(self, key): return key in self.key_stateful def _determine_used_keys(self): # self.used_keys = set() # for now just add all ascii keys self.used_keys = set(string.lowercase) self.used_keys.add(self.layout["hotkey"]) self.used_keys.add(self.layout["hotkey_cancel"]) queue = [] queue.extend(self.layout[self.designator_groups]) while len(queue) > 0: item = queue.pop() if "hotkey" in item: self.used_keys.add(item["hotkey"]) for k in self.recursive_elements: if k in item: queue.extend(item[k]) def _setup_aliases_existing_binds(self): """ Sets up aliases containing the original key function. """ for k in self.used_keys & set(self.existing_binds.keys()): existing_bind = self.existing_binds[k] alias_type = Alias if StatefulAlias.contains_state(existing_bind): # the bind contains state, we need to account for that alias_type = StatefulAlias alias = self.add_alias(self.get_aname_original(k), type_=alias_type) if StatefulAlias.contains_state(existing_bind): self.key_stateful.add(k) alias.add(existing_bind) def add_clear_aliases(self, alias, hotkeys): """ Adds aliases that disable all aliases for the keys in hotkeys. """ for h in hotkeys: alias.add("alias {}".format(self.get_aname_current(h))) if self.is_key_stateful(h): alias.add("alias {}".format(self.get_aname_current(h, off_state=True))) def _setup_aliases_restore(self): """ Sets up the alias resetting all used keys to their original state. """ restore = self.add_alias("restore") self.restore_alias_name = restore.name for k in self.used_keys: restore.add("alias {current} {original}".format( current=self.get_aname_current(k), original=self.get_aname_original(k), )) if self.is_key_stateful(k): restore.add("alias {current} {original}".format( current=self.get_aname_current(k, off_state=True), original=self.get_aname_original(k, off_state=True), )) def setup_aliases_group(self, dct): """ Set up the starting alias. """ self.assure_no_duplicate_hotkeys(dct) alias = self.add_alias(self.get_aname_group(dct["name"])) alias.add(self.get_cmd_alias( self.get_aname_current(self.layout["hotkey_cancel"]), self.restore_alias_name)) # clear all other keys to prevent accidentatl keypresses clear_hotkeys = self.used_keys - set(self.get_concurrent_hotkeys(dct)) clear_hotkeys -= set([self.layout["hotkey_cancel"], self.layout["hotkey"]]) self.add_clear_aliases(alias, clear_hotkeys) for phrase in dct.get(self.designator_cmds, []): phrase_name = self.setup_phrase(phrase["name"], phrase["id"]) hotkey_name = self.get_aname_current(phrase["hotkey"]) alias.add(self.get_cmd_alias(hotkey_name, phrase_name)) for group in dct.get(self.designator_groups, []): self.setup_aliases_group(group) group_name = self.get_aname_group(group["name"]) hotkey_name = self.get_aname_current(group["hotkey"]) alias.add(self.get_cmd_alias(hotkey_name, group_name)) if self.has_menu: self.console_writer.write_group_info_to_alias(dct, alias) def assure_no_duplicate_hotkeys(self, dct): hotkeys = self.get_concurrent_hotkeys(dct) set_hotkeys = set(hotkeys) if len(hotkeys) != len(set_hotkeys): duplicate_hotkeys = self.duplicates.setdefault(dct["name"], []) for k in set_hotkeys: if hotkeys.count(k) > 1: duplicate_hotkeys.append(k) log.warn("Group {} contains duplicate hotkeys for: {}".format( dct["name"], ", ".join(duplicate_hotkeys))) def get_concurrent_hotkeys(self, dct): """ Returns all hotkeys used by group. NOTE: That it is a list and may have duplicates etc. """ hotkeys = [] for elem in self.recursive_elements: for item in dct.get(elem, []): hotkeys.append(item["hotkey"]) return hotkeys def setup_phrase(self, name, id): """ Set up the alias with name `name` and id `id`. Returns the name of the alias. """ alias = self.add_alias(self.get_aname_phrase(name)) alias.add("chatwheel_say {}".format(id)) alias.add(self.restore_alias_name) return alias.name def write_script_file(self, f): self.write_aliases(f) self.write_bindings(f) if self.has_menu: self.write_menu_prelude(f) f.write(self.restore_alias_name + self.LE) f.write("echo \"VGS successfully loaded!\"" + self.LE) def write_menu_prelude(self, f): for cmd in self.console_writer.start_commands(): f.write(cmd + self.LE) def write_bindings(self, file): for k in self.used_keys: b = Bind(k) b.add(self.get_aname_current(k)) file.write(b.get() + self.LE) def write_aliases(self, file): for a in self.aliases.values(): file.write(a.get() + self.LE) def check_layout_names(self, grp=None): if grp is None: grp = self.layout name = grp.get("name", "") if any((l not in self.desired_letters for l in name)): raise ParseError("Illegal character in {}.".format(name)) for g in it.chain( grp.get(self.designator_groups, []), grp.get(self.designator_cmds, [])): self.check_layout_names(g) def additional_commands(self): """ Other commands outside of the regular rebinding """ if self.layout.get("indicate_vgs_mode_via_minimap", False): self.set_indicator() def set_indicator(self): start = self.aliases[self.get_aname_group("start")] start.add("dota_minimap_hero_size {}".format( self.layout.get("minimap_hero_size_vgs_mode", 1200))) restore = self.aliases["restore"] restore.add("dota_minimap_hero_size {}".format( self.layout.get("minimap_hero_size_regular", 600)))
mrichart/ns-3-dev-git
refs/heads/rpc
src/lte/bindings/modulegen_customizations.py
120
import os def post_register_types(root_module): enabled_features = os.environ['NS3_ENABLED_FEATURES'].split(',') if 'EmuFdNetDevice' not in enabled_features: if 'ns3::EmuEpcHelper'in root_module: root_module.classes.remove(root_module['ns3::EmuEpcHelper'])
ghchinoy/tensorflow
refs/heads/master
tensorflow/tools/common/traverse_test.py
60
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for Python module traversal.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow.python.platform import googletest from tensorflow.tools.common import test_module1 from tensorflow.tools.common import test_module2 from tensorflow.tools.common import traverse class TestVisitor(object): def __init__(self): self.call_log = [] def __call__(self, path, parent, children): self.call_log += [(path, parent, children)] class TraverseTest(googletest.TestCase): def test_cycle(self): class Cyclist(object): pass Cyclist.cycle = Cyclist visitor = TestVisitor() traverse.traverse(Cyclist, visitor) # We simply want to make sure we terminate. def test_module(self): visitor = TestVisitor() traverse.traverse(test_module1, visitor) called = [parent for _, parent, _ in visitor.call_log] self.assertIn(test_module1.ModuleClass1, called) self.assertIn(test_module2.ModuleClass2, called) def test_class(self): visitor = TestVisitor() traverse.traverse(TestVisitor, visitor) self.assertEqual(TestVisitor, visitor.call_log[0][1]) # There are a bunch of other members, but make sure that the ones we know # about are there. self.assertIn('__init__', [name for name, _ in visitor.call_log[0][2]]) self.assertIn('__call__', [name for name, _ in visitor.call_log[0][2]]) # There are more classes descended into, at least __class__ and # __class__.__base__, neither of which are interesting to us, and which may # change as part of Python version etc., so we don't test for them. def test_non_class(self): integer = 5 visitor = TestVisitor() traverse.traverse(integer, visitor) self.assertEqual([], visitor.call_log) if __name__ == '__main__': googletest.main()
econchick/heroku-buildpack-python
refs/heads/master
vendor/distribute-0.6.34/setuptools/dist.py
65
__all__ = ['Distribution'] import re from distutils.core import Distribution as _Distribution from setuptools.depends import Require from setuptools.command.install import install from setuptools.command.sdist import sdist from setuptools.command.install_lib import install_lib from distutils.errors import DistutilsOptionError, DistutilsPlatformError from distutils.errors import DistutilsSetupError import setuptools, pkg_resources, distutils.core, distutils.dist, distutils.cmd import os, distutils.log def _get_unpatched(cls): """Protect against re-patching the distutils if reloaded Also ensures that no other distutils extension monkeypatched the distutils first. """ while cls.__module__.startswith('setuptools'): cls, = cls.__bases__ if not cls.__module__.startswith('distutils'): raise AssertionError( "distutils has already been patched by %r" % cls ) return cls _Distribution = _get_unpatched(_Distribution) sequence = tuple, list def check_importable(dist, attr, value): try: ep = pkg_resources.EntryPoint.parse('x='+value) assert not ep.extras except (TypeError,ValueError,AttributeError,AssertionError): raise DistutilsSetupError( "%r must be importable 'module:attrs' string (got %r)" % (attr,value) ) def assert_string_list(dist, attr, value): """Verify that value is a string list or None""" try: assert ''.join(value)!=value except (TypeError,ValueError,AttributeError,AssertionError): raise DistutilsSetupError( "%r must be a list of strings (got %r)" % (attr,value) ) def check_nsp(dist, attr, value): """Verify that namespace packages are valid""" assert_string_list(dist,attr,value) for nsp in value: if not dist.has_contents_for(nsp): raise DistutilsSetupError( "Distribution contains no modules or packages for " + "namespace package %r" % nsp ) if '.' in nsp: parent = '.'.join(nsp.split('.')[:-1]) if parent not in value: distutils.log.warn( "%r is declared as a package namespace, but %r is not:" " please correct this in setup.py", nsp, parent ) def check_extras(dist, attr, value): """Verify that extras_require mapping is valid""" try: for k,v in value.items(): list(pkg_resources.parse_requirements(v)) except (TypeError,ValueError,AttributeError): raise DistutilsSetupError( "'extras_require' must be a dictionary whose values are " "strings or lists of strings containing valid project/version " "requirement specifiers." ) def assert_bool(dist, attr, value): """Verify that value is True, False, 0, or 1""" if bool(value) != value: raise DistutilsSetupError( "%r must be a boolean value (got %r)" % (attr,value) ) def check_requirements(dist, attr, value): """Verify that install_requires is a valid requirements list""" try: list(pkg_resources.parse_requirements(value)) except (TypeError,ValueError): raise DistutilsSetupError( "%r must be a string or list of strings " "containing valid project/version requirement specifiers" % (attr,) ) def check_entry_points(dist, attr, value): """Verify that entry_points map is parseable""" try: pkg_resources.EntryPoint.parse_map(value) except ValueError, e: raise DistutilsSetupError(e) def check_test_suite(dist, attr, value): if not isinstance(value,basestring): raise DistutilsSetupError("test_suite must be a string") def check_package_data(dist, attr, value): """Verify that value is a dictionary of package names to glob lists""" if isinstance(value,dict): for k,v in value.items(): if not isinstance(k,str): break try: iter(v) except TypeError: break else: return raise DistutilsSetupError( attr+" must be a dictionary mapping package names to lists of " "wildcard patterns" ) class Distribution(_Distribution): """Distribution with support for features, tests, and package data This is an enhanced version of 'distutils.dist.Distribution' that effectively adds the following new optional keyword arguments to 'setup()': 'install_requires' -- a string or sequence of strings specifying project versions that the distribution requires when installed, in the format used by 'pkg_resources.require()'. They will be installed automatically when the package is installed. If you wish to use packages that are not available in PyPI, or want to give your users an alternate download location, you can add a 'find_links' option to the '[easy_install]' section of your project's 'setup.cfg' file, and then setuptools will scan the listed web pages for links that satisfy the requirements. 'extras_require' -- a dictionary mapping names of optional "extras" to the additional requirement(s) that using those extras incurs. For example, this:: extras_require = dict(reST = ["docutils>=0.3", "reSTedit"]) indicates that the distribution can optionally provide an extra capability called "reST", but it can only be used if docutils and reSTedit are installed. If the user installs your package using EasyInstall and requests one of your extras, the corresponding additional requirements will be installed if needed. 'features' -- a dictionary mapping option names to 'setuptools.Feature' objects. Features are a portion of the distribution that can be included or excluded based on user options, inter-feature dependencies, and availability on the current system. Excluded features are omitted from all setup commands, including source and binary distributions, so you can create multiple distributions from the same source tree. Feature names should be valid Python identifiers, except that they may contain the '-' (minus) sign. Features can be included or excluded via the command line options '--with-X' and '--without-X', where 'X' is the name of the feature. Whether a feature is included by default, and whether you are allowed to control this from the command line, is determined by the Feature object. See the 'Feature' class for more information. 'test_suite' -- the name of a test suite to run for the 'test' command. If the user runs 'python setup.py test', the package will be installed, and the named test suite will be run. The format is the same as would be used on a 'unittest.py' command line. That is, it is the dotted name of an object to import and call to generate a test suite. 'package_data' -- a dictionary mapping package names to lists of filenames or globs to use to find data files contained in the named packages. If the dictionary has filenames or globs listed under '""' (the empty string), those names will be searched for in every package, in addition to any names for the specific package. Data files found using these names/globs will be installed along with the package, in the same location as the package. Note that globs are allowed to reference the contents of non-package subdirectories, as long as you use '/' as a path separator. (Globs are automatically converted to platform-specific paths at runtime.) In addition to these new keywords, this class also has several new methods for manipulating the distribution's contents. For example, the 'include()' and 'exclude()' methods can be thought of as in-place add and subtract commands that add or remove packages, modules, extensions, and so on from the distribution. They are used by the feature subsystem to configure the distribution for the included and excluded features. """ _patched_dist = None def patch_missing_pkg_info(self, attrs): # Fake up a replacement for the data that would normally come from # PKG-INFO, but which might not yet be built if this is a fresh # checkout. # if not attrs or 'name' not in attrs or 'version' not in attrs: return key = pkg_resources.safe_name(str(attrs['name'])).lower() dist = pkg_resources.working_set.by_key.get(key) if dist is not None and not dist.has_metadata('PKG-INFO'): dist._version = pkg_resources.safe_version(str(attrs['version'])) self._patched_dist = dist def __init__ (self, attrs=None): have_package_data = hasattr(self, "package_data") if not have_package_data: self.package_data = {} self.require_features = [] self.features = {} self.dist_files = [] self.src_root = attrs and attrs.pop("src_root", None) self.patch_missing_pkg_info(attrs) # Make sure we have any eggs needed to interpret 'attrs' if attrs is not None: self.dependency_links = attrs.pop('dependency_links', []) assert_string_list(self,'dependency_links',self.dependency_links) if attrs and 'setup_requires' in attrs: self.fetch_build_eggs(attrs.pop('setup_requires')) for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'): if not hasattr(self,ep.name): setattr(self,ep.name,None) _Distribution.__init__(self,attrs) if isinstance(self.metadata.version, (int,long,float)): # Some people apparently take "version number" too literally :) self.metadata.version = str(self.metadata.version) def parse_command_line(self): """Process features after parsing command line options""" result = _Distribution.parse_command_line(self) if self.features: self._finalize_features() return result def _feature_attrname(self,name): """Convert feature name to corresponding option attribute name""" return 'with_'+name.replace('-','_') def fetch_build_eggs(self, requires): """Resolve pre-setup requirements""" from pkg_resources import working_set, parse_requirements for dist in working_set.resolve( parse_requirements(requires), installer=self.fetch_build_egg ): working_set.add(dist) def finalize_options(self): _Distribution.finalize_options(self) if self.features: self._set_global_opts_from_features() for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'): value = getattr(self,ep.name,None) if value is not None: ep.require(installer=self.fetch_build_egg) ep.load()(self, ep.name, value) if getattr(self, 'convert_2to3_doctests', None): # XXX may convert to set here when we can rely on set being builtin self.convert_2to3_doctests = [os.path.abspath(p) for p in self.convert_2to3_doctests] else: self.convert_2to3_doctests = [] def fetch_build_egg(self, req): """Fetch an egg needed for building""" try: cmd = self._egg_fetcher cmd.package_index.to_scan = [] except AttributeError: from setuptools.command.easy_install import easy_install dist = self.__class__({'script_args':['easy_install']}) dist.parse_config_files() opts = dist.get_option_dict('easy_install') keep = ( 'find_links', 'site_dirs', 'index_url', 'optimize', 'site_dirs', 'allow_hosts' ) for key in opts.keys(): if key not in keep: del opts[key] # don't use any other settings if self.dependency_links: links = self.dependency_links[:] if 'find_links' in opts: links = opts['find_links'][1].split() + links opts['find_links'] = ('setup', links) cmd = easy_install( dist, args=["x"], install_dir=os.curdir, exclude_scripts=True, always_copy=False, build_directory=None, editable=False, upgrade=False, multi_version=True, no_report=True, user=False ) cmd.ensure_finalized() self._egg_fetcher = cmd return cmd.easy_install(req) def _set_global_opts_from_features(self): """Add --with-X/--without-X options based on optional features""" go = [] no = self.negative_opt.copy() for name,feature in self.features.items(): self._set_feature(name,None) feature.validate(self) if feature.optional: descr = feature.description incdef = ' (default)' excdef='' if not feature.include_by_default(): excdef, incdef = incdef, excdef go.append(('with-'+name, None, 'include '+descr+incdef)) go.append(('without-'+name, None, 'exclude '+descr+excdef)) no['without-'+name] = 'with-'+name self.global_options = self.feature_options = go + self.global_options self.negative_opt = self.feature_negopt = no def _finalize_features(self): """Add/remove features and resolve dependencies between them""" # First, flag all the enabled items (and thus their dependencies) for name,feature in self.features.items(): enabled = self.feature_is_included(name) if enabled or (enabled is None and feature.include_by_default()): feature.include_in(self) self._set_feature(name,1) # Then disable the rest, so that off-by-default features don't # get flagged as errors when they're required by an enabled feature for name,feature in self.features.items(): if not self.feature_is_included(name): feature.exclude_from(self) self._set_feature(name,0) def get_command_class(self, command): """Pluggable version of get_command_class()""" if command in self.cmdclass: return self.cmdclass[command] for ep in pkg_resources.iter_entry_points('distutils.commands',command): ep.require(installer=self.fetch_build_egg) self.cmdclass[command] = cmdclass = ep.load() return cmdclass else: return _Distribution.get_command_class(self, command) def print_commands(self): for ep in pkg_resources.iter_entry_points('distutils.commands'): if ep.name not in self.cmdclass: cmdclass = ep.load(False) # don't require extras, we're not running self.cmdclass[ep.name] = cmdclass return _Distribution.print_commands(self) def _set_feature(self,name,status): """Set feature's inclusion status""" setattr(self,self._feature_attrname(name),status) def feature_is_included(self,name): """Return 1 if feature is included, 0 if excluded, 'None' if unknown""" return getattr(self,self._feature_attrname(name)) def include_feature(self,name): """Request inclusion of feature named 'name'""" if self.feature_is_included(name)==0: descr = self.features[name].description raise DistutilsOptionError( descr + " is required, but was excluded or is not available" ) self.features[name].include_in(self) self._set_feature(name,1) def include(self,**attrs): """Add items to distribution that are named in keyword arguments For example, 'dist.exclude(py_modules=["x"])' would add 'x' to the distribution's 'py_modules' attribute, if it was not already there. Currently, this method only supports inclusion for attributes that are lists or tuples. If you need to add support for adding to other attributes in this or a subclass, you can add an '_include_X' method, where 'X' is the name of the attribute. The method will be called with the value passed to 'include()'. So, 'dist.include(foo={"bar":"baz"})' will try to call 'dist._include_foo({"bar":"baz"})', which can then handle whatever special inclusion logic is needed. """ for k,v in attrs.items(): include = getattr(self, '_include_'+k, None) if include: include(v) else: self._include_misc(k,v) def exclude_package(self,package): """Remove packages, modules, and extensions in named package""" pfx = package+'.' if self.packages: self.packages = [ p for p in self.packages if p != package and not p.startswith(pfx) ] if self.py_modules: self.py_modules = [ p for p in self.py_modules if p != package and not p.startswith(pfx) ] if self.ext_modules: self.ext_modules = [ p for p in self.ext_modules if p.name != package and not p.name.startswith(pfx) ] def has_contents_for(self,package): """Return true if 'exclude_package(package)' would do something""" pfx = package+'.' for p in self.iter_distribution_names(): if p==package or p.startswith(pfx): return True def _exclude_misc(self,name,value): """Handle 'exclude()' for list/tuple attrs without a special handler""" if not isinstance(value,sequence): raise DistutilsSetupError( "%s: setting must be a list or tuple (%r)" % (name, value) ) try: old = getattr(self,name) except AttributeError: raise DistutilsSetupError( "%s: No such distribution setting" % name ) if old is not None and not isinstance(old,sequence): raise DistutilsSetupError( name+": this setting cannot be changed via include/exclude" ) elif old: setattr(self,name,[item for item in old if item not in value]) def _include_misc(self,name,value): """Handle 'include()' for list/tuple attrs without a special handler""" if not isinstance(value,sequence): raise DistutilsSetupError( "%s: setting must be a list (%r)" % (name, value) ) try: old = getattr(self,name) except AttributeError: raise DistutilsSetupError( "%s: No such distribution setting" % name ) if old is None: setattr(self,name,value) elif not isinstance(old,sequence): raise DistutilsSetupError( name+": this setting cannot be changed via include/exclude" ) else: setattr(self,name,old+[item for item in value if item not in old]) def exclude(self,**attrs): """Remove items from distribution that are named in keyword arguments For example, 'dist.exclude(py_modules=["x"])' would remove 'x' from the distribution's 'py_modules' attribute. Excluding packages uses the 'exclude_package()' method, so all of the package's contained packages, modules, and extensions are also excluded. Currently, this method only supports exclusion from attributes that are lists or tuples. If you need to add support for excluding from other attributes in this or a subclass, you can add an '_exclude_X' method, where 'X' is the name of the attribute. The method will be called with the value passed to 'exclude()'. So, 'dist.exclude(foo={"bar":"baz"})' will try to call 'dist._exclude_foo({"bar":"baz"})', which can then handle whatever special exclusion logic is needed. """ for k,v in attrs.items(): exclude = getattr(self, '_exclude_'+k, None) if exclude: exclude(v) else: self._exclude_misc(k,v) def _exclude_packages(self,packages): if not isinstance(packages,sequence): raise DistutilsSetupError( "packages: setting must be a list or tuple (%r)" % (packages,) ) map(self.exclude_package, packages) def _parse_command_opts(self, parser, args): # Remove --with-X/--without-X options when processing command args self.global_options = self.__class__.global_options self.negative_opt = self.__class__.negative_opt # First, expand any aliases command = args[0] aliases = self.get_option_dict('aliases') while command in aliases: src,alias = aliases[command] del aliases[command] # ensure each alias can expand only once! import shlex args[:1] = shlex.split(alias,True) command = args[0] nargs = _Distribution._parse_command_opts(self, parser, args) # Handle commands that want to consume all remaining arguments cmd_class = self.get_command_class(command) if getattr(cmd_class,'command_consumes_arguments',None): self.get_option_dict(command)['args'] = ("command line", nargs) if nargs is not None: return [] return nargs def get_cmdline_options(self): """Return a '{cmd: {opt:val}}' map of all command-line options Option names are all long, but do not include the leading '--', and contain dashes rather than underscores. If the option doesn't take an argument (e.g. '--quiet'), the 'val' is 'None'. Note that options provided by config files are intentionally excluded. """ d = {} for cmd,opts in self.command_options.items(): for opt,(src,val) in opts.items(): if src != "command line": continue opt = opt.replace('_','-') if val==0: cmdobj = self.get_command_obj(cmd) neg_opt = self.negative_opt.copy() neg_opt.update(getattr(cmdobj,'negative_opt',{})) for neg,pos in neg_opt.items(): if pos==opt: opt=neg val=None break else: raise AssertionError("Shouldn't be able to get here") elif val==1: val = None d.setdefault(cmd,{})[opt] = val return d def iter_distribution_names(self): """Yield all packages, modules, and extension names in distribution""" for pkg in self.packages or (): yield pkg for module in self.py_modules or (): yield module for ext in self.ext_modules or (): if isinstance(ext,tuple): name, buildinfo = ext else: name = ext.name if name.endswith('module'): name = name[:-6] yield name def handle_display_options(self, option_order): """If there were any non-global "display-only" options (--help-commands or the metadata display options) on the command line, display the requested info and return true; else return false. """ import sys if sys.version_info < (3,) or self.help_commands: return _Distribution.handle_display_options(self, option_order) # Stdout may be StringIO (e.g. in tests) import io if not isinstance(sys.stdout, io.TextIOWrapper): return _Distribution.handle_display_options(self, option_order) # Don't wrap stdout if utf-8 is already the encoding. Provides # workaround for #334. if sys.stdout.encoding.lower() in ('utf-8', 'utf8'): return _Distribution.handle_display_options(self, option_order) # Print metadata in UTF-8 no matter the platform encoding = sys.stdout.encoding errors = sys.stdout.errors newline = sys.platform != 'win32' and '\n' or None line_buffering = sys.stdout.line_buffering sys.stdout = io.TextIOWrapper( sys.stdout.detach(), 'utf-8', errors, newline, line_buffering) try: return _Distribution.handle_display_options(self, option_order) finally: sys.stdout = io.TextIOWrapper( sys.stdout.detach(), encoding, errors, newline, line_buffering) # Install it throughout the distutils for module in distutils.dist, distutils.core, distutils.cmd: module.Distribution = Distribution class Feature: """A subset of the distribution that can be excluded if unneeded/wanted Features are created using these keyword arguments: 'description' -- a short, human readable description of the feature, to be used in error messages, and option help messages. 'standard' -- if true, the feature is included by default if it is available on the current system. Otherwise, the feature is only included if requested via a command line '--with-X' option, or if another included feature requires it. The default setting is 'False'. 'available' -- if true, the feature is available for installation on the current system. The default setting is 'True'. 'optional' -- if true, the feature's inclusion can be controlled from the command line, using the '--with-X' or '--without-X' options. If false, the feature's inclusion status is determined automatically, based on 'availabile', 'standard', and whether any other feature requires it. The default setting is 'True'. 'require_features' -- a string or sequence of strings naming features that should also be included if this feature is included. Defaults to empty list. May also contain 'Require' objects that should be added/removed from the distribution. 'remove' -- a string or list of strings naming packages to be removed from the distribution if this feature is *not* included. If the feature *is* included, this argument is ignored. This argument exists to support removing features that "crosscut" a distribution, such as defining a 'tests' feature that removes all the 'tests' subpackages provided by other features. The default for this argument is an empty list. (Note: the named package(s) or modules must exist in the base distribution when the 'setup()' function is initially called.) other keywords -- any other keyword arguments are saved, and passed to the distribution's 'include()' and 'exclude()' methods when the feature is included or excluded, respectively. So, for example, you could pass 'packages=["a","b"]' to cause packages 'a' and 'b' to be added or removed from the distribution as appropriate. A feature must include at least one 'requires', 'remove', or other keyword argument. Otherwise, it can't affect the distribution in any way. Note also that you can subclass 'Feature' to create your own specialized feature types that modify the distribution in other ways when included or excluded. See the docstrings for the various methods here for more detail. Aside from the methods, the only feature attributes that distributions look at are 'description' and 'optional'. """ def __init__(self, description, standard=False, available=True, optional=True, require_features=(), remove=(), **extras ): self.description = description self.standard = standard self.available = available self.optional = optional if isinstance(require_features,(str,Require)): require_features = require_features, self.require_features = [ r for r in require_features if isinstance(r,str) ] er = [r for r in require_features if not isinstance(r,str)] if er: extras['require_features'] = er if isinstance(remove,str): remove = remove, self.remove = remove self.extras = extras if not remove and not require_features and not extras: raise DistutilsSetupError( "Feature %s: must define 'require_features', 'remove', or at least one" " of 'packages', 'py_modules', etc." ) def include_by_default(self): """Should this feature be included by default?""" return self.available and self.standard def include_in(self,dist): """Ensure feature and its requirements are included in distribution You may override this in a subclass to perform additional operations on the distribution. Note that this method may be called more than once per feature, and so should be idempotent. """ if not self.available: raise DistutilsPlatformError( self.description+" is required," "but is not available on this platform" ) dist.include(**self.extras) for f in self.require_features: dist.include_feature(f) def exclude_from(self,dist): """Ensure feature is excluded from distribution You may override this in a subclass to perform additional operations on the distribution. This method will be called at most once per feature, and only after all included features have been asked to include themselves. """ dist.exclude(**self.extras) if self.remove: for item in self.remove: dist.exclude_package(item) def validate(self,dist): """Verify that feature makes sense in context of distribution This method is called by the distribution just before it parses its command line. It checks to ensure that the 'remove' attribute, if any, contains only valid package/module names that are present in the base distribution when 'setup()' is called. You may override it in a subclass to perform any other required validation of the feature against a target distribution. """ for item in self.remove: if not dist.has_contents_for(item): raise DistutilsSetupError( "%s wants to be able to remove %s, but the distribution" " doesn't contain any packages or modules under %s" % (self.description, item, item) ) def check_packages(dist, attr, value): for pkgname in value: if not re.match(r'\w+(\.\w+)*', pkgname): distutils.log.warn( "WARNING: %r not a valid package name; please use only" ".-separated package names in setup.py", pkgname )
desenho-sw-g5/service_control
refs/heads/devel
Trabalho_1/manage.py
1
#!/usr/bin/env python import os import sys if __name__ == "__main__": os.environ.setdefault("DJANGO_SETTINGS_MODULE", "service_control.settings") try: from django.core.management import execute_from_command_line except ImportError: # The above import may fail for some other reason. Ensure that the # issue is really that Django is missing to avoid masking other # exceptions on Python 2. try: import django except ImportError: raise ImportError( "Couldn't import Django. Are you sure it's installed and " "available on your PYTHONPATH environment variable? Did you " "forget to activate a virtual environment?" ) raise execute_from_command_line(sys.argv)
krikru/tensorflow-opencl
refs/heads/master
tensorflow/compiler/tests/clustering_test.py
123
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for the behavior of the auto-compilation pass.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np from six.moves import xrange # pylint: disable=redefined-builtin from tensorflow.compiler.tests.xla_test import XLATestCase from tensorflow.python.framework import constant_op from tensorflow.python.framework import dtypes from tensorflow.python.framework import ops from tensorflow.python.ops import array_ops from tensorflow.python.ops import math_ops from tensorflow.python.platform import googletest CPU_DEVICE = "/job:localhost/replica:0/task:0/cpu:0" class ClusteringTest(XLATestCase): def testAdd(self): val1 = np.array([4, 3, 2, 1], dtype=np.float32) val2 = np.array([5, 6, 7, 8], dtype=np.float32) expected = val1 + val2 with self.test_session(): with self.test_scope(): input1 = constant_op.constant(val1, name="const1") input2 = constant_op.constant(val2, name="const2") output = math_ops.add(input1, input2) result = output.eval() self.assertAllClose(result, expected, rtol=1e-3) def testAddFromCpuMultiple(self): val1 = np.array([4, 3, 2, 1]).astype(np.float32) val2 = np.array([5, 6, 7, 8]).astype(np.float32) expected = val1 + val2 with self.test_session(): with ops.device(CPU_DEVICE): input1 = constant_op.constant(val1, name="const1") input2 = constant_op.constant(val2, name="const2") with self.test_scope(): output = math_ops.add(input1, input2) for _ in xrange(10): result = output.eval() self.assertAllClose(result, expected, rtol=1e-3) def testDeadlock(self): # Builds a graph of the form: # x -> y # | \ # z -> w # where x and z are placed on the CPU and y and w are placed on the XLA # device. If y and w are clustered for compilation, then the graph will # deadlock since the clustered graph will contain a self-loop. with self.test_session() as sess: with ops.device(CPU_DEVICE): x = array_ops.placeholder(dtypes.float32, [2]) with self.test_scope(): y = x * 2 with ops.device(CPU_DEVICE): z = y * y with self.test_scope(): w = y + z result = sess.run(w, {x: [1.5, 0.5]}) self.assertAllClose(result, [12., 2.], rtol=1e-3) def testHostMemory(self): with self.test_session() as sess: x = array_ops.placeholder(dtypes.int32) with self.test_scope(): y = x + 1 with ops.device(CPU_DEVICE): # Place a computation on the CPU, so y and w cannot be merged into the # same JIT compilation. z = y * 2 with self.test_scope(): # Argument 'y' is a non-constant output of a previous cluster. Make sure # it is properly copied to host memory so it can be used as a # compile-time constant input for this cluster. w = array_ops.reshape(z, y) result = sess.run(w, {x: [1, 0]}) expected = np.array([[4], [2]], dtype=np.int32) self.assertAllClose(expected, result, rtol=1e-3) if __name__ == "__main__": googletest.main()
xrmx/django
refs/heads/master
tests/admin_inlines/test_templates.py
285
from __future__ import unicode_literals from django.template.loader import render_to_string from django.test import SimpleTestCase class TestTemplates(SimpleTestCase): def test_javascript_escaping(self): context = { 'inline_admin_formset': { 'formset': {'prefix': 'my-prefix'}, 'opts': {'verbose_name': 'verbose name\\'}, }, } output = render_to_string('admin/edit_inline/stacked.html', context) self.assertIn('prefix: "my\\u002Dprefix",', output) self.assertIn('addText: "Add another Verbose name\\u005C"', output) output = render_to_string('admin/edit_inline/tabular.html', context) self.assertIn('prefix: "my\\u002Dprefix",', output) self.assertIn('addText: "Add another Verbose name\\u005C"', output)
nokia-wroclaw/innovativeproject-dbshepherd
refs/heads/master
test_ssh.py
1
import unittest import connection from ssh_tunnelmanager import TunnelManager from configmanager import ConfigManager, ConfigManagerError from getpass import getpass from kp import KeePassError, get_password conn = None master = None def setUpModule(): global master global conn print("SShTest") master = "test" try: conn = connection.Connection() conn.start() except ConnectionRefusedError: print("is ssh-shepherd running?") def tearDownModule(): conn.stop() def connect_command_builder(connection, perm): # KeyValue try: command = connection["adress"] + "_" + connection["user"]+ "_" + \ get_pass(connection["keepass"]) + "_" + str(connection["sshport"]) + "_" + str(connection["remoteport"]) + "_" + perm return command except (KeyError, KeePassError) as e: try: command = connection["adress"] + "_" + connection["user"]+ "_" + \ connection["passwd"] + "_" + str(connection["sshport"]) + "_" + str(connection["remoteport"]) + "_" + perm return command except KeyError as e: raise KeePassError("No KP or Passwd") return command def get_pass(alias): file = "test.kdb" if master == None: raise KeePassError("Master Password Not Set") try: return get_password(file, master, alias) except KeePassError as e: print (e) raise e def create_command(server,perm): conf = ConfigManager("config/lista_test.yaml") connection = conf.get(server)["connection"] try: cmd = connect_command_builder(connection,perm) except KeyError: cmd = None return cmd def send_command(command): try: conn.send(command) t = None while t == None: t = conn.get_state() return t except AttributeError as e: print (e) class SShTest(unittest.TestCase): def test1_connection_to_ssh_shepherd(self): self.assertIsNotNone(conn) def test8_invalid_ssh_port(self): cmd = create_command("InvalidSSHPort","no") ans = send_command(cmd) status = ans.split("_")[0] self.assertEqual("bad", status) def test7_invalid_user_name(self): cmd = create_command("InvalidUsername","no") ans = send_command(cmd) status = ans.split("_")[0] self.assertEqual("bad", status) def test3_should_be_ok(self): cmd = create_command("ShouldBeOK","no") ans = send_command(cmd) status = ans.split("_")[0] self.assertEqual("ok", status) def test6_create_permanent(self): conf = ConfigManager("config/lista_test.yaml") connection = conf.get("ShouldBeOK")["connection"] try: cmd = connect_command_builder(connection,"yes") except KeyError as e: self.fail("KeyError" + e) ans = send_command(cmd) status = ans.split("_")[0] if status != "ok": self.fail("Unable to create tunnel") to_find = connection["adress"] + ":" + str(connection["remoteport"]) self.assertEqual(-1, ans.find(to_find,ans.find("perm"))) dc = cmd.split("_")[0] + ":" + cmd.split("_")[4] ans = send_command("clean;" + dc) def test5_disconnect(self): conf = ConfigManager("config/lista_test.yaml") connection = conf.get("ShouldBeOK")["connection"] try: cmd = connect_command_builder(connection,"no") except KeyError as e: self.fail("KeyError" + e) ans = send_command(cmd) status = ans.split("_")[0] if status != "ok": self.fail("Unable to create tunnel") dc = cmd.split("_")[0] + ":" + cmd.split("_")[4] ans = send_command("clean;" + dc) to_find = connection["adress"] + ":" + str(connection["remoteport"]) self.assertEqual(-1, ans.find(to_find)) def test4_list_command(self): conf = ConfigManager("config/lista_test.yaml") connection = conf.get("ShouldBeOK")["connection"] try: cmd = connect_command_builder(connection,"no") except KeyError as e: self.fail("KeyError" + e) ans = send_command(cmd) status = ans.split("_")[0] if status != "ok": self.fail("Unable to create tunnel") ans = send_command("list") to_find = connection["adress"] + ":" + str(connection["remoteport"]) self.assertNotEqual(-1, ans.find(to_find)) if __name__ == '__main__': unittest.main(verbosity=2)
alfredodeza/boto
refs/heads/develop
boto/s3/key.py
72
# Copyright (c) 2006-2012 Mitch Garnaat http://garnaat.org/ # Copyright (c) 2011, Nexenta Systems Inc. # Copyright (c) 2012 Amazon.com, Inc. or its affiliates. All Rights Reserved # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, dis- # tribute, sublicense, and/or sell copies of the Software, and to permit # persons to whom the Software is furnished to do so, subject to the fol- # lowing conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. import email.utils import errno import hashlib import mimetypes import os import re import base64 import binascii import math from hashlib import md5 import boto.utils from boto.compat import BytesIO, six, urllib, encodebytes from boto.exception import BotoClientError from boto.exception import StorageDataError from boto.exception import PleaseRetryException from boto.provider import Provider from boto.s3.keyfile import KeyFile from boto.s3.user import User from boto import UserAgent from boto.utils import compute_md5, compute_hash from boto.utils import find_matching_headers from boto.utils import merge_headers_by_name class Key(object): """ Represents a key (object) in an S3 bucket. :ivar bucket: The parent :class:`boto.s3.bucket.Bucket`. :ivar name: The name of this Key object. :ivar metadata: A dictionary containing user metadata that you wish to store with the object or that has been retrieved from an existing object. :ivar cache_control: The value of the `Cache-Control` HTTP header. :ivar content_type: The value of the `Content-Type` HTTP header. :ivar content_encoding: The value of the `Content-Encoding` HTTP header. :ivar content_disposition: The value of the `Content-Disposition` HTTP header. :ivar content_language: The value of the `Content-Language` HTTP header. :ivar etag: The `etag` associated with this object. :ivar last_modified: The string timestamp representing the last time this object was modified in S3. :ivar owner: The ID of the owner of this object. :ivar storage_class: The storage class of the object. Currently, one of: STANDARD | REDUCED_REDUNDANCY | GLACIER :ivar md5: The MD5 hash of the contents of the object. :ivar size: The size, in bytes, of the object. :ivar version_id: The version ID of this object, if it is a versioned object. :ivar encrypted: Whether the object is encrypted while at rest on the server. """ DefaultContentType = 'application/octet-stream' RestoreBody = """<?xml version="1.0" encoding="UTF-8"?> <RestoreRequest xmlns="http://s3.amazonaws.com/doc/2006-03-01"> <Days>%s</Days> </RestoreRequest>""" BufferSize = boto.config.getint('Boto', 'key_buffer_size', 8192) # The object metadata fields a user can set, other than custom metadata # fields (i.e., those beginning with a provider-specific prefix like # x-amz-meta). base_user_settable_fields = set(["cache-control", "content-disposition", "content-encoding", "content-language", "content-md5", "content-type", "x-robots-tag", "expires"]) _underscore_base_user_settable_fields = set() for f in base_user_settable_fields: _underscore_base_user_settable_fields.add(f.replace('-', '_')) # Metadata fields, whether user-settable or not, other than custom # metadata fields (i.e., those beginning with a provider specific prefix # like x-amz-meta). base_fields = (base_user_settable_fields | set(["last-modified", "content-length", "date", "etag"])) def __init__(self, bucket=None, name=None): self.bucket = bucket self.name = name self.metadata = {} self.cache_control = None self.content_type = self.DefaultContentType self.content_encoding = None self.content_disposition = None self.content_language = None self.filename = None self.etag = None self.is_latest = False self.last_modified = None self.owner = None self._storage_class = None self.path = None self.resp = None self.mode = None self.size = None self.version_id = None self.source_version_id = None self.delete_marker = False self.encrypted = None # If the object is being restored, this attribute will be set to True. # If the object is restored, it will be set to False. Otherwise this # value will be None. If the restore is completed (ongoing_restore = # False), the expiry_date will be populated with the expiry date of the # restored object. self.ongoing_restore = None self.expiry_date = None self.local_hashes = {} def __repr__(self): if self.bucket: name = u'<Key: %s,%s>' % (self.bucket.name, self.name) else: name = u'<Key: None,%s>' % self.name # Encode to bytes for Python 2 to prevent display decoding issues if not isinstance(name, str): name = name.encode('utf-8') return name def __iter__(self): return self @property def provider(self): provider = None if self.bucket and self.bucket.connection: provider = self.bucket.connection.provider return provider def _get_key(self): return self.name def _set_key(self, value): self.name = value key = property(_get_key, _set_key); def _get_md5(self): if 'md5' in self.local_hashes and self.local_hashes['md5']: return binascii.b2a_hex(self.local_hashes['md5']) def _set_md5(self, value): if value: self.local_hashes['md5'] = binascii.a2b_hex(value) elif 'md5' in self.local_hashes: self.local_hashes.pop('md5', None) md5 = property(_get_md5, _set_md5); def _get_base64md5(self): if 'md5' in self.local_hashes and self.local_hashes['md5']: md5 = self.local_hashes['md5'] if not isinstance(md5, bytes): md5 = md5.encode('utf-8') return binascii.b2a_base64(md5).decode('utf-8').rstrip('\n') def _set_base64md5(self, value): if value: if not isinstance(value, six.string_types): value = value.decode('utf-8') self.local_hashes['md5'] = binascii.a2b_base64(value) elif 'md5' in self.local_hashes: del self.local_hashes['md5'] base64md5 = property(_get_base64md5, _set_base64md5); def _get_storage_class(self): if self._storage_class is None and self.bucket: # Attempt to fetch storage class list_items = list(self.bucket.list(self.name.encode('utf-8'))) if len(list_items) and getattr(list_items[0], '_storage_class', None): self._storage_class = list_items[0]._storage_class else: # Key is not yet saved? Just use default... self._storage_class = 'STANDARD' return self._storage_class def _set_storage_class(self, value): self._storage_class = value storage_class = property(_get_storage_class, _set_storage_class) def get_md5_from_hexdigest(self, md5_hexdigest): """ A utility function to create the 2-tuple (md5hexdigest, base64md5) from just having a precalculated md5_hexdigest. """ digest = binascii.unhexlify(md5_hexdigest) base64md5 = encodebytes(digest) if base64md5[-1] == '\n': base64md5 = base64md5[0:-1] return (md5_hexdigest, base64md5) def handle_encryption_headers(self, resp): provider = self.bucket.connection.provider if provider.server_side_encryption_header: self.encrypted = resp.getheader( provider.server_side_encryption_header, None) else: self.encrypted = None def handle_version_headers(self, resp, force=False): provider = self.bucket.connection.provider # If the Key object already has a version_id attribute value, it # means that it represents an explicit version and the user is # doing a get_contents_*(version_id=<foo>) to retrieve another # version of the Key. In that case, we don't really want to # overwrite the version_id in this Key object. Comprende? if self.version_id is None or force: self.version_id = resp.getheader(provider.version_id, None) self.source_version_id = resp.getheader(provider.copy_source_version_id, None) if resp.getheader(provider.delete_marker, 'false') == 'true': self.delete_marker = True else: self.delete_marker = False def handle_restore_headers(self, response): provider = self.bucket.connection.provider header = response.getheader(provider.restore_header) if header is None: return parts = header.split(',', 1) for part in parts: key, val = [i.strip() for i in part.split('=')] val = val.replace('"', '') if key == 'ongoing-request': self.ongoing_restore = True if val.lower() == 'true' else False elif key == 'expiry-date': self.expiry_date = val def handle_addl_headers(self, headers): """ Used by Key subclasses to do additional, provider-specific processing of response headers. No-op for this base class. """ pass def open_read(self, headers=None, query_args='', override_num_retries=None, response_headers=None): """ Open this key for reading :type headers: dict :param headers: Headers to pass in the web request :type query_args: string :param query_args: Arguments to pass in the query string (ie, 'torrent') :type override_num_retries: int :param override_num_retries: If not None will override configured num_retries parameter for underlying GET. :type response_headers: dict :param response_headers: A dictionary containing HTTP headers/values that will override any headers associated with the stored object in the response. See http://goo.gl/EWOPb for details. """ if self.resp is None: self.mode = 'r' provider = self.bucket.connection.provider self.resp = self.bucket.connection.make_request( 'GET', self.bucket.name, self.name, headers, query_args=query_args, override_num_retries=override_num_retries) if self.resp.status < 199 or self.resp.status > 299: body = self.resp.read() raise provider.storage_response_error(self.resp.status, self.resp.reason, body) response_headers = self.resp.msg self.metadata = boto.utils.get_aws_metadata(response_headers, provider) for name, value in response_headers.items(): # To get correct size for Range GETs, use Content-Range # header if one was returned. If not, use Content-Length # header. if (name.lower() == 'content-length' and 'Content-Range' not in response_headers): self.size = int(value) elif name.lower() == 'content-range': end_range = re.sub('.*/(.*)', '\\1', value) self.size = int(end_range) elif name.lower() in Key.base_fields: self.__dict__[name.lower().replace('-', '_')] = value self.handle_version_headers(self.resp) self.handle_encryption_headers(self.resp) self.handle_restore_headers(self.resp) self.handle_addl_headers(self.resp.getheaders()) def open_write(self, headers=None, override_num_retries=None): """ Open this key for writing. Not yet implemented :type headers: dict :param headers: Headers to pass in the write request :type override_num_retries: int :param override_num_retries: If not None will override configured num_retries parameter for underlying PUT. """ raise BotoClientError('Not Implemented') def open(self, mode='r', headers=None, query_args=None, override_num_retries=None): if mode == 'r': self.mode = 'r' self.open_read(headers=headers, query_args=query_args, override_num_retries=override_num_retries) elif mode == 'w': self.mode = 'w' self.open_write(headers=headers, override_num_retries=override_num_retries) else: raise BotoClientError('Invalid mode: %s' % mode) closed = False def close(self, fast=False): """ Close this key. :type fast: bool :param fast: True if you want the connection to be closed without first reading the content. This should only be used in cases where subsequent calls don't need to return the content from the open HTTP connection. Note: As explained at http://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.getresponse, callers must read the whole response before sending a new request to the server. Calling Key.close(fast=True) and making a subsequent request to the server will work because boto will get an httplib exception and close/reopen the connection. """ if self.resp and not fast: self.resp.read() self.resp = None self.mode = None self.closed = True def next(self): """ By providing a next method, the key object supports use as an iterator. For example, you can now say: for bytes in key: write bytes to a file or whatever All of the HTTP connection stuff is handled for you. """ self.open_read() data = self.resp.read(self.BufferSize) if not data: self.close() raise StopIteration return data # Python 3 iterator support __next__ = next def read(self, size=0): self.open_read() if size == 0: data = self.resp.read() else: data = self.resp.read(size) if not data: self.close() return data def change_storage_class(self, new_storage_class, dst_bucket=None, validate_dst_bucket=True): """ Change the storage class of an existing key. Depending on whether a different destination bucket is supplied or not, this will either move the item within the bucket, preserving all metadata and ACL info bucket changing the storage class or it will copy the item to the provided destination bucket, also preserving metadata and ACL info. :type new_storage_class: string :param new_storage_class: The new storage class for the Key. Possible values are: * STANDARD * REDUCED_REDUNDANCY :type dst_bucket: string :param dst_bucket: The name of a destination bucket. If not provided the current bucket of the key will be used. :type validate_dst_bucket: bool :param validate_dst_bucket: If True, will validate the dst_bucket by using an extra list request. """ bucket_name = dst_bucket or self.bucket.name if new_storage_class == 'STANDARD': return self.copy(bucket_name, self.name, reduced_redundancy=False, preserve_acl=True, validate_dst_bucket=validate_dst_bucket) elif new_storage_class == 'REDUCED_REDUNDANCY': return self.copy(bucket_name, self.name, reduced_redundancy=True, preserve_acl=True, validate_dst_bucket=validate_dst_bucket) else: raise BotoClientError('Invalid storage class: %s' % new_storage_class) def copy(self, dst_bucket, dst_key, metadata=None, reduced_redundancy=False, preserve_acl=False, encrypt_key=False, validate_dst_bucket=True): """ Copy this Key to another bucket. :type dst_bucket: string :param dst_bucket: The name of the destination bucket :type dst_key: string :param dst_key: The name of the destination key :type metadata: dict :param metadata: Metadata to be associated with new key. If metadata is supplied, it will replace the metadata of the source key being copied. If no metadata is supplied, the source key's metadata will be copied to the new key. :type reduced_redundancy: bool :param reduced_redundancy: If True, this will force the storage class of the new Key to be REDUCED_REDUNDANCY regardless of the storage class of the key being copied. The Reduced Redundancy Storage (RRS) feature of S3, provides lower redundancy at lower storage cost. :type preserve_acl: bool :param preserve_acl: If True, the ACL from the source key will be copied to the destination key. If False, the destination key will have the default ACL. Note that preserving the ACL in the new key object will require two additional API calls to S3, one to retrieve the current ACL and one to set that ACL on the new object. If you don't care about the ACL, a value of False will be significantly more efficient. :type encrypt_key: bool :param encrypt_key: If True, the new copy of the object will be encrypted on the server-side by S3 and will be stored in an encrypted form while at rest in S3. :type validate_dst_bucket: bool :param validate_dst_bucket: If True, will validate the dst_bucket by using an extra list request. :rtype: :class:`boto.s3.key.Key` or subclass :returns: An instance of the newly created key object """ dst_bucket = self.bucket.connection.lookup(dst_bucket, validate_dst_bucket) if reduced_redundancy: storage_class = 'REDUCED_REDUNDANCY' else: storage_class = self.storage_class return dst_bucket.copy_key(dst_key, self.bucket.name, self.name, metadata, storage_class=storage_class, preserve_acl=preserve_acl, encrypt_key=encrypt_key, src_version_id=self.version_id) def startElement(self, name, attrs, connection): if name == 'Owner': self.owner = User(self) return self.owner else: return None def endElement(self, name, value, connection): if name == 'Key': self.name = value elif name == 'ETag': self.etag = value elif name == 'IsLatest': if value == 'true': self.is_latest = True else: self.is_latest = False elif name == 'LastModified': self.last_modified = value elif name == 'Size': self.size = int(value) elif name == 'StorageClass': self.storage_class = value elif name == 'Owner': pass elif name == 'VersionId': self.version_id = value else: setattr(self, name, value) def exists(self, headers=None): """ Returns True if the key exists :rtype: bool :return: Whether the key exists on S3 """ return bool(self.bucket.lookup(self.name, headers=headers)) def delete(self, headers=None): """ Delete this key from S3 """ return self.bucket.delete_key(self.name, version_id=self.version_id, headers=headers) def get_metadata(self, name): return self.metadata.get(name) def set_metadata(self, name, value): # Ensure that metadata that is vital to signing is in the correct # case. Applies to ``Content-Type`` & ``Content-MD5``. if name.lower() == 'content-type': self.metadata['Content-Type'] = value elif name.lower() == 'content-md5': self.metadata['Content-MD5'] = value else: self.metadata[name] = value if name.lower() in Key.base_user_settable_fields: self.__dict__[name.lower().replace('-', '_')] = value def update_metadata(self, d): self.metadata.update(d) # convenience methods for setting/getting ACL def set_acl(self, acl_str, headers=None): if self.bucket is not None: self.bucket.set_acl(acl_str, self.name, headers=headers) def get_acl(self, headers=None): if self.bucket is not None: return self.bucket.get_acl(self.name, headers=headers) def get_xml_acl(self, headers=None): if self.bucket is not None: return self.bucket.get_xml_acl(self.name, headers=headers) def set_xml_acl(self, acl_str, headers=None): if self.bucket is not None: return self.bucket.set_xml_acl(acl_str, self.name, headers=headers) def set_canned_acl(self, acl_str, headers=None): return self.bucket.set_canned_acl(acl_str, self.name, headers) def get_redirect(self): """Return the redirect location configured for this key. If no redirect is configured (via set_redirect), then None will be returned. """ response = self.bucket.connection.make_request( 'HEAD', self.bucket.name, self.name) if response.status == 200: return response.getheader('x-amz-website-redirect-location') else: raise self.provider.storage_response_error( response.status, response.reason, response.read()) def set_redirect(self, redirect_location, headers=None): """Configure this key to redirect to another location. When the bucket associated with this key is accessed from the website endpoint, a 301 redirect will be issued to the specified `redirect_location`. :type redirect_location: string :param redirect_location: The location to redirect. """ if headers is None: headers = {} else: headers = headers.copy() headers['x-amz-website-redirect-location'] = redirect_location response = self.bucket.connection.make_request('PUT', self.bucket.name, self.name, headers) if response.status == 200: return True else: raise self.provider.storage_response_error( response.status, response.reason, response.read()) def make_public(self, headers=None): return self.bucket.set_canned_acl('public-read', self.name, headers) def generate_url(self, expires_in, method='GET', headers=None, query_auth=True, force_http=False, response_headers=None, expires_in_absolute=False, version_id=None, policy=None, reduced_redundancy=False, encrypt_key=False): """ Generate a URL to access this key. :type expires_in: int :param expires_in: How long the url is valid for, in seconds :type method: string :param method: The method to use for retrieving the file (default is GET) :type headers: dict :param headers: Any headers to pass along in the request :type query_auth: bool :param query_auth: :type force_http: bool :param force_http: If True, http will be used instead of https. :type response_headers: dict :param response_headers: A dictionary containing HTTP headers/values that will override any headers associated with the stored object in the response. See http://goo.gl/EWOPb for details. :type expires_in_absolute: bool :param expires_in_absolute: :type version_id: string :param version_id: The version_id of the object to GET. If specified this overrides any value in the key. :type policy: :class:`boto.s3.acl.CannedACLStrings` :param policy: A canned ACL policy that will be applied to the new key in S3. :type reduced_redundancy: bool :param reduced_redundancy: If True, this will set the storage class of the new Key to be REDUCED_REDUNDANCY. The Reduced Redundancy Storage (RRS) feature of S3, provides lower redundancy at lower storage cost. :type encrypt_key: bool :param encrypt_key: If True, the new copy of the object will be encrypted on the server-side by S3 and will be stored in an encrypted form while at rest in S3. :rtype: string :return: The URL to access the key """ provider = self.bucket.connection.provider version_id = version_id or self.version_id if headers is None: headers = {} else: headers = headers.copy() # add headers accordingly (usually PUT case) if policy: headers[provider.acl_header] = policy if reduced_redundancy: self.storage_class = 'REDUCED_REDUNDANCY' if provider.storage_class_header: headers[provider.storage_class_header] = self.storage_class if encrypt_key: headers[provider.server_side_encryption_header] = 'AES256' headers = boto.utils.merge_meta(headers, self.metadata, provider) return self.bucket.connection.generate_url(expires_in, method, self.bucket.name, self.name, headers, query_auth, force_http, response_headers, expires_in_absolute, version_id) def send_file(self, fp, headers=None, cb=None, num_cb=10, query_args=None, chunked_transfer=False, size=None): """ Upload a file to a key into a bucket on S3. :type fp: file :param fp: The file pointer to upload. The file pointer must point point at the offset from which you wish to upload. ie. if uploading the full file, it should point at the start of the file. Normally when a file is opened for reading, the fp will point at the first byte. See the bytes parameter below for more info. :type headers: dict :param headers: The headers to pass along with the PUT request :type num_cb: int :param num_cb: (optional) If a callback is specified with the cb parameter this parameter determines the granularity of the callback by defining the maximum number of times the callback will be called during the file transfer. Providing a negative integer will cause your callback to be called with each buffer read. :type query_args: string :param query_args: (optional) Arguments to pass in the query string. :type chunked_transfer: boolean :param chunked_transfer: (optional) If true, we use chunked Transfer-Encoding. :type size: int :param size: (optional) The Maximum number of bytes to read from the file pointer (fp). This is useful when uploading a file in multiple parts where you are splitting the file up into different ranges to be uploaded. If not specified, the default behaviour is to read all bytes from the file pointer. Less bytes may be available. """ self._send_file_internal(fp, headers=headers, cb=cb, num_cb=num_cb, query_args=query_args, chunked_transfer=chunked_transfer, size=size) def _send_file_internal(self, fp, headers=None, cb=None, num_cb=10, query_args=None, chunked_transfer=False, size=None, hash_algs=None): provider = self.bucket.connection.provider try: spos = fp.tell() except IOError: spos = None self.read_from_stream = False # If hash_algs is unset and the MD5 hasn't already been computed, # default to an MD5 hash_alg to hash the data on-the-fly. if hash_algs is None and not self.md5: hash_algs = {'md5': md5} digesters = dict((alg, hash_algs[alg]()) for alg in hash_algs or {}) def sender(http_conn, method, path, data, headers): # This function is called repeatedly for temporary retries # so we must be sure the file pointer is pointing at the # start of the data. if spos is not None and spos != fp.tell(): fp.seek(spos) elif spos is None and self.read_from_stream: # if seek is not supported, and we've read from this # stream already, then we need to abort retries to # avoid setting bad data. raise provider.storage_data_error( 'Cannot retry failed request. fp does not support seeking.') # If the caller explicitly specified host header, tell putrequest # not to add a second host header. Similarly for accept-encoding. skips = {} if boto.utils.find_matching_headers('host', headers): skips['skip_host'] = 1 if boto.utils.find_matching_headers('accept-encoding', headers): skips['skip_accept_encoding'] = 1 http_conn.putrequest(method, path, **skips) for key in headers: http_conn.putheader(key, headers[key]) http_conn.endheaders() save_debug = self.bucket.connection.debug self.bucket.connection.debug = 0 # If the debuglevel < 4 we don't want to show connection # payload, so turn off HTTP connection-level debug output (to # be restored below). # Use the getattr approach to allow this to work in AppEngine. if getattr(http_conn, 'debuglevel', 0) < 4: http_conn.set_debuglevel(0) data_len = 0 if cb: if size: cb_size = size elif self.size: cb_size = self.size else: cb_size = 0 if chunked_transfer and cb_size == 0: # For chunked Transfer, we call the cb for every 1MB # of data transferred, except when we know size. cb_count = (1024 * 1024) / self.BufferSize elif num_cb > 1: cb_count = int( math.ceil(cb_size / self.BufferSize / (num_cb - 1.0))) elif num_cb < 0: cb_count = -1 else: cb_count = 0 i = 0 cb(data_len, cb_size) bytes_togo = size if bytes_togo and bytes_togo < self.BufferSize: chunk = fp.read(bytes_togo) else: chunk = fp.read(self.BufferSize) if not isinstance(chunk, bytes): chunk = chunk.encode('utf-8') if spos is None: # read at least something from a non-seekable fp. self.read_from_stream = True while chunk: chunk_len = len(chunk) data_len += chunk_len if chunked_transfer: http_conn.send('%x;\r\n' % chunk_len) http_conn.send(chunk) http_conn.send('\r\n') else: http_conn.send(chunk) for alg in digesters: digesters[alg].update(chunk) if bytes_togo: bytes_togo -= chunk_len if bytes_togo <= 0: break if cb: i += 1 if i == cb_count or cb_count == -1: cb(data_len, cb_size) i = 0 if bytes_togo and bytes_togo < self.BufferSize: chunk = fp.read(bytes_togo) else: chunk = fp.read(self.BufferSize) if not isinstance(chunk, bytes): chunk = chunk.encode('utf-8') self.size = data_len for alg in digesters: self.local_hashes[alg] = digesters[alg].digest() if chunked_transfer: http_conn.send('0\r\n') # http_conn.send("Content-MD5: %s\r\n" % self.base64md5) http_conn.send('\r\n') if cb and (cb_count <= 1 or i > 0) and data_len > 0: cb(data_len, cb_size) http_conn.set_debuglevel(save_debug) self.bucket.connection.debug = save_debug response = http_conn.getresponse() body = response.read() if not self.should_retry(response, chunked_transfer): raise provider.storage_response_error( response.status, response.reason, body) return response if not headers: headers = {} else: headers = headers.copy() # Overwrite user-supplied user-agent. for header in find_matching_headers('User-Agent', headers): del headers[header] headers['User-Agent'] = UserAgent # If storage_class is None, then a user has not explicitly requested # a storage class, so we can assume STANDARD here if self._storage_class not in [None, 'STANDARD']: headers[provider.storage_class_header] = self.storage_class if find_matching_headers('Content-Encoding', headers): self.content_encoding = merge_headers_by_name( 'Content-Encoding', headers) if find_matching_headers('Content-Language', headers): self.content_language = merge_headers_by_name( 'Content-Language', headers) content_type_headers = find_matching_headers('Content-Type', headers) if content_type_headers: # Some use cases need to suppress sending of the Content-Type # header and depend on the receiving server to set the content # type. This can be achieved by setting headers['Content-Type'] # to None when calling this method. if (len(content_type_headers) == 1 and headers[content_type_headers[0]] is None): # Delete null Content-Type value to skip sending that header. del headers[content_type_headers[0]] else: self.content_type = merge_headers_by_name( 'Content-Type', headers) elif self.path: self.content_type = mimetypes.guess_type(self.path)[0] if self.content_type is None: self.content_type = self.DefaultContentType headers['Content-Type'] = self.content_type else: headers['Content-Type'] = self.content_type if self.base64md5: headers['Content-MD5'] = self.base64md5 if chunked_transfer: headers['Transfer-Encoding'] = 'chunked' #if not self.base64md5: # headers['Trailer'] = "Content-MD5" else: headers['Content-Length'] = str(self.size) # This is terrible. We need a SHA256 of the body for SigV4, but to do # the chunked ``sender`` behavior above, the ``fp`` isn't available to # the auth mechanism (because closures). Detect if it's SigV4 & embelish # while we can before the auth calculations occur. if 'hmac-v4-s3' in self.bucket.connection._required_auth_capability(): kwargs = {'fp': fp, 'hash_algorithm': hashlib.sha256} if size is not None: kwargs['size'] = size headers['_sha256'] = compute_hash(**kwargs)[0] headers['Expect'] = '100-Continue' headers = boto.utils.merge_meta(headers, self.metadata, provider) resp = self.bucket.connection.make_request( 'PUT', self.bucket.name, self.name, headers, sender=sender, query_args=query_args ) self.handle_version_headers(resp, force=True) self.handle_addl_headers(resp.getheaders()) def should_retry(self, response, chunked_transfer=False): provider = self.bucket.connection.provider if not chunked_transfer: if response.status in [500, 503]: # 500 & 503 can be plain retries. return True if response.getheader('location'): # If there's a redirect, plain retry. return True if 200 <= response.status <= 299: self.etag = response.getheader('etag') md5 = self.md5 if isinstance(md5, bytes): md5 = md5.decode('utf-8') # If you use customer-provided encryption keys, the ETag value that # Amazon S3 returns in the response will not be the MD5 of the # object. server_side_encryption_customer_algorithm = response.getheader( 'x-amz-server-side-encryption-customer-algorithm', None) if server_side_encryption_customer_algorithm is None: if self.etag != '"%s"' % md5: raise provider.storage_data_error( 'ETag from S3 did not match computed MD5. ' '%s vs. %s' % (self.etag, self.md5)) return True if response.status == 400: # The 400 must be trapped so the retry handler can check to # see if it was a timeout. # If ``RequestTimeout`` is present, we'll retry. Otherwise, bomb # out. body = response.read() err = provider.storage_response_error( response.status, response.reason, body ) if err.error_code in ['RequestTimeout']: raise PleaseRetryException( "Saw %s, retrying" % err.error_code, response=response ) return False def compute_md5(self, fp, size=None): """ :type fp: file :param fp: File pointer to the file to MD5 hash. The file pointer will be reset to the same position before the method returns. :type size: int :param size: (optional) The Maximum number of bytes to read from the file pointer (fp). This is useful when uploading a file in multiple parts where the file is being split in place into different parts. Less bytes may be available. """ hex_digest, b64_digest, data_size = compute_md5(fp, size=size) # Returned values are MD5 hash, base64 encoded MD5 hash, and data size. # The internal implementation of compute_md5() needs to return the # data size but we don't want to return that value to the external # caller because it changes the class interface (i.e. it might # break some code) so we consume the third tuple value here and # return the remainder of the tuple to the caller, thereby preserving # the existing interface. self.size = data_size return (hex_digest, b64_digest) def set_contents_from_stream(self, fp, headers=None, replace=True, cb=None, num_cb=10, policy=None, reduced_redundancy=False, query_args=None, size=None): """ Store an object using the name of the Key object as the key in cloud and the contents of the data stream pointed to by 'fp' as the contents. The stream object is not seekable and total size is not known. This has the implication that we can't specify the Content-Size and Content-MD5 in the header. So for huge uploads, the delay in calculating MD5 is avoided but with a penalty of inability to verify the integrity of the uploaded data. :type fp: file :param fp: the file whose contents are to be uploaded :type headers: dict :param headers: additional HTTP headers to be sent with the PUT request. :type replace: bool :param replace: If this parameter is False, the method will first check to see if an object exists in the bucket with the same key. If it does, it won't overwrite it. The default value is True which will overwrite the object. :type cb: function :param cb: a callback function that will be called to report progress on the upload. The callback should accept two integer parameters, the first representing the number of bytes that have been successfully transmitted to GS and the second representing the total number of bytes that need to be transmitted. :type num_cb: int :param num_cb: (optional) If a callback is specified with the cb parameter, this parameter determines the granularity of the callback by defining the maximum number of times the callback will be called during the file transfer. :type policy: :class:`boto.gs.acl.CannedACLStrings` :param policy: A canned ACL policy that will be applied to the new key in GS. :type reduced_redundancy: bool :param reduced_redundancy: If True, this will set the storage class of the new Key to be REDUCED_REDUNDANCY. The Reduced Redundancy Storage (RRS) feature of S3, provides lower redundancy at lower storage cost. :type size: int :param size: (optional) The Maximum number of bytes to read from the file pointer (fp). This is useful when uploading a file in multiple parts where you are splitting the file up into different ranges to be uploaded. If not specified, the default behaviour is to read all bytes from the file pointer. Less bytes may be available. """ provider = self.bucket.connection.provider if not provider.supports_chunked_transfer(): raise BotoClientError('%s does not support chunked transfer' % provider.get_provider_name()) # Name of the Object should be specified explicitly for Streams. if not self.name or self.name == '': raise BotoClientError('Cannot determine the destination ' 'object name for the given stream') if headers is None: headers = {} if policy: headers[provider.acl_header] = policy if reduced_redundancy: self.storage_class = 'REDUCED_REDUNDANCY' if provider.storage_class_header: headers[provider.storage_class_header] = self.storage_class if self.bucket is not None: if not replace: if self.bucket.lookup(self.name): return self.send_file(fp, headers, cb, num_cb, query_args, chunked_transfer=True, size=size) def set_contents_from_file(self, fp, headers=None, replace=True, cb=None, num_cb=10, policy=None, md5=None, reduced_redundancy=False, query_args=None, encrypt_key=False, size=None, rewind=False): """ Store an object in S3 using the name of the Key object as the key in S3 and the contents of the file pointed to by 'fp' as the contents. The data is read from 'fp' from its current position until 'size' bytes have been read or EOF. :type fp: file :param fp: the file whose contents to upload :type headers: dict :param headers: Additional HTTP headers that will be sent with the PUT request. :type replace: bool :param replace: If this parameter is False, the method will first check to see if an object exists in the bucket with the same key. If it does, it won't overwrite it. The default value is True which will overwrite the object. :type cb: function :param cb: a callback function that will be called to report progress on the upload. The callback should accept two integer parameters, the first representing the number of bytes that have been successfully transmitted to S3 and the second representing the size of the to be transmitted object. :type num_cb: int :param num_cb: (optional) If a callback is specified with the cb parameter this parameter determines the granularity of the callback by defining the maximum number of times the callback will be called during the file transfer. :type policy: :class:`boto.s3.acl.CannedACLStrings` :param policy: A canned ACL policy that will be applied to the new key in S3. :type md5: A tuple containing the hexdigest version of the MD5 checksum of the file as the first element and the Base64-encoded version of the plain checksum as the second element. This is the same format returned by the compute_md5 method. :param md5: If you need to compute the MD5 for any reason prior to upload, it's silly to have to do it twice so this param, if present, will be used as the MD5 values of the file. Otherwise, the checksum will be computed. :type reduced_redundancy: bool :param reduced_redundancy: If True, this will set the storage class of the new Key to be REDUCED_REDUNDANCY. The Reduced Redundancy Storage (RRS) feature of S3, provides lower redundancy at lower storage cost. :type encrypt_key: bool :param encrypt_key: If True, the new copy of the object will be encrypted on the server-side by S3 and will be stored in an encrypted form while at rest in S3. :type size: int :param size: (optional) The Maximum number of bytes to read from the file pointer (fp). This is useful when uploading a file in multiple parts where you are splitting the file up into different ranges to be uploaded. If not specified, the default behaviour is to read all bytes from the file pointer. Less bytes may be available. :type rewind: bool :param rewind: (optional) If True, the file pointer (fp) will be rewound to the start before any bytes are read from it. The default behaviour is False which reads from the current position of the file pointer (fp). :rtype: int :return: The number of bytes written to the key. """ provider = self.bucket.connection.provider headers = headers or {} if policy: headers[provider.acl_header] = policy if encrypt_key: headers[provider.server_side_encryption_header] = 'AES256' if rewind: # caller requests reading from beginning of fp. fp.seek(0, os.SEEK_SET) else: # The following seek/tell/seek logic is intended # to detect applications using the older interface to # set_contents_from_file(), which automatically rewound the # file each time the Key was reused. This changed with commit # 14ee2d03f4665fe20d19a85286f78d39d924237e, to support uploads # split into multiple parts and uploaded in parallel, and at # the time of that commit this check was added because otherwise # older programs would get a success status and upload an empty # object. Unfortuantely, it's very inefficient for fp's implemented # by KeyFile (used, for example, by gsutil when copying between # providers). So, we skip the check for the KeyFile case. # TODO: At some point consider removing this seek/tell/seek # logic, after enough time has passed that it's unlikely any # programs remain that assume the older auto-rewind interface. if not isinstance(fp, KeyFile): spos = fp.tell() fp.seek(0, os.SEEK_END) if fp.tell() == spos: fp.seek(0, os.SEEK_SET) if fp.tell() != spos: # Raise an exception as this is likely a programming # error whereby there is data before the fp but nothing # after it. fp.seek(spos) raise AttributeError('fp is at EOF. Use rewind option ' 'or seek() to data start.') # seek back to the correct position. fp.seek(spos) if reduced_redundancy: self.storage_class = 'REDUCED_REDUNDANCY' if provider.storage_class_header: headers[provider.storage_class_header] = self.storage_class # TODO - What if provider doesn't support reduced reduncancy? # What if different providers provide different classes? if hasattr(fp, 'name'): self.path = fp.name if self.bucket is not None: if not md5 and provider.supports_chunked_transfer(): # defer md5 calculation to on the fly and # we don't know anything about size yet. chunked_transfer = True self.size = None else: chunked_transfer = False if isinstance(fp, KeyFile): # Avoid EOF seek for KeyFile case as it's very inefficient. key = fp.getkey() size = key.size - fp.tell() self.size = size # At present both GCS and S3 use MD5 for the etag for # non-multipart-uploaded objects. If the etag is 32 hex # chars use it as an MD5, to avoid having to read the file # twice while transferring. if (re.match('^"[a-fA-F0-9]{32}"$', key.etag)): etag = key.etag.strip('"') md5 = (etag, base64.b64encode(binascii.unhexlify(etag))) if not md5: # compute_md5() and also set self.size to actual # size of the bytes read computing the md5. md5 = self.compute_md5(fp, size) # adjust size if required size = self.size elif size: self.size = size else: # If md5 is provided, still need to size so # calculate based on bytes to end of content spos = fp.tell() fp.seek(0, os.SEEK_END) self.size = fp.tell() - spos fp.seek(spos) size = self.size self.md5 = md5[0] self.base64md5 = md5[1] if self.name is None: self.name = self.md5 if not replace: if self.bucket.lookup(self.name): return self.send_file(fp, headers=headers, cb=cb, num_cb=num_cb, query_args=query_args, chunked_transfer=chunked_transfer, size=size) # return number of bytes written. return self.size def set_contents_from_filename(self, filename, headers=None, replace=True, cb=None, num_cb=10, policy=None, md5=None, reduced_redundancy=False, encrypt_key=False): """ Store an object in S3 using the name of the Key object as the key in S3 and the contents of the file named by 'filename'. See set_contents_from_file method for details about the parameters. :type filename: string :param filename: The name of the file that you want to put onto S3 :type headers: dict :param headers: Additional headers to pass along with the request to AWS. :type replace: bool :param replace: If True, replaces the contents of the file if it already exists. :type cb: function :param cb: a callback function that will be called to report progress on the upload. The callback should accept two integer parameters, the first representing the number of bytes that have been successfully transmitted to S3 and the second representing the size of the to be transmitted object. :type cb: int :param num_cb: (optional) If a callback is specified with the cb parameter this parameter determines the granularity of the callback by defining the maximum number of times the callback will be called during the file transfer. :type policy: :class:`boto.s3.acl.CannedACLStrings` :param policy: A canned ACL policy that will be applied to the new key in S3. :type md5: A tuple containing the hexdigest version of the MD5 checksum of the file as the first element and the Base64-encoded version of the plain checksum as the second element. This is the same format returned by the compute_md5 method. :param md5: If you need to compute the MD5 for any reason prior to upload, it's silly to have to do it twice so this param, if present, will be used as the MD5 values of the file. Otherwise, the checksum will be computed. :type reduced_redundancy: bool :param reduced_redundancy: If True, this will set the storage class of the new Key to be REDUCED_REDUNDANCY. The Reduced Redundancy Storage (RRS) feature of S3, provides lower redundancy at lower storage cost. :type encrypt_key: bool :param encrypt_key: If True, the new copy of the object will be encrypted on the server-side by S3 and will be stored in an encrypted form while at rest in S3. :rtype: int :return: The number of bytes written to the key. """ with open(filename, 'rb') as fp: return self.set_contents_from_file(fp, headers, replace, cb, num_cb, policy, md5, reduced_redundancy, encrypt_key=encrypt_key) def set_contents_from_string(self, string_data, headers=None, replace=True, cb=None, num_cb=10, policy=None, md5=None, reduced_redundancy=False, encrypt_key=False): """ Store an object in S3 using the name of the Key object as the key in S3 and the string 's' as the contents. See set_contents_from_file method for details about the parameters. :type headers: dict :param headers: Additional headers to pass along with the request to AWS. :type replace: bool :param replace: If True, replaces the contents of the file if it already exists. :type cb: function :param cb: a callback function that will be called to report progress on the upload. The callback should accept two integer parameters, the first representing the number of bytes that have been successfully transmitted to S3 and the second representing the size of the to be transmitted object. :type cb: int :param num_cb: (optional) If a callback is specified with the cb parameter this parameter determines the granularity of the callback by defining the maximum number of times the callback will be called during the file transfer. :type policy: :class:`boto.s3.acl.CannedACLStrings` :param policy: A canned ACL policy that will be applied to the new key in S3. :type md5: A tuple containing the hexdigest version of the MD5 checksum of the file as the first element and the Base64-encoded version of the plain checksum as the second element. This is the same format returned by the compute_md5 method. :param md5: If you need to compute the MD5 for any reason prior to upload, it's silly to have to do it twice so this param, if present, will be used as the MD5 values of the file. Otherwise, the checksum will be computed. :type reduced_redundancy: bool :param reduced_redundancy: If True, this will set the storage class of the new Key to be REDUCED_REDUNDANCY. The Reduced Redundancy Storage (RRS) feature of S3, provides lower redundancy at lower storage cost. :type encrypt_key: bool :param encrypt_key: If True, the new copy of the object will be encrypted on the server-side by S3 and will be stored in an encrypted form while at rest in S3. """ if not isinstance(string_data, bytes): string_data = string_data.encode("utf-8") fp = BytesIO(string_data) r = self.set_contents_from_file(fp, headers, replace, cb, num_cb, policy, md5, reduced_redundancy, encrypt_key=encrypt_key) fp.close() return r def get_file(self, fp, headers=None, cb=None, num_cb=10, torrent=False, version_id=None, override_num_retries=None, response_headers=None): """ Retrieves a file from an S3 Key :type fp: file :param fp: File pointer to put the data into :type headers: string :param: headers to send when retrieving the files :type cb: function :param cb: a callback function that will be called to report progress on the upload. The callback should accept two integer parameters, the first representing the number of bytes that have been successfully transmitted to S3 and the second representing the size of the to be transmitted object. :type cb: int :param num_cb: (optional) If a callback is specified with the cb parameter this parameter determines the granularity of the callback by defining the maximum number of times the callback will be called during the file transfer. :type torrent: bool :param torrent: Flag for whether to get a torrent for the file :type override_num_retries: int :param override_num_retries: If not None will override configured num_retries parameter for underlying GET. :type response_headers: dict :param response_headers: A dictionary containing HTTP headers/values that will override any headers associated with the stored object in the response. See http://goo.gl/EWOPb for details. :type version_id: str :param version_id: The ID of a particular version of the object. If this parameter is not supplied but the Key object has a ``version_id`` attribute, that value will be used when retrieving the object. You can set the Key object's ``version_id`` attribute to None to always grab the latest version from a version-enabled bucket. """ self._get_file_internal(fp, headers=headers, cb=cb, num_cb=num_cb, torrent=torrent, version_id=version_id, override_num_retries=override_num_retries, response_headers=response_headers, hash_algs=None, query_args=None) def _get_file_internal(self, fp, headers=None, cb=None, num_cb=10, torrent=False, version_id=None, override_num_retries=None, response_headers=None, hash_algs=None, query_args=None): if headers is None: headers = {} save_debug = self.bucket.connection.debug if self.bucket.connection.debug == 1: self.bucket.connection.debug = 0 query_args = query_args or [] if torrent: query_args.append('torrent') if hash_algs is None and not torrent: hash_algs = {'md5': md5} digesters = dict((alg, hash_algs[alg]()) for alg in hash_algs or {}) # If a version_id is passed in, use that. If not, check to see # if the Key object has an explicit version_id and, if so, use that. # Otherwise, don't pass a version_id query param. if version_id is None: version_id = self.version_id if version_id: query_args.append('versionId=%s' % version_id) if response_headers: for key in response_headers: query_args.append('%s=%s' % ( key, urllib.parse.quote(response_headers[key]))) query_args = '&'.join(query_args) self.open('r', headers, query_args=query_args, override_num_retries=override_num_retries) data_len = 0 if cb: if self.size is None: cb_size = 0 else: cb_size = self.size if self.size is None and num_cb != -1: # If size is not available due to chunked transfer for example, # we'll call the cb for every 1MB of data transferred. cb_count = (1024 * 1024) / self.BufferSize elif num_cb > 1: cb_count = int(math.ceil(cb_size/self.BufferSize/(num_cb-1.0))) elif num_cb < 0: cb_count = -1 else: cb_count = 0 i = 0 cb(data_len, cb_size) try: for bytes in self: fp.write(bytes) data_len += len(bytes) for alg in digesters: digesters[alg].update(bytes) if cb: if cb_size > 0 and data_len >= cb_size: break i += 1 if i == cb_count or cb_count == -1: cb(data_len, cb_size) i = 0 except IOError as e: if e.errno == errno.ENOSPC: raise StorageDataError('Out of space for destination file ' '%s' % fp.name) raise if cb and (cb_count <= 1 or i > 0) and data_len > 0: cb(data_len, cb_size) for alg in digesters: self.local_hashes[alg] = digesters[alg].digest() if self.size is None and not torrent and "Range" not in headers: self.size = data_len self.close() self.bucket.connection.debug = save_debug def get_torrent_file(self, fp, headers=None, cb=None, num_cb=10): """ Get a torrent file (see to get_file) :type fp: file :param fp: The file pointer of where to put the torrent :type headers: dict :param headers: Headers to be passed :type cb: function :param cb: a callback function that will be called to report progress on the upload. The callback should accept two integer parameters, the first representing the number of bytes that have been successfully transmitted to S3 and the second representing the size of the to be transmitted object. :type cb: int :param num_cb: (optional) If a callback is specified with the cb parameter this parameter determines the granularity of the callback by defining the maximum number of times the callback will be called during the file transfer. """ return self.get_file(fp, headers, cb, num_cb, torrent=True) def get_contents_to_file(self, fp, headers=None, cb=None, num_cb=10, torrent=False, version_id=None, res_download_handler=None, response_headers=None): """ Retrieve an object from S3 using the name of the Key object as the key in S3. Write the contents of the object to the file pointed to by 'fp'. :type fp: File -like object :param fp: :type headers: dict :param headers: additional HTTP headers that will be sent with the GET request. :type cb: function :param cb: a callback function that will be called to report progress on the upload. The callback should accept two integer parameters, the first representing the number of bytes that have been successfully transmitted to S3 and the second representing the size of the to be transmitted object. :type cb: int :param num_cb: (optional) If a callback is specified with the cb parameter this parameter determines the granularity of the callback by defining the maximum number of times the callback will be called during the file transfer. :type torrent: bool :param torrent: If True, returns the contents of a torrent file as a string. :type res_upload_handler: ResumableDownloadHandler :param res_download_handler: If provided, this handler will perform the download. :type response_headers: dict :param response_headers: A dictionary containing HTTP headers/values that will override any headers associated with the stored object in the response. See http://goo.gl/EWOPb for details. :type version_id: str :param version_id: The ID of a particular version of the object. If this parameter is not supplied but the Key object has a ``version_id`` attribute, that value will be used when retrieving the object. You can set the Key object's ``version_id`` attribute to None to always grab the latest version from a version-enabled bucket. """ if self.bucket is not None: if res_download_handler: res_download_handler.get_file(self, fp, headers, cb, num_cb, torrent=torrent, version_id=version_id) else: self.get_file(fp, headers, cb, num_cb, torrent=torrent, version_id=version_id, response_headers=response_headers) def get_contents_to_filename(self, filename, headers=None, cb=None, num_cb=10, torrent=False, version_id=None, res_download_handler=None, response_headers=None): """ Retrieve an object from S3 using the name of the Key object as the key in S3. Store contents of the object to a file named by 'filename'. See get_contents_to_file method for details about the parameters. :type filename: string :param filename: The filename of where to put the file contents :type headers: dict :param headers: Any additional headers to send in the request :type cb: function :param cb: a callback function that will be called to report progress on the upload. The callback should accept two integer parameters, the first representing the number of bytes that have been successfully transmitted to S3 and the second representing the size of the to be transmitted object. :type num_cb: int :param num_cb: (optional) If a callback is specified with the cb parameter this parameter determines the granularity of the callback by defining the maximum number of times the callback will be called during the file transfer. :type torrent: bool :param torrent: If True, returns the contents of a torrent file as a string. :type res_upload_handler: ResumableDownloadHandler :param res_download_handler: If provided, this handler will perform the download. :type response_headers: dict :param response_headers: A dictionary containing HTTP headers/values that will override any headers associated with the stored object in the response. See http://goo.gl/EWOPb for details. :type version_id: str :param version_id: The ID of a particular version of the object. If this parameter is not supplied but the Key object has a ``version_id`` attribute, that value will be used when retrieving the object. You can set the Key object's ``version_id`` attribute to None to always grab the latest version from a version-enabled bucket. """ try: with open(filename, 'wb') as fp: self.get_contents_to_file(fp, headers, cb, num_cb, torrent=torrent, version_id=version_id, res_download_handler=res_download_handler, response_headers=response_headers) except Exception: os.remove(filename) raise # if last_modified date was sent from s3, try to set file's timestamp if self.last_modified is not None: try: modified_tuple = email.utils.parsedate_tz(self.last_modified) modified_stamp = int(email.utils.mktime_tz(modified_tuple)) os.utime(fp.name, (modified_stamp, modified_stamp)) except Exception: pass def get_contents_as_string(self, headers=None, cb=None, num_cb=10, torrent=False, version_id=None, response_headers=None, encoding=None): """ Retrieve an object from S3 using the name of the Key object as the key in S3. Return the contents of the object as a string. See get_contents_to_file method for details about the parameters. :type headers: dict :param headers: Any additional headers to send in the request :type cb: function :param cb: a callback function that will be called to report progress on the upload. The callback should accept two integer parameters, the first representing the number of bytes that have been successfully transmitted to S3 and the second representing the size of the to be transmitted object. :type cb: int :param num_cb: (optional) If a callback is specified with the cb parameter this parameter determines the granularity of the callback by defining the maximum number of times the callback will be called during the file transfer. :type torrent: bool :param torrent: If True, returns the contents of a torrent file as a string. :type response_headers: dict :param response_headers: A dictionary containing HTTP headers/values that will override any headers associated with the stored object in the response. See http://goo.gl/EWOPb for details. :type version_id: str :param version_id: The ID of a particular version of the object. If this parameter is not supplied but the Key object has a ``version_id`` attribute, that value will be used when retrieving the object. You can set the Key object's ``version_id`` attribute to None to always grab the latest version from a version-enabled bucket. :type encoding: str :param encoding: The text encoding to use, such as ``utf-8`` or ``iso-8859-1``. If set, then a string will be returned. Defaults to ``None`` and returns bytes. :rtype: bytes or str :returns: The contents of the file as bytes or a string """ fp = BytesIO() self.get_contents_to_file(fp, headers, cb, num_cb, torrent=torrent, version_id=version_id, response_headers=response_headers) value = fp.getvalue() if encoding is not None: value = value.decode(encoding) return value def add_email_grant(self, permission, email_address, headers=None): """ Convenience method that provides a quick way to add an email grant to a key. This method retrieves the current ACL, creates a new grant based on the parameters passed in, adds that grant to the ACL and then PUT's the new ACL back to S3. :type permission: string :param permission: The permission being granted. Should be one of: (READ, WRITE, READ_ACP, WRITE_ACP, FULL_CONTROL). :type email_address: string :param email_address: The email address associated with the AWS account your are granting the permission to. :type recursive: boolean :param recursive: A boolean value to controls whether the command will apply the grant to all keys within the bucket or not. The default value is False. By passing a True value, the call will iterate through all keys in the bucket and apply the same grant to each key. CAUTION: If you have a lot of keys, this could take a long time! """ policy = self.get_acl(headers=headers) policy.acl.add_email_grant(permission, email_address) self.set_acl(policy, headers=headers) def add_user_grant(self, permission, user_id, headers=None, display_name=None): """ Convenience method that provides a quick way to add a canonical user grant to a key. This method retrieves the current ACL, creates a new grant based on the parameters passed in, adds that grant to the ACL and then PUT's the new ACL back to S3. :type permission: string :param permission: The permission being granted. Should be one of: (READ, WRITE, READ_ACP, WRITE_ACP, FULL_CONTROL). :type user_id: string :param user_id: The canonical user id associated with the AWS account your are granting the permission to. :type display_name: string :param display_name: An option string containing the user's Display Name. Only required on Walrus. """ policy = self.get_acl(headers=headers) policy.acl.add_user_grant(permission, user_id, display_name=display_name) self.set_acl(policy, headers=headers) def _normalize_metadata(self, metadata): if type(metadata) == set: norm_metadata = set() for k in metadata: norm_metadata.add(k.lower()) else: norm_metadata = {} for k in metadata: norm_metadata[k.lower()] = metadata[k] return norm_metadata def _get_remote_metadata(self, headers=None): """ Extracts metadata from existing URI into a dict, so we can overwrite/delete from it to form the new set of metadata to apply to a key. """ metadata = {} for underscore_name in self._underscore_base_user_settable_fields: if hasattr(self, underscore_name): value = getattr(self, underscore_name) if value: # Generate HTTP field name corresponding to "_" named field. field_name = underscore_name.replace('_', '-') metadata[field_name.lower()] = value # self.metadata contains custom metadata, which are all user-settable. prefix = self.provider.metadata_prefix for underscore_name in self.metadata: field_name = underscore_name.replace('_', '-') metadata['%s%s' % (prefix, field_name.lower())] = ( self.metadata[underscore_name]) return metadata def set_remote_metadata(self, metadata_plus, metadata_minus, preserve_acl, headers=None): metadata_plus = self._normalize_metadata(metadata_plus) metadata_minus = self._normalize_metadata(metadata_minus) metadata = self._get_remote_metadata() metadata.update(metadata_plus) for h in metadata_minus: if h in metadata: del metadata[h] src_bucket = self.bucket # Boto prepends the meta prefix when adding headers, so strip prefix in # metadata before sending back in to copy_key() call. rewritten_metadata = {} for h in metadata: if (h.startswith('x-goog-meta-') or h.startswith('x-amz-meta-')): rewritten_h = (h.replace('x-goog-meta-', '') .replace('x-amz-meta-', '')) else: rewritten_h = h rewritten_metadata[rewritten_h] = metadata[h] metadata = rewritten_metadata src_bucket.copy_key(self.name, self.bucket.name, self.name, metadata=metadata, preserve_acl=preserve_acl, headers=headers) def restore(self, days, headers=None): """Restore an object from an archive. :type days: int :param days: The lifetime of the restored object (must be at least 1 day). If the object is already restored then this parameter can be used to readjust the lifetime of the restored object. In this case, the days param is with respect to the initial time of the request. If the object has not been restored, this param is with respect to the completion time of the request. """ response = self.bucket.connection.make_request( 'POST', self.bucket.name, self.name, data=self.RestoreBody % days, headers=headers, query_args='restore') if response.status not in (200, 202): provider = self.bucket.connection.provider raise provider.storage_response_error(response.status, response.reason, response.read())
ericfc/django
refs/heads/master
tests/migrations/test_migrations_squashed_complex/1_auto.py
1155
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations class Migration(migrations.Migration): operations = [ migrations.RunPython(migrations.RunPython.noop) ]
Samsung/TizenRT
refs/heads/master
external/protobuf/python/google/protobuf/pyext/__init__.py
401
try: __import__('pkg_resources').declare_namespace(__name__) except ImportError: __path__ = __import__('pkgutil').extend_path(__path__, __name__)
wemanuel/smry
refs/heads/master
server-auth/ls/google-cloud-sdk/lib/googlecloudsdk/bigquery/lib/__init__.py
12
# Copyright 2014 Google Inc. All Rights Reserved. """A helper library for this command group."""
gauravbose/digital-menu
refs/heads/master
digimenu2/restaurant/urls.py
2
from django.conf.urls import url from . import views #urlpatterns += static_files_urlpatterns() #urlpatterns +=static(settings.MEDIA_URL,document_root = settings.MEDIA_ROOT) urlpatterns=[ # url(r'^polls/', include('restaurant.urls', namespace="restaurant")), # url(r'^$',views.index,name='index'), # url(r'^$',views.welcome,name='welcome'), # url(r'^$',views.italian,name='italian'), # url(r'^$',views.chinese,name='chinese'), #url(r'^$',views.indian,name='indian'), # url(r'^$',views.desserts,name='desserts'), #url(r'^$',views.beverages,name='beverages'), # url(r'^$',views.southindian,name='southindian'), # url(r'^$',views.chaat,name='chaat'), # url(r'^$',views.cart,name='cart'), url(r'^index.?$','restaurant.views.index1'), url(r'^Chinese.*$','restaurant.views.chinese'), url(r'^Desserts.*$','restaurant.views.desserts'), url(r'^Chaat.+$','restaurant.views.chaat'), url(r'^restaurant/index/Italian.*$','restaurant.views.italian'), url(r'^South.*$','restaurant.views.southindian'), # url(r'^index/1.*$','restaurant.views.italian'), url(r'^welcome.*$','restaurant.views.welcome'), url(r'^cart.*$','restaurant.views.cart'), url(r'^restaurant/welcome/index.*$','restaurant.views.index'), url(r'^index/welcome.*$','restaurant.views.welcome'), url(r'^kitchen.*$','restaurant.views.kitchen'), #url(r'^index(/(?P<question_id>[0-9]+))+/kitchen.*$','restaurant.views.kitchen1'), url(r'^index/index.*$','restaurant.views.index1'), url(r'^index/cart.*$','restaurant.views.cart1'), # url(r'^index/(?P<question_id>[0-9]+.*)$','restaurant.views.italian'), url(r'^index(/(?P<question_id>[0-9]+))+.?$','restaurant.views.italian'), url(r'^index(/(?P<question_id>[0-9]+))+.?/bill.*$','restaurant.views.bill'), url(r'^index(/(?P<question_id>[0-9]+))+.?/thankyou.*$','restaurant.views.thankyou'), url(r'^index/thankyou.*$','restaurant.views.thankyou1'), # url(r'^(/(?P<question_id>[0-9]+))+.?$','restaurant.views.italian'), url(r'^index(/(?P<question_id>[0-9]+))+.?/status.*$','restaurant.views.status1'), url(r'^index/status.*$','restaurant.views.status'), url(r'^index(/(?P<question_id>[0-9]+))+/index.*$','restaurant.views.index'), url(r'^index(/(?P<question_id>[0-9]+))+/cart.*$','restaurant.views.cart'), url(r'^index/bill.*$','restaurant.views.bill1'), ]
shweta97/pyta
refs/heads/master
nodes/Assert.py
2
""" Assert astroid node An assertion. Attributes: - test (Expr) - This holds the condition, such as a Compare node, to be evaluated True or False - fail (Node | None) - Usually a str; the message shown if condition is False. If None, only AssertionError is shown. Example: - test -> x == 0 - fail -> "x isn't 0!" # AssertionError and this message if condition is False """ assert x == 0, "x isn't 0!"
skotin/EW
refs/heads/master
sites/all/modules/fckeditor/fckeditor/editor/filemanager/connectors/py/fckoutput.py
25
#!/usr/bin/env python """ FCKeditor - The text editor for Internet - http://www.fckeditor.net Copyright (C) 2003-2009 Frederico Caldeira Knabben == BEGIN LICENSE == Licensed under the terms of any of the following licenses at your choice: - GNU General Public License Version 2 or later (the "GPL") http://www.gnu.org/licenses/gpl.html - GNU Lesser General Public License Version 2.1 or later (the "LGPL") http://www.gnu.org/licenses/lgpl.html - Mozilla Public License Version 1.1 or later (the "MPL") http://www.mozilla.org/MPL/MPL-1.1.html == END LICENSE == Connector for Python (CGI and WSGI). """ from time import gmtime, strftime import string def escape(text, replace=string.replace): """ Converts the special characters '<', '>', and '&'. RFC 1866 specifies that these characters be represented in HTML as &lt; &gt; and &amp; respectively. In Python 1.5 we use the new string.replace() function for speed. """ text = replace(text, '&', '&amp;') # must be done 1st text = replace(text, '<', '&lt;') text = replace(text, '>', '&gt;') text = replace(text, '"', '&quot;') return text def convertToXmlAttribute(value): if (value is None): value = "" return escape(value) class BaseHttpMixin(object): def setHttpHeaders(self, content_type='text/xml'): "Purpose: to prepare the headers for the xml to return" # Prevent the browser from caching the result. # Date in the past self.setHeader('Expires','Mon, 26 Jul 1997 05:00:00 GMT') # always modified self.setHeader('Last-Modified',strftime("%a, %d %b %Y %H:%M:%S GMT", gmtime())) # HTTP/1.1 self.setHeader('Cache-Control','no-store, no-cache, must-revalidate') self.setHeader('Cache-Control','post-check=0, pre-check=0') # HTTP/1.0 self.setHeader('Pragma','no-cache') # Set the response format. self.setHeader( 'Content-Type', content_type + '; charset=utf-8' ) return class BaseXmlMixin(object): def createXmlHeader(self, command, resourceType, currentFolder, url): "Purpose: returns the xml header" self.setHttpHeaders() # Create the XML document header s = """<?xml version="1.0" encoding="utf-8" ?>""" # Create the main connector node s += """<Connector command="%s" resourceType="%s">""" % ( command, resourceType ) # Add the current folder node s += """<CurrentFolder path="%s" url="%s" />""" % ( convertToXmlAttribute(currentFolder), convertToXmlAttribute(url), ) return s def createXmlFooter(self): "Purpose: returns the xml footer" return """</Connector>""" def sendError(self, number, text): "Purpose: in the event of an error, return an xml based error" self.setHttpHeaders() return ("""<?xml version="1.0" encoding="utf-8" ?>""" + """<Connector>""" + self.sendErrorNode (number, text) + """</Connector>""" ) def sendErrorNode(self, number, text): if number != 1: return """<Error number="%s" />""" % (number) else: return """<Error number="%s" text="%s" />""" % (number, convertToXmlAttribute(text)) class BaseHtmlMixin(object): def sendUploadResults( self, errorNo = 0, fileUrl = '', fileName = '', customMsg = '' ): self.setHttpHeaders("text/html") "This is the function that sends the results of the uploading process" "Minified version of the document.domain automatic fix script (#1919)." "The original script can be found at _dev/domain_fix_template.js" return """<script type="text/javascript"> (function(){var d=document.domain;while (true){try{var A=window.parent.document.domain;break;}catch(e) {};d=d.replace(/.*?(?:\.|$)/,'');if (d.length==0) break;try{document.domain=d;}catch (e){break;}}})(); window.parent.OnUploadCompleted(%(errorNumber)s,"%(fileUrl)s","%(fileName)s","%(customMsg)s"); </script>""" % { 'errorNumber': errorNo, 'fileUrl': fileUrl.replace ('"', '\\"'), 'fileName': fileName.replace ( '"', '\\"' ) , 'customMsg': customMsg.replace ( '"', '\\"' ), }
IATI/IATI-Website-Tests
refs/heads/master
tests/test_query_builder.py
1
import pytest from utility import utility from web_test_base import WebTestBase class TestQueryBuilder(WebTestBase): requests_to_load = { 'IATI Query Builder': { 'url': 'http://datastore.iatistandard.org/query/' }, 'POST Example': { 'url': 'http://datastore.iatistandard.org/query/index.php', 'method': 'POST', 'data': { 'format': 'activity', 'grouping': 'summary', 'sample-size': '50 rows', 'reporting-org[]': 'XM-DAC-3-1', 'sector[]': '12181', 'recipient-region[]': '298', 'submit': 'Submit' } }, 'Publisher Information': { 'url': 'http://datastore.iatistandard.org/query/helpers/groups_cache_dc.json', 'min_response_size': 1500000 } } @pytest.mark.parametrize("target_request", ["IATI Query Builder", "POST Example"]) def test_locate_links(self, target_request): """ Tests that a page contains links to the defined URLs. """ req = self.loaded_request_from_test_name(target_request) result = utility.get_links_from_page(req) assert "https://iatistandard.org/en/using-data/IATI-tools-and-resources/IATI-datastore/" in result @pytest.mark.parametrize("target_request", ["POST Example"]) def test_form_submit_link(self, target_request): """ Tests that a result page contains a link to the relevant search. """ req = self.loaded_request_from_test_name(target_request) result = utility.get_links_from_page(req) assert "http://datastore.iatistandard.org/api/1/access/activity.csv?reporting-org=XM-DAC-3-1&sector=12181&recipient-region=298" in result
eunchong/build
refs/heads/master
third_party/twisted_10_2/twisted/trial/itrial.py
61
# Copyright (c) 2001-2008 Twisted Matrix Laboratories. # See LICENSE for details. """ Interfaces for Trial. Maintainer: Jonathan Lange """ import zope.interface as zi from zope.interface import Attribute class ITestCase(zi.Interface): """ The interface that a test case must implement in order to be used in Trial. """ failureException = zi.Attribute( "The exception class that is raised by failed assertions") def __call__(result): """ Run the test. Should always do exactly the same thing as run(). """ def countTestCases(): """ Return the number of tests in this test case. Usually 1. """ def id(): """ Return a unique identifier for the test, usually the fully-qualified Python name. """ def run(result): """ Run the test, storing the results in C{result}. @param result: A L{TestResult}. """ def shortDescription(): """ Return a short description of the test. """ class IReporter(zi.Interface): """ I report results from a run of a test suite. """ stream = zi.Attribute( "Deprecated in Twisted 8.0. " "The io-stream that this reporter will write to") tbformat = zi.Attribute("Either 'default', 'brief', or 'verbose'") args = zi.Attribute( "Additional string argument passed from the command line") shouldStop = zi.Attribute( """ A boolean indicating that this reporter would like the test run to stop. """) separator = Attribute( "Deprecated in Twisted 8.0. " "A value which will occasionally be passed to the L{write} method.") testsRun = Attribute( """ The number of tests that seem to have been run according to this reporter. """) def startTest(method): """ Report the beginning of a run of a single test method. @param method: an object that is adaptable to ITestMethod """ def stopTest(method): """ Report the status of a single test method @param method: an object that is adaptable to ITestMethod """ def startSuite(name): """ Deprecated in Twisted 8.0. Suites which wish to appear in reporter output should call this before running their tests. """ def endSuite(name): """ Deprecated in Twisted 8.0. Called at the end of a suite, if and only if that suite has called C{startSuite}. """ def cleanupErrors(errs): """ Deprecated in Twisted 8.0. Called when the reactor has been left in a 'dirty' state @param errs: a list of L{twisted.python.failure.Failure}s """ def upDownError(userMeth, warn=True, printStatus=True): """ Deprecated in Twisted 8.0. Called when an error occurs in a setUp* or tearDown* method @param warn: indicates whether or not the reporter should emit a warning about the error @type warn: Boolean @param printStatus: indicates whether or not the reporter should print the name of the method and the status message appropriate for the type of error @type printStatus: Boolean """ def addSuccess(test): """ Record that test passed. """ def addError(test, error): """ Record that a test has raised an unexpected exception. @param test: The test that has raised an error. @param error: The error that the test raised. It will either be a three-tuple in the style of C{sys.exc_info()} or a L{Failure<twisted.python.failure.Failure>} object. """ def addFailure(test, failure): """ Record that a test has failed with the given failure. @param test: The test that has failed. @param failure: The failure that the test failed with. It will either be a three-tuple in the style of C{sys.exc_info()} or a L{Failure<twisted.python.failure.Failure>} object. """ def addExpectedFailure(test, failure, todo): """ Record that the given test failed, and was expected to do so. @type test: L{pyunit.TestCase} @param test: The test which this is about. @type error: L{failure.Failure} @param error: The error which this test failed with. @type todo: L{unittest.Todo} @param todo: The reason for the test's TODO status. """ def addUnexpectedSuccess(test, todo): """ Record that the given test failed, and was expected to do so. @type test: L{pyunit.TestCase} @param test: The test which this is about. @type todo: L{unittest.Todo} @param todo: The reason for the test's TODO status. """ def addSkip(test, reason): """ Record that a test has been skipped for the given reason. @param test: The test that has been skipped. @param reason: An object that the test case has specified as the reason for skipping the test. """ def printSummary(): """ Deprecated in Twisted 8.0, use L{done} instead. Present a summary of the test results. """ def printErrors(): """ Deprecated in Twisted 8.0, use L{done} instead. Present the errors that have occured during the test run. This method will be called after all tests have been run. """ def write(string): """ Deprecated in Twisted 8.0, use L{done} instead. Display a string to the user, without appending a new line. """ def writeln(string): """ Deprecated in Twisted 8.0, use L{done} instead. Display a string to the user, appending a new line. """ def wasSuccessful(): """ Return a boolean indicating whether all test results that were reported to this reporter were successful or not. """ def done(): """ Called when the test run is complete. This gives the result object an opportunity to display a summary of information to the user. Once you have called C{done} on an L{IReporter} object, you should assume that the L{IReporter} object is no longer usable. """
tillahoffmann/tensorflow
refs/heads/master
tensorflow/python/ops/distributions/dirichlet_multinomial.py
39
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """The DirichletMultinomial distribution class.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow.python.framework import dtypes from tensorflow.python.framework import ops from tensorflow.python.ops import array_ops from tensorflow.python.ops import check_ops from tensorflow.python.ops import control_flow_ops from tensorflow.python.ops import math_ops from tensorflow.python.ops import random_ops from tensorflow.python.ops import special_math_ops from tensorflow.python.ops.distributions import distribution from tensorflow.python.ops.distributions import util as distribution_util __all__ = [ "DirichletMultinomial", ] _dirichlet_multinomial_sample_note = """For each batch of counts, `value = [n_0, ..., n_{K-1}]`, `P[value]` is the probability that after sampling `self.total_count` draws from this Dirichlet-Multinomial distribution, the number of draws falling in class `j` is `n_j`. Since this definition is [exchangeable](https://en.wikipedia.org/wiki/Exchangeable_random_variables); different sequences have the same counts so the probability includes a combinatorial coefficient. Note: `value` must be a non-negative tensor with dtype `self.dtype`, have no fractional components, and such that `tf.reduce_sum(value, -1) = self.total_count`. Its shape must be broadcastable with `self.concentration` and `self.total_count`.""" class DirichletMultinomial(distribution.Distribution): """Dirichlet-Multinomial compound distribution. The Dirichlet-Multinomial distribution is parameterized by a (batch of) length-`K` `concentration` vectors (`K > 1`) and a `total_count` number of trials, i.e., the number of trials per draw from the DirichletMultinomial. It is defined over a (batch of) length-`K` vector `counts` such that `tf.reduce_sum(counts, -1) = total_count`. The Dirichlet-Multinomial is identically the Beta-Binomial distribution when `K = 2`. #### Mathematical Details The Dirichlet-Multinomial is a distribution over `K`-class counts, i.e., a length-`K` vector of non-negative integer `counts = n = [n_0, ..., n_{K-1}]`. The probability mass function (pmf) is, ```none pmf(n; alpha, N) = Beta(alpha + n) / (prod_j n_j!) / Z Z = Beta(alpha) / N! ``` where: * `concentration = alpha = [alpha_0, ..., alpha_{K-1}]`, `alpha_j > 0`, * `total_count = N`, `N` a positive integer, * `N!` is `N` factorial, and, * `Beta(x) = prod_j Gamma(x_j) / Gamma(sum_j x_j)` is the [multivariate beta function]( https://en.wikipedia.org/wiki/Beta_function#Multivariate_beta_function), and, * `Gamma` is the [gamma function]( https://en.wikipedia.org/wiki/Gamma_function). Dirichlet-Multinomial is a [compound distribution]( https://en.wikipedia.org/wiki/Compound_probability_distribution), i.e., its samples are generated as follows. 1. Choose class probabilities: `probs = [p_0,...,p_{K-1}] ~ Dir(concentration)` 2. Draw integers: `counts = [n_0,...,n_{K-1}] ~ Multinomial(total_count, probs)` The last `concentration` dimension parametrizes a single Dirichlet-Multinomial distribution. When calling distribution functions (e.g., `dist.prob(counts)`), `concentration`, `total_count` and `counts` are broadcast to the same shape. The last dimension of `counts` corresponds single Dirichlet-Multinomial distributions. Distribution parameters are automatically broadcast in all functions; see examples for details. #### Pitfalls The number of classes, `K`, must not exceed: - the largest integer representable by `self.dtype`, i.e., `2**(mantissa_bits+1)` (IEE754), - the maximum `Tensor` index, i.e., `2**31-1`. In other words, ```python K <= min(2**31-1, { tf.float16: 2**11, tf.float32: 2**24, tf.float64: 2**53 }[param.dtype]) ``` Note: This condition is validated only when `self.validate_args = True`. #### Examples ```python alpha = [1, 2, 3] n = 2 dist = DirichletMultinomial(n, alpha) ``` Creates a 3-class distribution, with the 3rd class is most likely to be drawn. The distribution functions can be evaluated on counts. ```python # counts same shape as alpha. counts = [0, 0, 2] dist.prob(counts) # Shape [] # alpha will be broadcast to [[1, 2, 3], [1, 2, 3]] to match counts. counts = [[1, 1, 0], [1, 0, 1]] dist.prob(counts) # Shape [2] # alpha will be broadcast to shape [5, 7, 3] to match counts. counts = [[...]] # Shape [5, 7, 3] dist.prob(counts) # Shape [5, 7] ``` Creates a 2-batch of 3-class distributions. ```python alpha = [[1, 2, 3], [4, 5, 6]] # Shape [2, 3] n = [3, 3] dist = DirichletMultinomial(n, alpha) # counts will be broadcast to [[2, 1, 0], [2, 1, 0]] to match alpha. counts = [2, 1, 0] dist.prob(counts) # Shape [2] ``` """ # TODO(b/27419586) Change docstring for dtype of concentration once int # allowed. def __init__(self, total_count, concentration, validate_args=False, allow_nan_stats=True, name="DirichletMultinomial"): """Initialize a batch of DirichletMultinomial distributions. Args: total_count: Non-negative floating point tensor, whose dtype is the same as `concentration`. The shape is broadcastable to `[N1,..., Nm]` with `m >= 0`. Defines this as a batch of `N1 x ... x Nm` different Dirichlet multinomial distributions. Its components should be equal to integer values. concentration: Positive floating point tensor, whose dtype is the same as `n` with shape broadcastable to `[N1,..., Nm, K]` `m >= 0`. Defines this as a batch of `N1 x ... x Nm` different `K` class Dirichlet multinomial distributions. validate_args: Python `bool`, default `False`. When `True` distribution parameters are checked for validity despite possibly degrading runtime performance. When `False` invalid inputs may silently render incorrect outputs. allow_nan_stats: Python `bool`, default `True`. When `True`, statistics (e.g., mean, mode, variance) use the value "`NaN`" to indicate the result is undefined. When `False`, an exception is raised if one or more of the statistic's batch members are undefined. name: Python `str` name prefixed to Ops created by this class. """ parameters = locals() with ops.name_scope(name, values=[total_count, concentration]): # Broadcasting works because: # * The broadcasting convention is to prepend dimensions of size [1], and # we use the last dimension for the distribution, whereas # the batch dimensions are the leading dimensions, which forces the # distribution dimension to be defined explicitly (i.e. it cannot be # created automatically by prepending). This forces enough explicitness. # * All calls involving `counts` eventually require a broadcast between # `counts` and concentration. self._total_count = ops.convert_to_tensor(total_count, name="total_count") if validate_args: self._total_count = ( distribution_util.embed_check_nonnegative_integer_form( self._total_count)) self._concentration = self._maybe_assert_valid_concentration( ops.convert_to_tensor(concentration, name="concentration"), validate_args) self._total_concentration = math_ops.reduce_sum(self._concentration, -1) super(DirichletMultinomial, self).__init__( dtype=self._concentration.dtype, validate_args=validate_args, allow_nan_stats=allow_nan_stats, reparameterization_type=distribution.NOT_REPARAMETERIZED, parameters=parameters, graph_parents=[self._total_count, self._concentration], name=name) @property def total_count(self): """Number of trials used to construct a sample.""" return self._total_count @property def concentration(self): """Concentration parameter; expected prior counts for that coordinate.""" return self._concentration @property def total_concentration(self): """Sum of last dim of concentration parameter.""" return self._total_concentration def _batch_shape_tensor(self): return array_ops.shape(self.total_concentration) def _batch_shape(self): return self.total_concentration.get_shape() def _event_shape_tensor(self): return array_ops.shape(self.concentration)[-1:] def _event_shape(self): # Event shape depends only on total_concentration, not "n". return self.concentration.get_shape().with_rank_at_least(1)[-1:] def _sample_n(self, n, seed=None): n_draws = math_ops.cast(self.total_count, dtype=dtypes.int32) k = self.event_shape_tensor()[0] unnormalized_logits = array_ops.reshape( math_ops.log(random_ops.random_gamma( shape=[n], alpha=self.concentration, dtype=self.dtype, seed=seed)), shape=[-1, k]) draws = random_ops.multinomial( logits=unnormalized_logits, num_samples=n_draws, seed=distribution_util.gen_new_seed(seed, salt="dirichlet_multinomial")) x = math_ops.reduce_sum(array_ops.one_hot(draws, depth=k), -2) final_shape = array_ops.concat([[n], self.batch_shape_tensor(), [k]], 0) x = array_ops.reshape(x, final_shape) return math_ops.cast(x, self.dtype) @distribution_util.AppendDocstring(_dirichlet_multinomial_sample_note) def _log_prob(self, counts): counts = self._maybe_assert_valid_sample(counts) ordered_prob = ( special_math_ops.lbeta(self.concentration + counts) - special_math_ops.lbeta(self.concentration)) return ordered_prob + distribution_util.log_combinations( self.total_count, counts) @distribution_util.AppendDocstring(_dirichlet_multinomial_sample_note) def _prob(self, counts): return math_ops.exp(self._log_prob(counts)) def _mean(self): return self.total_count * (self.concentration / self.total_concentration[..., array_ops.newaxis]) @distribution_util.AppendDocstring( """The covariance for each batch member is defined as the following: ```none Var(X_j) = n * alpha_j / alpha_0 * (1 - alpha_j / alpha_0) * (n + alpha_0) / (1 + alpha_0) ``` where `concentration = alpha` and `total_concentration = alpha_0 = sum_j alpha_j`. The covariance between elements in a batch is defined as: ```none Cov(X_i, X_j) = -n * alpha_i * alpha_j / alpha_0 ** 2 * (n + alpha_0) / (1 + alpha_0) ``` """) def _covariance(self): x = self._variance_scale_term() * self._mean() return array_ops.matrix_set_diag( -math_ops.matmul(x[..., array_ops.newaxis], x[..., array_ops.newaxis, :]), # outer prod self._variance()) def _variance(self): scale = self._variance_scale_term() x = scale * self._mean() return x * (self.total_count * scale - x) def _variance_scale_term(self): """Helper to `_covariance` and `_variance` which computes a shared scale.""" # We must take care to expand back the last dim whenever we use the # total_concentration. c0 = self.total_concentration[..., array_ops.newaxis] return math_ops.sqrt((1. + c0 / self.total_count) / (1. + c0)) def _maybe_assert_valid_concentration(self, concentration, validate_args): """Checks the validity of the concentration parameter.""" if not validate_args: return concentration concentration = distribution_util.embed_check_categorical_event_shape( concentration) return control_flow_ops.with_dependencies([ check_ops.assert_positive( concentration, message="Concentration parameter must be positive."), ], concentration) def _maybe_assert_valid_sample(self, counts): """Check counts for proper shape, values, then return tensor version.""" if not self.validate_args: return counts counts = distribution_util.embed_check_nonnegative_integer_form(counts) return control_flow_ops.with_dependencies([ check_ops.assert_equal( self.total_count, math_ops.reduce_sum(counts, -1), message="counts last-dimension must sum to `self.total_count`"), ], counts)
jeetsukumaran/supertramp
refs/heads/master
bin/supertramp-generate-jobs.py
1
#! /usr/bin/env python import sys import os import json import random import argparse general_job_template = """\ #! /bin/bash #$ -cwd #$ -V #$ -S /bin/bash #$ -l h_vmem=8G #$ -l virtual_free=8G {queue} {commands} """ def main(): """ ### 2014-09-27 b = birth-rate d = "global" dispersal rate e = extinction rate P:5, C:2 ======== The ML estimates of the speciation rate and extinction rate for the Psychotria tree are 0.33 and 0 events per Myr (estimated with the “birthdeath” function of the R package geiger; Harmon et al. 2008), so, for rough comparability, the base speciation rate was set to lambda=0.3 in all simulations, and mu was set to 0, 0.1, or 0.3. P:6, C:1 ======== d = {0, 0.03, 0.15} e = {0, 0.03, 0.15} All combinations of parameters were used, except thoise where e > d P:7 === Number of areas: Psychotria = 4 P:10 ==== b = {0. d = {0.001, 0.005, 0.03, 0.04, 0.12,} ### 2014-08-27 Oceania Number of islands 6 Diversification Rate (APE) For Oceania with root age of 1.0 2.733492 Extinction Rate (Lagrange) For Oceania with root age of 1.0 5.054e-07 For Oceania with root age of 100000 1.741e-13 Dispersal Rate (Lagrange) For Oceania with root age of 1.0 0.3255 For Oceania with root age of 100000 4.446e-06 Simulation Settings Running for 100000 generations Extinction rate: 0 Birth rate: 2.73 / 100000 = 2.73e-5 Dispersal rate: 0.33 / 100000 = 3.3e-6 Niche evolution rate: = dispersal rate Continental For Continental with root age of 1.0, 2.8263 ### Previously 1 simulation generation = 100 years 10000 simulation generations = 1e6 years Simulation run-time: 1000000 generations = 1e8 years High speciation rate: 0.001 per generation = 0.1 per MY Med speciation rate: 0.0001 per generation = 0.01 per MY Low speciation rate: 0.00001 per generation = 0.001 per MY Dispersal rate: 0.01, 0.5, 1.0, 2.0, 10.0 x speciation rates Niche evolution prob: 0.001, 0.01, 0.10, 1.0 """ parser = argparse.ArgumentParser() parser.add_argument("--venv", default=None, help="Path to Python virtual environment.") parser.add_argument("-z", "--random-seed", default=None, help="Seed for random number generator engine.") parser.add_argument("-q", "--queue", default="long", help="Name of queue to use (default: '%(default)s')") parser.add_argument("--no-queue", action="store_true", default=False, help="Do not use any queue") # parser.add_argument("--ngens", # type=int, # default=1000000, # help="Number of generations to run (default = %(default)s).") parser.add_argument("--nreps", type=int, default=100, help="number of replicates (default = %(default)s).") args = parser.parse_args() if args.random_seed is None: args.random_seed = random.randint(0, sys.maxsize) rng = random.Random(args.random_seed) if args.venv is not None: venv_dir = os.path.expanduser(os.path.expandvars(args.venv)) venv_activate = os.path.abspath(os.path.join(venv_dir, "bin", "activate")) if not os.path.exists(venv_activate): raise Exception("Virtual environment activation script not found: '{}'".format(venv_activate)) source_venv = "source {}".format(venv_activate) else: source_venv = "" if not args.no_queue and args.queue: queue = "#$ -q {}".format(args.queue) else: queue = "" # python_path = "python3" # supertramp_path = os.path.abspath(os.path.join( # os.path.dirname(__file__), # "supertramp-simulate.py")) supertramp_path = "supertramp-simulate.py" # dispersal_models = ["constrained", "unconstrained"] # birth_death_rates = [(2.73e-5,0.0), ] # dispersal_rates = [3.3e-6,] # niche_evolution_probs = [3.3e-6,] dispersal_models = ["constrained", "unconstrained"] birth_death_rates = [ (0.003,0), (0.03, 0), ] # [(2.73e-5,0.0), ] dispersal_rates = [0.03, 0.15, 0.30] niche_evolution_probs = [0.03, 0.15, 0.5, 1.0] run_manifest = {} # for ngens in (int(x) for x in (1e5,)): for ntips in (50,): for dm_idx, dispersal_model in enumerate(dispersal_models): for bd_idx, (birth_rate, death_rate) in enumerate(birth_death_rates): for drf_idx, dispersal_rate in enumerate(dispersal_rates): for nef_idx, niche_evolution_prob in enumerate(niche_evolution_probs): stem = "d{dispersal_rate:10.8f}_q{niche_evolution_prob:10.8f}_b{birth_rate:10.8f}_e{death_rate:10.8f}_{dispersal_model}".format( dispersal_rate=dispersal_rate, niche_evolution_prob=niche_evolution_prob, birth_rate=birth_rate, death_rate=death_rate, dispersal_model=dispersal_model, ) output_prefix = stem run_cmd = [] run_cmd.append(supertramp_path) run_cmd.extend(["-z", str(rng.randint(0, sys.maxsize))]) run_cmd.extend(["--num-islands", str(6)]) run_cmd.extend(["--num-habitat-types", str(2)]) run_cmd.extend(["--nreps", str(args.nreps)]) run_cmd.extend(["-b", str(birth_rate)]) run_cmd.extend(["-d", str(death_rate)]) run_cmd.extend(["--niche-evolution-probability", str(niche_evolution_prob)]) run_cmd.extend(["--dispersal-rate", str(dispersal_rate)]) # run_cmd.extend(["--ngens", str(ngens)]) run_cmd.extend(["--target-num-tips", str(ntips)]) run_cmd.extend(["--output-prefix", output_prefix]) run_cmd.extend(["--dispersal-model", dispersal_model]) run_cmd = " ".join(run_cmd) commands = [] if source_venv: commands.append(source_venv) commands.append(run_cmd) job_filepath = stem + ".job" with open(job_filepath, "w") as jobf: template = general_job_template jobf.write(template.format( commands="\n".join(commands), queue=queue, )) run_manifest[output_prefix] = { "dispersal_model" : dispersal_model, "birth_rate" : birth_rate, "death_rate" : death_rate, "dispersal_rate" : dispersal_rate, "niche_evolution_prob" : niche_evolution_prob, # "ngens" : ngens, "ntips" : ntips, "treefile" : output_prefix + ".trees", "logfile" : output_prefix + ".log", } with open("run-manifest.json", "w") as manifestf: json.dump(run_manifest, manifestf) if __name__ == "__main__": main()
inonit/wagtail
refs/heads/master
wagtail/wagtaildocs/models.py
2
from __future__ import unicode_literals import os.path from taggit.managers import TaggableManager from django.db import models from django.db.models.signals import pre_delete from django.dispatch.dispatcher import receiver from django.dispatch import Signal from django.core.urlresolvers import reverse from django.core.exceptions import ImproperlyConfigured from django.conf import settings from django.utils.translation import ugettext_lazy as _ from django.utils.encoding import python_2_unicode_compatible from wagtail.wagtailadmin.taggable import TagSearchable from wagtail.wagtailadmin.utils import get_object_usage from wagtail.wagtailsearch import index from wagtail.wagtailsearch.queryset import SearchableQuerySetMixin class DocumentQuerySet(SearchableQuerySetMixin, models.QuerySet): pass @python_2_unicode_compatible class AbstractDocument(models.Model, TagSearchable): title = models.CharField(max_length=255, verbose_name=_('title')) file = models.FileField(upload_to='documents', verbose_name=_('file')) created_at = models.DateTimeField(verbose_name=_('created at'), auto_now_add=True) uploaded_by_user = models.ForeignKey( settings.AUTH_USER_MODEL, verbose_name=_('uploaded by user'), null=True, blank=True, editable=False, on_delete=models.SET_NULL ) tags = TaggableManager(help_text=None, blank=True, verbose_name=_('tags')) objects = DocumentQuerySet.as_manager() search_fields = TagSearchable.search_fields + ( index.FilterField('uploaded_by_user'), ) def __str__(self): return self.title @property def filename(self): return os.path.basename(self.file.name) @property def file_extension(self): return os.path.splitext(self.filename)[1][1:] @property def url(self): return reverse('wagtaildocs_serve', args=[self.id, self.filename]) def get_usage(self): return get_object_usage(self) @property def usage_url(self): return reverse('wagtaildocs:document_usage', args=(self.id,)) def is_editable_by_user(self, user): from wagtail.wagtaildocs.permissions import permission_policy return permission_policy.user_has_permission_for_instance(user, 'change', self) class Meta: abstract = True verbose_name = _('document') class Document(AbstractDocument): admin_form_fields = ( 'title', 'file', 'tags' ) def get_document_model(): from django.conf import settings from django.apps import apps try: app_label, model_name = settings.WAGTAILDOCS_DOCUMENT_MODEL.split('.') except AttributeError: return Document except ValueError: raise ImproperlyConfigured("WAGTAILDOCS_DOCUMENT_MODEL must be of the form 'app_label.model_name'") document_model = apps.get_model(app_label, model_name) if document_model is None: raise ImproperlyConfigured( "WAGTAILDOCS_DOCUMENT_MODEL refers to model '%s' that has not been installed" % settings.WAGTAILDOCS_DOCUMENT_MODEL ) return document_model # Receive the pre_delete signal and delete the file associated with the model instance. @receiver(pre_delete, sender=Document) def document_delete(sender, instance, **kwargs): # Pass false so FileField doesn't save the model. instance.file.delete(False) document_served = Signal(providing_args=['request'])
sfiera/gyp
refs/heads/master
PRESUBMIT.py
12
# Copyright 2010, Google Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. EXCLUDED_PATHS = () def CheckChangeOnUpload(input_api, output_api): report = [] black_list = input_api.DEFAULT_BLACK_LIST + EXCLUDED_PATHS sources = lambda x: input_api.FilterSourceFile(x, black_list=black_list) report.extend(input_api.canned_checks.CheckChangeSvnEolStyle( input_api, output_api, sources)) return report def CheckChangeOnCommit(input_api, output_api): report = [] black_list = input_api.DEFAULT_BLACK_LIST + EXCLUDED_PATHS sources = lambda x: input_api.FilterSourceFile(x, black_list=black_list) report.extend(input_api.canned_checks.CheckChangeSvnEolStyle( input_api, output_api, sources)) report.extend(input_api.canned_checks.CheckTreeIsOpen( input_api, output_api, 'http://gyp-status.appspot.com/status', 'http://gyp-status.appspot.com/current')) return report
philippze/django-cms
refs/heads/develop
cms/south_migrations/0022_login_required_added.py
1680
# -*- coding: utf-8 -*- import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models try: from django.contrib.auth import get_user_model except ImportError: # django < 1.5 from django.contrib.auth.models import User else: User = get_user_model() user_orm_label = '%s.%s' % (User._meta.app_label, User._meta.object_name) user_model_label = '%s.%s' % (User._meta.app_label, User._meta.model_name) user_ptr_name = '%s_ptr' % User._meta.object_name.lower() class Migration(SchemaMigration): def forwards(self, orm): # Dummy migration pass def backwards(self, orm): # Dummy migration pass models = { 'auth.group': { 'Meta': {'object_name': 'Group'}, 'id': ( 'django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, 'auth.permission': { 'Meta': { 'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, 'codename': ( 'django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), 'id': ( 'django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, user_model_label: { 'Meta': {'object_name': User.__name__, 'db_table': "'%s'" % User._meta.db_table}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), 'id': ( 'django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ( 'django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_staff': ( 'django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ( 'django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ( 'django.db.models.fields.CharField', [], {'max_length': '128'}), 'user_permissions': ( 'django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, 'cms.cmsplugin': { 'Meta': {'object_name': 'CMSPlugin'}, 'changed_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), 'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'id': ( 'django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'language': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}), 'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}), 'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}), 'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.CMSPlugin']", 'null': 'True', 'blank': 'True'}), 'placeholder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'}), 'plugin_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}), 'position': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}), 'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}), 'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}) }, 'cms.globalpagepermission': { 'Meta': {'object_name': 'GlobalPagePermission'}, 'can_add': ( 'django.db.models.fields.BooleanField', [], {'default': 'True'}), 'can_change': ( 'django.db.models.fields.BooleanField', [], {'default': 'True'}), 'can_change_advanced_settings': ( 'django.db.models.fields.BooleanField', [], {'default': 'False'}), 'can_change_permissions': ( 'django.db.models.fields.BooleanField', [], {'default': 'False'}), 'can_delete': ( 'django.db.models.fields.BooleanField', [], {'default': 'True'}), 'can_moderate': ( 'django.db.models.fields.BooleanField', [], {'default': 'True'}), 'can_move_page': ( 'django.db.models.fields.BooleanField', [], {'default': 'True'}), 'can_publish': ( 'django.db.models.fields.BooleanField', [], {'default': 'True'}), 'can_recover_page': ( 'django.db.models.fields.BooleanField', [], {'default': 'True'}), 'can_view': ( 'django.db.models.fields.BooleanField', [], {'default': 'False'}), 'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.Group']", 'null': 'True', 'blank': 'True'}), 'id': ( 'django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'sites': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['sites.Site']", 'null': 'True', 'blank': 'True'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['%s']" % user_orm_label, 'null': 'True', 'blank': 'True'}) }, 'cms.page': { 'Meta': {'ordering': "('site', 'tree_id', 'lft')", 'object_name': 'Page'}, 'changed_by': ( 'django.db.models.fields.CharField', [], {'max_length': '70'}), 'changed_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), 'created_by': ( 'django.db.models.fields.CharField', [], {'max_length': '70'}), 'creation_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'id': ( 'django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'in_navigation': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}), 'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}), 'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}), 'limit_visibility_in_menu': ( 'django.db.models.fields.SmallIntegerField', [], {'default': 'None', 'null': 'True', 'db_index': 'True', 'blank': 'True'}), 'login_required': ( 'django.db.models.fields.BooleanField', [], {'default': 'False'}), 'moderator_state': ('django.db.models.fields.SmallIntegerField', [], {'default': '1', 'blank': 'True'}), 'navigation_extenders': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '80', 'null': 'True', 'blank': 'True'}), 'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['cms.Page']"}), 'placeholders': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['cms.Placeholder']", 'symmetrical': 'False'}), 'publication_date': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}), 'publication_end_date': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}), 'published': ( 'django.db.models.fields.BooleanField', [], {'default': 'False'}), 'publisher_is_draft': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}), 'publisher_public': ( 'django.db.models.fields.related.OneToOneField', [], {'related_name': "'publisher_draft'", 'unique': 'True', 'null': 'True', 'to': "orm['cms.Page']"}), 'publisher_state': ('django.db.models.fields.SmallIntegerField', [], {'default': '0', 'db_index': 'True'}), 'reverse_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '40', 'null': 'True', 'blank': 'True'}), 'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}), 'site': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sites.Site']"}), 'soft_root': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}), 'template': ( 'django.db.models.fields.CharField', [], {'max_length': '100'}), 'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}) }, 'cms.pagemoderator': { 'Meta': {'object_name': 'PageModerator'}, 'id': ( 'django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'moderate_children': ( 'django.db.models.fields.BooleanField', [], {'default': 'False'}), 'moderate_descendants': ( 'django.db.models.fields.BooleanField', [], {'default': 'False'}), 'moderate_page': ( 'django.db.models.fields.BooleanField', [], {'default': 'False'}), 'page': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Page']"}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['%s']" % user_orm_label}) }, 'cms.pagemoderatorstate': { 'Meta': {'ordering': "('page', 'action', '-created')", 'object_name': 'PageModeratorState'}, 'action': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True', 'blank': 'True'}), 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'id': ( 'django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'message': ('django.db.models.fields.TextField', [], {'default': "''", 'max_length': '1000', 'blank': 'True'}), 'page': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Page']"}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['%s']" % user_orm_label, 'null': 'True'}) }, 'cms.pagepermission': { 'Meta': {'object_name': 'PagePermission'}, 'can_add': ( 'django.db.models.fields.BooleanField', [], {'default': 'True'}), 'can_change': ( 'django.db.models.fields.BooleanField', [], {'default': 'True'}), 'can_change_advanced_settings': ( 'django.db.models.fields.BooleanField', [], {'default': 'False'}), 'can_change_permissions': ( 'django.db.models.fields.BooleanField', [], {'default': 'False'}), 'can_delete': ( 'django.db.models.fields.BooleanField', [], {'default': 'True'}), 'can_moderate': ( 'django.db.models.fields.BooleanField', [], {'default': 'True'}), 'can_move_page': ( 'django.db.models.fields.BooleanField', [], {'default': 'True'}), 'can_publish': ( 'django.db.models.fields.BooleanField', [], {'default': 'True'}), 'can_view': ( 'django.db.models.fields.BooleanField', [], {'default': 'False'}), 'grant_on': ( 'django.db.models.fields.IntegerField', [], {'default': '5'}), 'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.Group']", 'null': 'True', 'blank': 'True'}), 'id': ( 'django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'page': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Page']", 'null': 'True', 'blank': 'True'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['%s']" % user_orm_label, 'null': 'True', 'blank': 'True'}) }, 'cms.pageuser': { 'Meta': {'object_name': 'PageUser', '_ormbases': [user_orm_label]}, 'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'created_users'", 'to': "orm['%s']" % user_orm_label}), 'user_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['%s']" % user_orm_label, 'unique': 'True', 'primary_key': 'True'}) }, 'cms.pageusergroup': { 'Meta': {'object_name': 'PageUserGroup', '_ormbases': ['auth.Group']}, 'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'created_usergroups'", 'to': "orm['%s']" % user_orm_label}), 'group_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.Group']", 'unique': 'True', 'primary_key': 'True'}) }, 'cms.placeholder': { 'Meta': {'object_name': 'Placeholder'}, 'default_width': ( 'django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}), 'id': ( 'django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'slot': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}) }, 'cms.title': { 'Meta': {'unique_together': "(('language', 'page'),)", 'object_name': 'Title'}, 'application_urls': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '200', 'null': 'True', 'blank': 'True'}), 'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'has_url_overwrite': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}), 'id': ( 'django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'language': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}), 'menu_title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'meta_description': ('django.db.models.fields.TextField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'meta_keywords': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'page': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'title_set'", 'to': "orm['cms.Page']"}), 'page_title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'path': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), 'redirect': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'slug': ( 'django.db.models.fields.SlugField', [], {'max_length': '255'}), 'title': ( 'django.db.models.fields.CharField', [], {'max_length': '255'}) }, 'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ( 'django.db.models.fields.CharField', [], {'max_length': '100'}), 'id': ( 'django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ( 'django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, 'sites.site': { 'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"}, 'domain': ( 'django.db.models.fields.CharField', [], {'max_length': '100'}), 'id': ( 'django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) } } complete_apps = ['cms']
rscnt/django-cms
refs/heads/develop
cms/south_migrations/0033_auto__del_field_title_publisher_is_draft__del_field_title_publisher_st.py
1680
# -*- coding: utf-8 -*- import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models try: from django.contrib.auth import get_user_model except ImportError: # django < 1.5 from django.contrib.auth.models import User else: User = get_user_model() user_orm_label = '%s.%s' % (User._meta.app_label, User._meta.object_name) user_model_label = '%s.%s' % (User._meta.app_label, User._meta.model_name) user_ptr_name = '%s_ptr' % User._meta.object_name.lower() class Migration(SchemaMigration): def forwards(self, orm): # Dummy migration pass def backwards(self, orm): # Dummy migration pass models = { 'auth.group': { 'Meta': {'object_name': 'Group'}, 'id': ( 'django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, 'auth.permission': { 'Meta': { 'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, 'codename': ( 'django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), 'id': ( 'django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, user_model_label: { 'Meta': {'object_name': User.__name__, 'db_table': "'%s'" % User._meta.db_table}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), 'id': ( 'django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ( 'django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_staff': ( 'django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ( 'django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ( 'django.db.models.fields.CharField', [], {'max_length': '128'}), 'user_permissions': ( 'django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, 'cms.cmsplugin': { 'Meta': {'object_name': 'CMSPlugin'}, 'changed_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), 'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'id': ( 'django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'language': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}), 'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}), 'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}), 'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.CMSPlugin']", 'null': 'True', 'blank': 'True'}), 'placeholder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'}), 'plugin_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}), 'position': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}), 'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}), 'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}) }, 'cms.globalpagepermission': { 'Meta': {'object_name': 'GlobalPagePermission'}, 'can_add': ( 'django.db.models.fields.BooleanField', [], {'default': 'True'}), 'can_change': ( 'django.db.models.fields.BooleanField', [], {'default': 'True'}), 'can_change_advanced_settings': ( 'django.db.models.fields.BooleanField', [], {'default': 'False'}), 'can_change_permissions': ( 'django.db.models.fields.BooleanField', [], {'default': 'False'}), 'can_delete': ( 'django.db.models.fields.BooleanField', [], {'default': 'True'}), 'can_moderate': ( 'django.db.models.fields.BooleanField', [], {'default': 'True'}), 'can_move_page': ( 'django.db.models.fields.BooleanField', [], {'default': 'True'}), 'can_publish': ( 'django.db.models.fields.BooleanField', [], {'default': 'True'}), 'can_recover_page': ( 'django.db.models.fields.BooleanField', [], {'default': 'True'}), 'can_view': ( 'django.db.models.fields.BooleanField', [], {'default': 'False'}), 'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.Group']", 'null': 'True', 'blank': 'True'}), 'id': ( 'django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'sites': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['sites.Site']", 'null': 'True', 'blank': 'True'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['%s']" % user_orm_label, 'null': 'True', 'blank': 'True'}) }, 'cms.page': { 'Meta': {'ordering': "('site', 'tree_id', 'lft')", 'object_name': 'Page'}, 'changed_by': ( 'django.db.models.fields.CharField', [], {'max_length': '70'}), 'changed_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), 'created_by': ( 'django.db.models.fields.CharField', [], {'max_length': '70'}), 'creation_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'id': ( 'django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'in_navigation': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}), 'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}), 'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}), 'limit_visibility_in_menu': ( 'django.db.models.fields.SmallIntegerField', [], {'default': 'None', 'null': 'True', 'db_index': 'True', 'blank': 'True'}), 'login_required': ( 'django.db.models.fields.BooleanField', [], {'default': 'False'}), 'moderator_state': ('django.db.models.fields.SmallIntegerField', [], {'default': '1', 'blank': 'True'}), 'navigation_extenders': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '80', 'null': 'True', 'blank': 'True'}), 'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['cms.Page']"}), 'placeholders': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['cms.Placeholder']", 'symmetrical': 'False'}), 'publication_date': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}), 'publication_end_date': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}), 'published': ( 'django.db.models.fields.BooleanField', [], {'default': 'False'}), 'publisher_is_draft': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}), 'publisher_public': ( 'django.db.models.fields.related.OneToOneField', [], {'related_name': "'publisher_draft'", 'unique': 'True', 'null': 'True', 'to': "orm['cms.Page']"}), 'publisher_state': ('django.db.models.fields.SmallIntegerField', [], {'default': '0', 'db_index': 'True'}), 'reverse_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '40', 'null': 'True', 'blank': 'True'}), 'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}), 'site': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sites.Site']"}), 'soft_root': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}), 'template': ( 'django.db.models.fields.CharField', [], {'max_length': '100'}), 'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}) }, 'cms.pagemoderator': { 'Meta': {'object_name': 'PageModerator'}, 'id': ( 'django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'moderate_children': ( 'django.db.models.fields.BooleanField', [], {'default': 'False'}), 'moderate_descendants': ( 'django.db.models.fields.BooleanField', [], {'default': 'False'}), 'moderate_page': ( 'django.db.models.fields.BooleanField', [], {'default': 'False'}), 'page': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Page']"}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['%s']" % user_orm_label}) }, 'cms.pagemoderatorstate': { 'Meta': {'ordering': "('page', 'action', '-created')", 'object_name': 'PageModeratorState'}, 'action': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True', 'blank': 'True'}), 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'id': ( 'django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'message': ('django.db.models.fields.TextField', [], {'default': "''", 'max_length': '1000', 'blank': 'True'}), 'page': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Page']"}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['%s']" % user_orm_label, 'null': 'True'}) }, 'cms.pagepermission': { 'Meta': {'object_name': 'PagePermission'}, 'can_add': ( 'django.db.models.fields.BooleanField', [], {'default': 'True'}), 'can_change': ( 'django.db.models.fields.BooleanField', [], {'default': 'True'}), 'can_change_advanced_settings': ( 'django.db.models.fields.BooleanField', [], {'default': 'False'}), 'can_change_permissions': ( 'django.db.models.fields.BooleanField', [], {'default': 'False'}), 'can_delete': ( 'django.db.models.fields.BooleanField', [], {'default': 'True'}), 'can_moderate': ( 'django.db.models.fields.BooleanField', [], {'default': 'True'}), 'can_move_page': ( 'django.db.models.fields.BooleanField', [], {'default': 'True'}), 'can_publish': ( 'django.db.models.fields.BooleanField', [], {'default': 'True'}), 'can_view': ( 'django.db.models.fields.BooleanField', [], {'default': 'False'}), 'grant_on': ( 'django.db.models.fields.IntegerField', [], {'default': '5'}), 'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.Group']", 'null': 'True', 'blank': 'True'}), 'id': ( 'django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'page': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Page']", 'null': 'True', 'blank': 'True'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['%s']" % user_orm_label, 'null': 'True', 'blank': 'True'}) }, 'cms.pageuser': { 'Meta': {'object_name': 'PageUser', '_ormbases': [user_orm_label]}, 'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'created_users'", 'to': "orm['%s']" % user_orm_label}), 'user_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['%s']" % user_orm_label, 'unique': 'True', 'primary_key': 'True'}) }, 'cms.pageusergroup': { 'Meta': {'object_name': 'PageUserGroup', '_ormbases': ['auth.Group']}, 'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'created_usergroups'", 'to': "orm['%s']" % user_orm_label}), 'group_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.Group']", 'unique': 'True', 'primary_key': 'True'}) }, 'cms.placeholder': { 'Meta': {'object_name': 'Placeholder'}, 'default_width': ( 'django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}), 'id': ( 'django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'slot': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}) }, 'cms.title': { 'Meta': {'unique_together': "(('language', 'page'),)", 'object_name': 'Title'}, 'application_urls': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '200', 'null': 'True', 'blank': 'True'}), 'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'has_url_overwrite': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}), 'id': ( 'django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'language': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}), 'menu_title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'meta_description': ('django.db.models.fields.TextField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'meta_keywords': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'page': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'title_set'", 'to': "orm['cms.Page']"}), 'page_title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'path': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), 'redirect': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'slug': ( 'django.db.models.fields.SlugField', [], {'max_length': '255'}), 'title': ( 'django.db.models.fields.CharField', [], {'max_length': '255'}) }, 'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ( 'django.db.models.fields.CharField', [], {'max_length': '100'}), 'id': ( 'django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ( 'django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, 'sites.site': { 'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"}, 'domain': ( 'django.db.models.fields.CharField', [], {'max_length': '100'}), 'id': ( 'django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) } } complete_apps = ['cms']
zoho/projects-python-wrappers
refs/heads/master
projects/model/Tasklog.py
1
#$Id$ from projects.model.Log import Log class Tasklog(Log): """This class is used to create object for task log.""" def __init__(self): """Initialize parameters for task logs.""" Log.__init__(self) self.task_id = 0 self.task_name = "" def set_task_id(self, task_id): """Set task id. Args: task_id(str): Task id. """ self.task_id = task_id def get_task_id(self): """Get task id. Returns: str: Task id. """ return self.task_id def set_task_name(self, name): """Set task name. Args: name(str): Name. """ self.task_name = name def get_task_name(self): """Get task name. Returns: str: Task name. """ return self.task_name
kived/python-for-android
refs/heads/master
tests/test_graph.py
1
from pythonforandroid.build import Context from pythonforandroid.graph import get_recipe_order_and_bootstrap from pythonforandroid.bootstrap import Bootstrap from itertools import product import pytest ctx = Context() name_sets = [['python2'], ['kivy']] bootstraps = [None, Bootstrap.get_bootstrap('pygame', ctx), Bootstrap.get_bootstrap('sdl2', ctx)] valid_combinations = list(product(name_sets, bootstraps)) valid_combinations.extend( [(['python3crystax'], Bootstrap.get_bootstrap('sdl2', ctx)), (['kivy', 'python3crystax'], Bootstrap.get_bootstrap('sdl2', ctx))]) @pytest.mark.parametrize('names,bootstrap', valid_combinations) def test_valid_recipe_order_and_bootstrap(names, bootstrap): get_recipe_order_and_bootstrap(ctx, names, bootstrap) invalid_combinations = [[['python2', 'python3crystax'], None], [['python3'], Bootstrap.get_bootstrap('pygame', ctx)]] @pytest.mark.parametrize('names,bootstrap', invalid_combinations) def test_invalid_recipe_order_and_bootstrap(names, bootstrap): with pytest.raises(SystemExit): get_recipe_order_and_bootstrap(ctx, names, bootstrap) def test_bootstrap_dependency_addition(): build_order, python_modules, bs = get_recipe_order_and_bootstrap( ctx, ['kivy'], None) assert (('hostpython2' in build_order) or ('hostpython3' in build_order)) if __name__ == "__main__": get_recipe_order_and_bootstrap(ctx, ['python3'], Bootstrap.get_bootstrap('sdl2', ctx))
cgstudiomap/cgstudiomap
refs/heads/develop
main/eggs/reportlab-3.2.0-py2.7-linux-x86_64.egg/reportlab/graphics/samples/scatter_lines.py
42
#Autogenerated by ReportLab guiedit do not edit from reportlab.graphics.charts.legends import Legend from reportlab.graphics.charts.lineplots import ScatterPlot from reportlab.graphics.shapes import Drawing, _DrawingEditorMixin, String from reportlab.graphics.charts.textlabels import Label from reportlab.graphics.samples.excelcolors import * class ScatterLines(_DrawingEditorMixin,Drawing): def __init__(self,width=200,height=150,*args,**kw): Drawing.__init__(self,width,height,*args,**kw) self._add(self,ScatterPlot(),name='chart',validate=None,desc="The main chart") self.chart.width = 115 self.chart.height = 80 self.chart.x = 30 self.chart.y = 40 self.chart.lines[0].strokeColor = color01 self.chart.lines[1].strokeColor = color02 self.chart.lines[2].strokeColor = color03 self.chart.lines[3].strokeColor = color04 self.chart.lines[4].strokeColor = color05 self.chart.lines[5].strokeColor = color06 self.chart.lines[6].strokeColor = color07 self.chart.lines[7].strokeColor = color08 self.chart.lines[8].strokeColor = color09 self.chart.lines[9].strokeColor = color10 self.chart.lines[0].symbol = None self.chart.lines[1].symbol = None self.chart.lines[2].symbol = None self.chart.lines[3].symbol = None self.chart.lines[4].symbol = None self.chart.lines[5].symbol = None self.chart.lines[6].symbol = None self.chart.lines[7].symbol = None self.chart.lines[8].symbol = None self.chart.lines[9].symbol = None self.chart.fillColor = backgroundGrey self.chart.lineLabels.fontName = 'Helvetica' self.chart.xValueAxis.labels.fontName = 'Helvetica' self.chart.xValueAxis.labels.fontSize = 7 self.chart.xValueAxis.forceZero = 0 self.chart.data = [((100,100), (200,200), (250,210), (300,300), (400,500)), ((100,200), (200,300), (250,200), (300,400), (400, 600))] self.chart.xValueAxis.avoidBoundFrac = 1 self.chart.xValueAxis.gridEnd = 115 self.chart.xValueAxis.tickDown = 3 self.chart.xValueAxis.visibleGrid = 1 self.chart.yValueAxis.tickLeft = 3 self.chart.yValueAxis.labels.fontName = 'Helvetica' self.chart.yValueAxis.labels.fontSize = 7 self._add(self,Label(),name='Title',validate=None,desc="The title at the top of the chart") self.Title.fontName = 'Helvetica-Bold' self.Title.fontSize = 7 self.Title.x = 100 self.Title.y = 135 self.Title._text = 'Chart Title' self.Title.maxWidth = 180 self.Title.height = 20 self.Title.textAnchor ='middle' self._add(self,Legend(),name='Legend',validate=None,desc="The legend or key for the chart") self.Legend.colorNamePairs = [(color01, 'Widgets'), (color02, 'Sprockets')] self.Legend.fontName = 'Helvetica' self.Legend.fontSize = 7 self.Legend.x = 153 self.Legend.y = 85 self.Legend.dxTextSpace = 5 self.Legend.dy = 5 self.Legend.dx = 5 self.Legend.deltay = 5 self.Legend.alignment ='right' self.chart.lineLabelFormat = None self.chart.xLabel = 'X Axis' self.chart.y = 30 self.chart.yLabel = 'Y Axis' self.chart.yValueAxis.gridEnd = 115 self.chart.yValueAxis.visibleGrid = 1 self.chart.yValueAxis.labelTextFormat = '%d' self.chart.yValueAxis.forceZero = 1 self.chart.xValueAxis.forceZero = 1 self.chart.joinedLines = 1 self._add(self,0,name='preview',validate=None,desc=None) if __name__=="__main__": #NORUNTESTS ScatterLines().save(formats=['pdf'],outDir=None,fnRoot='scatter_lines')
marco-mariotti/selenoprofiles
refs/heads/master
libraries/annotations/GO/__init__.py
1
#!/usr/bin/env python # -*- coding: UTF-8 -*- """ A package to work with the Gene Ontology. """ __author__ = 'Chris Lasher' __email__ = 'chris DOT lasher <AT> gmail DOT com' import ontology
betoesquivel/fil2014
refs/heads/master
filenv/lib/python2.7/site-packages/django/conf/urls/i18n.py
228
from django.conf import settings from django.conf.urls import patterns, url from django.core.urlresolvers import LocaleRegexURLResolver def i18n_patterns(prefix, *args): """ Adds the language code prefix to every URL pattern within this function. This may only be used in the root URLconf, not in an included URLconf. """ pattern_list = patterns(prefix, *args) if not settings.USE_I18N: return pattern_list return [LocaleRegexURLResolver(pattern_list)] urlpatterns = patterns('', url(r'^setlang/$', 'django.views.i18n.set_language', name='set_language'), )
kenshay/ImageScript
refs/heads/master
ProgramData/SystemFiles/Python/Lib/site-packages/scipy/interpolate/tests/test_gil.py
27
from __future__ import division, print_function, absolute_import import itertools import threading import time import numpy as np from numpy.testing import assert_equal import pytest import scipy.interpolate class TestGIL(object): """Check if the GIL is properly released by scipy.interpolate functions.""" def setup_method(self): self.messages = [] def log(self, message): self.messages.append(message) def make_worker_thread(self, target, args): log = self.log class WorkerThread(threading.Thread): def run(self): log('interpolation started') target(*args) log('interpolation complete') return WorkerThread() @pytest.mark.slow @pytest.mark.xfail(reason='race conditions, may depend on system load') def test_rectbivariatespline(self): def generate_params(n_points): x = y = np.linspace(0, 1000, n_points) x_grid, y_grid = np.meshgrid(x, y) z = x_grid * y_grid return x, y, z def calibrate_delay(requested_time): for n_points in itertools.count(5000, 1000): args = generate_params(n_points) time_started = time.time() interpolate(*args) if time.time() - time_started > requested_time: return args def interpolate(x, y, z): scipy.interpolate.RectBivariateSpline(x, y, z) args = calibrate_delay(requested_time=3) worker_thread = self.make_worker_thread(interpolate, args) worker_thread.start() for i in range(3): time.sleep(0.5) self.log('working') worker_thread.join() assert_equal(self.messages, [ 'interpolation started', 'working', 'working', 'working', 'interpolation complete', ])
hefen1/chromium
refs/heads/master
tools/cygprofile/check_orderfile_unittest.py
49
#!/usr/bin/python # Copyright 2015 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import unittest import check_orderfile import symbol_extractor class TestCheckOrderFile(unittest.TestCase): _SYMBOL_INFOS = [symbol_extractor.SymbolInfo('first', 0x1, 0, ''), symbol_extractor.SymbolInfo('second', 0x2, 0, ''), symbol_extractor.SymbolInfo('notProfiled', 0x4, 0, ''), symbol_extractor.SymbolInfo('third', 0x3, 0, ''),] def testMatchesSymbols(self): symbols = ['first', 'second', 'third'] (misordered_pairs_count, matched_count, missing_count) = ( check_orderfile._CountMisorderedSymbols(symbols, self._SYMBOL_INFOS)) self.assertEquals( (misordered_pairs_count, matched_count, missing_count), (0, 3, 0)) def testMissingMatches(self): symbols = ['second', 'third', 'other', 'first'] (_, matched_count, unmatched_count) = ( check_orderfile._CountMisorderedSymbols(symbols, self._SYMBOL_INFOS)) self.assertEquals(matched_count, 3) self.assertEquals(unmatched_count, 1) def testNoUnorderedSymbols(self): symbols = ['first', 'other', 'second', 'third', 'noMatchEither'] (misordered_pairs_count, _, _) = ( check_orderfile._CountMisorderedSymbols(symbols, self._SYMBOL_INFOS)) self.assertEquals(misordered_pairs_count, 0) def testUnorderedSymbols(self): symbols = ['first', 'other', 'third', 'second', 'noMatchEither'] (misordered_pairs_count, _, _) = ( check_orderfile._CountMisorderedSymbols(symbols, self._SYMBOL_INFOS)) self.assertEquals(misordered_pairs_count, 1) if __name__ == '__main__': unittest.main()
paragbaxi/qualysguard_host_list_detection
refs/heads/master
src/qualysguard_host_list_detection.py
1
from __future__ import print_function __author__ = 'Parag Baxi' # System modules import argparse import ast import ConfigParser import datetime import logging import os import sys import time from Queue import Queue from threading import Thread # Local modules import qualysapi from lxml import objectify, etree def download_hosts(i, q): """This is the worker thread function. It processes items in the queue one after another. These daemon threads go into an infinite loop, and only exit when the main thread ends. """ global c_args, datetime_format, start_time_hosts_detection # Have thread number start at 1 for human display. thread_number = i + 1 # Download assigned hosts in this thread. while True: logger.debug('Thread %s: Looking for the next enclosure' % (thread_number)) ids = q.get() # Chunk received. Start time. if not start_time_hosts_detection: start_time_hosts_detection = time.time() # Find start & end host ids for logging. if not ',' in ids: # Only one host_id or one range, no comma found. ids_range = ids else: try: thread_start = ids[:ids.index(',')] thread_end = ids[ids.rindex(',')+1:] ids_range = '%s-%s' % (thread_start, thread_end) except ValueError, e: # Only one host_id, no comma found. ids_range = ids logger.info('Thread %s: Downloading new hosts.' % (thread_number)) logger.debug('Thread %s: Downloading new hosts: %s' % (thread_number, ids)) # Set parameters. params = {'action': 'list', 'ids': ids, 'output_format': c_args.format,} # Suppress duplicate data for CSV format. if 'CSV' in params['output_format']: params.update({'suppress_duplicated_data_from_csv': '1'}) # Add user parameter options, if applicable. if c_args.parameters: user_params = ast.literal_eval(c_args.parameters) params.update(user_params) # Download host list detection chunk. response = qgc.request('/api/2.0/fo/asset/host/vm/detection/', params) q.task_done() # Don't write to file if benchmarking. if not c_args.benchmark: file_extension = 'csv' if c_args.format == 'XML': file_extension = 'xml' filename = '%s/%s-host_ids-%s.%s' % (c_args.output_directory, datetime_format, ids_range, file_extension) logger.debug('Writing hosts file: %s' % filename) with open(filename, 'w') as host_file: print(response, file = host_file) logger.debug('Thread %s: Finished downloading.: %s' % (thread_number, ids)) def save_config(): """ :return: Completed save. """ global host_id_start # Save start and end to file. cfgfile = open("config.ini",'w') try: Config.add_section('Host ID') except ConfigParser.DuplicateSectionError, e: # File already exists. pass Config.set('Host ID','start',host_id_start) Config.write(cfgfile) cfgfile.close() return True def find_start_host_id(id_start): """ :param id_start: Host ID to start querying. :return: Start Host ID. """ global qgc tree = qgc.request('/api/2.0/fo/asset/host/', {'action': 'list', 'id_min': str(id_start), 'details': 'None', 'truncation_limit': '1',}) # Objectify. host_list_output = objectify.fromstring(tree) # Find start ID. host_id_start = id_start = host_list_output.RESPONSE.ID_SET.ID.text return host_id_start def ids_in_id_list(tree): """Return set of extracted IPs from IP list XML. """ ids = [] # Grab all IDs and ID ranges. id_list = tree.xpath('//ID_SET/descendant::*/text()') for i in id_list: logger.debug('ID: %s' % i) if '-' in i: id_start = i[:i.find('-')] id_end = i[i.find('-')+1:] ids += range(int(id_start),int(id_end)+1) else: ids += [int(i)] return ids def chunk_to_parameter(chunk): """ :param chunk: List of numbers. :return: String of numbers, comma delimited, no spaces. """ numbers = '' for number in chunk: numbers += '%s,' % number # Remove last comma. numbers = numbers[:-1] return numbers def add_work_and_find_end_host_id(id_start, num_hosts_per_call): """ :param id_start: Host ID to start querying. :param num_hosts_per_call: Number of hosts to query per call. :return: Last host ID. """ global hosts_queue, logger, num_hosts chunk = [] while True: id_start += 1 logger.debug('Calling host API to identify host ids.') tree = qgc.request('/api/2.0/fo/asset/host/', {'action': 'list', 'id_min': str(id_start), 'details': 'None', 'truncation_limit': num_hosts_per_call,}) # Extract host ids. ids = ids_in_id_list(etree.fromstring(tree)) # Add length to total number of hosts. num_hosts += len(ids) logger.info('Found %s id(s), will now queue.' % str(len(ids))) logger.debug('ids found: %s' % str(ids)) # Are there any more hosts? if not ids: # No more new hosts. logger.info('No more new hosts.') # Is the current chunk incomplete? if chunk: # Send it to work queue. # Add work to the queue. logger.debug('Queuing remaining id(s): %s' % str(chunk)) hosts_queue.put(chunk_to_parameter(chunk)) break # For next round, find last host id, set to new start host id. id_start = ids[len(ids)-1] # Add hosts to work queue by popping until chunks are full. # Popping removes from end, so reverse to maintain order. ids.reverse() # Work until ids is empty. while ids: # Add to chunk. chunk.append(ids.pop()) logger.debug('id added: %s' % str(chunk[-1])) # Is chunk is full? if len(chunk) == c_args.hosts_to_download_per_call: # Add work to the queue. logger.debug('Queuing: %s' % str(chunk)) hosts_queue.put(chunk_to_parameter(chunk)) # Reset chunk. chunk = [] # Return last host, which was saved in id_start from while loop. logger.debug('Done processing up to host id: %s' % str(id_start)) return id_start # # Begin # # Set timers. start_time_hosts_detection = False start_time = time.time() # Declare the command line flags/options we want to allow. parser = argparse.ArgumentParser( description='Download hosts concurrently and efficiently via host list detection API.') # parser.add_argument('-a', '--override_all_apps', # help='Generate report for all webapps. Automatically selected for first run.') # Do not store files. parser.add_argument('--benchmark', action = 'store_true', help = argparse.SUPPRESS) parser.add_argument('--config', help = 'Configuration for Qualys connector.') parser.add_argument('-d', '--hosts_to_download_per_call', default=1000, help='Override default number of hosts (1000) to download per call for host vulnerability data.') parser.add_argument('-f', '--format', default='CSV_NO_METADATA', help='Set host list detection output format. (Default = CSV_NO_METADATA)') parser.add_argument('-i','--host_id_discovery_truncation_limit', default=5000, help='Override default truncation limit (5000) for host ID discovery.') parser.add_argument('-o', '--output_directory', default='data', help='Set directory for data output. (Default = data)') parser.add_argument('-p', '--parameters', help='Set host list detection parameters (Default: {\'suppress_duplicated_data_from_csv\': \'1\'})\n(Example: \"{\'include_search_list_titles\': \'SSL+certificate\', \'active_kernels_only\': \'1\'}\")') parser.add_argument('-t', '--threads', default=2, help='Number of concurrent threads to call the host list detection API with. (Default = 2)') parser.add_argument('-v', '--verbose', action = 'store_true', help='Outputs additional information to log.') # Parse arguments. c_args = parser.parse_args() c_args.hosts_to_download_per_call = int(c_args.hosts_to_download_per_call) # Create log and data directories. PATH_LOG = 'log' if not os.path.exists(PATH_LOG): os.makedirs(PATH_LOG) if not os.path.exists(c_args.output_directory): os.makedirs(c_args.output_directory) # Set log options. datetime_format = datetime.datetime.now().strftime('%Y-%m-%d.%H-%M-%S') LOG_FILENAME = '%s/%s-%s.log' % (PATH_LOG, __file__, datetime_format) # Make a global logging object. logger = logging.getLogger() if c_args.verbose: logger.setLevel(logging.DEBUG) else: logger.setLevel(logging.INFO) logging.getLogger('qualysapi').setLevel(logging.ERROR) logging.getLogger('requests').setLevel(logging.ERROR) # This handler writes everything to a file. logger_file = logging.FileHandler(LOG_FILENAME) logger_file.setFormatter(logging.Formatter("%(asctime)s %(name)-12s %(levelname)s %(funcName)s %(lineno)d %(message)s")) # This handler prints to screen. logger_console = logging.StreamHandler(sys.stdout) if c_args.verbose: logger_file.setLevel(logging.DEBUG) logger_console.setLevel(logging.DEBUG) else: logger_file.setLevel(logging.INFO) logger_console.setLevel(logging.ERROR) logger.addHandler(logger_file) logger.addHandler(logger_console) # Configure Qualys API connector. if c_args.config: qgc = qualysapi.connect(c_args.config) else: qgc = qualysapi.connect() # Read config file, if available. Config = ConfigParser.ConfigParser() Config.read('config.ini') try: host_id_start = Config.getint('Host ID', 'start') logger.debug('Read host_id_start from config file: %s' % str(host_id_start)) except ConfigParser.NoSectionError, e: # Discover start host_id, minimum is 1. host_id_start = 1 # Confirm start id. May be pushed back due to purging. host_id_start = int(find_start_host_id(host_id_start)) logger.debug('New host_id_start: %s' % host_id_start) # Keep track of number of hosts. num_hosts = 0 # Set up multi-threading. # Number of threads. threads = int(c_args.threads) # Set up some global variables hosts_queue = Queue() # Set up some threads to fetch the enclosures for i in range(threads): worker = Thread(target=download_hosts, args=(i, hosts_queue,)) worker.setDaemon(True) worker.start() # Find hosts and queue work. host_id_end = add_work_and_find_end_host_id(host_id_start, c_args.host_id_discovery_truncation_limit) logger.debug('host_id_end: %s' % str(host_id_end)) elapsed_time_host_ids = time.time() - start_time # Save configuration save_config() # Now wait for the queue to be empty, indicating that we have # processed all of the downloads. logger.info('*** All hosts queued. Waiting for downloads to complete.') hosts_queue.join() logger.info('*** Done') elapsed_time = time.time() - start_time elapsed_time_hosts_detection = time.time() - start_time_hosts_detection logger.info('Number of threads: %s' % str(c_args.threads)) logger.info('Number of hosts downloaded per call: %s' % str(c_args.hosts_to_download_per_call)) logger.info('Number of hosts downloaded: %s' % num_hosts) logger.info('Seconds elapsed to download all hosts ids: %s' % elapsed_time_host_ids) logger.info('Seconds elapsed to download all hosts detection data: %s' % elapsed_time_hosts_detection) logger.info('Seconds elapsed total: %s' % elapsed_time)
ant9000/RIOT
refs/heads/master
tests/pbkdf2/tests/01-rfc.py
8
#!/usr/bin/env python3 # Copyright (C) 2019 Freie Universität Berlin # # This file is subject to the terms and conditions of the GNU Lesser # General Public License v2.1. See the file LICENSE in the top level # directory for more details. # # Author: Juan Carrano <j.carrano@fu-berlin.de> """Vector from RFC 7914 section 11""" import os import hashlib import test_base KEY_SIZE = hashlib.sha256().digest_size v_easy = """55 ac 04 6e 56 e3 08 9f ec 16 91 c2 25 44 b6 05 f9 41 85 21 6d de 04 65 e6 8b 9d 57 c2 0d ac bc 49 ca 9c cc f1 79 b6 45 99 16 64 b3 9d 77 ef 31 7c 71 b8 45 b1 e3 0b d5 09 11 20 41 d3 a1 97 83""" v_hard = """ 4d dc d8 f6 0b 98 be 21 83 0c ee 5e f2 27 01 f9 64 1a 44 18 d0 4c 04 14 ae ff 08 87 6b 34 ab 56 a1 d4 25 a1 22 58 33 54 9a db 84 1b 51 c9 b3 17 6a 27 2b de bb a1 d0 78 47 8f 62 b3 97 f3 3c 8d""" def process_octets(s): return bytes(int(x, 16) for x in s.split())[:KEY_SIZE] VECTORS = [ ('passwd', b"salt", 1, process_octets(v_easy)) ] if os.environ.get('BOARD') == 'native': VECTORS.append(("Password", b"NaCl", 80000, process_octets(v_hard))) if __name__ == "__main__": test_base.main(VECTORS)
UbuntuBudgie/budgie-extras
refs/heads/master
budgie-app-launcher/src/budgie-app-launcher/SortHelper.py
1
#!/usr/bin/python3 # This file is part of App Launcher # Copyright © 2018-2021 Ubuntu Budgie Developers # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3 of the License, or # (at your option) any later version. import re class SortHelper(): def __init__(self): self.TAG = "budgie-app-launcher.SortHelper" def convert(self, text): if text.isdigit(): return int(text) else: return text.lower() def naturalSortAppsByName(self, app): key = app.getName() cList = [] if key is not None: for c in re.split('([0-9]+)', key): cList.append(self.convert(c)) return cList def sortedAppsByName(self, listToSort): return sorted(listToSort, key=self.naturalSortAppsByName) def naturalSortAppsByIndex(self, app): key = str(app.getIndex()) cList = [] if key is not None: for c in re.split('([0-9]+)', key): cList.append(self.convert(c)) return cList def sortedAppsByIndex(self, listToSort): return sorted(listToSort, key=self.naturalSortAppsByIndex)
binghongcha08/pyQMD
refs/heads/master
GWP/2D/1.0.8/phase.py
14
##!/usr/bin/python import numpy as np import pylab as plt import seaborn as sns sns.set_context('poster',font_scale=1.5) sns.set_style({'font.family':'Times New Roman'}) #with open("traj.dat") as f: # data = f.read() # # data = data.split('\n') # # x = [row.split(' ')[0] for row in data] # y = [row.split(' ')[1] for row in data] # # fig = plt.figure() # # ax1 = fig.add_subplot(111) # # ax1.set_title("Plot title...") # ax1.set_xlabel('your x label..') # ax1.set_ylabel('your y label...') # # ax1.plot(x,y, c='r', label='the data') # # leg = ax1.legend() #fig = plt.figure() plt.subplot(1,1,1) data = np.genfromtxt(fname='phase.dat') #data = np.loadtxt('traj.dat') nb = int(data.shape[-1]/2) for x in range(0,nb): plt.plot(data[:,x],data[:,x+nb]) #plt.figure(1) #plt.plot(x,y1,'-') #plt.plot(x,y2,'g-') plt.ylabel('Momentum') plt.xlabel('Position') #plt.title('traj') #plt.xlim(-16,16) #plt.ylim(-16,16) plt.savefig('phase.pdf') plt.show()
cc13ny/Allin
refs/heads/master
lintcode/239-[DUP]-Sliding-Window-Maximum/SlidingWindowMaximum_001.py
10
class Solution(object): def maxSlidingWindow(self, nums, k): """ :type nums: List[int] :type k: int :rtype: List[int] """ if nums == []: return [] dq = [(nums[0], 0)] for i in range(1, k): while dq != [] and nums[i] > dq[-1][0]: dq.pop() dq.append((nums[i], i)) res = [dq[0][0]] for j in range(k, len(nums)): if dq[0][1] < j - k + 1: dq.pop(0) while dq != [] and nums[j] > dq[-1][0]: dq.pop() dq.append((nums[j], j)) res.append(dq[0][0]) return res
curtacircuitos/pcb-tools
refs/heads/master
examples/pcb_example.py
1
#! /usr/bin/env python # -*- coding: utf-8 -*- # Copyright 2016 Hamilton Kibbe <ham@hamiltonkib.be> # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. """ This example demonstrates the use of pcb-tools with cairo to render composite images using the PCB interface """ import os from gerber import PCB from gerber.render import theme from gerber.render.cairo_backend import GerberCairoContext GERBER_FOLDER = os.path.abspath(os.path.join(os.path.dirname(__file__), 'gerbers')) # Create a new drawing context ctx = GerberCairoContext() # Create a new PCB instance pcb = PCB.from_directory(GERBER_FOLDER) # Render PCB top view ctx.render_layers(pcb.top_layers, os.path.join(os.path.dirname(__file__), 'pcb_top.png',), theme.THEMES['OSH Park'], max_width=800, max_height=600) # Render PCB bottom view ctx.render_layers(pcb.bottom_layers, os.path.join(os.path.dirname(__file__), 'pcb_bottom.png'), theme.THEMES['OSH Park'], max_width=800, max_height=600) # Render copper layers only ctx.render_layers(pcb.copper_layers + pcb.drill_layers, os.path.join(os.path.dirname(__file__), 'pcb_transparent_copper.png'), theme.THEMES['Transparent Copper'], max_width=800, max_height=600)
vvv1559/intellij-community
refs/heads/master
python/testData/resolve/TypeDunderDocWithInheritedClassAttr.py
35
class A(object): __doc__ = 17 class B(A): pass print(B.__doc__) # <ref>
elioth010/lugama
refs/heads/master
activate/lib/python3.4/site-packages/pip/_vendor/requests/packages/chardet/euctwfreq.py
3132
######################## BEGIN LICENSE BLOCK ######################## # The Original Code is Mozilla Communicator client code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### # EUCTW frequency table # Converted from big5 work # by Taiwan's Mandarin Promotion Council # <http:#www.edu.tw:81/mandr/> # 128 --> 0.42261 # 256 --> 0.57851 # 512 --> 0.74851 # 1024 --> 0.89384 # 2048 --> 0.97583 # # Idea Distribution Ratio = 0.74851/(1-0.74851) =2.98 # Random Distribution Ration = 512/(5401-512)=0.105 # # Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR EUCTW_TYPICAL_DISTRIBUTION_RATIO = 0.75 # Char to FreqOrder table , EUCTW_TABLE_SIZE = 8102 EUCTWCharToFreqOrder = ( 1,1800,1506, 255,1431, 198, 9, 82, 6,7310, 177, 202,3615,1256,2808, 110, # 2742 3735, 33,3241, 261, 76, 44,2113, 16,2931,2184,1176, 659,3868, 26,3404,2643, # 2758 1198,3869,3313,4060, 410,2211, 302, 590, 361,1963, 8, 204, 58,4296,7311,1931, # 2774 63,7312,7313, 317,1614, 75, 222, 159,4061,2412,1480,7314,3500,3068, 224,2809, # 2790 3616, 3, 10,3870,1471, 29,2774,1135,2852,1939, 873, 130,3242,1123, 312,7315, # 2806 4297,2051, 507, 252, 682,7316, 142,1914, 124, 206,2932, 34,3501,3173, 64, 604, # 2822 7317,2494,1976,1977, 155,1990, 645, 641,1606,7318,3405, 337, 72, 406,7319, 80, # 2838 630, 238,3174,1509, 263, 939,1092,2644, 756,1440,1094,3406, 449, 69,2969, 591, # 2854 179,2095, 471, 115,2034,1843, 60, 50,2970, 134, 806,1868, 734,2035,3407, 180, # 2870 995,1607, 156, 537,2893, 688,7320, 319,1305, 779,2144, 514,2374, 298,4298, 359, # 2886 2495, 90,2707,1338, 663, 11, 906,1099,2545, 20,2436, 182, 532,1716,7321, 732, # 2902 1376,4062,1311,1420,3175, 25,2312,1056, 113, 399, 382,1949, 242,3408,2467, 529, # 2918 3243, 475,1447,3617,7322, 117, 21, 656, 810,1297,2295,2329,3502,7323, 126,4063, # 2934 706, 456, 150, 613,4299, 71,1118,2036,4064, 145,3069, 85, 835, 486,2114,1246, # 2950 1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,7324,2127,2354, 347,3736, 221, # 2966 3503,3110,7325,1955,1153,4065, 83, 296,1199,3070, 192, 624, 93,7326, 822,1897, # 2982 2810,3111, 795,2064, 991,1554,1542,1592, 27, 43,2853, 859, 139,1456, 860,4300, # 2998 437, 712,3871, 164,2392,3112, 695, 211,3017,2096, 195,3872,1608,3504,3505,3618, # 3014 3873, 234, 811,2971,2097,3874,2229,1441,3506,1615,2375, 668,2076,1638, 305, 228, # 3030 1664,4301, 467, 415,7327, 262,2098,1593, 239, 108, 300, 200,1033, 512,1247,2077, # 3046 7328,7329,2173,3176,3619,2673, 593, 845,1062,3244, 88,1723,2037,3875,1950, 212, # 3062 266, 152, 149, 468,1898,4066,4302, 77, 187,7330,3018, 37, 5,2972,7331,3876, # 3078 7332,7333, 39,2517,4303,2894,3177,2078, 55, 148, 74,4304, 545, 483,1474,1029, # 3094 1665, 217,1869,1531,3113,1104,2645,4067, 24, 172,3507, 900,3877,3508,3509,4305, # 3110 32,1408,2811,1312, 329, 487,2355,2247,2708, 784,2674, 4,3019,3314,1427,1788, # 3126 188, 109, 499,7334,3620,1717,1789, 888,1217,3020,4306,7335,3510,7336,3315,1520, # 3142 3621,3878, 196,1034, 775,7337,7338, 929,1815, 249, 439, 38,7339,1063,7340, 794, # 3158 3879,1435,2296, 46, 178,3245,2065,7341,2376,7342, 214,1709,4307, 804, 35, 707, # 3174 324,3622,1601,2546, 140, 459,4068,7343,7344,1365, 839, 272, 978,2257,2572,3409, # 3190 2128,1363,3623,1423, 697, 100,3071, 48, 70,1231, 495,3114,2193,7345,1294,7346, # 3206 2079, 462, 586,1042,3246, 853, 256, 988, 185,2377,3410,1698, 434,1084,7347,3411, # 3222 314,2615,2775,4308,2330,2331, 569,2280, 637,1816,2518, 757,1162,1878,1616,3412, # 3238 287,1577,2115, 768,4309,1671,2854,3511,2519,1321,3737, 909,2413,7348,4069, 933, # 3254 3738,7349,2052,2356,1222,4310, 765,2414,1322, 786,4311,7350,1919,1462,1677,2895, # 3270 1699,7351,4312,1424,2437,3115,3624,2590,3316,1774,1940,3413,3880,4070, 309,1369, # 3286 1130,2812, 364,2230,1653,1299,3881,3512,3882,3883,2646, 525,1085,3021, 902,2000, # 3302 1475, 964,4313, 421,1844,1415,1057,2281, 940,1364,3116, 376,4314,4315,1381, 7, # 3318 2520, 983,2378, 336,1710,2675,1845, 321,3414, 559,1131,3022,2742,1808,1132,1313, # 3334 265,1481,1857,7352, 352,1203,2813,3247, 167,1089, 420,2814, 776, 792,1724,3513, # 3350 4071,2438,3248,7353,4072,7354, 446, 229, 333,2743, 901,3739,1200,1557,4316,2647, # 3366 1920, 395,2744,2676,3740,4073,1835, 125, 916,3178,2616,4317,7355,7356,3741,7357, # 3382 7358,7359,4318,3117,3625,1133,2547,1757,3415,1510,2313,1409,3514,7360,2145, 438, # 3398 2591,2896,2379,3317,1068, 958,3023, 461, 311,2855,2677,4074,1915,3179,4075,1978, # 3414 383, 750,2745,2617,4076, 274, 539, 385,1278,1442,7361,1154,1964, 384, 561, 210, # 3430 98,1295,2548,3515,7362,1711,2415,1482,3416,3884,2897,1257, 129,7363,3742, 642, # 3446 523,2776,2777,2648,7364, 141,2231,1333, 68, 176, 441, 876, 907,4077, 603,2592, # 3462 710, 171,3417, 404, 549, 18,3118,2393,1410,3626,1666,7365,3516,4319,2898,4320, # 3478 7366,2973, 368,7367, 146, 366, 99, 871,3627,1543, 748, 807,1586,1185, 22,2258, # 3494 379,3743,3180,7368,3181, 505,1941,2618,1991,1382,2314,7369, 380,2357, 218, 702, # 3510 1817,1248,3418,3024,3517,3318,3249,7370,2974,3628, 930,3250,3744,7371, 59,7372, # 3526 585, 601,4078, 497,3419,1112,1314,4321,1801,7373,1223,1472,2174,7374, 749,1836, # 3542 690,1899,3745,1772,3885,1476, 429,1043,1790,2232,2116, 917,4079, 447,1086,1629, # 3558 7375, 556,7376,7377,2020,1654, 844,1090, 105, 550, 966,1758,2815,1008,1782, 686, # 3574 1095,7378,2282, 793,1602,7379,3518,2593,4322,4080,2933,2297,4323,3746, 980,2496, # 3590 544, 353, 527,4324, 908,2678,2899,7380, 381,2619,1942,1348,7381,1341,1252, 560, # 3606 3072,7382,3420,2856,7383,2053, 973, 886,2080, 143,4325,7384,7385, 157,3886, 496, # 3622 4081, 57, 840, 540,2038,4326,4327,3421,2117,1445, 970,2259,1748,1965,2081,4082, # 3638 3119,1234,1775,3251,2816,3629, 773,1206,2129,1066,2039,1326,3887,1738,1725,4083, # 3654 279,3120, 51,1544,2594, 423,1578,2130,2066, 173,4328,1879,7386,7387,1583, 264, # 3670 610,3630,4329,2439, 280, 154,7388,7389,7390,1739, 338,1282,3073, 693,2857,1411, # 3686 1074,3747,2440,7391,4330,7392,7393,1240, 952,2394,7394,2900,1538,2679, 685,1483, # 3702 4084,2468,1436, 953,4085,2054,4331, 671,2395, 79,4086,2441,3252, 608, 567,2680, # 3718 3422,4087,4088,1691, 393,1261,1791,2396,7395,4332,7396,7397,7398,7399,1383,1672, # 3734 3748,3182,1464, 522,1119, 661,1150, 216, 675,4333,3888,1432,3519, 609,4334,2681, # 3750 2397,7400,7401,7402,4089,3025, 0,7403,2469, 315, 231,2442, 301,3319,4335,2380, # 3766 7404, 233,4090,3631,1818,4336,4337,7405, 96,1776,1315,2082,7406, 257,7407,1809, # 3782 3632,2709,1139,1819,4091,2021,1124,2163,2778,1777,2649,7408,3074, 363,1655,3183, # 3798 7409,2975,7410,7411,7412,3889,1567,3890, 718, 103,3184, 849,1443, 341,3320,2934, # 3814 1484,7413,1712, 127, 67, 339,4092,2398, 679,1412, 821,7414,7415, 834, 738, 351, # 3830 2976,2146, 846, 235,1497,1880, 418,1992,3749,2710, 186,1100,2147,2746,3520,1545, # 3846 1355,2935,2858,1377, 583,3891,4093,2573,2977,7416,1298,3633,1078,2549,3634,2358, # 3862 78,3750,3751, 267,1289,2099,2001,1594,4094, 348, 369,1274,2194,2175,1837,4338, # 3878 1820,2817,3635,2747,2283,2002,4339,2936,2748, 144,3321, 882,4340,3892,2749,3423, # 3894 4341,2901,7417,4095,1726, 320,7418,3893,3026, 788,2978,7419,2818,1773,1327,2859, # 3910 3894,2819,7420,1306,4342,2003,1700,3752,3521,2359,2650, 787,2022, 506, 824,3636, # 3926 534, 323,4343,1044,3322,2023,1900, 946,3424,7421,1778,1500,1678,7422,1881,4344, # 3942 165, 243,4345,3637,2521, 123, 683,4096, 764,4346, 36,3895,1792, 589,2902, 816, # 3958 626,1667,3027,2233,1639,1555,1622,3753,3896,7423,3897,2860,1370,1228,1932, 891, # 3974 2083,2903, 304,4097,7424, 292,2979,2711,3522, 691,2100,4098,1115,4347, 118, 662, # 3990 7425, 611,1156, 854,2381,1316,2861, 2, 386, 515,2904,7426,7427,3253, 868,2234, # 4006 1486, 855,2651, 785,2212,3028,7428,1040,3185,3523,7429,3121, 448,7430,1525,7431, # 4022 2164,4348,7432,3754,7433,4099,2820,3524,3122, 503, 818,3898,3123,1568, 814, 676, # 4038 1444, 306,1749,7434,3755,1416,1030, 197,1428, 805,2821,1501,4349,7435,7436,7437, # 4054 1993,7438,4350,7439,7440,2195, 13,2779,3638,2980,3124,1229,1916,7441,3756,2131, # 4070 7442,4100,4351,2399,3525,7443,2213,1511,1727,1120,7444,7445, 646,3757,2443, 307, # 4086 7446,7447,1595,3186,7448,7449,7450,3639,1113,1356,3899,1465,2522,2523,7451, 519, # 4102 7452, 128,2132, 92,2284,1979,7453,3900,1512, 342,3125,2196,7454,2780,2214,1980, # 4118 3323,7455, 290,1656,1317, 789, 827,2360,7456,3758,4352, 562, 581,3901,7457, 401, # 4134 4353,2248, 94,4354,1399,2781,7458,1463,2024,4355,3187,1943,7459, 828,1105,4101, # 4150 1262,1394,7460,4102, 605,4356,7461,1783,2862,7462,2822, 819,2101, 578,2197,2937, # 4166 7463,1502, 436,3254,4103,3255,2823,3902,2905,3425,3426,7464,2712,2315,7465,7466, # 4182 2332,2067, 23,4357, 193, 826,3759,2102, 699,1630,4104,3075, 390,1793,1064,3526, # 4198 7467,1579,3076,3077,1400,7468,4105,1838,1640,2863,7469,4358,4359, 137,4106, 598, # 4214 3078,1966, 780, 104, 974,2938,7470, 278, 899, 253, 402, 572, 504, 493,1339,7471, # 4230 3903,1275,4360,2574,2550,7472,3640,3029,3079,2249, 565,1334,2713, 863, 41,7473, # 4246 7474,4361,7475,1657,2333, 19, 463,2750,4107, 606,7476,2981,3256,1087,2084,1323, # 4262 2652,2982,7477,1631,1623,1750,4108,2682,7478,2864, 791,2714,2653,2334, 232,2416, # 4278 7479,2983,1498,7480,2654,2620, 755,1366,3641,3257,3126,2025,1609, 119,1917,3427, # 4294 862,1026,4109,7481,3904,3760,4362,3905,4363,2260,1951,2470,7482,1125, 817,4110, # 4310 4111,3906,1513,1766,2040,1487,4112,3030,3258,2824,3761,3127,7483,7484,1507,7485, # 4326 2683, 733, 40,1632,1106,2865, 345,4113, 841,2524, 230,4364,2984,1846,3259,3428, # 4342 7486,1263, 986,3429,7487, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562,3907, # 4358 3908,2939, 967,2751,2655,1349, 592,2133,1692,3324,2985,1994,4114,1679,3909,1901, # 4374 2185,7488, 739,3642,2715,1296,1290,7489,4115,2198,2199,1921,1563,2595,2551,1870, # 4390 2752,2986,7490, 435,7491, 343,1108, 596, 17,1751,4365,2235,3430,3643,7492,4366, # 4406 294,3527,2940,1693, 477, 979, 281,2041,3528, 643,2042,3644,2621,2782,2261,1031, # 4422 2335,2134,2298,3529,4367, 367,1249,2552,7493,3530,7494,4368,1283,3325,2004, 240, # 4438 1762,3326,4369,4370, 836,1069,3128, 474,7495,2148,2525, 268,3531,7496,3188,1521, # 4454 1284,7497,1658,1546,4116,7498,3532,3533,7499,4117,3327,2684,1685,4118, 961,1673, # 4470 2622, 190,2005,2200,3762,4371,4372,7500, 570,2497,3645,1490,7501,4373,2623,3260, # 4486 1956,4374, 584,1514, 396,1045,1944,7502,4375,1967,2444,7503,7504,4376,3910, 619, # 4502 7505,3129,3261, 215,2006,2783,2553,3189,4377,3190,4378, 763,4119,3763,4379,7506, # 4518 7507,1957,1767,2941,3328,3646,1174, 452,1477,4380,3329,3130,7508,2825,1253,2382, # 4534 2186,1091,2285,4120, 492,7509, 638,1169,1824,2135,1752,3911, 648, 926,1021,1324, # 4550 4381, 520,4382, 997, 847,1007, 892,4383,3764,2262,1871,3647,7510,2400,1784,4384, # 4566 1952,2942,3080,3191,1728,4121,2043,3648,4385,2007,1701,3131,1551, 30,2263,4122, # 4582 7511,2026,4386,3534,7512, 501,7513,4123, 594,3431,2165,1821,3535,3432,3536,3192, # 4598 829,2826,4124,7514,1680,3132,1225,4125,7515,3262,4387,4126,3133,2336,7516,4388, # 4614 4127,7517,3912,3913,7518,1847,2383,2596,3330,7519,4389, 374,3914, 652,4128,4129, # 4630 375,1140, 798,7520,7521,7522,2361,4390,2264, 546,1659, 138,3031,2445,4391,7523, # 4646 2250, 612,1848, 910, 796,3765,1740,1371, 825,3766,3767,7524,2906,2554,7525, 692, # 4662 444,3032,2624, 801,4392,4130,7526,1491, 244,1053,3033,4131,4132, 340,7527,3915, # 4678 1041,2987, 293,1168, 87,1357,7528,1539, 959,7529,2236, 721, 694,4133,3768, 219, # 4694 1478, 644,1417,3331,2656,1413,1401,1335,1389,3916,7530,7531,2988,2362,3134,1825, # 4710 730,1515, 184,2827, 66,4393,7532,1660,2943, 246,3332, 378,1457, 226,3433, 975, # 4726 3917,2944,1264,3537, 674, 696,7533, 163,7534,1141,2417,2166, 713,3538,3333,4394, # 4742 3918,7535,7536,1186, 15,7537,1079,1070,7538,1522,3193,3539, 276,1050,2716, 758, # 4758 1126, 653,2945,3263,7539,2337, 889,3540,3919,3081,2989, 903,1250,4395,3920,3434, # 4774 3541,1342,1681,1718, 766,3264, 286, 89,2946,3649,7540,1713,7541,2597,3334,2990, # 4790 7542,2947,2215,3194,2866,7543,4396,2498,2526, 181, 387,1075,3921, 731,2187,3335, # 4806 7544,3265, 310, 313,3435,2299, 770,4134, 54,3034, 189,4397,3082,3769,3922,7545, # 4822 1230,1617,1849, 355,3542,4135,4398,3336, 111,4136,3650,1350,3135,3436,3035,4137, # 4838 2149,3266,3543,7546,2784,3923,3924,2991, 722,2008,7547,1071, 247,1207,2338,2471, # 4854 1378,4399,2009, 864,1437,1214,4400, 373,3770,1142,2216, 667,4401, 442,2753,2555, # 4870 3771,3925,1968,4138,3267,1839, 837, 170,1107, 934,1336,1882,7548,7549,2118,4139, # 4886 2828, 743,1569,7550,4402,4140, 582,2384,1418,3437,7551,1802,7552, 357,1395,1729, # 4902 3651,3268,2418,1564,2237,7553,3083,3772,1633,4403,1114,2085,4141,1532,7554, 482, # 4918 2446,4404,7555,7556,1492, 833,1466,7557,2717,3544,1641,2829,7558,1526,1272,3652, # 4934 4142,1686,1794, 416,2556,1902,1953,1803,7559,3773,2785,3774,1159,2316,7560,2867, # 4950 4405,1610,1584,3036,2419,2754, 443,3269,1163,3136,7561,7562,3926,7563,4143,2499, # 4966 3037,4406,3927,3137,2103,1647,3545,2010,1872,4144,7564,4145, 431,3438,7565, 250, # 4982 97, 81,4146,7566,1648,1850,1558, 160, 848,7567, 866, 740,1694,7568,2201,2830, # 4998 3195,4147,4407,3653,1687, 950,2472, 426, 469,3196,3654,3655,3928,7569,7570,1188, # 5014 424,1995, 861,3546,4148,3775,2202,2685, 168,1235,3547,4149,7571,2086,1674,4408, # 5030 3337,3270, 220,2557,1009,7572,3776, 670,2992, 332,1208, 717,7573,7574,3548,2447, # 5046 3929,3338,7575, 513,7576,1209,2868,3339,3138,4409,1080,7577,7578,7579,7580,2527, # 5062 3656,3549, 815,1587,3930,3931,7581,3550,3439,3777,1254,4410,1328,3038,1390,3932, # 5078 1741,3933,3778,3934,7582, 236,3779,2448,3271,7583,7584,3657,3780,1273,3781,4411, # 5094 7585, 308,7586,4412, 245,4413,1851,2473,1307,2575, 430, 715,2136,2449,7587, 270, # 5110 199,2869,3935,7588,3551,2718,1753, 761,1754, 725,1661,1840,4414,3440,3658,7589, # 5126 7590, 587, 14,3272, 227,2598, 326, 480,2265, 943,2755,3552, 291, 650,1883,7591, # 5142 1702,1226, 102,1547, 62,3441, 904,4415,3442,1164,4150,7592,7593,1224,1548,2756, # 5158 391, 498,1493,7594,1386,1419,7595,2055,1177,4416, 813, 880,1081,2363, 566,1145, # 5174 4417,2286,1001,1035,2558,2599,2238, 394,1286,7596,7597,2068,7598, 86,1494,1730, # 5190 3936, 491,1588, 745, 897,2948, 843,3340,3937,2757,2870,3273,1768, 998,2217,2069, # 5206 397,1826,1195,1969,3659,2993,3341, 284,7599,3782,2500,2137,2119,1903,7600,3938, # 5222 2150,3939,4151,1036,3443,1904, 114,2559,4152, 209,1527,7601,7602,2949,2831,2625, # 5238 2385,2719,3139, 812,2560,7603,3274,7604,1559, 737,1884,3660,1210, 885, 28,2686, # 5254 3553,3783,7605,4153,1004,1779,4418,7606, 346,1981,2218,2687,4419,3784,1742, 797, # 5270 1642,3940,1933,1072,1384,2151, 896,3941,3275,3661,3197,2871,3554,7607,2561,1958, # 5286 4420,2450,1785,7608,7609,7610,3942,4154,1005,1308,3662,4155,2720,4421,4422,1528, # 5302 2600, 161,1178,4156,1982, 987,4423,1101,4157, 631,3943,1157,3198,2420,1343,1241, # 5318 1016,2239,2562, 372, 877,2339,2501,1160, 555,1934, 911,3944,7611, 466,1170, 169, # 5334 1051,2907,2688,3663,2474,2994,1182,2011,2563,1251,2626,7612, 992,2340,3444,1540, # 5350 2721,1201,2070,2401,1996,2475,7613,4424, 528,1922,2188,1503,1873,1570,2364,3342, # 5366 3276,7614, 557,1073,7615,1827,3445,2087,2266,3140,3039,3084, 767,3085,2786,4425, # 5382 1006,4158,4426,2341,1267,2176,3664,3199, 778,3945,3200,2722,1597,2657,7616,4427, # 5398 7617,3446,7618,7619,7620,3277,2689,1433,3278, 131, 95,1504,3946, 723,4159,3141, # 5414 1841,3555,2758,2189,3947,2027,2104,3665,7621,2995,3948,1218,7622,3343,3201,3949, # 5430 4160,2576, 248,1634,3785, 912,7623,2832,3666,3040,3786, 654, 53,7624,2996,7625, # 5446 1688,4428, 777,3447,1032,3950,1425,7626, 191, 820,2120,2833, 971,4429, 931,3202, # 5462 135, 664, 783,3787,1997, 772,2908,1935,3951,3788,4430,2909,3203, 282,2723, 640, # 5478 1372,3448,1127, 922, 325,3344,7627,7628, 711,2044,7629,7630,3952,2219,2787,1936, # 5494 3953,3345,2220,2251,3789,2300,7631,4431,3790,1258,3279,3954,3204,2138,2950,3955, # 5510 3956,7632,2221, 258,3205,4432, 101,1227,7633,3280,1755,7634,1391,3281,7635,2910, # 5526 2056, 893,7636,7637,7638,1402,4161,2342,7639,7640,3206,3556,7641,7642, 878,1325, # 5542 1780,2788,4433, 259,1385,2577, 744,1183,2267,4434,7643,3957,2502,7644, 684,1024, # 5558 4162,7645, 472,3557,3449,1165,3282,3958,3959, 322,2152, 881, 455,1695,1152,1340, # 5574 660, 554,2153,4435,1058,4436,4163, 830,1065,3346,3960,4437,1923,7646,1703,1918, # 5590 7647, 932,2268, 122,7648,4438, 947, 677,7649,3791,2627, 297,1905,1924,2269,4439, # 5606 2317,3283,7650,7651,4164,7652,4165, 84,4166, 112, 989,7653, 547,1059,3961, 701, # 5622 3558,1019,7654,4167,7655,3450, 942, 639, 457,2301,2451, 993,2951, 407, 851, 494, # 5638 4440,3347, 927,7656,1237,7657,2421,3348, 573,4168, 680, 921,2911,1279,1874, 285, # 5654 790,1448,1983, 719,2167,7658,7659,4441,3962,3963,1649,7660,1541, 563,7661,1077, # 5670 7662,3349,3041,3451, 511,2997,3964,3965,3667,3966,1268,2564,3350,3207,4442,4443, # 5686 7663, 535,1048,1276,1189,2912,2028,3142,1438,1373,2834,2952,1134,2012,7664,4169, # 5702 1238,2578,3086,1259,7665, 700,7666,2953,3143,3668,4170,7667,4171,1146,1875,1906, # 5718 4444,2601,3967, 781,2422, 132,1589, 203, 147, 273,2789,2402, 898,1786,2154,3968, # 5734 3969,7668,3792,2790,7669,7670,4445,4446,7671,3208,7672,1635,3793, 965,7673,1804, # 5750 2690,1516,3559,1121,1082,1329,3284,3970,1449,3794, 65,1128,2835,2913,2759,1590, # 5766 3795,7674,7675, 12,2658, 45, 976,2579,3144,4447, 517,2528,1013,1037,3209,7676, # 5782 3796,2836,7677,3797,7678,3452,7679,2602, 614,1998,2318,3798,3087,2724,2628,7680, # 5798 2580,4172, 599,1269,7681,1810,3669,7682,2691,3088, 759,1060, 489,1805,3351,3285, # 5814 1358,7683,7684,2386,1387,1215,2629,2252, 490,7685,7686,4173,1759,2387,2343,7687, # 5830 4448,3799,1907,3971,2630,1806,3210,4449,3453,3286,2760,2344, 874,7688,7689,3454, # 5846 3670,1858, 91,2914,3671,3042,3800,4450,7690,3145,3972,2659,7691,3455,1202,1403, # 5862 3801,2954,2529,1517,2503,4451,3456,2504,7692,4452,7693,2692,1885,1495,1731,3973, # 5878 2365,4453,7694,2029,7695,7696,3974,2693,1216, 237,2581,4174,2319,3975,3802,4454, # 5894 4455,2694,3560,3457, 445,4456,7697,7698,7699,7700,2761, 61,3976,3672,1822,3977, # 5910 7701, 687,2045, 935, 925, 405,2660, 703,1096,1859,2725,4457,3978,1876,1367,2695, # 5926 3352, 918,2105,1781,2476, 334,3287,1611,1093,4458, 564,3146,3458,3673,3353, 945, # 5942 2631,2057,4459,7702,1925, 872,4175,7703,3459,2696,3089, 349,4176,3674,3979,4460, # 5958 3803,4177,3675,2155,3980,4461,4462,4178,4463,2403,2046, 782,3981, 400, 251,4179, # 5974 1624,7704,7705, 277,3676, 299,1265, 476,1191,3804,2121,4180,4181,1109, 205,7706, # 5990 2582,1000,2156,3561,1860,7707,7708,7709,4464,7710,4465,2565, 107,2477,2157,3982, # 6006 3460,3147,7711,1533, 541,1301, 158, 753,4182,2872,3562,7712,1696, 370,1088,4183, # 6022 4466,3563, 579, 327, 440, 162,2240, 269,1937,1374,3461, 968,3043, 56,1396,3090, # 6038 2106,3288,3354,7713,1926,2158,4467,2998,7714,3564,7715,7716,3677,4468,2478,7717, # 6054 2791,7718,1650,4469,7719,2603,7720,7721,3983,2661,3355,1149,3356,3984,3805,3985, # 6070 7722,1076, 49,7723, 951,3211,3289,3290, 450,2837, 920,7724,1811,2792,2366,4184, # 6086 1908,1138,2367,3806,3462,7725,3212,4470,1909,1147,1518,2423,4471,3807,7726,4472, # 6102 2388,2604, 260,1795,3213,7727,7728,3808,3291, 708,7729,3565,1704,7730,3566,1351, # 6118 1618,3357,2999,1886, 944,4185,3358,4186,3044,3359,4187,7731,3678, 422, 413,1714, # 6134 3292, 500,2058,2345,4188,2479,7732,1344,1910, 954,7733,1668,7734,7735,3986,2404, # 6150 4189,3567,3809,4190,7736,2302,1318,2505,3091, 133,3092,2873,4473, 629, 31,2838, # 6166 2697,3810,4474, 850, 949,4475,3987,2955,1732,2088,4191,1496,1852,7737,3988, 620, # 6182 3214, 981,1242,3679,3360,1619,3680,1643,3293,2139,2452,1970,1719,3463,2168,7738, # 6198 3215,7739,7740,3361,1828,7741,1277,4476,1565,2047,7742,1636,3568,3093,7743, 869, # 6214 2839, 655,3811,3812,3094,3989,3000,3813,1310,3569,4477,7744,7745,7746,1733, 558, # 6230 4478,3681, 335,1549,3045,1756,4192,3682,1945,3464,1829,1291,1192, 470,2726,2107, # 6246 2793, 913,1054,3990,7747,1027,7748,3046,3991,4479, 982,2662,3362,3148,3465,3216, # 6262 3217,1946,2794,7749, 571,4480,7750,1830,7751,3570,2583,1523,2424,7752,2089, 984, # 6278 4481,3683,1959,7753,3684, 852, 923,2795,3466,3685, 969,1519, 999,2048,2320,1705, # 6294 7754,3095, 615,1662, 151, 597,3992,2405,2321,1049, 275,4482,3686,4193, 568,3687, # 6310 3571,2480,4194,3688,7755,2425,2270, 409,3218,7756,1566,2874,3467,1002, 769,2840, # 6326 194,2090,3149,3689,2222,3294,4195, 628,1505,7757,7758,1763,2177,3001,3993, 521, # 6342 1161,2584,1787,2203,2406,4483,3994,1625,4196,4197, 412, 42,3096, 464,7759,2632, # 6358 4484,3363,1760,1571,2875,3468,2530,1219,2204,3814,2633,2140,2368,4485,4486,3295, # 6374 1651,3364,3572,7760,7761,3573,2481,3469,7762,3690,7763,7764,2271,2091, 460,7765, # 6390 4487,7766,3002, 962, 588,3574, 289,3219,2634,1116, 52,7767,3047,1796,7768,7769, # 6406 7770,1467,7771,1598,1143,3691,4198,1984,1734,1067,4488,1280,3365, 465,4489,1572, # 6422 510,7772,1927,2241,1812,1644,3575,7773,4490,3692,7774,7775,2663,1573,1534,7776, # 6438 7777,4199, 536,1807,1761,3470,3815,3150,2635,7778,7779,7780,4491,3471,2915,1911, # 6454 2796,7781,3296,1122, 377,3220,7782, 360,7783,7784,4200,1529, 551,7785,2059,3693, # 6470 1769,2426,7786,2916,4201,3297,3097,2322,2108,2030,4492,1404, 136,1468,1479, 672, # 6486 1171,3221,2303, 271,3151,7787,2762,7788,2049, 678,2727, 865,1947,4493,7789,2013, # 6502 3995,2956,7790,2728,2223,1397,3048,3694,4494,4495,1735,2917,3366,3576,7791,3816, # 6518 509,2841,2453,2876,3817,7792,7793,3152,3153,4496,4202,2531,4497,2304,1166,1010, # 6534 552, 681,1887,7794,7795,2957,2958,3996,1287,1596,1861,3154, 358, 453, 736, 175, # 6550 478,1117, 905,1167,1097,7796,1853,1530,7797,1706,7798,2178,3472,2287,3695,3473, # 6566 3577,4203,2092,4204,7799,3367,1193,2482,4205,1458,2190,2205,1862,1888,1421,3298, # 6582 2918,3049,2179,3474, 595,2122,7800,3997,7801,7802,4206,1707,2636, 223,3696,1359, # 6598 751,3098, 183,3475,7803,2797,3003, 419,2369, 633, 704,3818,2389, 241,7804,7805, # 6614 7806, 838,3004,3697,2272,2763,2454,3819,1938,2050,3998,1309,3099,2242,1181,7807, # 6630 1136,2206,3820,2370,1446,4207,2305,4498,7808,7809,4208,1055,2605, 484,3698,7810, # 6646 3999, 625,4209,2273,3368,1499,4210,4000,7811,4001,4211,3222,2274,2275,3476,7812, # 6662 7813,2764, 808,2606,3699,3369,4002,4212,3100,2532, 526,3370,3821,4213, 955,7814, # 6678 1620,4214,2637,2427,7815,1429,3700,1669,1831, 994, 928,7816,3578,1260,7817,7818, # 6694 7819,1948,2288, 741,2919,1626,4215,2729,2455, 867,1184, 362,3371,1392,7820,7821, # 6710 4003,4216,1770,1736,3223,2920,4499,4500,1928,2698,1459,1158,7822,3050,3372,2877, # 6726 1292,1929,2506,2842,3701,1985,1187,2071,2014,2607,4217,7823,2566,2507,2169,3702, # 6742 2483,3299,7824,3703,4501,7825,7826, 666,1003,3005,1022,3579,4218,7827,4502,1813, # 6758 2253, 574,3822,1603, 295,1535, 705,3823,4219, 283, 858, 417,7828,7829,3224,4503, # 6774 4504,3051,1220,1889,1046,2276,2456,4004,1393,1599, 689,2567, 388,4220,7830,2484, # 6790 802,7831,2798,3824,2060,1405,2254,7832,4505,3825,2109,1052,1345,3225,1585,7833, # 6806 809,7834,7835,7836, 575,2730,3477, 956,1552,1469,1144,2323,7837,2324,1560,2457, # 6822 3580,3226,4005, 616,2207,3155,2180,2289,7838,1832,7839,3478,4506,7840,1319,3704, # 6838 3705,1211,3581,1023,3227,1293,2799,7841,7842,7843,3826, 607,2306,3827, 762,2878, # 6854 1439,4221,1360,7844,1485,3052,7845,4507,1038,4222,1450,2061,2638,4223,1379,4508, # 6870 2585,7846,7847,4224,1352,1414,2325,2921,1172,7848,7849,3828,3829,7850,1797,1451, # 6886 7851,7852,7853,7854,2922,4006,4007,2485,2346, 411,4008,4009,3582,3300,3101,4509, # 6902 1561,2664,1452,4010,1375,7855,7856, 47,2959, 316,7857,1406,1591,2923,3156,7858, # 6918 1025,2141,3102,3157, 354,2731, 884,2224,4225,2407, 508,3706, 726,3583, 996,2428, # 6934 3584, 729,7859, 392,2191,1453,4011,4510,3707,7860,7861,2458,3585,2608,1675,2800, # 6950 919,2347,2960,2348,1270,4511,4012, 73,7862,7863, 647,7864,3228,2843,2255,1550, # 6966 1346,3006,7865,1332, 883,3479,7866,7867,7868,7869,3301,2765,7870,1212, 831,1347, # 6982 4226,4512,2326,3830,1863,3053, 720,3831,4513,4514,3832,7871,4227,7872,7873,4515, # 6998 7874,7875,1798,4516,3708,2609,4517,3586,1645,2371,7876,7877,2924, 669,2208,2665, # 7014 2429,7878,2879,7879,7880,1028,3229,7881,4228,2408,7882,2256,1353,7883,7884,4518, # 7030 3158, 518,7885,4013,7886,4229,1960,7887,2142,4230,7888,7889,3007,2349,2350,3833, # 7046 516,1833,1454,4014,2699,4231,4519,2225,2610,1971,1129,3587,7890,2766,7891,2961, # 7062 1422, 577,1470,3008,1524,3373,7892,7893, 432,4232,3054,3480,7894,2586,1455,2508, # 7078 2226,1972,1175,7895,1020,2732,4015,3481,4520,7896,2733,7897,1743,1361,3055,3482, # 7094 2639,4016,4233,4521,2290, 895, 924,4234,2170, 331,2243,3056, 166,1627,3057,1098, # 7110 7898,1232,2880,2227,3374,4522, 657, 403,1196,2372, 542,3709,3375,1600,4235,3483, # 7126 7899,4523,2767,3230, 576, 530,1362,7900,4524,2533,2666,3710,4017,7901, 842,3834, # 7142 7902,2801,2031,1014,4018, 213,2700,3376, 665, 621,4236,7903,3711,2925,2430,7904, # 7158 2431,3302,3588,3377,7905,4237,2534,4238,4525,3589,1682,4239,3484,1380,7906, 724, # 7174 2277, 600,1670,7907,1337,1233,4526,3103,2244,7908,1621,4527,7909, 651,4240,7910, # 7190 1612,4241,2611,7911,2844,7912,2734,2307,3058,7913, 716,2459,3059, 174,1255,2701, # 7206 4019,3590, 548,1320,1398, 728,4020,1574,7914,1890,1197,3060,4021,7915,3061,3062, # 7222 3712,3591,3713, 747,7916, 635,4242,4528,7917,7918,7919,4243,7920,7921,4529,7922, # 7238 3378,4530,2432, 451,7923,3714,2535,2072,4244,2735,4245,4022,7924,1764,4531,7925, # 7254 4246, 350,7926,2278,2390,2486,7927,4247,4023,2245,1434,4024, 488,4532, 458,4248, # 7270 4025,3715, 771,1330,2391,3835,2568,3159,2159,2409,1553,2667,3160,4249,7928,2487, # 7286 2881,2612,1720,2702,4250,3379,4533,7929,2536,4251,7930,3231,4252,2768,7931,2015, # 7302 2736,7932,1155,1017,3716,3836,7933,3303,2308, 201,1864,4253,1430,7934,4026,7935, # 7318 7936,7937,7938,7939,4254,1604,7940, 414,1865, 371,2587,4534,4535,3485,2016,3104, # 7334 4536,1708, 960,4255, 887, 389,2171,1536,1663,1721,7941,2228,4027,2351,2926,1580, # 7350 7942,7943,7944,1744,7945,2537,4537,4538,7946,4539,7947,2073,7948,7949,3592,3380, # 7366 2882,4256,7950,4257,2640,3381,2802, 673,2703,2460, 709,3486,4028,3593,4258,7951, # 7382 1148, 502, 634,7952,7953,1204,4540,3594,1575,4541,2613,3717,7954,3718,3105, 948, # 7398 3232, 121,1745,3837,1110,7955,4259,3063,2509,3009,4029,3719,1151,1771,3838,1488, # 7414 4030,1986,7956,2433,3487,7957,7958,2093,7959,4260,3839,1213,1407,2803, 531,2737, # 7430 2538,3233,1011,1537,7960,2769,4261,3106,1061,7961,3720,3721,1866,2883,7962,2017, # 7446 120,4262,4263,2062,3595,3234,2309,3840,2668,3382,1954,4542,7963,7964,3488,1047, # 7462 2704,1266,7965,1368,4543,2845, 649,3383,3841,2539,2738,1102,2846,2669,7966,7967, # 7478 1999,7968,1111,3596,2962,7969,2488,3842,3597,2804,1854,3384,3722,7970,7971,3385, # 7494 2410,2884,3304,3235,3598,7972,2569,7973,3599,2805,4031,1460, 856,7974,3600,7975, # 7510 2885,2963,7976,2886,3843,7977,4264, 632,2510, 875,3844,1697,3845,2291,7978,7979, # 7526 4544,3010,1239, 580,4545,4265,7980, 914, 936,2074,1190,4032,1039,2123,7981,7982, # 7542 7983,3386,1473,7984,1354,4266,3846,7985,2172,3064,4033, 915,3305,4267,4268,3306, # 7558 1605,1834,7986,2739, 398,3601,4269,3847,4034, 328,1912,2847,4035,3848,1331,4270, # 7574 3011, 937,4271,7987,3602,4036,4037,3387,2160,4546,3388, 524, 742, 538,3065,1012, # 7590 7988,7989,3849,2461,7990, 658,1103, 225,3850,7991,7992,4547,7993,4548,7994,3236, # 7606 1243,7995,4038, 963,2246,4549,7996,2705,3603,3161,7997,7998,2588,2327,7999,4550, # 7622 8000,8001,8002,3489,3307, 957,3389,2540,2032,1930,2927,2462, 870,2018,3604,1746, # 7638 2770,2771,2434,2463,8003,3851,8004,3723,3107,3724,3490,3390,3725,8005,1179,3066, # 7654 8006,3162,2373,4272,3726,2541,3163,3108,2740,4039,8007,3391,1556,2542,2292, 977, # 7670 2887,2033,4040,1205,3392,8008,1765,3393,3164,2124,1271,1689, 714,4551,3491,8009, # 7686 2328,3852, 533,4273,3605,2181, 617,8010,2464,3308,3492,2310,8011,8012,3165,8013, # 7702 8014,3853,1987, 618, 427,2641,3493,3394,8015,8016,1244,1690,8017,2806,4274,4552, # 7718 8018,3494,8019,8020,2279,1576, 473,3606,4275,3395, 972,8021,3607,8022,3067,8023, # 7734 8024,4553,4554,8025,3727,4041,4042,8026, 153,4555, 356,8027,1891,2888,4276,2143, # 7750 408, 803,2352,8028,3854,8029,4277,1646,2570,2511,4556,4557,3855,8030,3856,4278, # 7766 8031,2411,3396, 752,8032,8033,1961,2964,8034, 746,3012,2465,8035,4279,3728, 698, # 7782 4558,1892,4280,3608,2543,4559,3609,3857,8036,3166,3397,8037,1823,1302,4043,2706, # 7798 3858,1973,4281,8038,4282,3167, 823,1303,1288,1236,2848,3495,4044,3398, 774,3859, # 7814 8039,1581,4560,1304,2849,3860,4561,8040,2435,2161,1083,3237,4283,4045,4284, 344, # 7830 1173, 288,2311, 454,1683,8041,8042,1461,4562,4046,2589,8043,8044,4563, 985, 894, # 7846 8045,3399,3168,8046,1913,2928,3729,1988,8047,2110,1974,8048,4047,8049,2571,1194, # 7862 425,8050,4564,3169,1245,3730,4285,8051,8052,2850,8053, 636,4565,1855,3861, 760, # 7878 1799,8054,4286,2209,1508,4566,4048,1893,1684,2293,8055,8056,8057,4287,4288,2210, # 7894 479,8058,8059, 832,8060,4049,2489,8061,2965,2490,3731, 990,3109, 627,1814,2642, # 7910 4289,1582,4290,2125,2111,3496,4567,8062, 799,4291,3170,8063,4568,2112,1737,3013, # 7926 1018, 543, 754,4292,3309,1676,4569,4570,4050,8064,1489,8065,3497,8066,2614,2889, # 7942 4051,8067,8068,2966,8069,8070,8071,8072,3171,4571,4572,2182,1722,8073,3238,3239, # 7958 1842,3610,1715, 481, 365,1975,1856,8074,8075,1962,2491,4573,8076,2126,3611,3240, # 7974 433,1894,2063,2075,8077, 602,2741,8078,8079,8080,8081,8082,3014,1628,3400,8083, # 7990 3172,4574,4052,2890,4575,2512,8084,2544,2772,8085,8086,8087,3310,4576,2891,8088, # 8006 4577,8089,2851,4578,4579,1221,2967,4053,2513,8090,8091,8092,1867,1989,8093,8094, # 8022 8095,1895,8096,8097,4580,1896,4054, 318,8098,2094,4055,4293,8099,8100, 485,8101, # 8038 938,3862, 553,2670, 116,8102,3863,3612,8103,3498,2671,2773,3401,3311,2807,8104, # 8054 3613,2929,4056,1747,2930,2968,8105,8106, 207,8107,8108,2672,4581,2514,8109,3015, # 8070 890,3614,3864,8110,1877,3732,3402,8111,2183,2353,3403,1652,8112,8113,8114, 941, # 8086 2294, 208,3499,4057,2019, 330,4294,3865,2892,2492,3733,4295,8115,8116,8117,8118, # 8102 #Everything below is of no interest for detection purpose 2515,1613,4582,8119,3312,3866,2516,8120,4058,8121,1637,4059,2466,4583,3867,8122, # 8118 2493,3016,3734,8123,8124,2192,8125,8126,2162,8127,8128,8129,8130,8131,8132,8133, # 8134 8134,8135,8136,8137,8138,8139,8140,8141,8142,8143,8144,8145,8146,8147,8148,8149, # 8150 8150,8151,8152,8153,8154,8155,8156,8157,8158,8159,8160,8161,8162,8163,8164,8165, # 8166 8166,8167,8168,8169,8170,8171,8172,8173,8174,8175,8176,8177,8178,8179,8180,8181, # 8182 8182,8183,8184,8185,8186,8187,8188,8189,8190,8191,8192,8193,8194,8195,8196,8197, # 8198 8198,8199,8200,8201,8202,8203,8204,8205,8206,8207,8208,8209,8210,8211,8212,8213, # 8214 8214,8215,8216,8217,8218,8219,8220,8221,8222,8223,8224,8225,8226,8227,8228,8229, # 8230 8230,8231,8232,8233,8234,8235,8236,8237,8238,8239,8240,8241,8242,8243,8244,8245, # 8246 8246,8247,8248,8249,8250,8251,8252,8253,8254,8255,8256,8257,8258,8259,8260,8261, # 8262 8262,8263,8264,8265,8266,8267,8268,8269,8270,8271,8272,8273,8274,8275,8276,8277, # 8278 8278,8279,8280,8281,8282,8283,8284,8285,8286,8287,8288,8289,8290,8291,8292,8293, # 8294 8294,8295,8296,8297,8298,8299,8300,8301,8302,8303,8304,8305,8306,8307,8308,8309, # 8310 8310,8311,8312,8313,8314,8315,8316,8317,8318,8319,8320,8321,8322,8323,8324,8325, # 8326 8326,8327,8328,8329,8330,8331,8332,8333,8334,8335,8336,8337,8338,8339,8340,8341, # 8342 8342,8343,8344,8345,8346,8347,8348,8349,8350,8351,8352,8353,8354,8355,8356,8357, # 8358 8358,8359,8360,8361,8362,8363,8364,8365,8366,8367,8368,8369,8370,8371,8372,8373, # 8374 8374,8375,8376,8377,8378,8379,8380,8381,8382,8383,8384,8385,8386,8387,8388,8389, # 8390 8390,8391,8392,8393,8394,8395,8396,8397,8398,8399,8400,8401,8402,8403,8404,8405, # 8406 8406,8407,8408,8409,8410,8411,8412,8413,8414,8415,8416,8417,8418,8419,8420,8421, # 8422 8422,8423,8424,8425,8426,8427,8428,8429,8430,8431,8432,8433,8434,8435,8436,8437, # 8438 8438,8439,8440,8441,8442,8443,8444,8445,8446,8447,8448,8449,8450,8451,8452,8453, # 8454 8454,8455,8456,8457,8458,8459,8460,8461,8462,8463,8464,8465,8466,8467,8468,8469, # 8470 8470,8471,8472,8473,8474,8475,8476,8477,8478,8479,8480,8481,8482,8483,8484,8485, # 8486 8486,8487,8488,8489,8490,8491,8492,8493,8494,8495,8496,8497,8498,8499,8500,8501, # 8502 8502,8503,8504,8505,8506,8507,8508,8509,8510,8511,8512,8513,8514,8515,8516,8517, # 8518 8518,8519,8520,8521,8522,8523,8524,8525,8526,8527,8528,8529,8530,8531,8532,8533, # 8534 8534,8535,8536,8537,8538,8539,8540,8541,8542,8543,8544,8545,8546,8547,8548,8549, # 8550 8550,8551,8552,8553,8554,8555,8556,8557,8558,8559,8560,8561,8562,8563,8564,8565, # 8566 8566,8567,8568,8569,8570,8571,8572,8573,8574,8575,8576,8577,8578,8579,8580,8581, # 8582 8582,8583,8584,8585,8586,8587,8588,8589,8590,8591,8592,8593,8594,8595,8596,8597, # 8598 8598,8599,8600,8601,8602,8603,8604,8605,8606,8607,8608,8609,8610,8611,8612,8613, # 8614 8614,8615,8616,8617,8618,8619,8620,8621,8622,8623,8624,8625,8626,8627,8628,8629, # 8630 8630,8631,8632,8633,8634,8635,8636,8637,8638,8639,8640,8641,8642,8643,8644,8645, # 8646 8646,8647,8648,8649,8650,8651,8652,8653,8654,8655,8656,8657,8658,8659,8660,8661, # 8662 8662,8663,8664,8665,8666,8667,8668,8669,8670,8671,8672,8673,8674,8675,8676,8677, # 8678 8678,8679,8680,8681,8682,8683,8684,8685,8686,8687,8688,8689,8690,8691,8692,8693, # 8694 8694,8695,8696,8697,8698,8699,8700,8701,8702,8703,8704,8705,8706,8707,8708,8709, # 8710 8710,8711,8712,8713,8714,8715,8716,8717,8718,8719,8720,8721,8722,8723,8724,8725, # 8726 8726,8727,8728,8729,8730,8731,8732,8733,8734,8735,8736,8737,8738,8739,8740,8741) # 8742 # flake8: noqa
asyan4ik/hwp6s-kernel
refs/heads/master
scripts/tracing/draw_functrace.py
14679
#!/usr/bin/python """ Copyright 2008 (c) Frederic Weisbecker <fweisbec@gmail.com> Licensed under the terms of the GNU GPL License version 2 This script parses a trace provided by the function tracer in kernel/trace/trace_functions.c The resulted trace is processed into a tree to produce a more human view of the call stack by drawing textual but hierarchical tree of calls. Only the functions's names and the the call time are provided. Usage: Be sure that you have CONFIG_FUNCTION_TRACER # mount -t debugfs nodev /sys/kernel/debug # echo function > /sys/kernel/debug/tracing/current_tracer $ cat /sys/kernel/debug/tracing/trace_pipe > ~/raw_trace_func Wait some times but not too much, the script is a bit slow. Break the pipe (Ctrl + Z) $ scripts/draw_functrace.py < raw_trace_func > draw_functrace Then you have your drawn trace in draw_functrace """ import sys, re class CallTree: """ This class provides a tree representation of the functions call stack. If a function has no parent in the kernel (interrupt, syscall, kernel thread...) then it is attached to a virtual parent called ROOT. """ ROOT = None def __init__(self, func, time = None, parent = None): self._func = func self._time = time if parent is None: self._parent = CallTree.ROOT else: self._parent = parent self._children = [] def calls(self, func, calltime): """ If a function calls another one, call this method to insert it into the tree at the appropriate place. @return: A reference to the newly created child node. """ child = CallTree(func, calltime, self) self._children.append(child) return child def getParent(self, func): """ Retrieve the last parent of the current node that has the name given by func. If this function is not on a parent, then create it as new child of root @return: A reference to the parent. """ tree = self while tree != CallTree.ROOT and tree._func != func: tree = tree._parent if tree == CallTree.ROOT: child = CallTree.ROOT.calls(func, None) return child return tree def __repr__(self): return self.__toString("", True) def __toString(self, branch, lastChild): if self._time is not None: s = "%s----%s (%s)\n" % (branch, self._func, self._time) else: s = "%s----%s\n" % (branch, self._func) i = 0 if lastChild: branch = branch[:-1] + " " while i < len(self._children): if i != len(self._children) - 1: s += "%s" % self._children[i].__toString(branch +\ " |", False) else: s += "%s" % self._children[i].__toString(branch +\ " |", True) i += 1 return s class BrokenLineException(Exception): """If the last line is not complete because of the pipe breakage, we want to stop the processing and ignore this line. """ pass class CommentLineException(Exception): """ If the line is a comment (as in the beginning of the trace file), just ignore it. """ pass def parseLine(line): line = line.strip() if line.startswith("#"): raise CommentLineException m = re.match("[^]]+?\\] +([0-9.]+): (\\w+) <-(\\w+)", line) if m is None: raise BrokenLineException return (m.group(1), m.group(2), m.group(3)) def main(): CallTree.ROOT = CallTree("Root (Nowhere)", None, None) tree = CallTree.ROOT for line in sys.stdin: try: calltime, callee, caller = parseLine(line) except BrokenLineException: break except CommentLineException: continue tree = tree.getParent(caller) tree = tree.calls(callee, calltime) print CallTree.ROOT if __name__ == "__main__": main()
JFLABO/three.js
refs/heads/master
utils/exporters/blender/2.65/scripts/addons/io_mesh_threejs/__init__.py
28
# ##### BEGIN GPL LICENSE BLOCK ##### # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software Foundation, # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # # ##### END GPL LICENSE BLOCK ##### # ################################################################ # Init # ################################################################ bl_info = { "name": "three.js format", "author": "mrdoob, kikko, alteredq, remoe, pxf, n3tfr34k", "version": (1, 4, 0), "blender": (2, 65, 0), "location": "File > Import-Export", "description": "Import-Export three.js meshes", "warning": "", "wiki_url": "https://github.com/mrdoob/three.js/tree/master/utils/exporters/blender", "tracker_url": "https://github.com/mrdoob/three.js/issues", "category": "Import-Export"} # To support reload properly, try to access a package var, # if it's there, reload everything import bpy if "bpy" in locals(): import imp if "export_threejs" in locals(): imp.reload(export_threejs) if "import_threejs" in locals(): imp.reload(import_threejs) from bpy.props import * from bpy_extras.io_utils import ExportHelper, ImportHelper # ################################################################ # Custom properties # ################################################################ bpy.types.Object.THREE_castShadow = bpy.props.BoolProperty() bpy.types.Object.THREE_receiveShadow = bpy.props.BoolProperty() bpy.types.Object.THREE_doubleSided = bpy.props.BoolProperty() bpy.types.Object.THREE_exportGeometry = bpy.props.BoolProperty(default = True) bpy.types.Material.THREE_useVertexColors = bpy.props.BoolProperty() bpy.types.Material.THREE_depthWrite = bpy.props.BoolProperty(default = True) bpy.types.Material.THREE_depthTest = bpy.props.BoolProperty(default = True) THREE_material_types = [("Basic", "Basic", "Basic"), ("Phong", "Phong", "Phong"), ("Lambert", "Lambert", "Lambert")] bpy.types.Material.THREE_materialType = EnumProperty(name = "Material type", description = "Material type", items = THREE_material_types, default = "Lambert") THREE_blending_types = [("NoBlending", "NoBlending", "NoBlending"), ("NormalBlending", "NormalBlending", "NormalBlending"), ("AdditiveBlending", "AdditiveBlending", "AdditiveBlending"), ("SubtractiveBlending", "SubtractiveBlending", "SubtractiveBlending"), ("MultiplyBlending", "MultiplyBlending", "MultiplyBlending"), ("AdditiveAlphaBlending", "AdditiveAlphaBlending", "AdditiveAlphaBlending")] bpy.types.Material.THREE_blendingType = EnumProperty(name = "Blending type", description = "Blending type", items = THREE_blending_types, default = "NormalBlending") class OBJECT_PT_hello( bpy.types.Panel ): bl_label = "THREE" bl_space_type = "PROPERTIES" bl_region_type = "WINDOW" bl_context = "object" def draw(self, context): layout = self.layout obj = context.object row = layout.row() row.label(text="Selected object: " + obj.name ) row = layout.row() row.prop( obj, "THREE_exportGeometry", text="Export geometry" ) row = layout.row() row.prop( obj, "THREE_castShadow", text="Casts shadow" ) row = layout.row() row.prop( obj, "THREE_receiveShadow", text="Receives shadow" ) row = layout.row() row.prop( obj, "THREE_doubleSided", text="Double sided" ) class MATERIAL_PT_hello( bpy.types.Panel ): bl_label = "THREE" bl_space_type = "PROPERTIES" bl_region_type = "WINDOW" bl_context = "material" def draw(self, context): layout = self.layout mat = context.material row = layout.row() row.label(text="Selected material: " + mat.name ) row = layout.row() row.prop( mat, "THREE_materialType", text="Material type" ) row = layout.row() row.prop( mat, "THREE_blendingType", text="Blending type" ) row = layout.row() row.prop( mat, "THREE_useVertexColors", text="Use vertex colors" ) row = layout.row() row.prop( mat, "THREE_depthWrite", text="Enable depth writing" ) row = layout.row() row.prop( mat, "THREE_depthTest", text="Enable depth testing" ) # ################################################################ # Importer # ################################################################ class ImportTHREEJS(bpy.types.Operator, ImportHelper): '''Load a Three.js ASCII JSON model''' bl_idname = "import.threejs" bl_label = "Import Three.js" filename_ext = ".js" filter_glob = StringProperty(default="*.js", options={'HIDDEN'}) option_flip_yz = BoolProperty(name="Flip YZ", description="Flip YZ", default=True) recalculate_normals = BoolProperty(name="Recalculate normals", description="Recalculate vertex normals", default=True) option_worker = BoolProperty(name="Worker", description="Old format using workers", default=False) def execute(self, context): import io_mesh_threejs.import_threejs return io_mesh_threejs.import_threejs.load(self, context, **self.properties) def draw(self, context): layout = self.layout row = layout.row() row.prop(self.properties, "option_flip_yz") row = layout.row() row.prop(self.properties, "recalculate_normals") row = layout.row() row.prop(self.properties, "option_worker") # ################################################################ # Exporter - settings # ################################################################ SETTINGS_FILE_EXPORT = "threejs_settings_export.js" import os import json def file_exists(filename): """Return true if file exists and accessible for reading. Should be safer than just testing for existence due to links and permissions magic on Unix filesystems. @rtype: boolean """ try: f = open(filename, 'r') f.close() return True except IOError: return False def get_settings_fullpath(): return os.path.join(bpy.app.tempdir, SETTINGS_FILE_EXPORT) def save_settings_export(properties): settings = { "option_export_scene" : properties.option_export_scene, "option_embed_meshes" : properties.option_embed_meshes, "option_url_base_html" : properties.option_url_base_html, "option_copy_textures" : properties.option_copy_textures, "option_lights" : properties.option_lights, "option_cameras" : properties.option_cameras, "option_animation_morph" : properties.option_animation_morph, "option_animation_skeletal" : properties.option_animation_skeletal, "option_frame_step" : properties.option_frame_step, "option_all_meshes" : properties.option_all_meshes, "option_flip_yz" : properties.option_flip_yz, "option_materials" : properties.option_materials, "option_normals" : properties.option_normals, "option_colors" : properties.option_colors, "option_uv_coords" : properties.option_uv_coords, "option_faces" : properties.option_faces, "option_vertices" : properties.option_vertices, "option_skinning" : properties.option_skinning, "option_bones" : properties.option_bones, "option_vertices_truncate" : properties.option_vertices_truncate, "option_scale" : properties.option_scale, "align_model" : properties.align_model } fname = get_settings_fullpath() f = open(fname, "w") json.dump(settings, f) def restore_settings_export(properties): settings = {} fname = get_settings_fullpath() if file_exists(fname): f = open(fname, "r") settings = json.load(f) properties.option_vertices = settings.get("option_vertices", True) properties.option_vertices_truncate = settings.get("option_vertices_truncate", False) properties.option_faces = settings.get("option_faces", True) properties.option_normals = settings.get("option_normals", True) properties.option_colors = settings.get("option_colors", True) properties.option_uv_coords = settings.get("option_uv_coords", True) properties.option_materials = settings.get("option_materials", True) properties.option_skinning = settings.get("option_skinning", True) properties.option_bones = settings.get("option_bones", True) properties.align_model = settings.get("align_model", "None") properties.option_scale = settings.get("option_scale", 1.0) properties.option_flip_yz = settings.get("option_flip_yz", True) properties.option_export_scene = settings.get("option_export_scene", False) properties.option_embed_meshes = settings.get("option_embed_meshes", True) properties.option_url_base_html = settings.get("option_url_base_html", False) properties.option_copy_textures = settings.get("option_copy_textures", False) properties.option_lights = settings.get("option_lights", False) properties.option_cameras = settings.get("option_cameras", False) properties.option_animation_morph = settings.get("option_animation_morph", False) properties.option_animation_skeletal = settings.get("option_animation_skeletal", False) properties.option_frame_step = settings.get("option_frame_step", 1) properties.option_all_meshes = settings.get("option_all_meshes", True) # ################################################################ # Exporter # ################################################################ class ExportTHREEJS(bpy.types.Operator, ExportHelper): '''Export selected object / scene for Three.js (ASCII JSON format).''' bl_idname = "export.threejs" bl_label = "Export Three.js" filename_ext = ".js" option_vertices = BoolProperty(name = "Vertices", description = "Export vertices", default = True) option_vertices_deltas = BoolProperty(name = "Deltas", description = "Delta vertices", default = False) option_vertices_truncate = BoolProperty(name = "Truncate", description = "Truncate vertices", default = False) option_faces = BoolProperty(name = "Faces", description = "Export faces", default = True) option_faces_deltas = BoolProperty(name = "Deltas", description = "Delta faces", default = False) option_normals = BoolProperty(name = "Normals", description = "Export normals", default = True) option_colors = BoolProperty(name = "Colors", description = "Export vertex colors", default = True) option_uv_coords = BoolProperty(name = "UVs", description = "Export texture coordinates", default = True) option_materials = BoolProperty(name = "Materials", description = "Export materials", default = True) option_skinning = BoolProperty(name = "Skinning", description = "Export skin data", default = True) option_bones = BoolProperty(name = "Bones", description = "Export bones", default = True) align_types = [("None","None","None"), ("Center","Center","Center"), ("Bottom","Bottom","Bottom"), ("Top","Top","Top")] align_model = EnumProperty(name = "Align model", description = "Align model", items = align_types, default = "None") option_scale = FloatProperty(name = "Scale", description = "Scale vertices", min = 0.01, max = 1000.0, soft_min = 0.01, soft_max = 1000.0, default = 1.0) option_flip_yz = BoolProperty(name = "Flip YZ", description = "Flip YZ", default = True) option_export_scene = BoolProperty(name = "Scene", description = "Export scene", default = False) option_embed_meshes = BoolProperty(name = "Embed meshes", description = "Embed meshes", default = True) option_copy_textures = BoolProperty(name = "Copy textures", description = "Copy textures", default = False) option_url_base_html = BoolProperty(name = "HTML as url base", description = "Use HTML as url base ", default = False) option_lights = BoolProperty(name = "Lights", description = "Export default scene lights", default = False) option_cameras = BoolProperty(name = "Cameras", description = "Export default scene cameras", default = False) option_animation_morph = BoolProperty(name = "Morph animation", description = "Export animation (morphs)", default = False) option_animation_skeletal = BoolProperty(name = "Skeletal animation", description = "Export animation (skeletal)", default = False) option_frame_step = IntProperty(name = "Frame step", description = "Animation frame step", min = 1, max = 1000, soft_min = 1, soft_max = 1000, default = 1) option_all_meshes = BoolProperty(name = "All meshes", description = "All meshes (merged)", default = True) def invoke(self, context, event): restore_settings_export(self.properties) return ExportHelper.invoke(self, context, event) @classmethod def poll(cls, context): return context.active_object is not None def execute(self, context): print("Selected: " + context.active_object.name) if not self.properties.filepath: raise Exception("filename not set") save_settings_export(self.properties) filepath = self.filepath import io_mesh_threejs.export_threejs return io_mesh_threejs.export_threejs.save(self, context, **self.properties) def draw(self, context): layout = self.layout row = layout.row() row.label(text="Geometry:") row = layout.row() row.prop(self.properties, "option_vertices") # row = layout.row() # row.enabled = self.properties.option_vertices # row.prop(self.properties, "option_vertices_deltas") row.prop(self.properties, "option_vertices_truncate") layout.separator() row = layout.row() row.prop(self.properties, "option_faces") row = layout.row() row.enabled = self.properties.option_faces # row.prop(self.properties, "option_faces_deltas") layout.separator() row = layout.row() row.prop(self.properties, "option_normals") layout.separator() row = layout.row() row.prop(self.properties, "option_bones") row.prop(self.properties, "option_skinning") layout.separator() row = layout.row() row.label(text="Materials:") row = layout.row() row.prop(self.properties, "option_uv_coords") row.prop(self.properties, "option_colors") row = layout.row() row.prop(self.properties, "option_materials") layout.separator() row = layout.row() row.label(text="Settings:") row = layout.row() row.prop(self.properties, "align_model") row = layout.row() row.prop(self.properties, "option_flip_yz") row.prop(self.properties, "option_scale") layout.separator() row = layout.row() row.label(text="--------- Experimental ---------") layout.separator() row = layout.row() row.label(text="Scene:") row = layout.row() row.prop(self.properties, "option_export_scene") row.prop(self.properties, "option_embed_meshes") row = layout.row() row.prop(self.properties, "option_lights") row.prop(self.properties, "option_cameras") layout.separator() row = layout.row() row.label(text="Animation:") row = layout.row() row.prop(self.properties, "option_animation_morph") row = layout.row() row.prop(self.properties, "option_animation_skeletal") row = layout.row() row.prop(self.properties, "option_frame_step") layout.separator() row = layout.row() row.label(text="Settings:") row = layout.row() row.prop(self.properties, "option_all_meshes") row = layout.row() row.prop(self.properties, "option_copy_textures") row = layout.row() row.prop(self.properties, "option_url_base_html") layout.separator() # ################################################################ # Common # ################################################################ def menu_func_export(self, context): default_path = bpy.data.filepath.replace(".blend", ".js") self.layout.operator(ExportTHREEJS.bl_idname, text="Three.js (.js)").filepath = default_path def menu_func_import(self, context): self.layout.operator(ImportTHREEJS.bl_idname, text="Three.js (.js)") def register(): bpy.utils.register_module(__name__) bpy.types.INFO_MT_file_export.append(menu_func_export) bpy.types.INFO_MT_file_import.append(menu_func_import) def unregister(): bpy.utils.unregister_module(__name__) bpy.types.INFO_MT_file_export.remove(menu_func_export) bpy.types.INFO_MT_file_import.remove(menu_func_import) if __name__ == "__main__": register()
GunoH/intellij-community
refs/heads/master
python/testData/inspections/PyTypeCheckerInspection/Assignment.py
13
def f(): x1: int = <warning descr="Expected type 'int', got 'str' instead">'foo'</warning> x2: str = 'bar' x3: int = 0 x4: str = <warning descr="Expected type 'str', got 'int' instead">1</warning>
gitenberg-dev/pg-epubmaker
refs/heads/master
epubmaker/HTMLChunker.py
1
#!/usr/bin/env python # -*- mode: python; indent-tabs-mode: nil; -*- coding: iso-8859-1 -*- """ HTMLChunker.py Copyright 2009 by Marcello Perathoner Distributable under the GNU General Public License Version 3 or newer. Splits a HTML file into chunks. """ from __future__ import with_statement import urlparse import urllib import os import re import copy from lxml import etree import epubmaker.lib.GutenbergGlobals as gg from epubmaker.lib.GutenbergGlobals import NS from epubmaker.lib.Logger import debug, error from epubmaker.CommonOptions import Options options = Options() # MAX_CHUNK_SIZE = 300 * 1024 # bytes MAX_CHUNK_SIZE = 100 * 1024 # bytes SECTIONS = [ ('div.section', 0.0), ('div.chapter', 0.0), ('h1', 0.5), ('div', 0.5), ('h2', 0.7), ('h3', 0.75), ('p', 0.8) ] def xpath (node, path): """ xpath helper """ return node.xpath (path, namespaces = gg.NSMAP) def unicode_uri (uri): """ Normalize URI for idmap. """ return urllib.unquote (uri).decode ('utf-8') class HTMLChunker (object): """ Splits HTML tree into smaller chunks. Some epub viewers are limited in that they cannot display files larger than 300K. If our HTML happens to be longer, we have to split it up. Also smaller chunks do improve page flip times. """ def __init__ (self): self.chunks = [] self.idmap = {} self.chunk = None self.chunk_body = None self.chunk_size = 0 self.next_id = 0 self.tags = {} for tag, size in SECTIONS: self.tags[NS.xhtml[tag]] = int (size * MAX_CHUNK_SIZE) for tag in options.section_tags: self.tags[NS.xhtml[tag]] = 0 def _make_name (self, url): """ Generate a name for the chunk. """ u = list (urlparse.urlparse (url)) root, ext = os.path.splitext (u[2]) # FIXME: brain-dead kindlegen only finds links in files with # .html extension. so we just add .html to everything u[2] = "%s-%d%s.html" % (root, self.next_id, ext) self.next_id += 1 return urlparse.urlunparse (u) @staticmethod def make_template (tree): """ Make a copy with an empty html:body. This makes a template into which we can paste our chunks. """ template = copy.deepcopy (tree) for c in xpath (template, '//xhtml:body'): # descend while elem has only one child while len (c) == 1: c = c[0] # clear children but save attributes attributes = c.attrib.items () c.clear () # was tentative fix for patological one-element-html case # for child in c: # c.remove (child) for a in attributes: c.set (a[0], a[1]) # debug (etree.tostring (template)) return template def reset_chunk (self, template): """ start a new chunk """ self.chunk = copy.deepcopy (template) self.chunk_size = len (etree.tostring (self.chunk)) self.chunk_body = xpath (self.chunk, "//xhtml:body")[0] while len (self.chunk_body) == 1: self.chunk_body = self.chunk_body[0] def shipout_chunk (self, url, chunk_id = None, comment = None): """ ready chunk to be shipped """ if (self.chunk_size > MAX_CHUNK_SIZE): self.split (self.chunk, url) return url = unicode_uri (url) chunk_name = self._make_name (url) # the url of the whole page if not url in self.idmap: self.idmap[url] = chunk_name # fragments of the page for e in xpath (self.chunk, '//xhtml:*[@id]'): id_ = e.attrib['id'] old_id = "%s#%s" % (url, id_) # key is unicode string, # value is uri-escaped byte string # if ids get cloned while chunking, map to the first one only if old_id not in self.idmap: self.idmap[old_id] = "%s#%s" % ( chunk_name, urllib.quote (id_.encode ('utf-8'))) self.chunks.append ( { 'name' : chunk_name, 'id' : chunk_id, 'comment' : comment, 'chunk' : self.chunk, } ) debug ("Adding chunk %s (%d bytes) %s" % (chunk_name, self.chunk_size, chunk_id)) def split (self, tree, url): """ Split whole html or split chunk. Find some arbitrary points to do it. """ for body in xpath (tree, "//xhtml:body"): # we can't split a node that has only one child # descend while elem has only one child while len (body) == 1: body = body[0] debug ("body tag is %s" % body.tag) template = self.make_template (tree) self.reset_chunk (template) # FIXME: is this ok ??? # fixes patological one-element-body case self.chunk_body.text = body.text for child in body: if not isinstance (child, etree.ElementBase): # comments, processing instructions etc. continue child_size = len (etree.tostring (child)) try: tags = [child.tag + '.' + c for c in child.attrib['class'].split ()] tags.append (child.tag) except KeyError: tags = [child.tag] for tag in tags: if ((self.chunk_size + child_size > MAX_CHUNK_SIZE) or (tag in self.tags and self.chunk_size > self.tags[tag])): comment = ("Chunk: size=%d Split on %s" % (self.chunk_size, re.sub ('^{.*}', '', tag))) debug (comment) # find a suitable id chunk_id = None for c in self.chunk_body: if 'id' in c.attrib: chunk_id = c.get ('id') break debug ("chunk id is: %s" % (chunk_id or '')) self.shipout_chunk (url, chunk_id, comment) self.reset_chunk (template) break self.chunk_body.append (child) self.chunk_size = self.chunk_size + child_size # fixes patological one-element-body case self.chunk_body.tail = body.tail chunk_id = None if len (self.chunk_body): chunk_id = self.chunk_body[0].get ('id') comment = "Chunk: size=%d" % self.chunk_size self.shipout_chunk (url, chunk_id, comment) self.reset_chunk (template) def rewrite_links (self, f): """ Rewrite all href and src using f (). """ for chunk in self.chunks: # chunk['name'] = f (chunk['name']) for link in xpath (chunk['chunk'], '//xhtml:*[@href]'): url = link.get ('href') if not url.startswith('http://') and not url.startswith('https://'): link.set ('href', f (url)) for image in xpath (chunk['chunk'], '//xhtml:*[@src]'): image.set ('src', f (image.get ('src'))) for k, v in self.idmap.items (): self.idmap[k] = f (v) def rewrite_internal_links (self): """ Rewrite links to point into right chunks. Because we split the HTML into chunks, all internal links need to be rewritten to become links into the right chunk. Rewrite all internal links in all chunks. """ for chunk in self.chunks: for a in xpath (chunk['chunk'], "//xhtml:*[@href]"): try: uri = unicode_uri (a.get ('href')) a.set ('href', self.idmap[uri]) except KeyError: ur, dummy_frag = urlparse.urldefrag (uri) if ur in self.idmap: error ("HTMLChunker: Cannot rewrite internal link '%s'" % uri) def rewrite_internal_links_toc (self, toc): """ Rewrite links to point into right chunks. Because we split the HTML into chunks, all internal links need to be rewritten to become links into the right chunk. Rewrite all links in the passed toc. """ for entry in toc: try: entry[0] = self.idmap [unicode_uri (entry[0])] except KeyError: error ("HTMLChunker: Cannot rewrite toc entry '%s'" % entry[0]) del entry
AndrewGrossman/django
refs/heads/master
tests/template_backends/apps/good/templatetags/subpackage/tags.py
1426
from django.template import Library register = Library()
JCA-Developpement/Odoo
refs/heads/master
openerp/addons/base/tests/test_mail_examples.py
17
#!/usr/bin/env python # -*- coding: utf-8 -*- MISC_HTML_SOURCE = """ <font size="2" style="color: rgb(31, 31, 31); font-family: monospace; font-variant: normal; line-height: normal; ">test1</font> <div style="color: rgb(31, 31, 31); font-family: monospace; font-variant: normal; line-height: normal; font-size: 12px; font-style: normal; "> <b>test2</b></div><div style="color: rgb(31, 31, 31); font-family: monospace; font-variant: normal; line-height: normal; font-size: 12px; "> <i>test3</i></div><div style="color: rgb(31, 31, 31); font-family: monospace; font-variant: normal; line-height: normal; font-size: 12px; "> <u>test4</u></div><div style="color: rgb(31, 31, 31); font-family: monospace; font-variant: normal; line-height: normal; font-size: 12px; "> <strike>test5</strike></div><div style="color: rgb(31, 31, 31); font-family: monospace; font-variant: normal; line-height: normal; "> <font size="5">test6</font></div><div><ul><li><font color="#1f1f1f" face="monospace" size="2">test7</font></li><li> <font color="#1f1f1f" face="monospace" size="2">test8</font></li></ul><div><ol><li><font color="#1f1f1f" face="monospace" size="2">test9</font> </li><li><font color="#1f1f1f" face="monospace" size="2">test10</font></li></ol></div></div> <blockquote style="margin: 0 0 0 40px; border: none; padding: 0px;"><div><div><div><font color="#1f1f1f" face="monospace" size="2"> test11</font></div></div></div></blockquote><blockquote style="margin: 0 0 0 40px; border: none; padding: 0px;"> <blockquote style="margin: 0 0 0 40px; border: none; padding: 0px;"><div><font color="#1f1f1f" face="monospace" size="2"> test12</font></div><div><font color="#1f1f1f" face="monospace" size="2"><br></font></div></blockquote></blockquote> <font color="#1f1f1f" face="monospace" size="2"><a href="http://google.com">google</a></font> <a href="javascript:alert('malicious code')">test link</a> """ EDI_LIKE_HTML_SOURCE = """<div style="font-family: 'Lucica Grande', Ubuntu, Arial, Verdana, sans-serif; font-size: 12px; color: rgb(34, 34, 34); background-color: #FFF; "> <p>Hello ${object.partner_id.name},</p> <p>A new invoice is available for you: </p> <p style="border-left: 1px solid #8e0000; margin-left: 30px;"> &nbsp;&nbsp;<strong>REFERENCES</strong><br /> &nbsp;&nbsp;Invoice number: <strong>${object.number}</strong><br /> &nbsp;&nbsp;Invoice total: <strong>${object.amount_total} ${object.currency_id.name}</strong><br /> &nbsp;&nbsp;Invoice date: ${object.date_invoice}<br /> &nbsp;&nbsp;Order reference: ${object.origin}<br /> &nbsp;&nbsp;Your contact: <a href="mailto:${object.user_id.email or ''}?subject=Invoice%20${object.number}">${object.user_id.name}</a> </p> <br/> <p>It is also possible to directly pay with Paypal:</p> <a style="margin-left: 120px;" href="${object.paypal_url}"> <img class="oe_edi_paypal_button" src="https://www.paypal.com/en_US/i/btn/btn_paynowCC_LG.gif"/> </a> <br/> <p>If you have any question, do not hesitate to contact us.</p> <p>Thank you for choosing ${object.company_id.name or 'us'}!</p> <br/> <br/> <div style="width: 375px; margin: 0px; padding: 0px; background-color: #8E0000; border-top-left-radius: 5px 5px; border-top-right-radius: 5px 5px; background-repeat: repeat no-repeat;"> <h3 style="margin: 0px; padding: 2px 14px; font-size: 12px; color: #DDD;"> <strong style="text-transform:uppercase;">${object.company_id.name}</strong></h3> </div> <div style="width: 347px; margin: 0px; padding: 5px 14px; line-height: 16px; background-color: #F2F2F2;"> <span style="color: #222; margin-bottom: 5px; display: block; "> ${object.company_id.street}<br/> ${object.company_id.street2}<br/> ${object.company_id.zip} ${object.company_id.city}<br/> ${object.company_id.state_id and ('%s, ' % object.company_id.state_id.name) or ''} ${object.company_id.country_id.name or ''}<br/> </span> <div style="margin-top: 0px; margin-right: 0px; margin-bottom: 0px; margin-left: 0px; padding-top: 0px; padding-right: 0px; padding-bottom: 0px; padding-left: 0px; "> Phone:&nbsp; ${object.company_id.phone} </div> <div> Web :&nbsp;<a href="${object.company_id.website}">${object.company_id.website}</a> </div> </div> </div></body></html>""" OERP_WEBSITE_HTML_1 = """ <div> <div class="container"> <div class="row"> <div class="col-md-12 text-center mt16 mb16"> <h2>OpenERP HR Features</h2> <h3 class="text-muted">Manage your company most important asset: People</h3> </div> <div class="col-md-4"> <img class="img-rounded img-responsive" src="/website/static/src/img/china_thumb.jpg"> <h4 class="mt16">Streamline Recruitments</h4> <p>Post job offers and keep track of each application received. Follow applicants in your recruitment process with the smart kanban view.</p> <p>Save time by automating some communications with email templates. Resumes are indexed automatically, allowing you to easily find for specific profiles.</p> </div> <div class="col-md-4"> <img class="img-rounded img-responsive" src="/website/static/src/img/desert_thumb.jpg"> <h4 class="mt16">Enterprise Social Network</h4> <p>Break down information silos. Share knowledge and best practices amongst all employees. Follow specific people or documents and join groups of interests to share expertise and documents.</p> <p>Interact with your collegues in real time with live chat.</p> </div> <div class="col-md-4"> <img class="img-rounded img-responsive" src="/website/static/src/img/deers_thumb.jpg"> <h4 class="mt16">Leaves Management</h4> <p>Keep track of the vacation days accrued by each employee. Employees enter their requests (paid holidays, sick leave, etc), for managers to approve and validate. It's all done in just a few clicks. The agenda of each employee is updated accordingly.</p> </div> </div> </div> </div>""" OERP_WEBSITE_HTML_1_IN = [ 'Manage your company most important asset: People', 'img class="img-rounded img-responsive" src="/website/static/src/img/china_thumb.jpg"', ] OERP_WEBSITE_HTML_1_OUT = [ 'Break down information silos.', 'Keep track of the vacation days accrued by each employee', 'img class="img-rounded img-responsive" src="/website/static/src/img/deers_thumb.jpg', ] OERP_WEBSITE_HTML_2 = """ <div class="mt16 cke_widget_editable cke_widget_element oe_editable oe_dirty" data-oe-model="blog.post" data-oe-id="6" data-oe-field="content" data-oe-type="html" data-oe-translate="0" data-oe-expression="blog_post.content" data-cke-widget-data="{}" data-cke-widget-keep-attr="0" data-widget="oeref" contenteditable="true" data-cke-widget-editable="text"> <section class="mt16 mb16"> <div class="container"> <div class="row"> <div class="col-md-12 text-center mt16 mb32"> <h2> OpenERP Project Management </h2> <h3 class="text-muted">Infinitely flexible. Incredibly easy to use.</h3> </div> <div class="col-md-12 mb16 mt16"> <p> OpenERP's <b>collaborative and realtime</b> project management helps your team get work done. Keep track of everything, from the big picture to the minute details, from the customer contract to the billing. </p><p> Organize projects around <b>your own processes</b>. Work on tasks and issues using the kanban view, schedule tasks using the gantt chart and control deadlines in the calendar view. Every project may have it's own stages allowing teams to optimize their job. </p> </div> </div> </div> </section> <section class=""> <div class="container"> <div class="row"> <div class="col-md-6 mt16 mb16"> <img class="img-responsive shadow" src="/website/static/src/img/image_text.jpg"> </div> <div class="col-md-6 mt32"> <h3>Manage Your Shops</h3> <p> OpenERP's Point of Sale introduces a super clean interface with no installation required that runs online and offline on modern hardwares. </p><p> It's full integration with the company inventory and accounting, gives you real time statistics and consolidations amongst all shops without the hassle of integrating several applications. </p> </div> </div> </div> </section> <section class=""> <div class="container"> <div class="row"> <div class="col-md-6 mt32"> <h3>Enterprise Social Network</h3> <p> Make every employee feel more connected and engaged with twitter-like features for your own company. Follow people, share best practices, 'like' top ideas, etc. </p><p> Connect with experts, follow what interests you, share documents and promote best practices with OpenERP Social application. Get work done with effective collaboration across departments, geographies and business applications. </p> </div> <div class="col-md-6 mt16 mb16"> <img class="img-responsive shadow" src="/website/static/src/img/text_image.png"> </div> </div> </div> </section><section class=""> <div class="container"> <div class="row"> <div class="col-md-12 text-center mt16 mb32"> <h2>Our Porfolio</h2> <h4 class="text-muted">More than 500 successful projects</h4> </div> <div class="col-md-4"> <img class="img-thumbnail img-responsive" src="/website/static/src/img/deers.jpg"> <img class="img-thumbnail img-responsive" src="/website/static/src/img/desert.jpg"> <img class="img-thumbnail img-responsive" src="/website/static/src/img/china.jpg"> </div> <div class="col-md-4"> <img class="img-thumbnail img-responsive" src="/website/static/src/img/desert.jpg"> <img class="img-thumbnail img-responsive" src="/website/static/src/img/china.jpg"> <img class="img-thumbnail img-responsive" src="/website/static/src/img/deers.jpg"> </div> <div class="col-md-4"> <img class="img-thumbnail img-responsive" src="/website/static/src/img/landscape.jpg"> <img class="img-thumbnail img-responsive" src="/website/static/src/img/china.jpg"> <img class="img-thumbnail img-responsive" src="/website/static/src/img/desert.jpg"> </div> </div> </div> </section> </div> """ OERP_WEBSITE_HTML_2_IN = [ 'management helps your team get work done', ] OERP_WEBSITE_HTML_2_OUT = [ 'Make every employee feel more connected', 'img class="img-responsive shadow" src="/website/static/src/img/text_image.png', ] TEXT_1 = """I contact you about our meeting tomorrow. Here is the schedule I propose: 9 AM: brainstorming about our new amazing business app 9.45 AM: summary 10 AM: meeting with Ignasse to present our app Is everything ok for you ? -- MySignature""" TEXT_1_IN = ["""I contact you about our meeting tomorrow. Here is the schedule I propose: 9 AM: brainstorming about our new amazing business app 9.45 AM: summary 10 AM: meeting with Ignasse to present our app Is everything ok for you ?"""] TEXT_1_OUT = ["""-- MySignature"""] TEXT_2 = """Salut Raoul! Le 28 oct. 2012 à 00:02, Raoul Grosbedon a écrit : > I contact you about our meeting tomorrow. Here is the schedule I propose: (quote) Of course. This seems viable. > 2012/10/27 Bert Tartopoils : >> blahblahblah (quote)? >> >> blahblahblah (quote) >> >> Bert TARTOPOILS >> bert.tartopoils@miam.miam >> > > > -- > RaoulSignature Bert TARTOPOILS bert.tartopoils@miam.miam """ TEXT_2_IN = ["Salut Raoul!", "Of course. This seems viable."] TEXT_2_OUT = ["I contact you about our meeting tomorrow. Here is the schedule I propose: (quote)", """> 2012/10/27 Bert Tartopoils : >> blahblahblah (quote)? >> >> blahblahblah (quote) >> >> Bert TARTOPOILS >> bert.tartopoils@miam.miam >> > > > -- > RaoulSignature"""] HTML_1 = """<p>I contact you about our meeting for tomorrow. Here is the schedule I propose: (keep) 9 AM: brainstorming about our new amazing business app 9.45 AM: summary 10 AM: meeting with Ignasse to present our app Is everything ok for you ? -- MySignature</p>""" HTML_1_IN = ["""I contact you about our meeting for tomorrow. Here is the schedule I propose: (keep) 9 AM: brainstorming about our new amazing business app 9.45 AM: summary 10 AM: meeting with Ignasse to present our app Is everything ok for you ?"""] HTML_1_OUT = ["""-- MySignature"""] HTML_2 = """<div> <font><span>I contact you about our meeting for tomorrow. Here is the schedule I propose:</span></font> </div> <div> <ul> <li><span>9 AM: brainstorming about our new amazing business app</span></li> <li><span>9.45 AM: summary</span></li> <li><span>10 AM: meeting with Fabien to present our app</span></li> </ul> </div> <div> <font><span>Is everything ok for you ?</span></font> </div>""" HTML_2_IN = ["<font><span>I contact you about our meeting for tomorrow. Here is the schedule I propose:</span></font>", "<li><span>9 AM: brainstorming about our new amazing business app</span></li>", "<li><span>9.45 AM: summary</span></li>", "<li><span>10 AM: meeting with Fabien to present our app</span></li>", "<font><span>Is everything ok for you ?</span></font>"] HTML_2_OUT = [] HTML_3 = """<div><pre>This is an answer. Regards, XXXXXX ----- Mail original -----</pre> <pre>Hi, My CRM-related question. Regards, XXXX</pre></div>""" HTML_3_IN = ["""<div><pre>This is an answer. Regards, XXXXXX ----- Mail original -----</pre>"""] HTML_3_OUT = ["Hi,", "My CRM-related question.", "Regards,"] HTML_4 = """ <div> <div>Hi Nicholas,</div> <br> <div>I'm free now. 00447710085916.</div> <br> <div>Regards,</div> <div>Nicholas</div> <br> <span id="OLK_SRC_BODY_SECTION"> <div style="font-family:Calibri; font-size:11pt; text-align:left; color:black; BORDER-BOTTOM: medium none; BORDER-LEFT: medium none; PADDING-BOTTOM: 0in; PADDING-LEFT: 0in; PADDING-RIGHT: 0in; BORDER-TOP: #b5c4df 1pt solid; BORDER-RIGHT: medium none; PADDING-TOP: 3pt"> <span style="font-weight:bold">From: </span>OpenERP Enterprise &lt;<a href="mailto:sales@openerp.com">sales@openerp.com</a>&gt;<br><span style="font-weight:bold">Reply-To: </span>&lt;<a href="mailto:sales@openerp.com">sales@openerp.com</a>&gt;<br><span style="font-weight:bold">Date: </span>Wed, 17 Apr 2013 13:30:47 +0000<br><span style="font-weight:bold">To: </span>Microsoft Office User &lt;<a href="mailto:n.saxlund@babydino.com">n.saxlund@babydino.com</a>&gt;<br><span style="font-weight:bold">Subject: </span>Re: your OpenERP.com registration<br> </div> <br> <div> <p>Hello Nicholas Saxlund, </p> <p>I noticed you recently registered to our OpenERP Online solution. </p> <p>You indicated that you wish to use OpenERP in your own company. We would like to know more about your your business needs and requirements, and see how we can help you. When would you be available to discuss your project ? </p> <p>Best regards, </p> <pre><a href="http://openerp.com">http://openerp.com</a> Belgium: +32.81.81.37.00 U.S.: +1 (650) 307-6736 India: +91 (79) 40 500 100 </pre> </div> </span> </div>""" HTML_5 = """<div><pre>Hi, I have downloaded OpenERP installer 7.0 and successfully installed the postgresql server and the OpenERP. I created a database and started to install module by log in as administrator. However, I was not able to install any module due to "OpenERP Server Error" as shown in the attachement. Could you please let me know how could I fix this problem? &nbsp;Regards, Goh Sin Yih ________________________________ From: OpenERP Enterprise &lt;sales@openerp.com&gt; To: sinyih_goh@yahoo.com Sent: Friday, February 8, 2013 12:46 AM Subject: Feedback From Your OpenERP Trial Hello Goh Sin Yih, Thank you for having tested OpenERP Online. I noticed you started a trial of OpenERP Online (gsy) but you did not decide to keep using it. So, I just wanted to get in touch with you to get your feedback. Can you tell me what kind of application you were you looking for and why you didn't decide to continue with OpenERP? Thanks in advance for providing your feedback, Do not hesitate to contact me if you have any questions, Thanks, </pre>""" GMAIL_1 = """Hello,<div><br></div><div>Ok for me. I am replying directly in gmail, without signature.</div><div><br></div><div>Kind regards,</div><div><br></div><div>Demo.<br><br><div>On Thu, Nov 8, 2012 at 5:29 PM, <span>&lt;<a href="mailto:dummy@example.com">dummy@example.com</a>&gt;</span> wrote:<br><blockquote><div>I contact you about our meeting for tomorrow. Here is the schedule I propose:</div><div><ul><li>9 AM: brainstorming about our new amazing business app&lt;/span&gt;&lt;/li&gt;</li> <li>9.45 AM: summary</li><li>10 AM: meeting with Fabien to present our app</li></ul></div><div>Is everything ok for you ?</div> <div><p>--<br>Administrator</p></div> <div><p>Log in our portal at: <a href="http://localhost:8069#action=login&amp;db=mail_1&amp;login=demo">http://localhost:8069#action=login&amp;db=mail_1&amp;login=demo</a></p></div> </blockquote></div><br></div>""" GMAIL_1_IN = ['Ok for me. I am replying directly in gmail, without signature.'] GMAIL_1_OUT = ['Administrator', 'Log in our portal at:'] THUNDERBIRD_1 = """<div>On 11/08/2012 05:29 PM, <a href="mailto:dummy@example.com">dummy@example.com</a> wrote:<br></div> <blockquote> <div>I contact you about our meeting for tomorrow. Here is the schedule I propose:</div> <div> <ul><li>9 AM: brainstorming about our new amazing business app&lt;/span&gt;&lt;/li&gt;</li> <li>9.45 AM: summary</li> <li>10 AM: meeting with Fabien to present our app</li> </ul></div> <div>Is everything ok for you ?</div> <div> <p>--<br> Administrator</p> </div> <div> <p>Log in our portal at: <a href="http://localhost:8069#action=login&amp;db=mail_1&amp;token=rHdWcUART5PhEnJRaXjH">http://localhost:8069#action=login&amp;db=mail_1&amp;token=rHdWcUART5PhEnJRaXjH</a></p> </div> </blockquote> Ok for me. I am replying directly below your mail, using Thunderbird, with a signature.<br><br> Did you receive my email about my new laptop, by the way ?<br><br> Raoul.<br><pre>-- Raoul Grosbedonn&#233;e </pre>""" THUNDERBIRD_1_IN = ['Ok for me. I am replying directly below your mail, using Thunderbird, with a signature.'] THUNDERBIRD_1_OUT = ['I contact you about our meeting for tomorrow.', 'Raoul Grosbedon'] HOTMAIL_1 = """<div> <div dir="ltr"><br>&nbsp; I have an amazing company, i'm learning OpenERP, it is a small company yet, but plannig to grow up quickly. <br>&nbsp;<br>Kindest regards,<br>xxx<br> <div> <div id="SkyDrivePlaceholder"> </div> <hr id="stopSpelling"> Subject: Re: your OpenERP.com registration<br>From: xxx@xxx.xxx<br>To: xxx@xxx.xxx<br>Date: Wed, 27 Mar 2013 17:12:12 +0000 <br><br> Hello xxx, <br> I noticed you recently created an OpenERP.com account to access OpenERP Apps. <br> You indicated that you wish to use OpenERP in your own company. We would like to know more about your your business needs and requirements, and see how we can help you. When would you be available to discuss your project ?<br> Best regards,<br> <pre> <a href="http://openerp.com" target="_blank">http://openerp.com</a> Belgium: +32.81.81.37.00 U.S.: +1 (650) 307-6736 India: +91 (79) 40 500 100 </pre> </div> </div> </div>""" HOTMAIL_1_IN = ["I have an amazing company, i'm learning OpenERP, it is a small company yet, but plannig to grow up quickly."] HOTMAIL_1_OUT = ["Subject: Re: your OpenERP.com registration", " I noticed you recently created an OpenERP.com account to access OpenERP Apps.", "We would like to know more about your your business needs and requirements", "Belgium: +32.81.81.37.00"] MSOFFICE_1 = """ <div> <div class="WordSection1"> <p class="MsoNormal"> <span style="font-size:11.0pt;font-family:&quot;Calibri&quot;,&quot;sans-serif&quot;;color:#1F497D"> Our requirements are simple. Just looking to replace some spreadsheets for tracking quotes and possibly using the timecard module. We are a company of 25 engineers providing product design services to clients. </span> </p> <p></p> <p></p> <p class="MsoNormal"> <span style="font-size:11.0pt;font-family:&quot;Calibri&quot;,&quot;sans-serif&quot;;color:#1F497D"> I’ll install on a windows server and run a very limited trial to see how it works. If we adopt OpenERP we will probably move to Linux or look for a hosted SaaS option. </span> </p> <p></p> <p></p> <p class="MsoNormal"> <span style="font-size:11.0pt;font-family:&quot;Calibri&quot;,&quot;sans-serif&quot;;color:#1F497D"> <br> I am also evaluating Adempiere and maybe others. </span> </p> <p></p> <p></p> <p class="MsoNormal"> <span style="font-size:11.0pt;font-family:&quot;Calibri&quot;,&quot;sans-serif&quot;;color:#1F497D"> </span> </p> <p>&nbsp;</p> <p></p> <p class="MsoNormal"> <span style="font-size:11.0pt;font-family:&quot;Calibri&quot;,&quot;sans-serif&quot;;color:#1F497D"> I expect the trial will take 2-3 months as this is not a high priority for us. </span> </p> <p></p> <p></p> <p class="MsoNormal"> <span style="font-size:11.0pt;font-family:&quot;Calibri&quot;,&quot;sans-serif&quot;;color:#1F497D"> </span> </p> <p>&nbsp;</p> <p></p> <p class="MsoNormal"> <span style="font-size:11.0pt;font-family:&quot;Calibri&quot;,&quot;sans-serif&quot;;color:#1F497D"> Alan </span> </p> <p></p> <p></p> <p class="MsoNormal"> <span style="font-size:11.0pt;font-family:&quot;Calibri&quot;,&quot;sans-serif&quot;;color:#1F497D"> </span> </p> <p>&nbsp;</p> <p></p> <div> <div style="border:none;border-top:solid #B5C4DF 1.0pt;padding:3.0pt 0in 0in 0in"> <p class="MsoNormal"> <b><span style="font-size:10.0pt;font-family:&quot;Tahoma&quot;,&quot;sans-serif&quot;"> From: </span></b> <span style="font-size:10.0pt;font-family:&quot;Tahoma&quot;,&quot;sans-serif&quot;"> OpenERP Enterprise [mailto:sales@openerp.com] <br><b>Sent:</b> Monday, 11 March, 2013 14:47<br><b>To:</b> Alan Widmer<br><b>Subject:</b> Re: your OpenERP.com registration </span> </p> <p></p> <p></p> </div> </div> <p class="MsoNormal"></p> <p>&nbsp;</p> <p>Hello Alan Widmer, </p> <p></p> <p>I noticed you recently downloaded OpenERP. </p> <p></p> <p> Uou mentioned you wish to use OpenERP in your own company. Please let me more about your business needs and requirements? When will you be available to discuss about your project? </p> <p></p> <p>Thanks for your interest in OpenERP, </p> <p></p> <p>Feel free to contact me if you have any questions, </p> <p></p> <p>Looking forward to hear from you soon. </p> <p></p> <pre><p>&nbsp;</p></pre> <pre>--<p></p></pre> <pre>Nicolas<p></p></pre> <pre><a href="http://openerp.com">http://openerp.com</a><p></p></pre> <pre>Belgium: +32.81.81.37.00<p></p></pre> <pre>U.S.: +1 (650) 307-6736<p></p></pre> <pre>India: +91 (79) 40 500 100<p></p></pre> <pre>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;<p></p></pre> </div> </div>""" MSOFFICE_1_IN = ['Our requirements are simple. Just looking to replace some spreadsheets for tracking quotes and possibly using the timecard module.'] MSOFFICE_1_OUT = ['I noticed you recently downloaded OpenERP.', 'Uou mentioned you wish to use OpenERP in your own company.', 'Belgium: +32.81.81.37.00'] MSOFFICE_2 = """ <div> <div class="WordSection1"> <p class="MsoNormal"> <span style="font-size:11.0pt;font-family:&quot;Calibri&quot;,&quot;sans-serif&quot;;color:#1F497D">Nicolas,</span></p><p></p> <p></p> <p class="MsoNormal" style="text-indent:.5in"> <span style="font-size:11.0pt;font-family:&quot;Calibri&quot;,&quot;sans-serif&quot;;color:#1F497D">We are currently investigating the possibility of moving away from our current ERP </span></p><p></p> <p></p> <p class="MsoNormal"> <span style="font-size:11.0pt;font-family:&quot;Calibri&quot;,&quot;sans-serif&quot;;color:#1F497D"> </span></p><p>&nbsp;</p> <p></p> <p class="MsoNormal"> <span style="font-size:11.0pt;font-family:&quot;Calibri&quot;,&quot;sans-serif&quot;;color:#1F497D">Thank You</span></p><p></p> <p></p> <p class="MsoNormal"> <span style="font-size:11.0pt;font-family:&quot;Calibri&quot;,&quot;sans-serif&quot;;color:#1F497D">Matt</span></p><p></p> <p></p> <p class="MsoNormal"> <span style="font-size:11.0pt;font-family:&quot;Calibri&quot;,&quot;sans-serif&quot;;color:#1F497D"> </span></p><p>&nbsp;</p> <p></p> <div> <p class="MsoNormal"> <span style="font-size:11.0pt;font-family:&quot;Calibri&quot;,&quot;sans-serif&quot;;color:#1F497D">Raoul Petitpoil</span></p><p></p> <p></p> <p class="MsoNormal"> <span style="font-size:11.0pt;font-family:&quot;Calibri&quot;,&quot;sans-serif&quot;;color:#1F497D">Poil Industries</span></p><p></p> <p></p> <p class="MsoNormal"> <span style="font-size:11.0pt;font-family:&quot;Calibri&quot;,&quot;sans-serif&quot;;color:#1F497D">Information Technology</span></p><p></p> <p></p> <p class="MsoNormal"> <span style="font-size:11.0pt;font-family:&quot;Calibri&quot;,&quot;sans-serif&quot;;color:#1F497D">920 Super Street</span></p><p></p> <p></p> <p class="MsoNormal"> <span style="font-size:11.0pt;font-family:&quot;Calibri&quot;,&quot;sans-serif&quot;;color:#1F497D">Sanchez, Pa 17046 USA</span></p><p></p> <p></p> <p class="MsoNormal"> <span style="font-size:11.0pt;font-family:&quot;Calibri&quot;,&quot;sans-serif&quot;;color:#1F497D">Tel: xxx.xxx</span></p><p></p> <p></p> <p class="MsoNormal"> <span style="font-size:11.0pt;font-family:&quot;Calibri&quot;,&quot;sans-serif&quot;;color:#1F497D">Fax: xxx.xxx</span></p><p></p> <p></p> <p class="MsoNormal"> <span style="font-size:11.0pt;font-family:&quot;Calibri&quot;,&quot;sans-serif&quot;;color:#1F497D">Email: </span> <a href="mailto:raoul@petitpoil.com"> <span style="font-size:11.0pt;font-family:&quot;Calibri&quot;,&quot;sans-serif&quot;;color:blue">raoul@petitpoil.com</span> </a> <span style="font-size:11.0pt;font-family:&quot;Calibri&quot;,&quot;sans-serif&quot;;color:#1F497D"> </span></p><p></p> <p></p> <p class="MsoNormal"> <span style="font-size:11.0pt;font-family:&quot;Calibri&quot;,&quot;sans-serif&quot;;color:#1F497D">www.poilindustries.com</span></p><p></p> <p></p> <p class="MsoNormal"> <span style="font-size:11.0pt;font-family:&quot;Calibri&quot;,&quot;sans-serif&quot;;color:#1F497D">www.superproducts.com</span></p><p></p> <p></p> </div> <p class="MsoNormal"> <span style="font-size:11.0pt;font-family:&quot;Calibri&quot;,&quot;sans-serif&quot;;color:#1F497D"> </span></p><p>&nbsp;</p> <p></p> <div> <div style="border:none;border-top:solid #B5C4DF 1.0pt;padding:3.0pt 0in 0in 0in"> <p class="MsoNormal"> <b> <span style="font-size:10.0pt;font-family:&quot;Tahoma&quot;,&quot;sans-serif&quot;">From:</span> </b> <span style="font-size:10.0pt;font-family:&quot;Tahoma&quot;,&quot;sans-serif&quot;"> OpenERP Enterprise [mailto:sales@openerp.com] <br><b>Sent:</b> Wednesday, April 17, 2013 1:31 PM<br><b>To:</b> Matt Witters<br><b>Subject:</b> Re: your OpenERP.com registration</span></p><p></p> <p></p> </div> </div> <p class="MsoNormal"></p> <p>&nbsp;</p> <p>Hello Raoul Petitpoil, </p> <p></p> <p>I noticed you recently downloaded OpenERP. </p> <p></p> <p>You indicated that you wish to use OpenERP in your own company. We would like to know more about your your business needs and requirements, and see how we can help you. When would you be available to discuss your project ? </p> <p></p> <p>Best regards, </p> <p></p> <pre> <p>&nbsp;</p> </pre> <pre>--<p></p></pre> <pre>Nicolas<p></p></pre> <pre> <a href="http://openerp.com">http://openerp.com</a> <p></p> </pre> <pre>Belgium: +32.81.81.37.00<p></p></pre> <pre>U.S.: +1 (650) 307-6736<p></p></pre> <pre>India: +91 (79) 40 500 100<p></p></pre> <pre>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; <p></p></pre> </div> </div>""" MSOFFICE_2_IN = ['We are currently investigating the possibility'] MSOFFICE_2_OUT = ['I noticed you recently downloaded OpenERP.', 'You indicated that you wish', 'Belgium: +32.81.81.37.00'] MSOFFICE_3 = """<div> <div class="WordSection1"> <p class="MsoNormal"> <span style="font-size:11.0pt;font-family:&quot;Calibri&quot;,&quot;sans-serif&quot;;color:#1F497D">Hi Nicolas&nbsp;!</span></p><p></p> <p></p> <p class="MsoNormal"> <span style="font-size:11.0pt;font-family:&quot;Calibri&quot;,&quot;sans-serif&quot;;color:#1F497D"> </span></p><p>&nbsp;</p> <p></p> <p class="MsoNormal"> <span lang="EN-US" style="font-size:11.0pt;font-family:&quot;Calibri&quot;,&quot;sans-serif&quot;;color:#1F497D">Yes I’d be glad to hear about your offers as we struggle every year with the planning/approving of LOA. </span></p><p></p> <p></p> <p class="MsoNormal"> <span lang="EN-US" style="font-size:11.0pt;font-family:&quot;Calibri&quot;,&quot;sans-serif&quot;;color:#1F497D">I saw your boss yesterday on tv and immediately wanted to test the interface. </span></p><p></p> <p></p> <p class="MsoNormal"> <span lang="EN-US" style="font-size:11.0pt;font-family:&quot;Calibri&quot;,&quot;sans-serif&quot;;color:#1F497D"> </span></p><p>&nbsp;</p> <p></p> <div> <p class="MsoNormal"> <b> <span lang="NL-BE" style="font-size:10.0pt;font-family:&quot;Trebuchet MS&quot;,&quot;sans-serif&quot;;color:gray">Bien à vous, </span></b></p><p></p><b> </b> <p></p> <p class="MsoNormal"> <b> <span lang="NL-BE" style="font-size:10.0pt;font-family:&quot;Trebuchet MS&quot;,&quot;sans-serif&quot;;color:gray">Met vriendelijke groeten, </span></b></p><p></p><b> </b> <p></p> <p class="MsoNormal"> <b> <span lang="EN-GB" style="font-size:10.0pt;font-family:&quot;Trebuchet MS&quot;,&quot;sans-serif&quot;;color:gray">Best regards,</span></b></p><p></p><b> </b> <p></p> <p class="MsoNormal"> <b> <span lang="EN-GB" style="font-size:10.0pt;font-family:&quot;Trebuchet MS&quot;,&quot;sans-serif&quot;;color:gray"> </span></b></p><p><b>&nbsp;</b></p><b> </b> <p></p> <p class="MsoNormal"> <b> <span lang="EN-GB" style="font-size:10.0pt;font-family:&quot;Trebuchet MS&quot;,&quot;sans-serif&quot;;color:gray">R. Petitpoil&nbsp;&nbsp;&nbsp; <br></span> </b> <span lang="EN-GB" style="font-size:10.0pt;font-family:&quot;Trebuchet MS&quot;,&quot;sans-serif&quot;;color:gray">Human Resource Manager<b><br><br>Field Resource s.a n.v.&nbsp;&nbsp;<i> <br></i></b>Hermesstraat 6A <br>1930 Zaventem</span> <span lang="EN-GB" style="font-size:8.0pt;font-family:&quot;Tahoma&quot;,&quot;sans-serif&quot;;color:gray"><br></span> <b> <span lang="FR" style="font-size:10.0pt;font-family:Wingdings;color:#1F497D">(</span> </b> <b> <span lang="FR" style="font-size:9.0pt;font-family:Wingdings;color:#1F497D"> </span> </b> <b> <span lang="EN-GB" style="font-size:8.0pt;font-family:&quot;Trebuchet MS&quot;,&quot;sans-serif&quot;;color:gray">xxx.xxx &nbsp;</span> </b> <b> <span lang="EN-GB" style="font-size:9.0pt;font-family:&quot;Trebuchet MS&quot;,&quot;sans-serif&quot;;color:gray"><br></span> </b> <b> <span lang="FR" style="font-size:10.0pt;font-family:&quot;Wingdings 2&quot;;color:#1F497D">7</span> </b> <b> <span lang="FR" style="font-size:9.0pt;font-family:&quot;Wingdings 2&quot;;color:#1F497D"> </span> </b> <b> <span lang="EN-GB" style="font-size:8.0pt;font-family:&quot;Trebuchet MS&quot;,&quot;sans-serif&quot;;color:gray">+32 2 727.05.91<br></span> </b> <span lang="EN-GB" style="font-size:24.0pt;font-family:Webdings;color:green">P</span> <span lang="EN-GB" style="font-size:8.0pt;font-family:&quot;Tahoma&quot;,&quot;sans-serif&quot;;color:green"> <b>&nbsp;&nbsp; </b></span> <b> <span lang="EN-GB" style="font-size:9.0pt;font-family:&quot;Trebuchet MS&quot;,&quot;sans-serif&quot;;color:green">Please consider the environment before printing this email.</span> </b> <span lang="EN-GB" style="font-size:11.0pt;font-family:&quot;Calibri&quot;,&quot;sans-serif&quot;;color:navy"> </span> <span lang="EN-GB" style="font-family:&quot;Calibri&quot;,&quot;sans-serif&quot;;color:navy"> </span></p><p></p> <p></p> </div> <p class="MsoNormal"> <span lang="EN-US" style="font-size:11.0pt;font-family:&quot;Calibri&quot;,&quot;sans-serif&quot;;color:#1F497D"> </span></p><p>&nbsp;</p> <p></p> <div> <div style="border:none;border-top:solid #B5C4DF 1.0pt;padding:3.0pt 0cm 0cm 0cm"> <p class="MsoNormal"> <b> <span lang="FR" style="font-size:10.0pt;font-family:&quot;Tahoma&quot;,&quot;sans-serif&quot;">De&nbsp;:</span> </b> <span lang="FR" style="font-size:10.0pt;font-family:&quot;Tahoma&quot;,&quot;sans-serif&quot;"> OpenERP Enterprise [mailto:sales@openerp.com] <br><b>Envoyé&nbsp;:</b> jeudi 18 avril 2013 11:31<br><b>À&nbsp;:</b> Paul Richard<br><b>Objet&nbsp;:</b> Re: your OpenERP.com registration</span></p><p></p> <p></p> </div> </div> <p class="MsoNormal"></p> <p>&nbsp;</p> <p>Hello Raoul PETITPOIL, </p> <p></p> <p>I noticed you recently registered to our OpenERP Online solution. </p> <p></p> <p>You indicated that you wish to use OpenERP in your own company. We would like to know more about your your business needs and requirements, and see how we can help you. When would you be available to discuss your project ? </p> <p></p> <p>Best regards, </p> <p></p> <pre> <p>&nbsp;</p> </pre> <pre>--<p></p></pre> <pre>Nicolas<p></p></pre> <pre> <a href="http://openerp.com">http://openerp.com</a> <p></p> </pre> <pre>Belgium: +32.81.81.37.00<p></p></pre> <pre>U.S.: +1 (650) 307-6736<p></p></pre> <pre>India: +91 (79) 40 500 100<p></p></pre> <pre>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; <p></p></pre> </div> </div>""" MSOFFICE_3_IN = ['I saw your boss yesterday'] MSOFFICE_3_OUT = ['I noticed you recently downloaded OpenERP.', 'You indicated that you wish', 'Belgium: +32.81.81.37.00'] # ------------------------------------------------------------ # Test cases coming from bugs # ------------------------------------------------------------ # bug: read more not apparent, strange message in read more span BUG1 = """<pre>Hi Migration Team, Paragraph 1, blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah. Paragraph 2, blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah. Paragraph 3, blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah blah. Thanks. Regards, -- Olivier Laurent Migration Manager OpenERP SA Chaussée de Namur, 40 B-1367 Gérompont Tel: +32.81.81.37.00 Web: http://www.openerp.com</pre>""" BUG_1_IN = [ 'Hi Migration Team', 'Paragraph 1' ] BUG_1_OUT = [ 'Olivier Laurent', 'Chaussée de Namur', '81.81.37.00', 'openerp.com', ] BUG2 = """ <div> <br> <div class="moz-forward-container"><br> <br> -------- Original Message -------- <table class="moz-email-headers-table" border="0" cellpadding="0" cellspacing="0"> <tbody> <tr> <th nowrap="" valign="BASELINE" align="RIGHT">Subject: </th> <td>Fwd: TR: OpenERP S.A. Payment Reminder</td> </tr> <tr> <th nowrap="" valign="BASELINE" align="RIGHT">Date: </th> <td>Wed, 16 Oct 2013 14:11:13 +0200</td> </tr> <tr> <th nowrap="" valign="BASELINE" align="RIGHT">From: </th> <td>Christine Herrmann <a class="moz-txt-link-rfc2396E" href="mailto:che@openerp.com">&lt;che@openerp.com&gt;</a></td> </tr> <tr> <th nowrap="" valign="BASELINE" align="RIGHT">To: </th> <td><a class="moz-txt-link-abbreviated" href="mailto:online@openerp.com">online@openerp.com</a></td> </tr> </tbody> </table> <br> <br> <br> <div class="moz-forward-container"><br> <br> -------- Message original -------- <table class="moz-email-headers-table" border="0" cellpadding="0" cellspacing="0"> <tbody> <tr> <th nowrap="" valign="BASELINE" align="RIGHT">Sujet: </th> <td>TR: OpenERP S.A. Payment Reminder</td> </tr> <tr> <th nowrap="" valign="BASELINE" align="RIGHT">Date&nbsp;: </th> <td>Wed, 16 Oct 2013 10:34:45 -0000</td> </tr> <tr> <th nowrap="" valign="BASELINE" align="RIGHT">De&nbsp;: </th> <td>Ida Siwatala <a class="moz-txt-link-rfc2396E" href="mailto:infos@inzoservices.com">&lt;infos@inzoservices.com&gt;</a></td> </tr> <tr> <th nowrap="" valign="BASELINE" align="RIGHT">Répondre à&nbsp;: </th> <td><a class="moz-txt-link-abbreviated" href="mailto:catchall@mail.odoo.com">catchall@mail.odoo.com</a></td> </tr> <tr> <th nowrap="" valign="BASELINE" align="RIGHT">Pour&nbsp;: </th> <td>Christine Herrmann (che) <a class="moz-txt-link-rfc2396E" href="mailto:che@openerp.com">&lt;che@openerp.com&gt;</a></td> </tr> </tbody> </table> <br> <br> <div> <div class="WordSection1"> <p class="MsoNormal"><span style="font-size:11.0pt;font-family:&quot;Calibri&quot;,&quot;sans-serif&quot;;color:#1F497D">Bonjour,</span></p> <p class="MsoNormal"><span style="font-size:11.0pt;font-family:&quot;Calibri&quot;,&quot;sans-serif&quot;;color:#1F497D"></span></p> <p>&nbsp;</p> <p class="MsoNormal"><span style="font-size:11.0pt;font-family:&quot;Calibri&quot;,&quot;sans-serif&quot;;color:#1F497D">Pourriez-vous me faire un retour sur ce point.</span></p> <p class="MsoNormal"><span style="font-size:11.0pt;font-family:&quot;Calibri&quot;,&quot;sans-serif&quot;;color:#1F497D"></span></p> <p>&nbsp;</p> <p class="MsoNormal"><span style="font-size:11.0pt;font-family:&quot;Calibri&quot;,&quot;sans-serif&quot;;color:#1F497D">Cordialement</span></p> <p class="MsoNormal"><span style="font-size:11.0pt;font-family:&quot;Calibri&quot;,&quot;sans-serif&quot;;color:#1F497D"></span></p> <p>&nbsp;</p> <div> <div style="border:none;border-top:solid #B5C4DF 1.0pt;padding:3.0pt 0cm 0cm 0cm"> <p class="MsoNormal"><b><span style="font-size:10.0pt;font-family:&quot;Tahoma&quot;,&quot;sans-serif&quot;">De&nbsp;:</span></b><span style="font-size:10.0pt;font-family:&quot;Tahoma&quot;,&quot;sans-serif&quot;"> Ida Siwatala [<a class="moz-txt-link-freetext" href="mailto:infos@inzoservices.com">mailto:infos@inzoservices.com</a>] <br> <b>Envoyé&nbsp;:</b> vendredi 4 octobre 2013 20:03<br> <b>À&nbsp;:</b> 'Followers of INZO-services-8-all-e-Maxime-Lisbonne-77176-Savigny-le-temple-France'<br> <b>Objet&nbsp;:</b> RE: OpenERP S.A. Payment Reminder</span></p> </div> </div> <p>&nbsp;</p> <p class="MsoNormal"><span style="font-size:11.0pt;font-family:&quot;Calibri&quot;,&quot;sans-serif&quot;;color:#1F497D">Bonsoir,</span></p> <p class="MsoNormal"><span style="font-size:11.0pt;font-family:&quot;Calibri&quot;,&quot;sans-serif&quot;;color:#1F497D"></span></p> <p>&nbsp;</p> <p class="MsoNormal"><span style="font-size:11.0pt;font-family:&quot;Calibri&quot;,&quot;sans-serif&quot;;color:#1F497D">Je me permets de revenir vers vous par écrit , car j’ai fait 2 appels vers votre service en exposant mon problème, mais je n’ai pas eu de retour.</span></p> <p class="MsoNormal"><span style="font-size:11.0pt;font-family:&quot;Calibri&quot;,&quot;sans-serif&quot;;color:#1F497D">Cela fait un mois que j’ai fait la souscription de votre produit, mais je me rends compte qu’il est pas adapté à ma situation ( fonctionnalité manquante et surtout je n’ai pas beaucoup de temps à passer à résoudre des bugs). </span></p> <p class="MsoNormal"><span style="font-size:11.0pt;font-family:&quot;Calibri&quot;,&quot;sans-serif&quot;;color:#1F497D">C’est pourquoi , j’ai demandé qu’un accord soit trouvé avec vous pour annuler le contrat (tout en vous payant le mois d’utilisation de septembre).</span></p> <p class="MsoNormal"><span style="font-size:11.0pt;font-family:&quot;Calibri&quot;,&quot;sans-serif&quot;;color:#1F497D"></span></p> <p>&nbsp;</p> <p class="MsoNormal"><span style="font-size:11.0pt;font-family:&quot;Calibri&quot;,&quot;sans-serif&quot;;color:#1F497D">Pourriez-vous me faire un retour sur ce point.</span></p> <p class="MsoNormal"><span style="font-size:11.0pt;font-family:&quot;Calibri&quot;,&quot;sans-serif&quot;;color:#1F497D"></span></p> <p>&nbsp;</p> <p class="MsoNormal"><span style="font-size:11.0pt;font-family:&quot;Calibri&quot;,&quot;sans-serif&quot;;color:#1F497D">Cordialement,</span></p> <p class="MsoNormal"><span style="font-size:11.0pt;font-family:&quot;Calibri&quot;,&quot;sans-serif&quot;;color:#1F497D"></span></p> <p>&nbsp;</p> <p class="MsoNormal"><span style="font-size:11.0pt;font-family:&quot;Calibri&quot;,&quot;sans-serif&quot;;color:#1F497D">Ida Siwatala</span></p> <p class="MsoNormal"><span style="font-size:11.0pt;font-family:&quot;Calibri&quot;,&quot;sans-serif&quot;;color:#1F497D"></span></p> <p>&nbsp;</p> <p class="MsoNormal"><b><span style="font-size:10.0pt;font-family:&quot;Tahoma&quot;,&quot;sans-serif&quot;">De&nbsp;:</span></b><span style="font-size:10.0pt;font-family:&quot;Tahoma&quot;,&quot;sans-serif&quot;"> <a href="mailto:che@openerp.com">che@openerp.com</a> [<a href="mailto:che@openerp.com">mailto:che@openerp.com</a>] <br> <b>Envoyé&nbsp;:</b> vendredi 4 octobre 2013 17:41<br> <b>À&nbsp;:</b> <a href="mailto:infos@inzoservices.com">infos@inzoservices.com</a><br> <b>Objet&nbsp;:</b> OpenERP S.A. Payment Reminder</span></p> <p>&nbsp;</p> <div> <p style="background:white"><span style="font-size:9.0pt;font-family:&quot;Arial&quot;,&quot;sans-serif&quot;;color:#222222">Dear INZO services,</span></p> <p style="background:white"><span style="font-size:9.0pt;font-family:&quot;Arial&quot;,&quot;sans-serif&quot;;color:#222222">Exception made if there was a mistake of ours, it seems that the following amount stays unpaid. Please, take appropriate measures in order to carry out this payment in the next 8 days. </span></p> <p class="MsoNormal" style="background:white"><span style="font-size:9.0pt;font-family:&quot;Arial&quot;,&quot;sans-serif&quot;;color:#222222"></span></p> <p>&nbsp;</p> <table class="MsoNormalTable" style="width:100.0%;border:outset 1.5pt" width="100%" border="1" cellpadding="0"> <tbody> <tr> <td style="padding:.75pt .75pt .75pt .75pt"> <p class="MsoNormal">Date de facturation</p> </td> <td style="padding:.75pt .75pt .75pt .75pt"> <p class="MsoNormal">Description</p> </td> <td style="padding:.75pt .75pt .75pt .75pt"> <p class="MsoNormal">Reference</p> </td> <td style="padding:.75pt .75pt .75pt .75pt"> <p class="MsoNormal">Due Date</p> </td> <td style="padding:.75pt .75pt .75pt .75pt"> <p class="MsoNormal">Amount (€)</p> </td> <td style="padding:.75pt .75pt .75pt .75pt"> <p class="MsoNormal">Lit.</p> </td> </tr> <tr> <td style="padding:.75pt .75pt .75pt .75pt"> <p class="MsoNormal"><b>2013-09-24</b></p> </td> <td style="padding:.75pt .75pt .75pt .75pt"> <p class="MsoNormal"><b>2013/1121</b></p> </td> <td style="padding:.75pt .75pt .75pt .75pt"> <p class="MsoNormal"><b>Enterprise - Inzo Services - Juillet 2013</b></p> </td> <td style="padding:.75pt .75pt .75pt .75pt"> <p class="MsoNormal"><b>2013-09-24</b></p> </td> <td style="padding:.75pt .75pt .75pt .75pt"> <p class="MsoNormal"><b>420.0</b></p> </td> <td style="padding:.75pt .75pt .75pt .75pt"><br> </td> </tr> <tr> <td style="padding:.75pt .75pt .75pt .75pt"><br> </td> <td style="border:none;padding:.75pt .75pt .75pt .75pt"><br> </td> <td style="border:none;padding:.75pt .75pt .75pt .75pt"><br> </td> <td style="border:none;padding:.75pt .75pt .75pt .75pt"><br> </td> <td style="border:none;padding:.75pt .75pt .75pt .75pt"><br> </td> <td style="border:none;padding:.75pt .75pt .75pt .75pt"><br> </td> </tr> </tbody> </table> <p class="MsoNormal" style="text-align:center;background:white" align="center"><span style="font-size:9.0pt;font-family:&quot;Arial&quot;,&quot;sans-serif&quot;;color:#222222">Amount due : 420.00 € </span></p> <p style="background:white"><span style="font-size:9.0pt;font-family:&quot;Arial&quot;,&quot;sans-serif&quot;;color:#222222">Would your payment have been carried out after this mail was sent, please ignore this message. Do not hesitate to contact our accounting department. </span></p> <p class="MsoNormal" style="background:white"><span style="font-size:9.0pt;font-family:&quot;Arial&quot;,&quot;sans-serif&quot;;color:#222222"><br> Best Regards, <br> Aurore Lesage <br> OpenERP<br> Chaussée de Namur, 40 <br> B-1367 Grand Rosières <br> Tel: +32.81.81.37.00 - Fax: +32.81.73.35.01 <br> E-mail : <a href="mailto:ale@openerp.com">ale@openerp.com</a> <br> Web: <a href="http://www.openerp.com">http://www.openerp.com</a></span></p> </div> </div> </div> --<br> INZO services <small>Sent by <a style="color:inherit" href="http://www.openerp.com">OpenERP S.A.</a> using <a style="color:inherit" href="https://www.openerp.com/">OpenERP</a>.</small> <small>Access your messages and documents <a style="color:inherit" href="https://accounts.openerp.com?db=openerp#action=mail.action_mail_redirect&amp;login=che&amp;message_id=5750830">in OpenERP</a></small> <br> <pre class="moz-signature" cols="72">-- Christine Herrmann OpenERP Chaussée de Namur, 40 B-1367 Grand Rosières Tel: +32.81.81.37.00 - Fax: +32.81.73.35.01 Web: <a class="moz-txt-link-freetext" href="http://www.openerp.com">http://www.openerp.com</a> </pre> <br> </div> <br> <br> </div> <br> </div>""" BUG_2_IN = [ 'read more', '...', ] BUG_2_OUT = [ 'Fwd: TR: OpenERP S.A' 'fait un mois' ] # BUG 20/08/2014: READ MORE NOT APPEARING BUG3 = """<div class="oe_msg_body_long" style="/* display: none; */"><p>OpenERP has been upgraded to version 8.0.</p> <h2>What's new in this upgrade?</h2> <div class="document"> <ul> <li><p class="first">New Warehouse Management System:</p> <blockquote> <p>Schedule your picking, packing, receptions and internal moves automatically with Odoo using your own routing rules. Define push and pull rules to organize a warehouse or to manage product moves between several warehouses. Track in detail all stock moves, not only in your warehouse but wherever else it's taken as well (customers, suppliers or manufacturing locations).</p> </blockquote> </li> <li><p class="first">New Product Configurator</p> </li> <li><p class="first">Documentation generation from website forum:</p> <blockquote> <p>New module to generate a documentation from questions and responses from your forum. The documentation manager can define a table of content and any user, depending their karma, can link a question to an entry of this TOC.</p> </blockquote> </li> <li><p class="first">New kanban view of documents (resumes and letters in recruitement, project documents...)</p> </li> <li><p class="first">E-Commerce:</p> <blockquote> <ul class="simple"> <li>Manage TIN in contact form for B2B.</li> <li>Dedicated salesteam to easily manage leads and orders.</li> </ul> </blockquote> </li> <li><p class="first">Better Instant Messaging.</p> </li> <li><p class="first">Faster and Improved Search view: Search drawer now appears on top of the results, and is open by default in reporting views</p> </li> <li><p class="first">Improved User Interface:</p> <blockquote> <ul class="simple"> <li>Popups has changed to be more responsive on tablets and smartphones.</li> <li>New Stat Buttons: Forms views have now dynamic buttons showing some statistics abouts linked models.</li> <li>Color code to check in one look availability of components in an MRP order.</li> <li>Unified menu bar allows you to switch easily between the frontend (website) and backend</li> <li>Results panel is now scrollable independently of the menu bars, keeping the navigation, search bar and view switcher always within reach.</li> </ul> </blockquote> </li> <li><p class="first">User signature is now in HTML.</p> </li> <li><p class="first">New development API.</p> </li> <li><p class="first">Remove support for Outlook and Thunderbird plugins</p> </li> </ul> </div> <p>Enjoy the new OpenERP Online!</p><span class="oe_mail_reduce"><a href="#">read less</a></span></div>""" BUG_3_IN = [ 'read more', '...', ] BUG_3_OUT = [ 'New kanban view of documents' ]
defzzd/UserDataBase-Heroku
refs/heads/master
venv/Lib/encodings/cp932.py
817
# # cp932.py: Python Unicode Codec for CP932 # # Written by Hye-Shik Chang <perky@FreeBSD.org> # import _codecs_jp, codecs import _multibytecodec as mbc codec = _codecs_jp.getcodec('cp932') class Codec(codecs.Codec): encode = codec.encode decode = codec.decode class IncrementalEncoder(mbc.MultibyteIncrementalEncoder, codecs.IncrementalEncoder): codec = codec class IncrementalDecoder(mbc.MultibyteIncrementalDecoder, codecs.IncrementalDecoder): codec = codec class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader): codec = codec class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter): codec = codec def getregentry(): return codecs.CodecInfo( name='cp932', encode=Codec().encode, decode=Codec().decode, incrementalencoder=IncrementalEncoder, incrementaldecoder=IncrementalDecoder, streamreader=StreamReader, streamwriter=StreamWriter, )
simbha/GAE-appswell
refs/heads/master
appspot/framework/vendor/tweepy/cache.py
6
# Tweepy # Copyright 2009-2010 Joshua Roesslein # See LICENSE for details. import time import threading import os import cPickle as pickle try: import hashlib except ImportError: # python 2.4 import md5 as hashlib try: import fcntl except ImportError: # Probably on a windows system # TODO: use win32file pass class Cache(object): """Cache interface""" def __init__(self, timeout=60): """Initialize the cache timeout: number of seconds to keep a cached entry """ self.timeout = timeout def store(self, key, value): """Add new record to cache key: entry key value: data of entry """ raise NotImplementedError def get(self, key, timeout=None): """Get cached entry if exists and not expired key: which entry to get timeout: override timeout with this value [optional] """ raise NotImplementedError def count(self): """Get count of entries currently stored in cache""" raise NotImplementedError def cleanup(self): """Delete any expired entries in cache.""" raise NotImplementedError def flush(self): """Delete all cached entries""" raise NotImplementedError class MemoryCache(Cache): """In-memory cache""" def __init__(self, timeout=60): Cache.__init__(self, timeout) self._entries = {} self.lock = threading.Lock() def __getstate__(self): # pickle return {'entries': self._entries, 'timeout': self.timeout} def __setstate__(self, state): # unpickle self.lock = threading.Lock() self._entries = state['entries'] self.timeout = state['timeout'] def _is_expired(self, entry, timeout): return timeout > 0 and (time.time() - entry[0]) >= timeout def store(self, key, value): self.lock.acquire() self._entries[key] = (time.time(), value) self.lock.release() def get(self, key, timeout=None): self.lock.acquire() try: # check to see if we have this key entry = self._entries.get(key) if not entry: # no hit, return nothing return None # use provided timeout in arguments if provided # otherwise use the one provided during init. if timeout is None: timeout = self.timeout # make sure entry is not expired if self._is_expired(entry, timeout): # entry expired, delete and return nothing del self._entries[key] return None # entry found and not expired, return it return entry[1] finally: self.lock.release() def count(self): return len(self._entries) def cleanup(self): self.lock.acquire() try: for k, v in self._entries.items(): if self._is_expired(v, self.timeout): del self._entries[k] finally: self.lock.release() def flush(self): self.lock.acquire() self._entries.clear() self.lock.release() class FileCache(Cache): """File-based cache""" # locks used to make cache thread-safe cache_locks = {} def __init__(self, cache_dir, timeout=60): Cache.__init__(self, timeout) if os.path.exists(cache_dir) is False: os.mkdir(cache_dir) self.cache_dir = cache_dir if cache_dir in FileCache.cache_locks: self.lock = FileCache.cache_locks[cache_dir] else: self.lock = threading.Lock() FileCache.cache_locks[cache_dir] = self.lock if os.name == 'posix': self._lock_file = self._lock_file_posix self._unlock_file = self._unlock_file_posix elif os.name == 'nt': self._lock_file = self._lock_file_win32 self._unlock_file = self._unlock_file_win32 else: print 'Warning! FileCache locking not supported on this system!' self._lock_file = self._lock_file_dummy self._unlock_file = self._unlock_file_dummy def _get_path(self, key): md5 = hashlib.md5() md5.update(key) return os.path.join(self.cache_dir, md5.hexdigest()) def _lock_file_dummy(self, path, exclusive=True): return None def _unlock_file_dummy(self, lock): return def _lock_file_posix(self, path, exclusive=True): lock_path = path + '.lock' if exclusive is True: f_lock = open(lock_path, 'w') fcntl.lockf(f_lock, fcntl.LOCK_EX) else: f_lock = open(lock_path, 'r') fcntl.lockf(f_lock, fcntl.LOCK_SH) if os.path.exists(lock_path) is False: f_lock.close() return None return f_lock def _unlock_file_posix(self, lock): lock.close() def _lock_file_win32(self, path, exclusive=True): # TODO: implement return None def _unlock_file_win32(self, lock): # TODO: implement return def _delete_file(self, path): os.remove(path) if os.path.exists(path + '.lock'): os.remove(path + '.lock') def store(self, key, value): path = self._get_path(key) self.lock.acquire() try: # acquire lock and open file f_lock = self._lock_file(path) datafile = open(path, 'wb') # write data pickle.dump((time.time(), value), datafile) # close and unlock file datafile.close() self._unlock_file(f_lock) finally: self.lock.release() def get(self, key, timeout=None): return self._get(self._get_path(key), timeout) def _get(self, path, timeout): if os.path.exists(path) is False: # no record return None self.lock.acquire() try: # acquire lock and open f_lock = self._lock_file(path, False) datafile = open(path, 'rb') # read pickled object created_time, value = pickle.load(datafile) datafile.close() # check if value is expired if timeout is None: timeout = self.timeout if timeout > 0 and (time.time() - created_time) >= timeout: # expired! delete from cache value = None self._delete_file(path) # unlock and return result self._unlock_file(f_lock) return value finally: self.lock.release() def count(self): c = 0 for entry in os.listdir(self.cache_dir): if entry.endswith('.lock'): continue c += 1 return c def cleanup(self): for entry in os.listdir(self.cache_dir): if entry.endswith('.lock'): continue self._get(os.path.join(self.cache_dir, entry), None) def flush(self): for entry in os.listdir(self.cache_dir): if entry.endswith('.lock'): continue self._delete_file(os.path.join(self.cache_dir, entry)) class MemCacheCache(Cache): """Cache interface""" def __init__(self, client, timeout=60): """Initialize the cache client: The memcache client timeout: number of seconds to keep a cached entry """ self.client = client self.timeout = timeout def store(self, key, value): """Add new record to cache key: entry key value: data of entry """ self.client.set(key, value, time=self.timeout) def get(self, key, timeout=None): """Get cached entry if exists and not expired key: which entry to get timeout: override timeout with this value [optional]. DOES NOT WORK HERE """ return self.client.get(key, key) def count(self): """Get count of entries currently stored in cache. RETURN 0""" return 0 def cleanup(self): """Delete any expired entries in cache. NO-OP""" pass def flush(self): """Delete all cached entries. NO-OP""" pass
ethers/btcrelay
refs/heads/f2
test/test_btcBulkStoreHeaders.py
1
from ethereum import tester from datetime import datetime, date import pytest slow = pytest.mark.slow from utilRelay import makeMerkleProof, dblSha256Flip, disablePyethLogging disablePyethLogging() class TestBtcBulkStoreHeaders(object): CONTRACT = 'test/btcBulkStoreHeaders_debug.se' BTC_ETH_CONTRACT = 'test/btc-eth_debug.se' ETHER = 10 ** 18 def setup_class(cls): cls.s = tester.state() cls.c = cls.s.abi_contract(cls.CONTRACT, endowment=2000*cls.ETHER) cls.snapshot = cls.s.snapshot() cls.seed = tester.seed def setup_method(self, method): self.s.revert(self.snapshot) tester.seed = self.seed def testBulkStore5(self): block100kPrev = 0x000000000002d01c1fccc21636b607dfd930d31d01c3a62104612a1719011250 self.c.setInitialParent(block100kPrev, 99999, 1) # 7 here, but only storing 5 headers since OOG headers = [ "0100000050120119172a610421a6c3011dd330d9df07b63616c2cc1f1cd00200000000006657a9252aacd5c0b2940996ecff952228c3067cc38d4885efb5a4ac4247e9f337221b4d4c86041b0f2b5710", "0100000006e533fd1ada86391f3f6c343204b0d278d4aaec1c0b20aa27ba0300000000006abbb3eb3d733a9fe18967fd7d4c117e4ccbbac5bec4d910d900b3ae0793e77f54241b4d4c86041b4089cc9b", "0100000090f0a9f110702f808219ebea1173056042a714bad51b916cb6800000000000005275289558f51c9966699404ae2294730c3c9f9bda53523ce50e9b95e558da2fdb261b4d4c86041b1ab1bf93", "01000000aff7e0c7dc29d227480c2aa79521419640a161023b51cdb28a3b0100000000003779fc09d638c4c6da0840c41fa625a90b72b125015fd0273f706d61f3be175faa271b4d4c86041b142dca82", "01000000e1c5ba3a6817d53738409f5e7229ffd098d481147b002941a7a002000000000077ed2af87aa4f9f450f8dbd15284720c3fd96f565a13c9de42a3c1440b7fc6a50e281b4d4c86041b08aecda2", "0100000079cda856b143d9db2c1caff01d1aecc8630d30625d10e8b4b8b0000000000000b50cc069d6a3e33e3ff84a5c41d9d3febe7c770fdcc96b2c3ff60abe184f196367291b4d4c86041b8fa45d63", "0100000045dc58743362fe8d8898a7506faa816baed7d391c9bc0b13b0da00000000000021728a2f4f975cc801cb3c672747f1ead8a946b2702b7bd52f7b86dd1aa0c975c02a1b4d4c86041b7b47546d" ] count = 5 strings = "" for i in range(count): strings += headers[i] headerBins = strings.decode('hex') res = self.c.bulkStoreHeader(headerBins, count, profiling=True) print('GAS: '+str(res['gas'])) assert res['output'] == count-1 + 100000 def testTx1In300K(self): hh = self.bulkStore10From300K() txIndex = 1 # block300k tx[1] 7301b595279ece985f0c415e420e425451fcf7f684fcce087ba14d10ffec1121 txStr = '01000000014dff4050dcee16672e48d755c6dd25d324492b5ea306f85a3ab23b4df26e16e9000000008c493046022100cb6dc911ef0bae0ab0e6265a45f25e081fc7ea4975517c9f848f82bc2b80a909022100e30fb6bb4fb64f414c351ed3abaed7491b8f0b1b9bcd75286036df8bfabc3ea5014104b70574006425b61867d2cbb8de7c26095fbc00ba4041b061cf75b85699cb2b449c6758741f640adffa356406632610efb267cb1efa0442c207059dd7fd652eeaffffffff020049d971020000001976a91461cf5af7bb84348df3fd695672e53c7d5b3f3db988ac30601c0c060000001976a914fd4ed114ef85d350d6d40ed3f6dc23743f8f99c488ac00000000' btcAddr = 0x61cf5af7bb84348df3fd695672e53c7d5b3f3db9 self.checkRelay(txStr, txIndex, btcAddr, hh) # this is a static test. for a broader test, # there's a veryslow dynamic test that calls randomTxVerify in test_txVerify.py @slow def testRelay300K(self): hh = self.bulkStore10From300K() txIndex = 3 txStr = '0100000002acf17f885a83c7a221ab64fda59bce530b95a131a16eff3470a6cccac6b2d312000000006b483045022100a71b9fe6d94918b436e7b949f6c49407f25e4e39fc7fe20cf22e787def43cb5602200b52e999c0e75eaef28bd97609465ff41d7dad99e06b219997c3df452251e903012102e7d08484e6c4c26bd2a3aabab09c65bbdcb4a6bba0ee5cf7008ef19b9540f818ffffffff71d4a7c7fe372cb80d7170b96a8a2b8c5a0b0015f7877f50e6709fc78f1766ae010000006b483045022100c82137d106505ab32febf6ba3a607fe62cd4a4ab96fef67bf4e379405c40836302202cf6d85f4a0e811728870d649ceb47b24986599f0f09c252e9b62c94df6a2bb5012102e7d08484e6c4c26bd2a3aabab09c65bbdcb4a6bba0ee5cf7008ef19b9540f818ffffffff0200743ba40b0000001976a91429a158767437cd82ccf4bd3e34ecd16c267fc36388ace093a7ca000000001976a9140b31340661bb7a4165736ca2fc6509164b1dc96488ac00000000' btcAddr = 0x29a158767437cd82ccf4bd3e34ecd16c267fc363 self.checkRelay(txStr, txIndex, btcAddr, hh) txIndex = 9 txStr = '0100000001afb74944e62f5182cfeac80d2039e263f84a41af698aa9d04dd8a23c452bba04010000006a473044022023ff1153282b9e7f6b6d2d5ead84f9a6ab8cf15c91955160c3e79c49cf06802102203f882a1d71c0eff3985e276c4bb01c4d5762632cba3ce43818c60d31b76d8a450121034809fb967e89f40e4509179af367658263e0236ed9251a3b9f85c995715f7c38ffffffff02fbe9cd32000000001976a9142677ecb4e52311a83a3239f5fbff8c5325f7ebc988ac055aebc9000000001976a9146c6c734aeca979d3e190762cabd70b26e05f105f88ac00000000' btcAddr = 0x2677ecb4e52311a83a3239f5fbff8c5325f7ebc9 self.checkRelay(txStr, txIndex, btcAddr, hh) txIndex = 216 txStr = '0100000008e8cd5987582c32393e41358baf37c1558de6ab061be42a497692cdea5784b1e9000000006b483045022100d44a3d698afde6df43f6a2387d6356716dde81e743676d8abe6efc0f7a196a56022036cc81b319a24605463a47a2f0605a9e1496a26e71990ef32301b118f53691ab0121033a6942b7436d179f1fa03434fc2b0f7e66841f826cd2d61a3472487c06125f3bffffffff2e17d5b8ccc0d7a4a3d009d52705be770513a47dba906eb505c8396d66df3811000000006a47304402203327261d1740dd33d0ca10a7e28ddb2862ed05dffaf81d685151e019a3e751fb0220185b9537123789b2c5200bb4f0aa098dfe6ffbc627537b6aaac5357369d4cf9c0121024276cb31dcdc70e06e6cd6e562283344e8fcf68f267dd199b3c8f140cd4d13c8ffffffff8d5929ffe66222b8cd3414a20ecfcea7d4a711100f41d762d56d825d7a786f71010000006a4730440220751c8bf1ba2d9fe5eed684d83d2083e0f083d55f338c71677e23917ee32dd06c022063e0542d41632d8e3a011fbe583a7728677e47f85cbc343441c2f111ef6c314b01210264af1414a01efb0c0381767acc16cf5f271ba49d3a3272b60520da7c95a85c2bffffffff2268b04342ecd75f79e6f14ea4c1ec11e22d7b649512f369400c17e3a860affc010000006b483045022100f5a817f1a03694d274e1c504f419c689d3e3d0823ced260dad6198dcf1b39256022042a027133f67606b349b228b29c5435dd294872d5feb122ade756e3e237a937e012103fff38371a436bbfd74b19d315d367625f38b68b2f785b53369108cf49da60f4bffffffffc23512daf04dc476a7b3b5f3ff7bbf7ae04364f31903163ad5b53602d342a4b7010000006b483045022100c74e3f77e4e7dd5f89a534612d8b09b3424094eda2ab974811285809c1e3b0b3022012f1d330b4c5b78127f2aa8bf2ea95ffe46491b5f59c17257c474ce620a72e0201210301c0b0cc55c74009051ad4f91ada8b57223cd1d56eaf87cc15363fc7c7041150ffffffffc3fba4804928ca0d22f1c74d722e817fe4011999d0dbfd5f748108011104fafd000000006b483045022100e15317e47d656da19c832af9cab8248bc1bd28fd3536227cec45b8ed28757c1b02202981cdb408577ee31d4e0c8fdc11e76bc97dd504ee9ce0299b10a605e9f58b75012103bd1b995eeba595c304d5e2ebe22ac793c1a355729e99bb0218820b6b0a284cbfffffffff8f091001be58b491b753bde74f1b3938b2674ef2f4db4c2cb509dabd392cb9bd000000006a47304402205090d866742584a66fa8663addcae8e089c9d13fb104b9e466d6ce20ed01e502022003649ec623e72e93fa7d7afafd9eeec9b8214e068011af126a5d1f4e7659b8f301210257bd210ebbe37034dd9823603ba3f6776f61c16d9b5969f465e4c256546ae453ffffffffe03387cbf249d6e15b8ff59975879e491d11b31e8ad5765392aaa544d3203197010000006b483045022100bf8874e4dbdbfc75b19e97f2e0ea5b46b9febb5f84aaf4fd4620031b9571f48d02201c60c69f476990848f1066e0ccc8e2f3dc2d90c329670c4b51debdd17494688d012103353989af6f20bab1c63ac8f87cf0365347b66ca490650154a2da8437d679bd7cffffffff023f9d6923000000001976a9144a0fe1a4b5bbe9dfbe878b64e136735d6cc083e588acc3a50f00000000001976a91409ca07592f3e5b404b9c490422f469216203f19688ac00000000' btcAddr = 0x4a0fe1a4b5bbe9dfbe878b64e136735d6cc083e5 self.checkRelay(txStr, txIndex, btcAddr, hh) # 8 ins, 2 outs txIndex = 122 txStr = '01000000013934e0623827d294bf8c99f3c3d886d92c11a8e1374157bdf09a3ec47c0ccbed010000006c493046022100d434b7d03802ee591130f2ee07c724da8cca5e67f880a23c5d44d1f551b8e5ab022100e9bb1c900a97cea0a72decc2d74cd059352885e49c7b91b22ebd764a79d780d0012102ee03e3e29926fa137dc78b15ae63b9dc9bf370065dbea04fb710eee500479117ffffffff02b0b8122a000000001976a914add23e3d2d757c9b201005fced7cbdd79a00cfe588ac60a72000000000001976a914954f0607399062d4d791b9b0b349e3817f0711b288ac00000000' btcAddr = 0xadd23e3d2d757c9b201005fced7cbdd79a00cfe5 self.checkRelay(txStr, txIndex, btcAddr, hh) # 2 ins, 3 outs txIndex = 50 txStr = '01000000015f43d26fc7ea7049a2fc63a5cd47e767ac1f8cd8bf388045e06dc5faab9e9756010000006b483045022100a51893da50d180cd3481625ce7193a43cf54b9c5ca6eedda75cef471c1afc19c022008255285e37be092ce9d793f8430cef6cc1ad12de50c56a870ee6b443f6068eb012103481e57ba7df07d0b29a827a2380f83bd349002fab509bc865c62f43e79baeb33ffffffff0356dffdfd000000001976a914c9dea40941945cf8a8955c4ee3be117d195df0f488ac20ebb304000000001976a914a9a955323f97ec609bc334fc65cc700913aa66e688acccef9201000000001976a9146f4664e7632d6e2fefc065e540eba4b71ebb371f88ac00000000' btcAddr = 0xc9dea40941945cf8a8955c4ee3be117d195df0f4 self.checkRelay(txStr, txIndex, btcAddr, hh) def testDifficulty(self): self.bulkStore11FromGenesis() cumulDiff = self.c.getCumulativeDifficulty() assert cumulDiff == 11 + 1 # +1 since setInitialParent was called with imaginary block blockDifficulty = self.c.getAverageBlockDifficulty() assert blockDifficulty == 10 def bulkStore11FromGenesis(self): numBlock = 11 self.c.setInitialParent(0, 0, 1) strings = "" with open("test/headers/firstEleven.txt") as f: for header in f: strings += header[:-1] # [:-1] to remove trailing \n headerBins = strings.decode('hex') # print('@@@ hb: ', headerBins) self.c.bulkStoreHeader(headerBins, numBlock) def bulkStore10From300K(self): startBlockNum = 300000 numBlock = 10 block300kPrev = 0x000000000000000067ecc744b5ae34eebbde14d21ca4db51652e4d67e155f07e self.c.setInitialParent(block300kPrev, startBlockNum-1, 1) strings = "" i = 1 with open("test/headers/100from300k.txt") as f: for header in f: strings += header[:-1] # [:-1] to remove trailing \n if i==numBlock: break i += 1 headerBins = strings.decode('hex') # print('@@@ hb: ', headerBins) res = self.c.bulkStoreHeader(headerBins, numBlock, profiling=True) print('GAS: '+str(res['gas'])) assert res['output'] == numBlock-1 + startBlockNum # block 300000 # data from pybitcointools, eg # >>> from bitcoin import * # >>> blocknum=300000 # >>> header = get_block_header_data(blocknum) # >>> hashes = get_txs_in_block(blocknum) header = {'nonce': 222771801, 'hash': u'000000000000000082ccf8f1557c5d40b21edabb18d2d691cfbf87118bac7254', 'timestamp': 1399703554, 'merkle_root': u'915c887a2d9ec3f566a648bedcf4ed30d0988e22268cfe43ab5b0cf8638999d3', 'version': 2, 'prevhash': u'000000000000000067ecc744b5ae34eebbde14d21ca4db51652e4d67e155f07e', 'bits': 419465580} hashes = [u'b39fa6c39b99683ac8f456721b270786c627ecb246700888315991877024b983', u'7301b595279ece985f0c415e420e425451fcf7f684fcce087ba14d10ffec1121', u'6961d06e4a921834bbf729a94d7ab423b18ddd92e5ce9661b7b871d852f1db74', u'85e72c0814597ec52d2d178b7125af0e3cfa07821912ca81bf4b1fbe4b4b70f2', u'25ca9ce6e118225fd0e95febe6d835cdb95bf9e57aa2ca99ea2f140a86ca334f', u'a52997fa37fee82c0bf16638f5ec66bb0df999034c6b21bf9b8747c1abed994f', u'dd9aaf33afe6f8364a190904afcc5004fd973527be5a23f68bd7b6bd40f84c59', u'83ff2b04fe5e19f2650c5fedc706a26ab314e9edc40aed106373adaa36f6bf12', u'3c412d497cb5d83fff8270062e9fe6c1fba147eed156887081dddfcc117e854c', u'5a0ce1166ff8e6800416b1aa25f1577e233f230bd21204a6505fa6ee5a9c5fc6', u'3184aa6ccaed5f3e41fc34045970cee7501b68795c235108debd1c9a5dfec1a4', u'80bf2f098684a5db1ce0b14c0adc75efec6710a040eacfb81f64917c34e69ca5', u'80f6247937daa9ffd866e616abd337177d734a35f847669c41ec358817f3a7e8', u'3f4735eb3beb164150000b90fba6055bcff7a08ecba9352b7d29f404a658d2c9', u'c33240a15d4e252ec0284e4079776843780a7ea8836bd91f8fb8217ca23eed9b', u'15796981d90b9ecbce09a9e8a7b4f447566f2f859b808f4e940fb3b6ac17d3d5', u'60e9a2b1f7120d329fd41f1b48f5f1b3a3a581212bec241fd8f1f2a37a06efeb', u'b31214324f4c4d59540346de6cb692c02fdd31674486bff4981a8a7d8db74b6e', u'8cd58bdc6b27fd9b664e2499e9ae7d8fd8ab61757a14e9face89b1d3ce72e8d4', u'430116ff8ba9331cd0aee2530841661a9f138655eb3f013cb159d4387a5c8d75', u'07985ed09f592d6a5d87f20631f31f0e844a63eb6fdca293be5b8cdd966b5bc3', u'05cea126741dcc14e8b04a71c7b3a20790ac650a34c2af3d26ad591897a745bb', u'697699797160497048258d35a491929772d3afdb4d39e2f3b1215f69f34f95e4', u'83088c0ad83575d56013dffe44a6820d5931ec7b2df0a72a7008123714541230', u'f14255e8fa5618b03a6f99dff6c0635f565278dd951d776daa392ef8a22314cf', u'81335877f5ba07432f642206de7bb30367e3da8fa48dc91139795ecb6571e39e', u'3a87b229b9db4a9562d1453106f7b61aa67e05631cf1068cf37af910101c1b7d', u'4f4c7d7e0677f5109cc91287a7635ffb4906dc91d1287f0d8661cec0e2dcdbaa', u'0c3570cc17b033ca526fb06e3d9945aab06b279c11aee3e947a6f76b9d938cad', u'e9ce733b8397a3e39ed484e2766095e2cc3fb80e62e6c01281223d4e98516125', u'7711457b2611b68b0dd03291cbad3e56015233f7861cf087686ac8b7a6bcd2a1', u'3b4eaaa4c92e5e79f2a2b5a91517290a8134b0581cb5146b5bb34abb2da6105a', u'4d4a6f9408042e48cfd78a0151c8e01aaec829a6bd0154c9aff8020fedb19910', u'200f10f9a80042bc2ea3ce10678a22c2f2f65e8c9eb77cb02616ab994d78bc64', u'9fa29ce548cdc746ebfca93778a13f2aec7ca337b3713a508371cc895926c4e1', u'38a7fb461f35590b1b637accc0bc7cfbd8dc97cd29494f91f6de7ab650b77662', u'692229d9ddcac96288015bbbf9ef2ff21ea1f332c258ac09d67ea90a89643e1e', u'cbad0a4ee0b505a4b004e1ec9e8d845ffaab9fe006841a5de0f57f45975f638d', u'fd0fe9667036af10d633674cdd216b8f3dfe8dea8530c29735bb8494ded454d3', u'73eee43d7fd9ab2d38323a25ee776f79dd05e37e0bac5c58c3537fdf8bebb03a', u'dbb2f7d327e746cc5271f0184446319d13fdab43f7cd9c9bdb1780fb51333195', u'bb420523868848e1b60ffe28a2f5a657e7db424e11aaacca19c992eb67805349', u'd826449f965893e8a9e16e7c5eab237250167623bc2464a146bca66fa17bc859', u'a258557069b65f7376a0b183b113d09a6488998ab3b37f3b22e653064673a4f9', u'ac4e61fab92d01541734295dd8b47647989b696282d94f7e03f3d517a2c8fe0f', u'14778e8e4f139ea8d3802c9390f95d762748acd44af0bd5f990ff96254694539', u'cd127347cb166b6dc07b54e9a1212122b965116f44f64c8a181e0f1ed13a7a94', u'e9eed99ee6ad90d43fbcf01fba43c644f0d1267b7821f252ee796a29772a9da0', u'160b3ac2c0e8554a4693f618099027c183ca8528f16bca39bdf594f012e0d259', u'8d3c7ac9c640836e344dda3b2d8969d674e203cc85e1b42b364b9cd379bbc54c', u'4d493d0803f6f66755f2527c09bde9e37cf829e036f2a408c0ad40e981011808', u'ee170c8b853905e267e046da730de4efc5924b9d45a138703962bd88a427841e', u'014150195ba681ac08be9773d01cebea757139c262381b83b68505b0865df37f', u'7f3717dfb7530b2ac2f3d5891872a80616c0c9114cb813123151ef604fcec2c8', u'bac470d551be0ddcf575f6241692a059029e554b3bd759e29cb582df2c452ef1', u'f902e9c1e85fbbb970a68c54f07793b82441341cb84edfeeddbc5008dd3fa42a', u'5c42701ec8b6fa449e294c063d63a1378a82335892d8a968867e507456d6dd46', u'eec726af5ba61dfc183f23e2561af3dbc72e17567e63de8919c383aed0b34789', u'e8c8ffe8259fb98535ba4b3028b3d15726ccec383a263bfb47735d614aa1c8c5', u'79d57b6337a02b36e7fb341da626cd3f641f4393b35ad750da1f18710ec7132a', u'32e19411618c67afc97dc6ecb188d9d3a7c179ba652191f52d019a1db85eaa39', u'ef2ba16f90b39a6982df6f70600d683529d328a8b88d5f03cd9611ad2c67d898', u'313ef3fcf6a27a5cc376422e10a7a3c835aa5cce098ca206d35998f1f950907f', u'bb727c8f9cc138ec17a36355473b902f8170b415e62b537c9d9514a29b18195b', u'bd89b3e925e1a98cdfc323e27cd096f331bb8b61dce58e55277418acd4e6b81c', u'eab98223941ddf74dc5f24c50c18c94fe37de0b976ae57cf12877d2adea492c7', u'59157efb4c7ffd44d76c9e26f2aab16288d408caa821ed6840312984cab397c2', u'4a4343f87d0a15583b738447d4b252d29ec6014b364f75a6b82febdc85f77d36', u'bd2fe3c72fdff8ff934195bfbec9efde589eecc7e2486d1a27137490b934f0bb', u'5a73af81efbf315df6db9eda21e52c45462da2c2f4d9cdaa0aabaedafbe5a0c3', u'01df07dd63c90be88c2c6fdb7499f26f6af085cb9aaa71ef4a896abc7619c5e7', u'2eec110d8d89c01952cf48e71ea297c039eff7536034ebc5e6b2635d6125a636', u'93f0a25cb92a4fdde230335d8236b21309375800bfd114bad79d2b78a741b236', u'9a597af7f8cd737115368afde26fa3ddbd61f1852a30896d146b003bd96c0e28', u'6aba765dd5c019b226789bc4bcd69f27819161765d2264e231bd43afa31d5329', u'67230ecd856ea2a271d11a20e7686b8a1d8ea17b8ee39deb4999d8742619c287', u'81df42b9d145c7306aa363069bbb5dfc39f27169ba4cc0c0fe05fe957a40d5ed', u'2079d746f4ba56140cfd5ff6e8d870c7798cfa08c83984e726614ee9fe09636d', u'6eacf3862e3d96b06269971ce96e955d87e1eb036156d0f32a25dbdd83bf9ad6', u'f9a6c8c0a9d9104925b826c2f88be0e39701e2ed609a0047dff77b758427c967', u'e1822ccb952aff1d5453081116cab5dd193aa01012265153e20073b574559fc5', u'67616a53c479b20693ef82bc61b0f1d3dd6219b7b39eba3adf849666ac269d31', u'6ac0be3b5fe96e5aab1a68b073ec47ef281e4b9f1757565182a10b4d969ad979', u'c955e04d7d129735914785506fe945edcc11afdcbfff5f0c145eca5989d1ba9d', u'8bff2d6bd3371ab20b3025d017f98c7830360cdc138f8949636a375ad862ad5a', u'0c6b4796eb26bd092bc99d2f71100d15de77e9ab89d878d10940610221756e4d', u'f290713ba603832d71bd1cb055b275cbdc112bbb773079ae208adf9376ad4d8b', u'54929ba3c1b9a6a96e67a17c0ba7c63322a8852d6cba771fb8c18c6964b352d0', u'dd91af04b80457a741d0e02e4d5d9199fbca7eafc173a9db9c0c8ac39e575f4a', u'c08deba97b231b535e843a30b25ea5e82ac84b0dde44315fada98eec0e5fe0f5', u'187eb5e2503c24afbdd396c113262cfcf9a8b48ed64476f36705c9d2a14b6a1f', u'a891400adcff3db30dae2a5e6d57399c84a9467bfe5b1f8040ed3861f91413e9', u'0f771706adf8399f088489917251174402107d92bc0d28fdfdcfa48ad2b37429', u'9f12792e30797baf6a25f3128e4d89c8ffd46b667c2391a5cafeb5a8817a4118', u'd74ed5ef608c251f8ae1b06b6ca5e20ed365b96399c70a2973d03596b97ca1a8', u'6621d46781aae5ea3aea9016b517196bc8a39b4d2412dbe8ec37f148c841f992', u'b2f15a79fdb8b77966089eb53f3a7950ff5de9f0a79f0883600cc2b545b42491', u'cebe405a224b64774a9b394620c83fde847511d92145cbdc1459de2cadb93e50', u'1d2f946d69b5f48ea84dd4ed30f468126bc3d605d737d14151e7b144863e9ed6', u'dfe4429838aaff73694700a1ac2de0f18b4cc9c5c350525b0528769718a76ac2', u'd398d7ec7d847be58f32ccebb90c9a1489b51b8f02dd3ca69bd43e6695ea01ba', u'1272c9e1e3acf67ca2359076a6eb5d2ee81632ee4720b1abd18e206cf2ba58e8', u'0a8d85932d7acf42b175ce94785732db179946678f42b91bd8ea12834f5732d4', u'b0f72f99a1b8155f4e08aa5fd7b7bf1ef6a724c9f17e3b99ab3d732a66903514', u'a1be755bfc1d7bac848231bc54a0bfad250441cb0841ba5c37ef55a26ea0ffa7', u'293d0d15766fac48b94b2cf188b5bd5d53c640749f65efc747554bcc03aa6577', u'3af197c25dc773bd09b6b2f36bca282a14ddf00922a570606214bf3180bb8420', u'2ec52b1c0c312e28790a2b88014cd2bba7a8ec06902aeed321dac8b7c9098cb2', u'fabef88911c58225014d5758dd59e46916a9a122848d8c7fa8230197afc8849d', u'140d297a04eeb94a464db48da32efe7801dba32c6ecb5411c9057bb32e4f2955', u'6ec656b73c7d04e7bbc3fcefc7359c91b74040cc79fc21cfd247bef2d230f73a', u'bdae0e5dfa9f494413586dd8aca440b0bbea849a8cd9c2342ec1f27e320b971d', u'691ae0704e080e04411ebca008ce1b7ff4a05f8b67f6da2ee8aca0f32657c50e', u'e6cd0bf28ad5f512f5fdcb65b053b1662f562b3734bfc17ac952af892a767df8', u'df244e11f1bd63cc0c28073c1bb418daa8a1f7da67e6e2e7b140e2f05c8450c3', u'60ee29e4256ea0b275e6b230a531cf40f348277dd3e118948365d11cac4d9131', u'f8c9b22b2cc94966cf47e5be1c3dbc54a6e3619c08b6b957339159a504c97804', u'702451ad78d73bd3a3b3793049d6382a819750177648a0429da49b485d842173', u'45cac6176a75416c666aad603ec085e0948beae98539791a6bfec69fa5e241ac', u'0bceb62c2fe23caf701800f24e44f0278838bac015f7567f8dbfdc4d3a49d093', u'7360377a882c923bb415583777cf0b340a4fffc63350c53891564b4955976b7e', u'22d5669af43229adfcf2157de9477b013265d6fd70b1cacf7048457c65243fe4', u'c36bb17c97ebd644fc7ab9362b2d04ad63c9fae8c7f9047199c1d408df419913', u'556d7c72dd4636a5cba7fc3dedf57cb34555bf913dc09e187bdcb489c3804726', u'e48b08df0afa01a7339335fb6b6964100d11985765cbc6afcde990fd65856a9b', u'9cc064bbce74a2c56ce12b0b59fc7267a2618a35e1d8c66f642efd6d033a9681', u'12998f415cefa518a76278d82d6088d7aa512c2a1a8c3e91d826aa415c809992', u'e8b85649bdf57c0927c2f36486f1147c53ae153a2e3d6cbab1cf238d8ab65e7b', u'fafe668f8725ad2df033f3e5f86a793ac23a58283d992c1a0ff610579b56362a', u'd111d3a9bd8f499946309764effcd6bd7f1b1bb0f7f1d5562ecb88ede72bff31', u'8bec816469f65b34cc517d65b69dadc7f94fa3e19c5e34ce66351d6bd76d46b8', u'a0dbb7cf37e49483be11ce690c7c4333bd50d47df3194efcc29f141fb37e1d16', u'2855f1cf7548aad0ba6d865d9c97c95f0c7bf8dc65af38df49ad74835bc7c937', u'fa571daf7590ae943afd2b6c9072b5a47864d4e4d2055c3b4acb4ac1343c48e3', u'34d5c5fe77e6869b1b383637d058a33946dbe103edc927a0e38d26545aec8d9e', u'f87c866a8782370f004070ae1caf8b91323f1c93186a76024d439127dcd87c1a', u'2b1a908eaa8add861703d10976e64ac7946c8b83be65d66c2de4bcbac4e3cb4e', u'fea520c626ab8832451e589a4e430723a70157de7bdfbda10b61b0c618c2744f', u'1f5bbe466a99c2f75a7bc126372f56323abeaa2c2b66f972745cf153194dd71f', u'af3fcfa1440bc4d40436c69ca1d63d3bee61ffd7f41124e58a8eeac371ef9876', u'e8c5e36001052efaa2b5df0b28457002887ae73b6e49ce77f4229ad0e04a5caa', u'2865ec584fa147a52c644b744f8044502d3625e7637b55fc43f42533f3918791', u'c574cbf9ee51c461ad7b1a946760df0e996c174e331ec7c0d3ae1818d38442e2', u'f7f96a71f0e33be4665f8856b89549d46f663a1f5d5ca91efa10b0d2ee969a1a', u'ece6636a6afb756930b25915fd71940f8cb0f398348866a73cd100ddf54774e7', u'1a1ae6e74af37afbb8ed812f779cb505410f439b380d08c33832fb7291e4fa3d', u'86c7d896023f918da17cbc411110999e094b0c9434e457711eb54c56ca7b7786', u'f2f814c488de5382fcca21deabac38cc7aabdbdc9eff28131a426b9c09069e56', u'c567df44fa6c0a613394524943c2a01258da8e7f7fe532cc2bb3a2176f202c74', u'dd26818f759d5566ad3de82bfee739d138ba55e4d46315fbef4f5f4c000aa8b1', u'20cf5a1ff51f2c4ccb7e842b09238167e1951be18f409439ba7d0b074a9aa035', u'0842b84421b9f010ef4390fb70e7fd0e25a868bf3823822f168c49a0247ba552', u'62650f017ac2eb9c75a0544d992c527062823d579108545cb2a5479fb359b8a9', u'543153fff73eb3a86f7d33934889aded91626e2ddd8d6391cfd3183f94467057', u'c699b2e258c8f2659dd6288f303929485c03093b9b195b76ad2a72dc1942aff4', u'5f8ee140aa6e9458d8cc98fd04de577d9f1d5f7ba22d1aaea9dbf18d29f4aad5', u'195624d25e16e2f21477c0e944dadbd8562eb95d7d6f02a10eb9d1706ccd6be0', u'9f8a4be4106745a1c9615c24dcb33e676fc20b263b790dcc5938bd25c9cfa7b6', u'c61d7b9e7f27c88c90cb88cf5c14cd2733dfa126ca916c967b5425e13d6dbef2', u'fcc5e7327547595acec6d5496dbb7047bbd3d7bef15d8a99614507791a0e919c', u'12e58986ff3f692e1d9811d4b7504f9ab12a8328d01cb384561372a8977fcb4f', u'36bf83edfee9780bf71d5ae2e5564682c093ee856e0df242df68cf36f636cde3', u'7aa7d8dc67d83997271055fad15ba87439caca44cb887d8494e2e3d7a035489e', u'6bcc5699a9d768e7c32a7cbacecf4e04c0a3b47664435e85c267f35e62b34a40', u'464ce88d4978fd2aeb1d07b9859cf4bbc7d64835da34c8908a422cd69b05764d', u'4081f9bc98d0fe6c1be648ba58d3038f178d367033d5504826d89dc01b641883', u'8fddc291012eec652f89ec544c25a62d5c7b56485cb62aa84b4cdd1e5b9bc6e3', u'4ea733208b1287b4b6158f40631dca4aea9003cfca273593efe7c55fbc2ac93b', u'26327d0878a2e51007bc4f6f8eb731729a6ece8c0c749d0b0462b7d27ed9dec4', u'79e6e69805cf4148decdb67fb230242f176456070e989fbba170a202ae6e8be0', u'5f706df8103ee22d87715c5d766af8a965c9f3d3ab393663acecd1a88f9e5732', u'5ee987ebefa35b7a7ee95e5239fc04f5777e6b5e290c056174d87636d388d783', u'173f27313b60ead70171fbd0ab755c7d6e40d1336f1c427d34bf138615121a64', u'c29fd604274e072c2bd4a879dcfa516b5c8d278a2123bcdd58d52b9040353137', u'e4655d09893a5b5d96fc911aa73140a67f8ccdb43354522f125706e7f0a9c3f2', u'31f27a1e742f9590c4a3b2a9c6e95d44f265cb328d52cc0dcb17a133920a84b1', u'1f5dff66b173bce3eb9899a2e03525e0ce4ce26d21b311c11361df9c097bf4ab', u'94bf9d213499a3dbb6938c81f92a7584a9d74912534519f7372548477eb582e2', u'fcaa773d66d90f0509375bf9fd47fdb665b215959095b140d30ebc07435f6672', u'9b3cbace712f1fef331b91a4a557395d771c00f177f286276d8f039a4f508837', u'7cf112ba96c4999533ca3f2d27b3b7580ec90658918d10cb4f161ec35102a815', u'91fbee07653d38151b3ac628624716c7ed0822245335368b4cffdafdfddba2a4', u'8b516fc42ee3cfe729559e53df509aac06f2d2ea67b4198d75f50ba57968556f', u'fabf8e33fdcc6d874a2799d2006286f7584a9e4b6962271df7183f7f40c801fa', u'42afd1587aeaff52e62569d8ba87a26839e2c0213ace92518fd0762dd52b00e2', u'e4962898a6f3c54337f38ce9e941092748f1d16aa214fa8094b0fc7b643c6d25', u'f071a9340dcc73755a775083bb1cf50f7c4ce43afc0faeabae324c47c74b506b', u'50cf012c35ad0a5f38506fa510e81122e1db8942916c71ed3ab09ed81ec2322b', u'fa67048a1d19de38779d9a784980f1fbbc2cffac51e9cf2eb5428cf50463e173', u'7f55b1be49234705fe38c0b20b21bb8b80ebeef7caa115e59163f732c93abadf', u'141e4ea2fa3c9bf9984d03ff081d21555f8ccc7a528326cea96221ca6d476566', u'09636b32593267f1aec7cf7ac36b6a51b8ef158f5648d1d27882492b7908ca2e', u'5bf2fc8b3d5fd649104ea4a0996a263a22d9a27bb8bbf7c5620ddfa59180415d', u'bb98981ae53448e37f9f9b92308532ebbaf2cbe1cf678971b3c48f8556e3bace', u'4507c1c9eb8898e959fe563f26410f74ee898f57e799fce6d2bb3c9e7768d2e0', u'45f347c4811a168cb9a517b1a67f4c27ceeefa0d6fe92b62f4f32023282759c1', u'8b8bb1e04c132542b2e4fec124bbf1587e8206e635ada0c5bc77adc84a69d71c', u'8f05b10b8bcd0f9c646133c2a6c2b862b987d4bab1fc543e203d225f51d332f2', u'92951b259f6f50af6b0c9615b02f4cd32adaa68b44b0a1f341e51b8d066f7242', u'8c2c452e01c8e1fedcce515d8eeaeb554fb8d3a9199c8f7487b43f97dff78b24', u'b3a8743a49ebc07d51063f05cfee21e0550c88bd066ba4848d3407d3b83caa67', u'6bf1e0eaacd3a028314404fcf7070857718d9d6b1effef8ae30b1dd12daeecc0', u'4de44365c2d8cccc47146eddc30a3ce1c0f3cf2f02a76d5bc448ab9df8e9a50a', u'a7fa6bb651795f601bc5b30d46f2ebc54c1f4e64660ba3e2c9aa4ff67b5b2b44', u'0f22eceac6512674cefc616ec02e8602e39b7988335d354b4b9e46b202a4d1e6', u'076c4630f3233c40f2d081ca5205c0511e3ae151a843d07a241a367efc459d8a', u'381c90c62d292e883a9954f4cb30910a22a2ecf623f81c479e3bb62c076ae15c', u'f2b36993c8b6954a49fc8101a5a73be1c12e1dc54607e4155f6cc89050dcdc3f', u'17e92b3249912210d734ea870724e5aa89d14fdac19775c6f3fb27b0443b578f', u'df9d82e7d64773876cba3f238d3314156a9c047bea43f74b3d333e58e32c02e8', u'1f001521ff4d15c9cf5e0573b7626089f0fc3adc80f3a95efe27a5bf11d42cdc', u'fc15cc1b201179302bb865ad94157dab220200feca696db1719b6305ce438caf', u'af5a9d4881abbb86176fec8f19b70e2bec2ba331e55660c84a65ae56858f2140', u'b4fcb470c50bb9de902d78b068ff8fcbd43f2a703405f12c67c33b8f0d142333', u'7e59df63b51452499b8e8916e077e1a8b031ff2a3578fa7362d6b8847fb0b087', u'8e3c4705b993a7b62088586e8ad07d4d53dbca75cf3d7dcfaed856b0724a9e36', u'8ad2f603082827e24cabf1079e90216d5c0ad9071cf326a88625ac22fd654635', u'686ffa6fa20cb99f5b66741546e854363569d6b6557481d41947b5b4243e02a1', u'278d14dbae58d976299144c063727c0d8608b2df09d1d6f98d08dcd016db1d3f', u'f2486038276fe29dad82f1c163119b01c33638a2699ab7c0e97a6c4a1d7ecc14', u'83e276a9ab6d8936435e222d0dbd25bfce1b6a05e25d3cb311ca800aebe26cc3', u'ea81826abb33c8e2001daa1adf1d807d958cb007422cc77c1873cf302bd338b3', u'b6c73b487847f9f23716dd4aa37572d0ef33a061e5763a5277dafc1e09c0804a', u'edab80d85ad6cb79c42eb909d36031268cf801b79d70c3aa314e9485b8aeb071', u'8570dece43243e0e648edf63803c4c4a92ef2f84085db5219c2d932a80f6c68c', u'64d02bff2385b2406bcce5084cffffcf2b251ad19cf2da353a83a01eaf248907', u'ba7e4f59b925eddd31c349392ab92655e30ea41f66b90441ac190310f05f8eea', u'2f637d397e7a7f475b31d7cbac564ffc52ff7a2e826590c1a07b67c863e819dc', u'10d3aa0309d9f6ac4a58a75563ca49667965b6a9f454eef10b024b5f91eb030f', u'446249cf6bd83ee255cae174194a03e9c653648f219eed3a9d0edffd1892ce19', u'07fa53991a585d45fef3d8434a4004c56e335a936efc8fb77776481c9fdd88ea', u'9aa48344a6f4d316c0be11d3591c4a0597af167bc68208234ed479f71486a5b4', u'e0c4a881e591e1d05000443821b0c524c81236ea4248f39985635afade584166', u'75a98ce35b869772adbf643b3f8acadfa5b46b4cd8bfef26f9e079c517018285', u'e67f95cd4d5682f2b9d4e19b658baae692d669e550e6e3337c07d7395800c5a9', u'9a23b701a614b81746c0a44caa8b393844f94aaa8a13b57666a6813464e72f94', u'3b115dcc8a5d1ae060b9be8bdfc697155f6cf40f10bbfb8ab22d14306a9828cb'] return [header, hashes] # this is consistent with the assumption that the ether address is the output # following the 'btcAddr' and that the outputs are standard scripts # (OP_DUP OP_HASH160 <address> OP_EQUALVERIFY OP_CHECKSIG) def checkRelay(self, txStr, txIndex, btcAddr, hh): [header, hashes] = hh [txHash, txIndex, siblings, txBlockHash] = makeMerkleProof(header, hashes, txIndex) # verify the proof and then hand the proof to the btc-eth contract, which will check # the tx outputs and send ether as appropriate BTC_ETH = self.s.abi_contract(self.BTC_ETH_CONTRACT, endowment=2000*self.ETHER, sender=tester.k1) assert BTC_ETH.setTrustedBtcRelay(self.c.address, sender=tester.k1) == 1 assert BTC_ETH.testingonlySetBtcAddr(btcAddr, sender=tester.k1) == 1 res = self.c.relayTx(txStr, txHash, txIndex, siblings, txBlockHash, BTC_ETH.address, profiling=True) indexOfBtcAddr = txStr.find(format(btcAddr, 'x')) ethAddrBin = txStr[indexOfBtcAddr+68:indexOfBtcAddr+108].decode('hex') # assumes ether addr is after btcAddr print('@@@@ ethAddrHex: '+ethAddrBin.encode('hex')) userEthBalance = self.s.block.get_balance(ethAddrBin) print('USER ETH BALANCE: '+str(userEthBalance)) expEtherBalance = 13 assert userEthBalance == expEtherBalance assert res['output'] == 1 # ether was transferred # exchange contract is owned by tester.k1, while # relay contract is owned by tester.k0 # Thus k0 is NOT allowed to reclaim ether using the same tx assert 0 == self.c.relayTx(txStr, txHash, txIndex, siblings, txBlockHash, BTC_ETH.address) # skip since OOG @pytest.mark.skipif(True,reason='skip') @slow # @pytest.mark.veryslow def testBulkStore120(self): startBlockNum = 300000 numBlock = 60 block300kPrev = 0x000000000000000067ecc744b5ae34eebbde14d21ca4db51652e4d67e155f07e self.c.setInitialParent(block300kPrev, startBlockNum-1, 1) nLoop = 2 j = 0 with open("test/headers/500from300k.txt") as f: while j < nLoop: i = 1 strings = "" for header in f: strings += header[:-1] if i==numBlock: break i += 1 headerBins = strings.decode('hex') # [:-1] to remove trailing \n res = self.c.bulkStoreHeader(headerBins, numBlock) assert res == (numBlock * (j+1)) j += 1 # startTime = datetime.now().time() # endTime = datetime.now().time() # # duration = datetime.combine(date.today(), endTime) - datetime.combine(date.today(), startTime) # print("********** duration: "+str(duration)+" ********** start:"+str(startTime)+" end:"+str(endTime)) # assert res == numBlock # skip since OOG @pytest.mark.skipif(True,reason='skip') def testBulkStore60(self): startBlockNum = 300000 numBlock = 60 block300kPrev = 0x000000000000000067ecc744b5ae34eebbde14d21ca4db51652e4d67e155f07e self.c.setInitialParent(block300kPrev, startBlockNum-1, 1) strings = "" i = 1 with open("test/headers/500from300k.txt") as f: for header in f: strings += header[:-1] if i==numBlock: break i += 1 headerBins = strings.decode('hex') # [:-1] to remove trailing \n # print('@@@ hb: ', headerBins) startTime = datetime.now().time() res = self.c.bulkStoreHeader(headerBins, numBlock, profiling=True) endTime = datetime.now().time() duration = datetime.combine(date.today(), endTime) - datetime.combine(date.today(), startTime) print("********** duration: "+str(duration)+" ********** start:"+str(startTime)+" end:"+str(endTime)) print('GAS: '+str(res['gas'])) assert res['output'] == numBlock
m-messiah/dzzzzr-bot
refs/heads/master
useragents.py
1
# coding=utf-8 USERAGENTS = ( '(Windows NT 6.2; WOW64) AppleWebKit/537.22 (KHTML, like Gecko) ' 'Chrome/25.0.1364.172 YaBrowser/1.7.1364.22194 Safari/537.22', '(X11; U; Linux x86_64; en-US; rv:1.9.1.3) Gecko/20090913 Firefox/3.5.3', '(X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) ' 'Ubuntu Chromium/28.0.1500.71 Chrome/28.0.1500.71 Safari/537.36', '(Windows; U; Windows NT 6.1; en; rv:1.9.1.3) Gecko/20090824 ' 'Firefox/3.5.3 (.NET CLR 3.5.30729)', '(Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/30.0.1599.69 Safari/537.36', '(Windows NT 6.1; WOW64; rv:24.0) Gecko/20100101 Firefox/24.0', '(Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/29.0.1547.76 Safari/537.36', '(Macintosh; Intel Mac OS X 10_8_5) AppleWebKit/536.30.1 ' '(KHTML, like Gecko) Version/6.0.5 Safari/536.30.1', '(iPhone; CPU iPhone OS 7_0_2 like Mac OS X) AppleWebKit/537.51.1 ' '(KHTML, like Gecko) Version/7.0 Mobile/11A501 Safari/9537.53', '(iPad; CPU OS 7_0 like Mac OS X) AppleWebKit/537.51.1 ' '(KHTML, like Gecko) Version/7.0 Mobile/11A465 Safari/9537.53', '(iPhone; CPU iPhone OS 6_1_3 like Mac OS X) AppleWebKit/536.26 ' '(KHTML, like Gecko) Version/6.0 Mobile/10B329 Safari/8536.25', '(Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/30.0.1599.66 Safari/537.36', '(Linux; Android 4.1.2; GT-I9100 Build/JZO54K) AppleWebKit/537.36 ' '(KHTML, like Gecko) Chrome/30.0.1599.82 Mobile Safari/537.36', '(Windows; U; Windows NT 5.2; en-US; rv:1.9.1.3) Gecko/20090824 ' 'Firefox/3.5.3 (.NET CLR 3.5.30729)', '(Windows; U; Windows NT 6.1; en-US; rv:1.9.1.1) Gecko/20090718 ' 'Firefox/3.5.1', '(Windows; U; Windows NT 5.1; en-US) AppleWebKit/532.1 ' '(KHTML, like Gecko) Chrome/4.0.219.6 Safari/532.1', '(compatible; MSIE 8.0; Windows NT 6.1; WOW64; ' 'Trident/4.0; SLCC2; .NET CLR 2.0.50727; InfoPath.2)', '(compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0; SLCC1; ' '.NET CLR 2.0.50727; .NET CLR 1.1.4322; .NET CLR 3.5.30729; ' '.NET CLR 3.0.30729)', '(compatible; MSIE 8.0; Windows NT 5.2; Win64; x64; Trident/4.0)', '(compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0)', '(compatible; MSIE 10.0; Windows NT 6.2; WOW64; Trident/6.0)', '(compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0; SV1; ' '.NET CLR 2.0.50727; InfoPath.2)', '(Windows; U; MSIE 7.0; Windows NT 6.0; en-US)', '(compatible; MSIE 7.0; Windows NT 5.1)', '(compatible; MSIE 6.1; Windows XP)', '(Windows NT 5.2; U; ru) Presto/2.5.22 Version/10.51', '(Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/30.0.1599.69 Safari/537.36', '(Windows NT 6.1; WOW64; rv:24.0) Gecko/20100101 Firefox/24.0', '(Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/29.0.1547.76 Safari/537.36', '(Macintosh; Intel Mac OS X 10_8_5) AppleWebKit/536.30.1 ' '(KHTML, like Gecko) Version/6.0.5 Safari/536.30.1', '(Macintosh; Intel Mac OS X 10_8_5) AppleWebKit/537.36 ' '(KHTML, like Gecko) Chrome/30.0.1599.69 Safari/537.36', '(Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/30.0.1599.101 Safari/537.36', '(Macintosh; Intel Mac OS X 10.8; rv:24.0) Gecko/20100101 Firefox/24.0', '(Macintosh; Intel Mac OS X 10_8_5) AppleWebKit/537.36 ' '(KHTML, like Gecko) Chrome/30.0.1599.101 Safari/537.36', '(Macintosh; Intel Mac OS X 10_8_5) AppleWebKit/537.36 ' '(KHTML, like Gecko) Chrome/29.0.1547.76 Safari/537.36', '(Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/30.0.1599.69 Safari/537.36', '(compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0)', '(Windows NT 6.1; rv:24.0) Gecko/20100101 Firefox/24.0', '(Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/29.0.1547.76 Safari/537.36', '(Windows NT 5.1; rv:24.0) Gecko/20100101 Firefox/24.0', '(iPhone; CPU iPhone OS 7_0_2 like Mac OS X) AppleWebKit/537.51.1 ' '(KHTML, like Gecko) Version/7.0 Mobile/11A501 Safari/9537.53', '(Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/30.0.1599.69 Safari/537.36', '(X11; Ubuntu; Linux x86_64; rv:24.0) Gecko/20100101 Firefox/24.0', '(Windows NT 6.1; WOW64; rv:23.0) Gecko/20100101 Firefox/23.0', '(Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/29.0.1547.76 Safari/537.36', '(Windows NT 6.2; WOW64; rv:24.0) Gecko/20100101 Firefox/24.0', '(iPad; CPU OS 7_0_2 like Mac OS X) AppleWebKit/537.51.1 ' '(KHTML, like Gecko) Version/7.0 Mobile/11A501 Safari/9537.53', '(Macintosh; Intel Mac OS X 10_7_5) AppleWebKit/536.30.1 ' '(KHTML, like Gecko) Version/6.0.5 Safari/536.30.1', '(Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/30.0.1599.66 Safari/537.36', '(compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0)', '(Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/30.0.1599.101 Safari/537.36', '(X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/30.0.1599.66 Safari/537.36', '(X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) ' 'Ubuntu Chromium/28.0.1500.71 Chrome/28.0.1500.71 Safari/537.36', '(Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/30.0.1599.69 Safari/537.36', '(Macintosh; Intel Mac OS X 10_8_4) AppleWebKit/537.36 ' '(KHTML, like Gecko) Chrome/29.0.1547.76 Safari/537.36', '(Macintosh; Intel Mac OS X 10_9) AppleWebKit/537.71 ' '(KHTML, like Gecko) Version/7.0 Safari/537.71', '(Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/30.0.1599.101 Safari/537.36', '(Macintosh; Intel Mac OS X 10_7_5) AppleWebKit/537.36 ' '(KHTML, like Gecko) Chrome/30.0.1599.69 Safari/537.36', '(Macintosh; Intel Mac OS X 10_8_5) AppleWebKit/537.36 ' '(KHTML, like Gecko) Chrome/30.0.1599.66 Safari/537.36', '(Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/29.0.1547.76 Safari/537.36', '(Macintosh; Intel Mac OS X 10_8_4) AppleWebKit/536.30.1 ' '(KHTML, like Gecko) Version/6.0.5 Safari/536.30.1', '(X11; Linux x86_64; rv:24.0) Gecko/20100101 Firefox/24.0', '(X11; Ubuntu; Linux i686; rv:24.0) Gecko/20100101 Firefox/24.0', '(Macintosh; Intel Mac OS X 10_8_4) AppleWebKit/537.36 ' '(KHTML, like Gecko) Chrome/30.0.1599.69 Safari/537.36', '(Macintosh; Intel Mac OS X 10_9_0) AppleWebKit/537.36 ' '(KHTML, like Gecko) Chrome/30.0.1599.101 Safari/537.36', '(compatible; MSIE 10.0; Windows NT 6.1; Trident/6.0)', '(Macintosh; Intel Mac OS X 10.7; rv:24.0) Gecko/20100101 Firefox/24.0', '(Macintosh; Intel Mac OS X 10_7_5) AppleWebKit/537.36 ' '(KHTML, like Gecko) Chrome/29.0.1547.76 Safari/537.36', '(Macintosh; Intel Mac OS X 10_6_8) AppleWebKit/534.59.10 ' '(KHTML, like Gecko) Version/5.1.9 Safari/534.59.10', '(Macintosh; Intel Mac OS X 10_6_8) AppleWebKit/537.36 ' '(KHTML, like Gecko) Chrome/30.0.1599.69 Safari/537.36', '(compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0)', '(Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/29.0.1547.66 Safari/537.36', '(iPhone; CPU iPhone OS 7_0 like Mac OS X) AppleWebKit/537.51.1 ' '(KHTML, like Gecko) Version/7.0 Mobile/11A465 Safari/9537.53', '(X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/29.0.1547.76 Safari/537.36', '(compatible; MSIE 10.0; Windows NT 6.2; WOW64; Trident/6.0)', '(Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/30.0.1599.101 Safari/537.36', '(Macintosh; Intel Mac OS X 10.6; rv:24.0) Gecko/20100101 Firefox/24.0', '(Macintosh; Intel Mac OS X 10.8; rv:23.0) Gecko/20100101 Firefox/23.0', '(Windows NT 5.1; rv:23.0) Gecko/20100101 Firefox/23.0', '(iPad; CPU OS 7_0 like Mac OS X) AppleWebKit/537.51.1 ' '(KHTML, like Gecko) Version/7.0 Mobile/11A465 Safari/9537.53', '(Macintosh; Intel Mac OS X 10_6_8) AppleWebKit/537.36 ' '(KHTML, like Gecko) Chrome/29.0.1547.76 Safari/537.36', '(Macintosh; Intel Mac OS X 10_7_5) AppleWebKit/537.36 ' '(KHTML, like Gecko) Chrome/30.0.1599.101 Safari/537.36', '(iPhone; CPU iPhone OS 6_1_3 like Mac OS X) AppleWebKit/536.26 ' '(KHTML, like Gecko) Version/6.0 Mobile/10B329 Safari/8536.25', '(Windows NT 6.1; rv:23.0) Gecko/20100101 Firefox/23.0', '(X11; Linux i686) AppleWebKit/537.36 (KHTML, like Gecko) ' 'Ubuntu Chromium/28.0.1500.71 Chrome/28.0.1500.71 Safari/537.36', '(Macintosh; Intel Mac OS X 10_8_4) AppleWebKit/537.36 ' '(KHTML, like Gecko) Chrome/30.0.1599.101 Safari/537.36', '(Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/30.0.1599.101 Safari/537.36', '(Windows NT 6.1; WOW64) Presto/2.12.388 Version/12.16', '(iPad; CPU OS 6_1_3 like Mac OS X) AppleWebKit/536.26 ' '(KHTML, like Gecko) Version/6.0 Mobile/10B329 Safari/8536.25', '(Windows NT 6.2; WOW64; rv:23.0) Gecko/20100101 Firefox/23.0', '(Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/30.0.1599.66 Safari/537.36', '(Linux; Android 4.1.2; GT-I9100 Build/JZO54K) AppleWebKit/537.36 ' '(KHTML, like Gecko) Chrome/30.0.1599.82 Mobile Safari/537.36', '(Windows NT 6.0; rv:24.0) Gecko/20100101 Firefox/24.0', '(Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/30.0.1599.69 Safari/537.36', '(Macintosh; Intel Mac OS X 10_7_5) AppleWebKit/537.36 ' '(KHTML, like Gecko) Chrome/30.0.1599.66 Safari/537.36', '(X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/29.0.1547.65 Safari/537.36', '(X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/30.0.1599.101 Safari/537.36', '(Windows NT 6.0) AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/30.0.1599.69 Safari/537.36', '(compatible; MSIE 9.0; Windows NT 6.0; Trident/5.0)', '(Macintosh; Intel Mac OS X 10_6_8) AppleWebKit/537.36 ' '(KHTML, like Gecko) Chrome/30.0.1599.101 Safari/537.36', '(Windows NT 6.1; WOW64; rv:25.0) Gecko/20100101 Firefox/25.0', '(iPad; CPU OS 5_1_1 like Mac OS X) AppleWebKit/534.46 ' '(KHTML, like Gecko) Version/5.1 Mobile/9B206 Safari/7534.48.3', '(Windows NT 6.1; WOW64; rv:17.0) Gecko/20100101 Firefox/17.0', '(X11; Linux i686) AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/29.0.1547.76 Safari/537.36', '(Macintosh; Intel Mac OS X 10_8_4) AppleWebKit/537.36 ' '(KHTML, like Gecko) Chrome/30.0.1599.66 Safari/537.36', '(X11; Linux x86_64) AppleWebKit/537.31 (KHTML, like Gecko) ' 'Chrome/26.0.1410.63 Safari/537.31', '(Macintosh; Intel Mac OS X 10_8_3) AppleWebKit/537.36 ' '(KHTML, like Gecko) Chrome/30.0.1599.69 Safari/537.36', '(Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/30.0.1599.66 Safari/537.36', '(Windows NT 6.3; WOW64; rv:24.0) Gecko/20100101 Firefox/24.0', '(X11; Linux i686) AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/30.0.1599.66 Safari/537.36', )
robocoder/zaproxy
refs/heads/wip/socks-proxy
python/api/src/zapv2/params.py
1
# Zed Attack Proxy (ZAP) and its related class files. # # ZAP is an HTTP/HTTPS proxy for assessing web application security. # # Copyright 2014 the ZAP development team # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ This file was automatically generated. """ class params(object): def __init__(self, zap): self.zap = zap def params(self, site=''): """ Shows the parameters for the specified site, or for all sites if the site is not specified """ return self.zap._request(self.zap.base + 'params/view/params/', {'site' : site}).get('params')
lmazuel/azure-sdk-for-python
refs/heads/master
azure-mgmt-network/azure/mgmt/network/v2018_01_01/models/express_route_circuit_authorization.py
1
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from .sub_resource import SubResource class ExpressRouteCircuitAuthorization(SubResource): """Authorization in an ExpressRouteCircuit resource. Variables are only populated by the server, and will be ignored when sending a request. :param id: Resource ID. :type id: str :param authorization_key: The authorization key. :type authorization_key: str :param authorization_use_status: AuthorizationUseStatus. Possible values are: 'Available' and 'InUse'. Possible values include: 'Available', 'InUse' :type authorization_use_status: str or ~azure.mgmt.network.v2018_01_01.models.AuthorizationUseStatus :param provisioning_state: Gets the provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :type provisioning_state: str :param name: Gets name of the resource that is unique within a resource group. This name can be used to access the resource. :type name: str :ivar etag: A unique read-only string that changes whenever the resource is updated. :vartype etag: str """ _validation = { 'etag': {'readonly': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'authorization_key': {'key': 'properties.authorizationKey', 'type': 'str'}, 'authorization_use_status': {'key': 'properties.authorizationUseStatus', 'type': 'str'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, } def __init__(self, **kwargs): super(ExpressRouteCircuitAuthorization, self).__init__(**kwargs) self.authorization_key = kwargs.get('authorization_key', None) self.authorization_use_status = kwargs.get('authorization_use_status', None) self.provisioning_state = kwargs.get('provisioning_state', None) self.name = kwargs.get('name', None) self.etag = None
DanForever/TimeSync
refs/heads/master
GAE/lib/requests/packages/urllib3/packages/ordered_dict.py
1093
# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy. # Passes Python2.7's test suite and incorporates all the latest updates. # Copyright 2009 Raymond Hettinger, released under the MIT License. # http://code.activestate.com/recipes/576693/ try: from thread import get_ident as _get_ident except ImportError: from dummy_thread import get_ident as _get_ident try: from _abcoll import KeysView, ValuesView, ItemsView except ImportError: pass class OrderedDict(dict): 'Dictionary that remembers insertion order' # An inherited dict maps keys to values. # The inherited dict provides __getitem__, __len__, __contains__, and get. # The remaining methods are order-aware. # Big-O running times for all methods are the same as for regular dictionaries. # The internal self.__map dictionary maps keys to links in a doubly linked list. # The circular doubly linked list starts and ends with a sentinel element. # The sentinel element never gets deleted (this simplifies the algorithm). # Each link is stored as a list of length three: [PREV, NEXT, KEY]. def __init__(self, *args, **kwds): '''Initialize an ordered dictionary. Signature is the same as for regular dictionaries, but keyword arguments are not recommended because their insertion order is arbitrary. ''' if len(args) > 1: raise TypeError('expected at most 1 arguments, got %d' % len(args)) try: self.__root except AttributeError: self.__root = root = [] # sentinel node root[:] = [root, root, None] self.__map = {} self.__update(*args, **kwds) def __setitem__(self, key, value, dict_setitem=dict.__setitem__): 'od.__setitem__(i, y) <==> od[i]=y' # Setting a new item creates a new link which goes at the end of the linked # list, and the inherited dictionary is updated with the new key/value pair. if key not in self: root = self.__root last = root[0] last[1] = root[0] = self.__map[key] = [last, root, key] dict_setitem(self, key, value) def __delitem__(self, key, dict_delitem=dict.__delitem__): 'od.__delitem__(y) <==> del od[y]' # Deleting an existing item uses self.__map to find the link which is # then removed by updating the links in the predecessor and successor nodes. dict_delitem(self, key) link_prev, link_next, key = self.__map.pop(key) link_prev[1] = link_next link_next[0] = link_prev def __iter__(self): 'od.__iter__() <==> iter(od)' root = self.__root curr = root[1] while curr is not root: yield curr[2] curr = curr[1] def __reversed__(self): 'od.__reversed__() <==> reversed(od)' root = self.__root curr = root[0] while curr is not root: yield curr[2] curr = curr[0] def clear(self): 'od.clear() -> None. Remove all items from od.' try: for node in self.__map.itervalues(): del node[:] root = self.__root root[:] = [root, root, None] self.__map.clear() except AttributeError: pass dict.clear(self) def popitem(self, last=True): '''od.popitem() -> (k, v), return and remove a (key, value) pair. Pairs are returned in LIFO order if last is true or FIFO order if false. ''' if not self: raise KeyError('dictionary is empty') root = self.__root if last: link = root[0] link_prev = link[0] link_prev[1] = root root[0] = link_prev else: link = root[1] link_next = link[1] root[1] = link_next link_next[0] = root key = link[2] del self.__map[key] value = dict.pop(self, key) return key, value # -- the following methods do not depend on the internal structure -- def keys(self): 'od.keys() -> list of keys in od' return list(self) def values(self): 'od.values() -> list of values in od' return [self[key] for key in self] def items(self): 'od.items() -> list of (key, value) pairs in od' return [(key, self[key]) for key in self] def iterkeys(self): 'od.iterkeys() -> an iterator over the keys in od' return iter(self) def itervalues(self): 'od.itervalues -> an iterator over the values in od' for k in self: yield self[k] def iteritems(self): 'od.iteritems -> an iterator over the (key, value) items in od' for k in self: yield (k, self[k]) def update(*args, **kwds): '''od.update(E, **F) -> None. Update od from dict/iterable E and F. If E is a dict instance, does: for k in E: od[k] = E[k] If E has a .keys() method, does: for k in E.keys(): od[k] = E[k] Or if E is an iterable of items, does: for k, v in E: od[k] = v In either case, this is followed by: for k, v in F.items(): od[k] = v ''' if len(args) > 2: raise TypeError('update() takes at most 2 positional ' 'arguments (%d given)' % (len(args),)) elif not args: raise TypeError('update() takes at least 1 argument (0 given)') self = args[0] # Make progressively weaker assumptions about "other" other = () if len(args) == 2: other = args[1] if isinstance(other, dict): for key in other: self[key] = other[key] elif hasattr(other, 'keys'): for key in other.keys(): self[key] = other[key] else: for key, value in other: self[key] = value for key, value in kwds.items(): self[key] = value __update = update # let subclasses override update without breaking __init__ __marker = object() def pop(self, key, default=__marker): '''od.pop(k[,d]) -> v, remove specified key and return the corresponding value. If key is not found, d is returned if given, otherwise KeyError is raised. ''' if key in self: result = self[key] del self[key] return result if default is self.__marker: raise KeyError(key) return default def setdefault(self, key, default=None): 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od' if key in self: return self[key] self[key] = default return default def __repr__(self, _repr_running={}): 'od.__repr__() <==> repr(od)' call_key = id(self), _get_ident() if call_key in _repr_running: return '...' _repr_running[call_key] = 1 try: if not self: return '%s()' % (self.__class__.__name__,) return '%s(%r)' % (self.__class__.__name__, self.items()) finally: del _repr_running[call_key] def __reduce__(self): 'Return state information for pickling' items = [[k, self[k]] for k in self] inst_dict = vars(self).copy() for k in vars(OrderedDict()): inst_dict.pop(k, None) if inst_dict: return (self.__class__, (items,), inst_dict) return self.__class__, (items,) def copy(self): 'od.copy() -> a shallow copy of od' return self.__class__(self) @classmethod def fromkeys(cls, iterable, value=None): '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S and values equal to v (which defaults to None). ''' d = cls() for key in iterable: d[key] = value return d def __eq__(self, other): '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive while comparison to a regular mapping is order-insensitive. ''' if isinstance(other, OrderedDict): return len(self)==len(other) and self.items() == other.items() return dict.__eq__(self, other) def __ne__(self, other): return not self == other # -- the following methods are only used in Python 2.7 -- def viewkeys(self): "od.viewkeys() -> a set-like object providing a view on od's keys" return KeysView(self) def viewvalues(self): "od.viewvalues() -> an object providing a view on od's values" return ValuesView(self) def viewitems(self): "od.viewitems() -> a set-like object providing a view on od's items" return ItemsView(self)
taaviteska/django
refs/heads/master
tests/i18n/contenttypes/tests.py
75
import os from django.contrib.contenttypes.models import ContentType from django.test import TestCase, override_settings from django.utils import translation @override_settings( USE_I18N=True, LOCALE_PATHS=[ os.path.join(os.path.dirname(__file__), 'locale'), ], LANGUAGE_CODE='en', LANGUAGES=[ ('en', 'English'), ('fr', 'French'), ], ) class ContentTypeTests(TestCase): def test_verbose_name(self): company_type = ContentType.objects.get(app_label='i18n', model='company') with translation.override('en'): self.assertEqual(str(company_type), 'Company') with translation.override('fr'): self.assertEqual(str(company_type), 'Société')
rajsadho/django
refs/heads/master
tests/forms_tests/widget_tests/test_radioselect.py
161
from django.forms import RadioSelect from .base import WidgetTest class RadioSelectTest(WidgetTest): widget = RadioSelect() def test_render(self): self.check_html(self.widget, 'beatle', 'J', choices=self.beatles, html=( """<ul> <li><label><input checked="checked" type="radio" name="beatle" value="J" /> John</label></li> <li><label><input type="radio" name="beatle" value="P" /> Paul</label></li> <li><label><input type="radio" name="beatle" value="G" /> George</label></li> <li><label><input type="radio" name="beatle" value="R" /> Ringo</label></li> </ul>""" )) def test_nested_choices(self): nested_choices = ( ('unknown', 'Unknown'), ('Audio', (('vinyl', 'Vinyl'), ('cd', 'CD'))), ('Video', (('vhs', 'VHS'), ('dvd', 'DVD'))), ) html = """ <ul id="media"> <li> <label for="media_0"><input id="media_0" name="nestchoice" type="radio" value="unknown" /> Unknown</label> </li> <li>Audio<ul id="media_1"> <li> <label for="media_1_0"><input id="media_1_0" name="nestchoice" type="radio" value="vinyl" /> Vinyl</label> </li> <li><label for="media_1_1"><input id="media_1_1" name="nestchoice" type="radio" value="cd" /> CD</label></li> </ul></li> <li>Video<ul id="media_2"> <li><label for="media_2_0"><input id="media_2_0" name="nestchoice" type="radio" value="vhs" /> VHS</label></li> <li> <label for="media_2_1"> <input checked="checked" id="media_2_1" name="nestchoice" type="radio" value="dvd" /> DVD </label> </li> </ul></li> </ul> """ self.check_html( self.widget, 'nestchoice', 'dvd', choices=nested_choices, attrs={'id': 'media'}, html=html, ) def test_constructor_attrs(self): """ Attributes provided at instantiation are passed to the constituent inputs. """ widget = RadioSelect(attrs={'id': 'foo'}) html = """ <ul id="foo"> <li> <label for="foo_0"><input checked="checked" type="radio" id="foo_0" value="J" name="beatle" /> John</label> </li> <li><label for="foo_1"><input type="radio" id="foo_1" value="P" name="beatle" /> Paul</label></li> <li><label for="foo_2"><input type="radio" id="foo_2" value="G" name="beatle" /> George</label></li> <li><label for="foo_3"><input type="radio" id="foo_3" value="R" name="beatle" /> Ringo</label></li> </ul> """ self.check_html(widget, 'beatle', 'J', choices=self.beatles, html=html) def test_render_attrs(self): """ Attributes provided at render-time are passed to the constituent inputs. """ html = """ <ul id="bar"> <li> <label for="bar_0"><input checked="checked" type="radio" id="bar_0" value="J" name="beatle" /> John</label> </li> <li><label for="bar_1"><input type="radio" id="bar_1" value="P" name="beatle" /> Paul</label></li> <li><label for="bar_2"><input type="radio" id="bar_2" value="G" name="beatle" /> George</label></li> <li><label for="bar_3"><input type="radio" id="bar_3" value="R" name="beatle" /> Ringo</label></li> </ul> """ self.check_html(self.widget, 'beatle', 'J', choices=self.beatles, attrs={'id': 'bar'}, html=html)
chidaobanjiu/MANA2077
refs/heads/master
routes/__init__.py
3
from flask import session from models.user import User def current_user(): uid = session.get('user_id', '') u = User.find_by(id=uid) return u def admin(): uid = session.get('user_id', '') u = User.find_by(id=uid) return u
zenodo/invenio
refs/heads/zenodo-master
invenio/utils/washers.py
20
# -*- coding: utf-8 -*- # This file is part of Invenio. # Copyright (C) 2012, 2013, 2014, 2015 CERN. # # Invenio is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License as # published by the Free Software Foundation; either version 2 of the # License, or (at your option) any later version. # # Invenio is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Invenio; if not, write to the Free Software Foundation, Inc., # 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA. # """Washing utilities.""" import types def wash_urlargd(form, content): """Wash the complete form based on the specification in content. Content is a dictionary containing the field names as a key, and a tuple (type, default) as value. 'type' can be list, unicode, invenio.legacy.wsgi.utils.StringField, int, tuple, or invenio.legacy.wsgi.utils.Field (for file uploads). The specification automatically includes the 'ln' field, which is common to all queries. Arguments that are not defined in 'content' are discarded. .. note:: In case `list` or `tuple` were asked for, we assume that `list` or `tuple` of strings is to be returned. Therefore beware when you want to use ``wash_urlargd()`` for multiple file upload forms. :returns: argd dictionary that can be used for passing function parameters by keywords. """ result = {} for k, (dst_type, default) in content.items(): try: value = form[k] except KeyError: result[k] = default continue src_type = type(value) # First, handle the case where we want all the results. In # this case, we need to ensure all the elements are strings, # and not Field instances. if src_type in (list, tuple): if dst_type is list: result[k] = [x for x in value] continue if dst_type is tuple: result[k] = tuple([x for x in value]) continue # in all the other cases, we are only interested in the # first value. value = value[0] # Allow passing argument modyfing function. if isinstance(dst_type, types.FunctionType): result[k] = dst_type(value) continue # Maybe we already have what is expected? Then don't change # anything. if isinstance(value, dst_type): result[k] = value continue # Since we got here, 'value' is sure to be a single symbol, # not a list kind of structure anymore. if dst_type in (int, float, long, bool): try: result[k] = dst_type(value) except: result[k] = default elif dst_type is tuple: result[k] = (value, ) elif dst_type is list: result[k] = [value] else: raise ValueError( 'cannot cast form value %s of type %r into type %r' % ( value, src_type, dst_type)) return result def wash_html_id(dirty): """Strip non-alphabetic or newline characters from a given string. It can be used as a HTML element ID (also with jQuery and in all browsers). :param dirty: the string to wash :returns: the HTML ID ready string """ import re if not dirty[0].isalpha(): # we make sure that the first character is a lowercase letter dirty = 'i' + dirty non_word = re.compile(r'[^\w]+') return non_word.sub('', dirty)
sushengyang/CoreNLP
refs/heads/master
scripts/arabic-segmenter/output_to_tedeval.py
41
#!/usr/bin/env python2.7 # -*- coding: utf-8 -*- import sys import codecs import re def convert(untok_filename, tok_filename): with uopen(untok_filename, 'r') as input, \ uopen(tok_filename, 'r') as output, \ uopen(tok_filename + '.segmentation', 'w') as seg, \ uopen(tok_filename + '.ftree', 'w') as tree: convert_files(input, output, seg, tree) def get_filenames(argv): if len(argv) != 3: print 'Usage: %s <untok> <tok>' % argv[0] print ' where' print ' <untok> is the untokenized input file that was fed to the segmenter' print ' <tok> is the existing segmenter output file' print ' <tok>.segmentation will be the generated TEDEval seg file' print ' <tok>.ftree will be the generated TEDEval tree file' exit(1) return argv[1], argv[2] def uopen(filename, mode): return codecs.open(filename, mode, encoding='utf-8') def convert_files(input, output, seg, tree): for input_line, output_line in zip(input, output): process_line(input_line, output_line, seg, tree) def process_line(input_line, output_line, seg, tree): tree.write('(root') input_words = sanitize(input_line).split(' ') output_words = merge_segments(output_line).split(' ') input_words = filter_deletions(input_words) output_words = filter_deletions(output_words) assert len(input_words) == len(output_words), str((input_line, output_line, input_words, output_words)) for input_word, output_word in zip(input_words, output_words): for segment in output_word.split(':'): tree.write(' (seg %s)' % segment) seg.write('%s\t%s\n' % (input_word, output_word)) seg.write('\n') tree.write(')\n') def filter_deletions(words): ''' Some tokens (ones consisting solely of a diacritic or tatweel) are deleted by one or both segmenters. This deletes all such tokens from the output to try to balance out the sentence. ''' return [word for word in words if not is_deleted(word)] def is_deleted(word): return re.match(u'^[~_\u0640\u064b-\u065e\u0670]*$', word) is not None # tatweel dagger alif # most diacritics def merge_segments(line): return re.sub(r'\$(\w+)\$', r'#\1#', re.sub(r'\(', r'#lp#', re.sub(r'\)', r'#rp#', re.sub(r'([^ ])# ', r'\1:', re.sub(r' \+([^ ])', r':\1', re.sub(r'([^ ])# \+([^ ])', r'\1:\2', re.sub(r':', r'$pm$', re.sub(r'#(\w+)#', r'$\1$', line[:-1])))))))) def sanitize(line): return re.sub(r'\(', r'#lp#', re.sub(r'\)', r'#rp#', re.sub(r':', r'#pm#', line[:-1]))) if __name__ == '__main__': untok, tok = get_filenames(sys.argv) convert(untok, tok)
faithsws/WWR
refs/heads/master
RouterIF.py
1
import socket import Utils import threading import time class Connection(threading.Thread): def __init__(self,conn,addr,parent): threading.Thread.__init__(self) self.conn = conn self.addr = addr self.parent = parent self.mac = "" self.info = "" pass def run(self): #self.conn.settimeout(5) #self.file = self.conn.makefile while True: try: buf = self.conn.recv(65524) if len(buf) == 0: raise Exception("socket failed") print(buf) except Exception,ex: print(ex) self.parent.RemoveConnection(self) return class RouteIF: def __init__(self): self.connections = [] def RemoveConnection(self,conn): self.connections.remove(conn) def StartServer(self): th = threading.Thread(target=self.Server) th.start() while True: time.sleep(1) print(len(self.connections)) def Server(self): sock = socket.socket(socket.AF_INET,socket.SOCK_STREAM) sock.bind(("0.0.0.0",Utils.PluginConfig['RouteIF']['port'])) sock.listen(Utils.PluginConfig['RouteIF']['maxConnections']) try: while True: connection,address = sock.accept() conn = Connection(connection,address,self) self.connections.append(conn) conn.start() except Exception,ex: print(ex)
bboalimoe/keras
refs/heads/master
examples/mnist_irnn.py
70
from __future__ import absolute_import from __future__ import print_function import numpy as np np.random.seed(1337) # for reproducibility from keras.datasets import mnist from keras.models import Sequential from keras.layers.core import Dense, Activation from keras.initializations import normal, identity from keras.layers.recurrent import SimpleRNN, LSTM from keras.optimizers import RMSprop from keras.utils import np_utils ''' This is a reproduction of the IRNN experiment with pixel-by-pixel sequential MNIST in "A Simple Way to Initialize Recurrent Networks of Rectified Linear Units " by Quoc V. Le, Navdeep Jaitly, Geoffrey E. Hinton arXiv:1504.00941v2 [cs.NE] 7 Apr 201 http://arxiv.org/pdf/1504.00941v2.pdf Optimizer is replaced with RMSprop which yields more stable and steady improvement. Reaches 0.93 train/test accuracy after 900 epochs (which roughly corresponds to 1687500 steps in the original paper.) ''' batch_size = 32 nb_classes = 10 nb_epochs = 200 hidden_units = 100 learning_rate = 1e-6 clip_norm = 1.0 BPTT_truncate = 28*28 # the data, shuffled and split between train and test sets (X_train, y_train), (X_test, y_test) = mnist.load_data() X_train = X_train.reshape(X_train.shape[0], -1, 1) X_test = X_test.reshape(X_test.shape[0], -1, 1) X_train = X_train.astype("float32") X_test = X_test.astype("float32") X_train /= 255 X_test /= 255 print('X_train shape:', X_train.shape) print(X_train.shape[0], 'train samples') print(X_test.shape[0], 'test samples') # convert class vectors to binary class matrices Y_train = np_utils.to_categorical(y_train, nb_classes) Y_test = np_utils.to_categorical(y_test, nb_classes) print('Evaluate IRNN...') model = Sequential() model.add(SimpleRNN(input_dim=1, output_dim=hidden_units, init=lambda shape: normal(shape, scale=0.001), inner_init=lambda shape: identity(shape, scale=1.0), activation='relu', truncate_gradient=BPTT_truncate)) model.add(Dense(hidden_units, nb_classes)) model.add(Activation('softmax')) rmsprop = RMSprop(lr=learning_rate) model.compile(loss='categorical_crossentropy', optimizer=rmsprop) model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epochs, show_accuracy=True, verbose=1, validation_data=(X_test, Y_test)) scores = model.evaluate(X_test, Y_test, show_accuracy=True, verbose=0) print('IRNN test score:', scores[0]) print('IRNN test accuracy:', scores[1]) print('Compare to LSTM...') model = Sequential() model.add(LSTM(1, hidden_units)) model.add(Dense(hidden_units, nb_classes)) model.add(Activation('softmax')) rmsprop = RMSprop(lr=learning_rate) model.compile(loss='categorical_crossentropy', optimizer=rmsprop) model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epochs, show_accuracy=True, verbose=1, validation_data=(X_test, Y_test)) scores = model.evaluate(X_test, Y_test, show_accuracy=True, verbose=0) print('LSTM test score:', scores[0]) print('LSTM test accuracy:', scores[1])
protochron/carbon
refs/heads/master
lib/carbon/aggregator/receiver.py
3
from carbon.instrumentation import increment from carbon.aggregator.rules import RuleManager from carbon.aggregator.buffers import BufferManager from carbon.rewrite import RewriteRuleManager from carbon import events, log def process(metric, datapoint): increment('datapointsReceived') for rule in RewriteRuleManager.preRules: metric = rule.apply(metric) aggregate_metrics = [] for rule in RuleManager.rules: aggregate_metric = rule.get_aggregate_metric(metric) if aggregate_metric is None: continue else: aggregate_metrics.append(aggregate_metric) buffer = BufferManager.get_buffer(aggregate_metric) if not buffer.configured: buffer.configure_aggregation(rule.frequency, rule.aggregation_func) buffer.input(datapoint) for rule in RewriteRuleManager.postRules: metric = rule.apply(metric) if metric not in aggregate_metrics: log.msg("Couldn't match metric %s with any aggregation rule. Passing on un-aggregated." % metric) events.metricGenerated(metric, datapoint)
bkpathak/HackerRank-Problems
refs/heads/master
python/sorting/merge_using_heap.py
2
import heapq def addtoheap(h, i, it): try: heapq.heappush(h, (next(it), i)) except StopIteration: pass def mergek(*lists): its = map(iter, lists) h = [] for i, it in enumerate(its): addtoheap(h, i, it) while h: v, i = heapq.heappop(h) addtoheap(h, i, its[i]) yield v data = [ [] ] for x in mergek([1, 3, 5], [2, 4, 6], [7, 8, 9], [10]): print x,
pridemusvaire/yowsup
refs/heads/master
yowsup/layers/protocol_groups/protocolentities/iq_groups_create.py
41
from yowsup.structs import ProtocolEntity, ProtocolTreeNode from .iq_groups import GroupsIqProtocolEntity class CreateGroupsIqProtocolEntity(GroupsIqProtocolEntity): ''' <iq type="set" id="{{id}}" xmlns="w:g2", to="g.us"> <create subject="{{subject}}"> <participant jid="{{jid}}"></participant> </create> </iq> ''' def __init__(self, subject, _id = None, participants = None): super(CreateGroupsIqProtocolEntity, self).__init__(to = "g.us", _id = _id, _type = "set") self.setProps(subject) self.setParticipants(participants or []) def setProps(self, subject): self.subject = subject def setParticipants(self, participants): self.participantList = participants def toProtocolTreeNode(self): node = super(CreateGroupsIqProtocolEntity, self).toProtocolTreeNode() cnode = ProtocolTreeNode("create",{ "subject": self.subject}) participantNodes = [ ProtocolTreeNode("participant", { "jid": participant }) for participant in self.participantList ] cnode.addChildren(participantNodes) node.addChild(cnode) return node @staticmethod def fromProtocolTreeNode(node): entity = super(CreateGroupsIqProtocolEntity,CreateGroupsIqProtocolEntity).fromProtocolTreeNode(node) entity.__class__ = CreateGroupsIqProtocolEntity entity.setProps(node.getChild("create").getAttributeValue("subject")) participantList = [] for participantNode in node.getChild("create").getAllChildren(): participantList.append(participantNode["jid"]) entity.setParticipants(participantList) return entity
ybellavance/python-for-android
refs/heads/master
python3-alpha/python3-src/Lib/distutils/tests/test_config_cmd.py
47
"""Tests for distutils.command.config.""" import unittest import os import sys from test.support import run_unittest from distutils.command.config import dump_file, config from distutils.tests import support from distutils import log class ConfigTestCase(support.LoggingSilencer, support.TempdirManager, unittest.TestCase): def _info(self, msg, *args): for line in msg.splitlines(): self._logs.append(line) def setUp(self): super(ConfigTestCase, self).setUp() self._logs = [] self.old_log = log.info log.info = self._info def tearDown(self): log.info = self.old_log super(ConfigTestCase, self).tearDown() def test_dump_file(self): this_file = os.path.splitext(__file__)[0] + '.py' f = open(this_file) try: numlines = len(f.readlines()) finally: f.close() dump_file(this_file, 'I am the header') self.assertEqual(len(self._logs), numlines+1) def test_search_cpp(self): if sys.platform == 'win32': return pkg_dir, dist = self.create_dist() cmd = config(dist) # simple pattern searches match = cmd.search_cpp(pattern='xxx', body='// xxx') self.assertEqual(match, 0) match = cmd.search_cpp(pattern='_configtest', body='// xxx') self.assertEqual(match, 1) def test_finalize_options(self): # finalize_options does a bit of transformation # on options pkg_dir, dist = self.create_dist() cmd = config(dist) cmd.include_dirs = 'one%stwo' % os.pathsep cmd.libraries = 'one' cmd.library_dirs = 'three%sfour' % os.pathsep cmd.ensure_finalized() self.assertEqual(cmd.include_dirs, ['one', 'two']) self.assertEqual(cmd.libraries, ['one']) self.assertEqual(cmd.library_dirs, ['three', 'four']) def test_clean(self): # _clean removes files tmp_dir = self.mkdtemp() f1 = os.path.join(tmp_dir, 'one') f2 = os.path.join(tmp_dir, 'two') self.write_file(f1, 'xxx') self.write_file(f2, 'xxx') for f in (f1, f2): self.assertTrue(os.path.exists(f)) pkg_dir, dist = self.create_dist() cmd = config(dist) cmd._clean(f1, f2) for f in (f1, f2): self.assertTrue(not os.path.exists(f)) def test_suite(): return unittest.makeSuite(ConfigTestCase) if __name__ == "__main__": run_unittest(test_suite())
drammock/mne-python
refs/heads/main
tutorials/intro/50_configure_mne.py
7
# -*- coding: utf-8 -*- """ .. _tut-configure-mne: Configuring MNE-Python ====================== This tutorial covers how to configure MNE-Python to suit your local system and your analysis preferences. We begin by importing the necessary Python modules: """ import os import mne ############################################################################### # .. _config-get-set: # # Getting and setting configuration variables # ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ # # Configuration variables are read and written using the functions # :func:`mne.get_config` and :func:`mne.set_config`. To read a specific # configuration variable, pass its name to :func:`~mne.get_config` as the # ``key`` parameter (``key`` is the first parameter so you can pass it unnamed # if you want): print(mne.get_config('MNE_USE_CUDA')) print(type(mne.get_config('MNE_USE_CUDA'))) ############################################################################### # Note that the string values read from the JSON file are not parsed in any # way, so :func:`~mne.get_config` returns a string even for true/false config # values, rather than a Python :ref:`boolean <bltin-boolean-values>`. # Similarly, :func:`~mne.set_config` will only set string values (or ``None`` # values, to unset a variable): try: mne.set_config('MNE_USE_CUDA', True) except TypeError as err: print(err) ############################################################################### # If you're unsure whether a config variable has been set, there is a # convenient way to check it and provide a fallback in case it doesn't exist: # :func:`~mne.get_config` has a ``default`` parameter. print(mne.get_config('missing_config_key', default='fallback value')) ############################################################################### # There are also two convenience modes of :func:`~mne.get_config`. The first # will return a :class:`dict` containing all config variables (and their # values) that have been set on your system; this is done by passing # ``key=None`` (which is the default, so it can be omitted): print(mne.get_config()) # same as mne.get_config(key=None) ############################################################################### # The second convenience mode will return a :class:`tuple` of all the keys that # MNE-Python recognizes and uses, regardless of whether they've been set on # your system. This is done by passing an empty string ``''`` as the ``key``: print(mne.get_config(key='')) ############################################################################### # It is possible to add config variables that are not part of the recognized # list, by passing any arbitrary key to :func:`~mne.set_config`. This will # yield a warning, however, which is a nice check in cases where you meant to # set a valid key but simply misspelled it: mne.set_config('MNEE_USE_CUUDAA', 'false') ############################################################################### # Let's delete that config variable we just created. To unset a config # variable, use :func:`~mne.set_config` with ``value=None``. Since we're still # dealing with an unrecognized key (as far as MNE-Python is concerned) we'll # still get a warning, but the key will be unset: mne.set_config('MNEE_USE_CUUDAA', None) assert 'MNEE_USE_CUUDAA' not in mne.get_config('') ############################################################################### # Where configurations are stored # ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ # # MNE-Python stores configuration variables in a `JSON`_ file. By default, this # file is located in :file:`{%USERPROFILE%}\\.mne\\mne-python.json` on Windows # and :file:`{$HOME}/.mne/mne-python.json` on Linux or macOS. You can get the # full path to the config file with :func:`mne.get_config_path`. print(mne.get_config_path()) ############################################################################### # However it is not a good idea to directly edit files in the :file:`.mne` # directory; use the getting and setting functions described in :ref:`the # previous section <config-get-set>`. # # If for some reason you want to load the configuration from a different # location, you can pass the ``home_dir`` parameter to # :func:`~mne.get_config_path`, specifying the parent directory of the # :file:`.mne` directory where the configuration file you wish to load is # stored. # # # Using environment variables # ^^^^^^^^^^^^^^^^^^^^^^^^^^^ # # For compatibility with :doc:`MNE-C <../../install/mne_c>`, MNE-Python # also reads and writes `environment variables`_ to specify configuration. This # is done with the same functions that read and write the JSON configuration, # and is controlled with the parameters ``use_env`` and ``set_env``. By # default, :func:`~mne.get_config` will check :data:`os.environ` before # checking the MNE-Python JSON file; to check *only* the JSON file use # ``use_env=False``. To demonstrate, here's an environment variable that is not # specific to MNE-Python (and thus is not in the JSON config file): # make sure it's not in the JSON file (no error means our assertion held): assert mne.get_config('PATH', use_env=False) is None # but it *is* in the environment: print(mne.get_config('PATH')) ############################################################################### # Also by default, :func:`~mne.set_config` will set values in both the JSON # file and in :data:`os.environ`; to set a config variable *only* in the JSON # file use ``set_env=False``. Here we'll use :func:`print` statement to confirm # that an environment variable is being created and deleted (we could have used # the Python :ref:`assert statement <assert>` instead, but it doesn't print any # output when it succeeds so it's a little less obvious): mne.set_config('foo', 'bar', set_env=False) print('foo' in os.environ.keys()) mne.set_config('foo', 'bar') print('foo' in os.environ.keys()) mne.set_config('foo', None) # unsetting a key deletes var from environment print('foo' in os.environ.keys()) ############################################################################### # .. _tut_logging: # # Logging # ^^^^^^^ # # One important configuration variable is ``MNE_LOGGING_LEVEL``. Throughout the # module, messages are generated describing the actions MNE-Python is taking # behind-the-scenes. How you set ``MNE_LOGGING_LEVEL`` determines how many of # those messages you see. The default logging level on a fresh install of # MNE-Python is ``info``: print(mne.get_config('MNE_LOGGING_LEVEL')) ############################################################################### # The logging levels that can be set as config variables are ``debug``, # ``info``, ``warning``, ``error``, and ``critical``. Around 90% of the log # messages in MNE-Python are ``info`` messages, so for most users the choice is # between ``info`` (tell me what is happening) and ``warning`` (tell me only if # something worrisome happens). The ``debug`` logging level is intended for # MNE-Python developers. # # # In :ref:`an earlier section <config-get-set>` we saw how # :func:`mne.set_config` is used to change the logging level for the current # Python session and all future sessions. To change the logging level only for # the current Python session, you can use :func:`mne.set_log_level` instead. # The :func:`~mne.set_log_level` function takes the same five string options # that are used for the ``MNE_LOGGING_LEVEL`` config variable; additionally, it # can accept :class:`int` or :class:`bool` values that are equivalent to those # strings. The equivalencies are given in this table: # # .. _table-log-levels: # # +----------+---------+---------+ # | String | Integer | Boolean | # +==========+=========+=========+ # | DEBUG | 10 | | # +----------+---------+---------+ # | INFO | 20 | True | # +----------+---------+---------+ # | WARNING | 30 | False | # +----------+---------+---------+ # | ERROR | 40 | | # +----------+---------+---------+ # | CRITICAL | 50 | | # +----------+---------+---------+ # # With many MNE-Python functions it is possible to change the logging level # temporarily for just that function call, by using the ``verbose`` parameter. # To illustrate this, we'll load some sample data with different logging levels # set. First, with log level ``warning``: kit_data_path = os.path.join(os.path.abspath(os.path.dirname(mne.__file__)), 'io', 'kit', 'tests', 'data', 'test.sqd') raw = mne.io.read_raw_kit(kit_data_path, verbose='warning') ############################################################################### # No messages were generated, because none of the messages were of severity # "warning" or worse. Next, we'll load the same file with log level ``info`` # (the default level): raw = mne.io.read_raw_kit(kit_data_path, verbose='info') ############################################################################### # This time, we got a few messages about extracting information from the file, # converting that information into the MNE-Python :class:`~mne.Info` format, # etc. Finally, if we request ``debug``-level information, we get even more # detail: raw = mne.io.read_raw_kit(kit_data_path, verbose='debug') ############################################################################### # We've been passing string values to the ``verbose`` parameter, but we can see # from :ref:`the table above <table-log-levels>` that ``verbose=True`` will # give us the ``info`` messages and ``verbose=False`` will suppress them; this # is a useful shorthand to use in scripts, so you don't have to remember the # specific names of the different logging levels. One final note: # ``verbose=None`` (which is the default for functions that have a ``verbose`` # parameter) will fall back on whatever logging level was most recently set by # :func:`mne.set_log_level`, or if that hasn't been called during the current # Python session, it will fall back to the value of # ``mne.get_config('MNE_LOGGING_LEVEL')``. # # # .. LINKS # # .. _json: https://en.wikipedia.org/wiki/JSON # .. _`environment variables`: https://wikipedia.org/wiki/Environment_variable
mibexsoftware/alfred-stash-workflow
refs/heads/master
workflow/src/lib/requests/packages/chardet/hebrewprober.py
2928
######################## BEGIN LICENSE BLOCK ######################## # The Original Code is Mozilla Universal charset detector code. # # The Initial Developer of the Original Code is # Shy Shalom # Portions created by the Initial Developer are Copyright (C) 2005 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### from .charsetprober import CharSetProber from .constants import eNotMe, eDetecting from .compat import wrap_ord # This prober doesn't actually recognize a language or a charset. # It is a helper prober for the use of the Hebrew model probers ### General ideas of the Hebrew charset recognition ### # # Four main charsets exist in Hebrew: # "ISO-8859-8" - Visual Hebrew # "windows-1255" - Logical Hebrew # "ISO-8859-8-I" - Logical Hebrew # "x-mac-hebrew" - ?? Logical Hebrew ?? # # Both "ISO" charsets use a completely identical set of code points, whereas # "windows-1255" and "x-mac-hebrew" are two different proper supersets of # these code points. windows-1255 defines additional characters in the range # 0x80-0x9F as some misc punctuation marks as well as some Hebrew-specific # diacritics and additional 'Yiddish' ligature letters in the range 0xc0-0xd6. # x-mac-hebrew defines similar additional code points but with a different # mapping. # # As far as an average Hebrew text with no diacritics is concerned, all four # charsets are identical with respect to code points. Meaning that for the # main Hebrew alphabet, all four map the same values to all 27 Hebrew letters # (including final letters). # # The dominant difference between these charsets is their directionality. # "Visual" directionality means that the text is ordered as if the renderer is # not aware of a BIDI rendering algorithm. The renderer sees the text and # draws it from left to right. The text itself when ordered naturally is read # backwards. A buffer of Visual Hebrew generally looks like so: # "[last word of first line spelled backwards] [whole line ordered backwards # and spelled backwards] [first word of first line spelled backwards] # [end of line] [last word of second line] ... etc' " # adding punctuation marks, numbers and English text to visual text is # naturally also "visual" and from left to right. # # "Logical" directionality means the text is ordered "naturally" according to # the order it is read. It is the responsibility of the renderer to display # the text from right to left. A BIDI algorithm is used to place general # punctuation marks, numbers and English text in the text. # # Texts in x-mac-hebrew are almost impossible to find on the Internet. From # what little evidence I could find, it seems that its general directionality # is Logical. # # To sum up all of the above, the Hebrew probing mechanism knows about two # charsets: # Visual Hebrew - "ISO-8859-8" - backwards text - Words and sentences are # backwards while line order is natural. For charset recognition purposes # the line order is unimportant (In fact, for this implementation, even # word order is unimportant). # Logical Hebrew - "windows-1255" - normal, naturally ordered text. # # "ISO-8859-8-I" is a subset of windows-1255 and doesn't need to be # specifically identified. # "x-mac-hebrew" is also identified as windows-1255. A text in x-mac-hebrew # that contain special punctuation marks or diacritics is displayed with # some unconverted characters showing as question marks. This problem might # be corrected using another model prober for x-mac-hebrew. Due to the fact # that x-mac-hebrew texts are so rare, writing another model prober isn't # worth the effort and performance hit. # #### The Prober #### # # The prober is divided between two SBCharSetProbers and a HebrewProber, # all of which are managed, created, fed data, inquired and deleted by the # SBCSGroupProber. The two SBCharSetProbers identify that the text is in # fact some kind of Hebrew, Logical or Visual. The final decision about which # one is it is made by the HebrewProber by combining final-letter scores # with the scores of the two SBCharSetProbers to produce a final answer. # # The SBCSGroupProber is responsible for stripping the original text of HTML # tags, English characters, numbers, low-ASCII punctuation characters, spaces # and new lines. It reduces any sequence of such characters to a single space. # The buffer fed to each prober in the SBCS group prober is pure text in # high-ASCII. # The two SBCharSetProbers (model probers) share the same language model: # Win1255Model. # The first SBCharSetProber uses the model normally as any other # SBCharSetProber does, to recognize windows-1255, upon which this model was # built. The second SBCharSetProber is told to make the pair-of-letter # lookup in the language model backwards. This in practice exactly simulates # a visual Hebrew model using the windows-1255 logical Hebrew model. # # The HebrewProber is not using any language model. All it does is look for # final-letter evidence suggesting the text is either logical Hebrew or visual # Hebrew. Disjointed from the model probers, the results of the HebrewProber # alone are meaningless. HebrewProber always returns 0.00 as confidence # since it never identifies a charset by itself. Instead, the pointer to the # HebrewProber is passed to the model probers as a helper "Name Prober". # When the Group prober receives a positive identification from any prober, # it asks for the name of the charset identified. If the prober queried is a # Hebrew model prober, the model prober forwards the call to the # HebrewProber to make the final decision. In the HebrewProber, the # decision is made according to the final-letters scores maintained and Both # model probers scores. The answer is returned in the form of the name of the # charset identified, either "windows-1255" or "ISO-8859-8". # windows-1255 / ISO-8859-8 code points of interest FINAL_KAF = 0xea NORMAL_KAF = 0xeb FINAL_MEM = 0xed NORMAL_MEM = 0xee FINAL_NUN = 0xef NORMAL_NUN = 0xf0 FINAL_PE = 0xf3 NORMAL_PE = 0xf4 FINAL_TSADI = 0xf5 NORMAL_TSADI = 0xf6 # Minimum Visual vs Logical final letter score difference. # If the difference is below this, don't rely solely on the final letter score # distance. MIN_FINAL_CHAR_DISTANCE = 5 # Minimum Visual vs Logical model score difference. # If the difference is below this, don't rely at all on the model score # distance. MIN_MODEL_DISTANCE = 0.01 VISUAL_HEBREW_NAME = "ISO-8859-8" LOGICAL_HEBREW_NAME = "windows-1255" class HebrewProber(CharSetProber): def __init__(self): CharSetProber.__init__(self) self._mLogicalProber = None self._mVisualProber = None self.reset() def reset(self): self._mFinalCharLogicalScore = 0 self._mFinalCharVisualScore = 0 # The two last characters seen in the previous buffer, # mPrev and mBeforePrev are initialized to space in order to simulate # a word delimiter at the beginning of the data self._mPrev = ' ' self._mBeforePrev = ' ' # These probers are owned by the group prober. def set_model_probers(self, logicalProber, visualProber): self._mLogicalProber = logicalProber self._mVisualProber = visualProber def is_final(self, c): return wrap_ord(c) in [FINAL_KAF, FINAL_MEM, FINAL_NUN, FINAL_PE, FINAL_TSADI] def is_non_final(self, c): # The normal Tsadi is not a good Non-Final letter due to words like # 'lechotet' (to chat) containing an apostrophe after the tsadi. This # apostrophe is converted to a space in FilterWithoutEnglishLetters # causing the Non-Final tsadi to appear at an end of a word even # though this is not the case in the original text. # The letters Pe and Kaf rarely display a related behavior of not being # a good Non-Final letter. Words like 'Pop', 'Winamp' and 'Mubarak' # for example legally end with a Non-Final Pe or Kaf. However, the # benefit of these letters as Non-Final letters outweighs the damage # since these words are quite rare. return wrap_ord(c) in [NORMAL_KAF, NORMAL_MEM, NORMAL_NUN, NORMAL_PE] def feed(self, aBuf): # Final letter analysis for logical-visual decision. # Look for evidence that the received buffer is either logical Hebrew # or visual Hebrew. # The following cases are checked: # 1) A word longer than 1 letter, ending with a final letter. This is # an indication that the text is laid out "naturally" since the # final letter really appears at the end. +1 for logical score. # 2) A word longer than 1 letter, ending with a Non-Final letter. In # normal Hebrew, words ending with Kaf, Mem, Nun, Pe or Tsadi, # should not end with the Non-Final form of that letter. Exceptions # to this rule are mentioned above in isNonFinal(). This is an # indication that the text is laid out backwards. +1 for visual # score # 3) A word longer than 1 letter, starting with a final letter. Final # letters should not appear at the beginning of a word. This is an # indication that the text is laid out backwards. +1 for visual # score. # # The visual score and logical score are accumulated throughout the # text and are finally checked against each other in GetCharSetName(). # No checking for final letters in the middle of words is done since # that case is not an indication for either Logical or Visual text. # # We automatically filter out all 7-bit characters (replace them with # spaces) so the word boundary detection works properly. [MAP] if self.get_state() == eNotMe: # Both model probers say it's not them. No reason to continue. return eNotMe aBuf = self.filter_high_bit_only(aBuf) for cur in aBuf: if cur == ' ': # We stand on a space - a word just ended if self._mBeforePrev != ' ': # next-to-last char was not a space so self._mPrev is not a # 1 letter word if self.is_final(self._mPrev): # case (1) [-2:not space][-1:final letter][cur:space] self._mFinalCharLogicalScore += 1 elif self.is_non_final(self._mPrev): # case (2) [-2:not space][-1:Non-Final letter][ # cur:space] self._mFinalCharVisualScore += 1 else: # Not standing on a space if ((self._mBeforePrev == ' ') and (self.is_final(self._mPrev)) and (cur != ' ')): # case (3) [-2:space][-1:final letter][cur:not space] self._mFinalCharVisualScore += 1 self._mBeforePrev = self._mPrev self._mPrev = cur # Forever detecting, till the end or until both model probers return # eNotMe (handled above) return eDetecting def get_charset_name(self): # Make the decision: is it Logical or Visual? # If the final letter score distance is dominant enough, rely on it. finalsub = self._mFinalCharLogicalScore - self._mFinalCharVisualScore if finalsub >= MIN_FINAL_CHAR_DISTANCE: return LOGICAL_HEBREW_NAME if finalsub <= -MIN_FINAL_CHAR_DISTANCE: return VISUAL_HEBREW_NAME # It's not dominant enough, try to rely on the model scores instead. modelsub = (self._mLogicalProber.get_confidence() - self._mVisualProber.get_confidence()) if modelsub > MIN_MODEL_DISTANCE: return LOGICAL_HEBREW_NAME if modelsub < -MIN_MODEL_DISTANCE: return VISUAL_HEBREW_NAME # Still no good, back to final letter distance, maybe it'll save the # day. if finalsub < 0.0: return VISUAL_HEBREW_NAME # (finalsub > 0 - Logical) or (don't know what to do) default to # Logical. return LOGICAL_HEBREW_NAME def get_state(self): # Remain active as long as any of the model probers are active. if (self._mLogicalProber.get_state() == eNotMe) and \ (self._mVisualProber.get_state() == eNotMe): return eNotMe return eDetecting
Mic92/ansible
refs/heads/devel
lib/ansible/utils/hashing.py
28
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type import os from ansible.errors import AnsibleError from ansible.utils.unicode import to_bytes # Note, sha1 is the only hash algorithm compatible with python2.4 and with # FIPS-140 mode (as of 11-2014) try: from hashlib import sha1 as sha1 except ImportError: from sha import sha as sha1 # Backwards compat only try: from hashlib import md5 as _md5 except ImportError: try: from md5 import md5 as _md5 except ImportError: # Assume we're running in FIPS mode here _md5 = None def secure_hash_s(data, hash_func=sha1): ''' Return a secure hash hex digest of data. ''' digest = hash_func() try: if not isinstance(data, basestring): data = "%s" % data digest.update(data) except UnicodeEncodeError: digest.update(data.encode('utf-8')) return digest.hexdigest() def secure_hash(filename, hash_func=sha1): ''' Return a secure hash hex digest of local file, None if file is not present or a directory. ''' if not os.path.exists(to_bytes(filename, errors='strict')) or os.path.isdir(to_bytes(filename, errors='strict')): return None digest = hash_func() blocksize = 64 * 1024 try: infile = open(to_bytes(filename, errors='strict'), 'rb') block = infile.read(blocksize) while block: digest.update(block) block = infile.read(blocksize) infile.close() except IOError as e: raise AnsibleError("error while accessing the file %s, error was: %s" % (filename, e)) return digest.hexdigest() # The checksum algorithm must match with the algorithm in ShellModule.checksum() method checksum = secure_hash checksum_s = secure_hash_s # Backwards compat functions. Some modules include md5s in their return values # Continue to support that for now. As of ansible-1.8, all of those modules # should also return "checksum" (sha1 for now) # Do not use md5 unless it is needed for: # 1) Optional backwards compatibility # 2) Compliance with a third party protocol # # MD5 will not work on systems which are FIPS-140-2 compliant. def md5s(data): if not _md5: raise ValueError('MD5 not available. Possibly running in FIPS mode') return secure_hash_s(data, _md5) def md5(filename): if not _md5: raise ValueError('MD5 not available. Possibly running in FIPS mode') return secure_hash(filename, _md5)
rspavel/spack
refs/heads/develop
var/spack/repos/builtin/packages/libxfont2/package.py
5
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Libxfont2(AutotoolsPackage, XorgPackage): """libXfont provides the core of the legacy X11 font system, handling the index files (fonts.dir, fonts.alias, fonts.scale), the various font file formats, and rasterizing them. It is used by the X servers, the X Font Server (xfs), and some font utilities (bdftopcf for instance), but should not be used by normal X11 clients. X11 clients access fonts via either the new API's in libXft, or the legacy API's in libX11.""" homepage = "http://cgit.freedesktop.org/xorg/lib/libXfont" xorg_mirror_path = "lib/libXfont2-2.0.1.tar.gz" version('2.0.1', sha256='381b6b385a69343df48a082523c856aed9042fbbc8ee0a6342fb502e4321230a') depends_on('libfontenc') depends_on('freetype') depends_on('xtrans', type='build') depends_on('xproto', type='build') depends_on('fontsproto@2.1.3:', type='build') depends_on('pkgconfig', type='build') depends_on('util-macros', type='build')
xpansa/purchase-workflow
refs/heads/8.0
purchase_requisition_transport_document/__init__.py
253
# -*- coding: utf-8 -*- from . import model
gitprouser/appengine-bottle-skeleton
refs/heads/master
lib/requests_toolbelt/auth/_digest_auth_compat.py
33
"""Provide a compatibility layer for requests.auth.HTTPDigestAuth.""" import requests class _ThreadingDescriptor(object): def __init__(self, prop, default): self.prop = prop self.default = default def __get__(self, obj, objtype=None): return getattr(obj._thread_local, self.prop, self.default) def __set__(self, obj, value): setattr(obj._thread_local, self.prop, value) class _HTTPDigestAuth(requests.auth.HTTPDigestAuth): init = _ThreadingDescriptor('init', True) last_nonce = _ThreadingDescriptor('last_nonce', '') nonce_count = _ThreadingDescriptor('nonce_count', 0) chal = _ThreadingDescriptor('chal', {}) pos = _ThreadingDescriptor('pos', None) num_401_calls = _ThreadingDescriptor('num_401_calls', 1) if requests.__build__ < 0x020800: HTTPDigestAuth = requests.auth.HTTPDigestAuth else: HTTPDigestAuth = _HTTPDigestAuth
darjeeling/django
refs/heads/master
tests/migrations/migrations_test_apps/conflicting_app_with_dependencies/migrations/0002_conflicting_second.py
133
from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ("conflicting_app_with_dependencies", "0001_initial"), ] operations = [ migrations.CreateModel( "Something", [ ("id", models.AutoField(primary_key=True)), ], ) ]
ljrepos/hypertable
refs/heads/master
src/py/ThriftClient/gen-py/hyperthrift/gen2/ttypes.py
3
# # Autogenerated by Thrift Compiler (0.9.2) # # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING # # options string: py:new_style # from thrift.Thrift import TType, TMessageType, TException, TApplicationException import hyperthrift.gen.ttypes from thrift.transport import TTransport from thrift.protocol import TBinaryProtocol, TProtocol try: from thrift.protocol import fastbinary except: fastbinary = None class HqlResult(object): """ Result type of HQL queries <dl> <dt>results</dt> <dd>String results from metadata queries</dd> <dt>cells</dt> <dd>Resulting table cells of for buffered queries</dd> <dt>scanner</dt> <dd>Resulting scanner ID for unbuffered queries</dd> <dt>mutator</dt> <dd>Resulting mutator ID for unflushed modifying queries</dd> </dl> Attributes: - results - cells - scanner - mutator """ thrift_spec = ( None, # 0 (1, TType.LIST, 'results', (TType.STRING,None), None, ), # 1 (2, TType.LIST, 'cells', (TType.STRUCT,(hyperthrift.gen.ttypes.Cell, hyperthrift.gen.ttypes.Cell.thrift_spec)), None, ), # 2 (3, TType.I64, 'scanner', None, None, ), # 3 (4, TType.I64, 'mutator', None, None, ), # 4 ) def __init__(self, results=None, cells=None, scanner=None, mutator=None,): self.results = results self.cells = cells self.scanner = scanner self.mutator = mutator def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.LIST: self.results = [] (_etype3, _size0) = iprot.readListBegin() for _i4 in xrange(_size0): _elem5 = iprot.readString(); self.results.append(_elem5) iprot.readListEnd() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.LIST: self.cells = [] (_etype9, _size6) = iprot.readListBegin() for _i10 in xrange(_size6): _elem11 = hyperthrift.gen.ttypes.Cell() _elem11.read(iprot) self.cells.append(_elem11) iprot.readListEnd() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.I64: self.scanner = iprot.readI64(); else: iprot.skip(ftype) elif fid == 4: if ftype == TType.I64: self.mutator = iprot.readI64(); else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('HqlResult') if self.results is not None: oprot.writeFieldBegin('results', TType.LIST, 1) oprot.writeListBegin(TType.STRING, len(self.results)) for iter12 in self.results: oprot.writeString(iter12) oprot.writeListEnd() oprot.writeFieldEnd() if self.cells is not None: oprot.writeFieldBegin('cells', TType.LIST, 2) oprot.writeListBegin(TType.STRUCT, len(self.cells)) for iter13 in self.cells: iter13.write(oprot) oprot.writeListEnd() oprot.writeFieldEnd() if self.scanner is not None: oprot.writeFieldBegin('scanner', TType.I64, 3) oprot.writeI64(self.scanner) oprot.writeFieldEnd() if self.mutator is not None: oprot.writeFieldBegin('mutator', TType.I64, 4) oprot.writeI64(self.mutator) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __hash__(self): value = 17 value = (value * 31) ^ hash(self.results) value = (value * 31) ^ hash(self.cells) value = (value * 31) ^ hash(self.scanner) value = (value * 31) ^ hash(self.mutator) return value def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class HqlResult2(object): """ Same as HqlResult except with cell as array Attributes: - results - cells - scanner - mutator """ thrift_spec = ( None, # 0 (1, TType.LIST, 'results', (TType.STRING,None), None, ), # 1 (2, TType.LIST, 'cells', (TType.LIST,(TType.STRING,None)), None, ), # 2 (3, TType.I64, 'scanner', None, None, ), # 3 (4, TType.I64, 'mutator', None, None, ), # 4 ) def __init__(self, results=None, cells=None, scanner=None, mutator=None,): self.results = results self.cells = cells self.scanner = scanner self.mutator = mutator def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.LIST: self.results = [] (_etype17, _size14) = iprot.readListBegin() for _i18 in xrange(_size14): _elem19 = iprot.readString(); self.results.append(_elem19) iprot.readListEnd() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.LIST: self.cells = [] (_etype23, _size20) = iprot.readListBegin() for _i24 in xrange(_size20): _elem25 = [] (_etype29, _size26) = iprot.readListBegin() for _i30 in xrange(_size26): _elem31 = iprot.readString(); _elem25.append(_elem31) iprot.readListEnd() self.cells.append(_elem25) iprot.readListEnd() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.I64: self.scanner = iprot.readI64(); else: iprot.skip(ftype) elif fid == 4: if ftype == TType.I64: self.mutator = iprot.readI64(); else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('HqlResult2') if self.results is not None: oprot.writeFieldBegin('results', TType.LIST, 1) oprot.writeListBegin(TType.STRING, len(self.results)) for iter32 in self.results: oprot.writeString(iter32) oprot.writeListEnd() oprot.writeFieldEnd() if self.cells is not None: oprot.writeFieldBegin('cells', TType.LIST, 2) oprot.writeListBegin(TType.LIST, len(self.cells)) for iter33 in self.cells: oprot.writeListBegin(TType.STRING, len(iter33)) for iter34 in iter33: oprot.writeString(iter34) oprot.writeListEnd() oprot.writeListEnd() oprot.writeFieldEnd() if self.scanner is not None: oprot.writeFieldBegin('scanner', TType.I64, 3) oprot.writeI64(self.scanner) oprot.writeFieldEnd() if self.mutator is not None: oprot.writeFieldBegin('mutator', TType.I64, 4) oprot.writeI64(self.mutator) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __hash__(self): value = 17 value = (value * 31) ^ hash(self.results) value = (value * 31) ^ hash(self.cells) value = (value * 31) ^ hash(self.scanner) value = (value * 31) ^ hash(self.mutator) return value def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other) class HqlResultAsArrays(object): """ Same as HqlResult except with cell as array Attributes: - results - cells - scanner - mutator """ thrift_spec = ( None, # 0 (1, TType.LIST, 'results', (TType.STRING,None), None, ), # 1 (2, TType.LIST, 'cells', (TType.LIST,(TType.STRING,None)), None, ), # 2 (3, TType.I64, 'scanner', None, None, ), # 3 (4, TType.I64, 'mutator', None, None, ), # 4 ) def __init__(self, results=None, cells=None, scanner=None, mutator=None,): self.results = results self.cells = cells self.scanner = scanner self.mutator = mutator def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.LIST: self.results = [] (_etype38, _size35) = iprot.readListBegin() for _i39 in xrange(_size35): _elem40 = iprot.readString(); self.results.append(_elem40) iprot.readListEnd() else: iprot.skip(ftype) elif fid == 2: if ftype == TType.LIST: self.cells = [] (_etype44, _size41) = iprot.readListBegin() for _i45 in xrange(_size41): _elem46 = [] (_etype50, _size47) = iprot.readListBegin() for _i51 in xrange(_size47): _elem52 = iprot.readString(); _elem46.append(_elem52) iprot.readListEnd() self.cells.append(_elem46) iprot.readListEnd() else: iprot.skip(ftype) elif fid == 3: if ftype == TType.I64: self.scanner = iprot.readI64(); else: iprot.skip(ftype) elif fid == 4: if ftype == TType.I64: self.mutator = iprot.readI64(); else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('HqlResultAsArrays') if self.results is not None: oprot.writeFieldBegin('results', TType.LIST, 1) oprot.writeListBegin(TType.STRING, len(self.results)) for iter53 in self.results: oprot.writeString(iter53) oprot.writeListEnd() oprot.writeFieldEnd() if self.cells is not None: oprot.writeFieldBegin('cells', TType.LIST, 2) oprot.writeListBegin(TType.LIST, len(self.cells)) for iter54 in self.cells: oprot.writeListBegin(TType.STRING, len(iter54)) for iter55 in iter54: oprot.writeString(iter55) oprot.writeListEnd() oprot.writeListEnd() oprot.writeFieldEnd() if self.scanner is not None: oprot.writeFieldBegin('scanner', TType.I64, 3) oprot.writeI64(self.scanner) oprot.writeFieldEnd() if self.mutator is not None: oprot.writeFieldBegin('mutator', TType.I64, 4) oprot.writeI64(self.mutator) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __hash__(self): value = 17 value = (value * 31) ^ hash(self.results) value = (value * 31) ^ hash(self.cells) value = (value * 31) ^ hash(self.scanner) value = (value * 31) ^ hash(self.mutator) return value def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other)
aakar77/DockSeatResourIsoCompr
refs/heads/master
docker-python-sdk/DockerApiClass.py
1
''' Author: Aakar Jinwala Description: This class is an abstraction layer over Docker Python methods and its methods. It provides automating the tasks for docker container methods using Docker API. Get the log stream from the container. Get the log from the container. Get the container ID. Get the statistics regarding a container. Get Process IDs of the process running inside the doker container Important Note: This class is only for managing the containers and Not for creating docker images. Docker images can also be created using the Python Docker SDK. It is not my focus currently. For more Information about the Docker SDk for python: https://docker-py.readthedocs.io/en/stable/ Special Note: Three ways for creating a docker container 1) Use Docker SDK docker run method - using detached = false - Will run container in foreground 2) Use Docker SDK docker run method - using detached = True - Will run the container in backrgoud; returns container class object 3) Use Docker SDK docker create method; gives container class object and then invoke start method on the container object. 2) and 3) method gives container object ''' import docker from itertools import izip import json import datetime import time import csv class docker_sdk_abstraction(): def __init__(self): ''' <Purpose> Initializes the Docker API client object. Initializes the Docker API container class object to None. container_obj will be initialized while container_create / container_run methods. <Arguments> None ''' self.docker_api_obj = docker.from_env() self.container_obj = None self.process_set = set() self.container_start_timestamp = None self.stats_dict = None # Following are the Getter methods for getting Docker Api Container Object attributes def get_container_id(self): ''' <Purpose> For returning the container object id attribute. id attribute is container's id <Arguments> None <Return> Returns the container ID to which the object is pointing to ''' return self.container_obj.id def get_container_id_short(self): ''' <Purpose> Get method for container object short id(truncated to 10 character) attribute. <Arguments> None <Return> Returns the 10 charcter container ID to which the object is pointing to ''' return self.container_obj.short_id def get_container_name(self): ''' <Purpose> Get method for container object name attribute. It is by default assigned by the docker container if not specified while docker run / docker create <Arguments> None <Return> Returns the 10 charcter container ID to which the object is pointing to ''' return self.container_obj.name def get_container_image_name(self): ''' <Purpose> Get method for the container's object image attribute. <Arguments> None <Return> Returns the container image name for example <Image: 'python-prog:latest'> ''' return str(self.container_obj.image) def get_container_status(self): return self.container_obj.status def get_container_process(self): return self.process_set def set_container_process(self): if(self.get_container_status() != "exited"): # docker container object top method, it gives process ids currently running in the form of a list process_dict = self.container_obj.top() nested_list = process_dict.get("Processes") for list_a in nested_list: self.process_set.add(list_a[1]) # Process ID self.process_set.add(list_a[2]) # Parent Process print self.get_container_process() """ It gives the process ID of processes running inside the container in format like {u'Processes': [[u'root', u'27138', u'27121', u'30', u'16:36', u'?', u'00:00:01', u'mplayer -benchmark -vo null -ao null ./Sintel.mp4']], u'Titles': [u'UID', u'PID', u'PPID', u'C', u'STIME', u'TTY', u'TIME', u'CMD']} Made a set attribute that stores the process running inside the docker container. """ # Following are the class methods def container_create(self, docker_image_tag_name, container_arguments): ''' <Purpose> Create a docker container using containers.create method. Inigtializes the docker API container class object. <Arguments> 1) Image name for which container is to created. 2) A Dictonary which can be used for setting up the arguments for the containers.create() method. ''' print container_arguments self.container_obj = self.docker_api_obj.containers.create(docker_image_tag_name, **container_arguments) def container_start(self): ''' <Purpose> Invoke Docker API container class object start method. Starts the docker container <Arguments> None ''' self.container_obj.start() def container_run(self, docker_image_tag_name, detach_mode): if (detach_mode == False): # Docker container will run on foreground # Output = docker container logs # Will not return untill container execution completes container_run_log = self.docker_api_obj.containers.run(docker_image_tag_name, detach=detach_mode) return container_run_log else: # Docker container won't run in foreground # Output of the containers.run method = Container class object self.container_obj = self.docker_ai_obj.containers.run(docker_image_tag_name, detach=detach_mode) def container_log(self): ''' <Purpose> This method is for getting the container log after container has stopped running. It creates a log file with the filename as container short id + output-file.log <Arguments> None <Return> None ''' #Updating container's processes self.set_container_process() #Calling container object logs method - stream is False and Follow is True container_end_log = self.container_obj.logs(stdout = True, stderr = True, stream = False, follow = True) # Formatting the log output container_end_log.replace("\r", "\n") # Creating file name filename = self.get_container_id_short() +"-"+self.get_container_image_name()+"-output-file.log" # Creating and writting into the log file log_file_obj = open(filename, "w+") log_file_obj.write(container_end_log) log_file_obj.close() def container_log_stream(self): ''' <Purpose> This method is for getting the container log throughout the container execution. It creates a log file with the filename as container short id + image name + output-file.log. This method will return back only after the container has completed its execution. i.e. status = exited. <Arguments> None <Return> None ''' # Reloading the container object attributes, especially needed for the status self.container_obj.reload() # Creating file name for the log file filename = self.get_container_id_short()+"-"+self.get_container_image_name()+"-output-file.log" log_file_obj = open(filename, "w+") #update container procees set attribute self.container_obj.set_container_process() # Gives generator stream object helper log_stream = self.container_obj.logs(stdout = True, stderr = True, stream = True, follow = True) for data in izip(log_stream): # Reloading the container object atrributes, more concerned for container status = exited self.container_obj.reload() # Formatting the stream data tuple data = "".join(data) data.replace("\r", "\n") # Dumping the data into file json.dump(data, log_file_obj) #update container procees set attribute self.set_container_process() """ Break the generator stream, once the container status turns exicted. If not, it will produce 0 values for all the other fields. """ if(self.get_container_status() == "exited"): stat_file_obj.close() break def container_stats_stream(self): ''' <Purpose> This method is for getting the statistics stream during the container execution. It creates a stats file with the filename as container short id + stat-file.log. This method will return back only after the container has completed its execution. i.e. status = exited Next Task would be: Manually logging cpu and memory data and calculating average over them. A suggestion by Lukas to log these stats into a CSV file and parse the file after completion. <Arguments> None <Return> None ''' self.get_container_process() # Updating the container object attributes self.container_obj.reload() # Creating file name. filename = self.get_container_id_short() +"-"+self.get_container_image_name()+"-stats-file.csv" #stat_file_obj = open(filename, "w+") # Gives generator stream object helper stats_stream = self.container_obj.stats(decode=True, stream = True) writer = csv.writer(open(filename, 'w+')) for stats_tuple in izip(stats_stream): # Updating the container object attributes, especially the container status self.container_obj.reload() # Getting memory stat dictionary read_timestamp = stats_tuple[0]['read'] preread_timestamp = stats_tuple[0]['preread'] memory_stat = stats_tuple[0]['memory_stats'] memory_usage = memory_stat.get('usage') memory_limit = memory_stat.get('limit') memory_max_usage = memory_stat.get('max_usage') csv_row = [read_timestamp, preread_timestamp, memory_usage, memory_limit, memory_max_usage ] #print csv_row writer.writerow(csv_row) # If the container has exited, close the file object and break the for loopn if(self.get_container_status() == "exited"): stat_file_obj.close() break """ # Getting CPU stats dictonary cpu_stat = stats_tuple[0]['cpu_stats'] # Dumping the stats stream data, in the file json.dump(stats_tuple, stat_file_obj, indent = 4) #update container procees set attribute self.set_container_process() """ def list_containers(self, method_options): # sending a dict of container arguments. It returns a list of containers list_containers = self.docker_api_obj(**method_options) ############################# object1 = docker_sdk_abstraction() """ Important note here ------------------------- cpu_cpus Datatype = int or String cpu_shares Datatype = int only mem_limit = if int specify memory limit in bytes or can specify values like 200m 4000k 1g More options available at https://docker-py.readthedocs.io/en/stable/containers.html """ container_arguments = { 'cpuset_cpus': "1", 'cpu_shares': 2000, 'mem_limit': "200m" } object1.container_create("docker-mplayer-i", container_arguments) object1.container_start() #print object1.get_container_image_name() #object1.container_log_stream() object1.container_stats_stream() object1.container_log() """ #print object1.get_container_image() #while(object1.get_container_status == "running"): # pass # object1.start_container() # print object1.get_container_name() """
xujun10110/golismero
refs/heads/master
thirdparty_libs/nltk/inference/__init__.py
17
# Natural Language Toolkit: Inference # # Copyright (C) 2001-2012 NLTK Project # Author: Dan Garrette <dhgarrette@gmail.com> # Ewan Klein <ewan@inf.ed.ac.uk> # # URL: <http://www.nltk.org/> # For license information, see LICENSE.TXT """ Classes and interfaces for theorem proving and model building. """ from api import ParallelProverBuilder, ParallelProverBuilderCommand from mace import Mace, MaceCommand from prover9 import Prover9, Prover9Command from resolution import ResolutionProver, ResolutionProverCommand from tableau import TableauProver, TableauProverCommand from discourse import (ReadingCommand, CfgReadingCommand, DrtGlueReadingCommand, DiscourseTester)
0x0all/scikit-learn
refs/heads/master
sklearn/decomposition/tests/test_truncated_svd.py
240
"""Test truncated SVD transformer.""" import numpy as np import scipy.sparse as sp from sklearn.decomposition import TruncatedSVD from sklearn.utils import check_random_state from sklearn.utils.testing import (assert_array_almost_equal, assert_equal, assert_raises, assert_greater, assert_array_less) # Make an X that looks somewhat like a small tf-idf matrix. # XXX newer versions of SciPy have scipy.sparse.rand for this. shape = 60, 55 n_samples, n_features = shape rng = check_random_state(42) X = rng.randint(-100, 20, np.product(shape)).reshape(shape) X = sp.csr_matrix(np.maximum(X, 0), dtype=np.float64) X.data[:] = 1 + np.log(X.data) Xdense = X.A def test_algorithms(): svd_a = TruncatedSVD(30, algorithm="arpack") svd_r = TruncatedSVD(30, algorithm="randomized", random_state=42) Xa = svd_a.fit_transform(X)[:, :6] Xr = svd_r.fit_transform(X)[:, :6] assert_array_almost_equal(Xa, Xr) comp_a = np.abs(svd_a.components_) comp_r = np.abs(svd_r.components_) # All elements are equal, but some elements are more equal than others. assert_array_almost_equal(comp_a[:9], comp_r[:9]) assert_array_almost_equal(comp_a[9:], comp_r[9:], decimal=3) def test_attributes(): for n_components in (10, 25, 41): tsvd = TruncatedSVD(n_components).fit(X) assert_equal(tsvd.n_components, n_components) assert_equal(tsvd.components_.shape, (n_components, n_features)) def test_too_many_components(): for algorithm in ["arpack", "randomized"]: for n_components in (n_features, n_features+1): tsvd = TruncatedSVD(n_components=n_components, algorithm=algorithm) assert_raises(ValueError, tsvd.fit, X) def test_sparse_formats(): for fmt in ("array", "csr", "csc", "coo", "lil"): Xfmt = Xdense if fmt == "dense" else getattr(X, "to" + fmt)() tsvd = TruncatedSVD(n_components=11) Xtrans = tsvd.fit_transform(Xfmt) assert_equal(Xtrans.shape, (n_samples, 11)) Xtrans = tsvd.transform(Xfmt) assert_equal(Xtrans.shape, (n_samples, 11)) def test_inverse_transform(): for algo in ("arpack", "randomized"): # We need a lot of components for the reconstruction to be "almost # equal" in all positions. XXX Test means or sums instead? tsvd = TruncatedSVD(n_components=52, random_state=42) Xt = tsvd.fit_transform(X) Xinv = tsvd.inverse_transform(Xt) assert_array_almost_equal(Xinv, Xdense, decimal=1) def test_integers(): Xint = X.astype(np.int64) tsvd = TruncatedSVD(n_components=6) Xtrans = tsvd.fit_transform(Xint) assert_equal(Xtrans.shape, (n_samples, tsvd.n_components)) def test_explained_variance(): # Test sparse data svd_a_10_sp = TruncatedSVD(10, algorithm="arpack") svd_r_10_sp = TruncatedSVD(10, algorithm="randomized", random_state=42) svd_a_20_sp = TruncatedSVD(20, algorithm="arpack") svd_r_20_sp = TruncatedSVD(20, algorithm="randomized", random_state=42) X_trans_a_10_sp = svd_a_10_sp.fit_transform(X) X_trans_r_10_sp = svd_r_10_sp.fit_transform(X) X_trans_a_20_sp = svd_a_20_sp.fit_transform(X) X_trans_r_20_sp = svd_r_20_sp.fit_transform(X) # Test dense data svd_a_10_de = TruncatedSVD(10, algorithm="arpack") svd_r_10_de = TruncatedSVD(10, algorithm="randomized", random_state=42) svd_a_20_de = TruncatedSVD(20, algorithm="arpack") svd_r_20_de = TruncatedSVD(20, algorithm="randomized", random_state=42) X_trans_a_10_de = svd_a_10_de.fit_transform(X.toarray()) X_trans_r_10_de = svd_r_10_de.fit_transform(X.toarray()) X_trans_a_20_de = svd_a_20_de.fit_transform(X.toarray()) X_trans_r_20_de = svd_r_20_de.fit_transform(X.toarray()) # helper arrays for tests below svds = (svd_a_10_sp, svd_r_10_sp, svd_a_20_sp, svd_r_20_sp, svd_a_10_de, svd_r_10_de, svd_a_20_de, svd_r_20_de) svds_trans = ( (svd_a_10_sp, X_trans_a_10_sp), (svd_r_10_sp, X_trans_r_10_sp), (svd_a_20_sp, X_trans_a_20_sp), (svd_r_20_sp, X_trans_r_20_sp), (svd_a_10_de, X_trans_a_10_de), (svd_r_10_de, X_trans_r_10_de), (svd_a_20_de, X_trans_a_20_de), (svd_r_20_de, X_trans_r_20_de), ) svds_10_v_20 = ( (svd_a_10_sp, svd_a_20_sp), (svd_r_10_sp, svd_r_20_sp), (svd_a_10_de, svd_a_20_de), (svd_r_10_de, svd_r_20_de), ) svds_sparse_v_dense = ( (svd_a_10_sp, svd_a_10_de), (svd_a_20_sp, svd_a_20_de), (svd_r_10_sp, svd_r_10_de), (svd_r_20_sp, svd_r_20_de), ) # Assert the 1st component is equal for svd_10, svd_20 in svds_10_v_20: assert_array_almost_equal( svd_10.explained_variance_ratio_, svd_20.explained_variance_ratio_[:10], decimal=5, ) # Assert that 20 components has higher explained variance than 10 for svd_10, svd_20 in svds_10_v_20: assert_greater( svd_20.explained_variance_ratio_.sum(), svd_10.explained_variance_ratio_.sum(), ) # Assert that all the values are greater than 0 for svd in svds: assert_array_less(0.0, svd.explained_variance_ratio_) # Assert that total explained variance is less than 1 for svd in svds: assert_array_less(svd.explained_variance_ratio_.sum(), 1.0) # Compare sparse vs. dense for svd_sparse, svd_dense in svds_sparse_v_dense: assert_array_almost_equal(svd_sparse.explained_variance_ratio_, svd_dense.explained_variance_ratio_) # Test that explained_variance is correct for svd, transformed in svds_trans: total_variance = np.var(X.toarray(), axis=0).sum() variances = np.var(transformed, axis=0) true_explained_variance_ratio = variances / total_variance assert_array_almost_equal( svd.explained_variance_ratio_, true_explained_variance_ratio, )
Alshak/clowdflows
refs/heads/master
workflows/mysql/db_test.py
4
import mysql.connector as sql # Widget 1 con = sql.connect(user='root', password='', host='localhost', database='test') # Widget 2 # Tole bo DB Context Object # - izberi relacije # - izberi stolpce # - povezi kljuce cursor = con.cursor() cursor.execute('SHOW tables') tables = [table for (table,) in cursor] cols = {} for table in tables: cursor.execute("SELECT column_name FROM information_schema.columns WHERE table_name = '%s'" % table) cols[table] = [col for (col,) in cursor] print cols main_table = 'trains' connected = {} cursor.execute( "SELECT table_name, column_name, referenced_table_name, referenced_column_name \ FROM information_schema.KEY_COLUMN_USAGE \ WHERE referenced_table_name IS NOT NULL") for (table, col, ref_table, ref_col) in cursor: connected[(table, ref_table)] = (col, ref_col) print connected # Widget 3 # - iz DB context obj zgeneriraj proper .b in .f/.n fajle
dfang/odoo
refs/heads/10.0
addons/calendar/models/__init__.py
23
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. import ir_attachment import ir_http import res_partner import mail_message import calendar
youdonghai/intellij-community
refs/heads/master
python/testData/refactoring/rename/renameImportModuleAs/before/a.py
83
import f<caret>oo as foo foo.f()
tgckpg/wenku10
refs/heads/unsigned-master
unittest.py
1
#!/usr/bin/env python3 import re import socket import collections import sys # Symbolic name meaning all available interfaces HOST = "" # Arbitrary non-privileged port PORT = 9730 # TCP: SOCK_STREAM s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.bind((HOST, PORT)) s.listen(1) print( "Listening on: " + str(PORT) ) conn, addr = s.accept() print( "Connected by: ", addr ) while 1: data = conn.recv(2048) if( data.decode( "utf-8" ) == "CanITest" ): print( "Received UnitTestSignal" ) conn.sendall( b"GoAhead" ) break elif( data ): print( "Unknow signal:", data ) conn.sendall( b"No" ) break s.close()
daeseokyoun/youtube-dl
refs/heads/youtube_search
youtube_dl/extractor/afreecatv.py
16
# coding: utf-8 from __future__ import unicode_literals import re from .common import InfoExtractor from ..compat import ( compat_urllib_parse_urlparse, compat_urlparse, ) from ..utils import ( ExtractorError, int_or_none, xpath_element, xpath_text, ) class AfreecaTVIE(InfoExtractor): IE_DESC = 'afreecatv.com' _VALID_URL = r'''(?x)^ https?://(?:(live|afbbs|www)\.)?afreeca(?:tv)?\.com(?::\d+)? (?: /app/(?:index|read_ucc_bbs)\.cgi| /player/[Pp]layer\.(?:swf|html)) \?.*?\bnTitleNo=(?P<id>\d+)''' _TESTS = [{ 'url': 'http://live.afreecatv.com:8079/app/index.cgi?szType=read_ucc_bbs&szBjId=dailyapril&nStationNo=16711924&nBbsNo=18605867&nTitleNo=36164052&szSkin=', 'md5': 'f72c89fe7ecc14c1b5ce506c4996046e', 'info_dict': { 'id': '36164052', 'ext': 'mp4', 'title': '데일리 에이프릴 요정들의 시상식!', 'thumbnail': 're:^https?://(?:video|st)img.afreecatv.com/.*$', 'uploader': 'dailyapril', 'uploader_id': 'dailyapril', 'upload_date': '20160503', } }, { 'url': 'http://afbbs.afreecatv.com:8080/app/read_ucc_bbs.cgi?nStationNo=16711924&nTitleNo=36153164&szBjId=dailyapril&nBbsNo=18605867', 'info_dict': { 'id': '36153164', 'title': "BJ유트루와 함께하는 '팅커벨 메이크업!'", 'thumbnail': 're:^https?://(?:video|st)img.afreecatv.com/.*$', 'uploader': 'dailyapril', 'uploader_id': 'dailyapril', }, 'playlist_count': 2, 'playlist': [{ 'md5': 'd8b7c174568da61d774ef0203159bf97', 'info_dict': { 'id': '36153164_1', 'ext': 'mp4', 'title': "BJ유트루와 함께하는 '팅커벨 메이크업!'", 'upload_date': '20160502', }, }, { 'md5': '58f2ce7f6044e34439ab2d50612ab02b', 'info_dict': { 'id': '36153164_2', 'ext': 'mp4', 'title': "BJ유트루와 함께하는 '팅커벨 메이크업!'", 'upload_date': '20160502', }, }], }, { 'url': 'http://www.afreecatv.com/player/Player.swf?szType=szBjId=djleegoon&nStationNo=11273158&nBbsNo=13161095&nTitleNo=36327652', 'only_matching': True, }] @staticmethod def parse_video_key(key): video_key = {} m = re.match(r'^(?P<upload_date>\d{8})_\w+_(?P<part>\d+)$', key) if m: video_key['upload_date'] = m.group('upload_date') video_key['part'] = m.group('part') return video_key def _real_extract(self, url): video_id = self._match_id(url) parsed_url = compat_urllib_parse_urlparse(url) info_url = compat_urlparse.urlunparse(parsed_url._replace( netloc='afbbs.afreecatv.com:8080', path='/api/video/get_video_info.php')) video_xml = self._download_xml(info_url, video_id) if xpath_element(video_xml, './track/video/file') is None: raise ExtractorError('Specified AfreecaTV video does not exist', expected=True) title = xpath_text(video_xml, './track/title', 'title') uploader = xpath_text(video_xml, './track/nickname', 'uploader') uploader_id = xpath_text(video_xml, './track/bj_id', 'uploader id') duration = int_or_none(xpath_text(video_xml, './track/duration', 'duration')) thumbnail = xpath_text(video_xml, './track/titleImage', 'thumbnail') entries = [] for i, video_file in enumerate(video_xml.findall('./track/video/file')): video_key = self.parse_video_key(video_file.get('key', '')) if not video_key: continue entries.append({ 'id': '%s_%s' % (video_id, video_key.get('part', i + 1)), 'title': title, 'upload_date': video_key.get('upload_date'), 'duration': int_or_none(video_file.get('duration')), 'url': video_file.text, }) info = { 'id': video_id, 'title': title, 'uploader': uploader, 'uploader_id': uploader_id, 'duration': duration, 'thumbnail': thumbnail, } if len(entries) > 1: info['_type'] = 'multi_video' info['entries'] = entries elif len(entries) == 1: info['url'] = entries[0]['url'] info['upload_date'] = entries[0].get('upload_date') else: raise ExtractorError( 'No files found for the specified AfreecaTV video, either' ' the URL is incorrect or the video has been made private.', expected=True) return info
bbc/kamaelia
refs/heads/master
Code/Python/Kamaelia/Kamaelia/Apps/Compose/GUI.py
3
#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright 2010 British Broadcasting Corporation and Kamaelia Contributors(1) # # (1) Kamaelia Contributors are listed in the AUTHORS file and at # http://www.kamaelia.org/AUTHORS - please extend this file, # not this notice. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ------------------------------------------------------------------------- from Kamaelia.UI.Tk.TkWindow import TkWindow from Kamaelia.Support.Tk.Scrolling import ScrollingMenu from Axon.Ipc import producerFinished, shutdownMicroprocess import Tkinter class ArgumentsPanel(Tkinter.Frame): def __init__(self, parent, theclass): Tkinter.Frame.__init__(self, parent) self.theclass = theclass # build widgets row=0 if self.theclass['classdoc']: self.classdoclabel = Tkinter.Label(self, text = self.theclass['classdoc'], justify="left") self.classdoclabel['font'] = " ".join(self.classdoclabel['font'].split(" ")[0:2]) self.classdoclabel.grid(row=row, column=0,columnspan=2, sticky=Tkinter.N+Tkinter.E+Tkinter.W+Tkinter.S, padx=4, pady=4) row+=1 if self.theclass['initdoc']: self.initdoclabel = Tkinter.Label(self, text = self.theclass['initdoc'], justify="left") self.initdoclabel['font'] = " ".join(self.initdoclabel['font'].split(" ")[0:2]) self.initdoclabel.grid(row=row, column=0, columnspan=2, sticky=Tkinter.N+Tkinter.E+Tkinter.W+Tkinter.S, padx=4, pady=4) row+=1 self.label = Tkinter.Label(self, text="ARGUMENTS:") self.label.grid(row=row, column=0, columnspan=2,sticky=Tkinter.W+Tkinter.S, padx=4, pady=4) row+=1 # enumerate std args self.args = [] for arg in self.theclass['args']['std']: arglabel = Tkinter.Label(self, text=arg[0]) arglabel.grid(row=row,column=0, sticky=Tkinter.E) svar = Tkinter.StringVar() argfield = Tkinter.Entry(self, bg="white", textvariable=svar, takefocus=1) default="" if len(arg)>=2: default = arg[1] svar.set(default) argfield.grid(row=row,column=1, sticky=Tkinter.W) self.args.append( (arg[0], svar, default) ) row+=1 # now do * and ** args for argname in ["*","**"]: if self.theclass['args'][argname]: arglabel = Tkinter.Label(self, text=argname) arglabel.grid(row=row,column=0, sticky=Tkinter.E) arglabel = None svar = Tkinter.StringVar() argfield = Tkinter.Entry(self, bg="white", textvariable=svar, takefocus=1) argfield.grid(row=row,column=1, sticky=Tkinter.W) self.args.append( (argname, svar, "") ) row+=1 # self.rowconfigure(row, weight=1) # self.grid() def getDef(self): return { "name" : self.theclass['class'], "module" : self.theclass['module'], "instantiation" : self.getInstantiation() } def getInstantiation(self): """Return the instantiation string""" argstr = "" prefix = "" for (argname, svar, default) in self.args: text = svar.get().strip() default = default.strip() if argname != "*" and argname != "**": if default=="" or text != default: if not text: text = "<<unspecified>>" argstr = argstr + prefix + argname + " = " + text prefix=", " else: if text: argstr = argstr + prefix + text prefix=", " return argstr class BuilderControlsGUI(TkWindow): def __init__(self, classes): self.selectedComponent = None self.uid = 1 self.classes = classes super(BuilderControlsGUI, self).__init__() def setupWindow(self): items = [] lookup = {} # This is a bit of a nasty hack really ... :-) # Why is this a hack ? self.window.title("Pipeline Builder") self.addframe = Tkinter.Frame(self.window, borderwidth=2, relief=Tkinter.GROOVE) self.addframe.grid(row=0, column=0, sticky=Tkinter.N+Tkinter.E+Tkinter.W+Tkinter.S, padx=4, pady=4) def menuCallback(index, text): self.click_menuChoice(lookup[text]) print (self.classes[0]) for theclass in self.classes: lookup[ theclass['module']+"."+theclass['class'] ] = theclass items.append(theclass['module']+"."+theclass['class']) self.choosebutton = ScrollingMenu(self.addframe, items, command = menuCallback) self.choosebutton.grid(row=0, column=0, columnspan=2, sticky=Tkinter.N) self.argPanel = None self.argCanvas = Tkinter.Canvas(self.addframe, relief=Tkinter.SUNKEN, borderwidth=2) self.argCanvas.grid(row=1, column=0, sticky=Tkinter.N+Tkinter.S+Tkinter.E+Tkinter.W) self.argCanvasWID = self.argCanvas.create_window(0,0, anchor=Tkinter.NW) self.argCanvasScroll = Tkinter.Scrollbar(self.addframe, orient=Tkinter.VERTICAL) self.argCanvasScroll.grid(row=1, column=1, sticky=Tkinter.N+Tkinter.S+Tkinter.E) self.argCanvasScroll['command'] = self.argCanvas.yview self.argCanvas['yscrollcommand'] = self.argCanvasScroll.set self.click_menuChoice(self.classes[1]) self.addbutton = Tkinter.Button(self.addframe, text="ADD Component", command=self.click_addComponent ) self.addbutton.grid(row=2, column=0, columnspan=2, sticky=Tkinter.S) self.addframe.rowconfigure(1, weight=1) self.addframe.columnconfigure(0, weight=1) self.remframe = Tkinter.Frame(self.window, borderwidth=2, relief=Tkinter.GROOVE) self.remframe.grid(row=1, column=0, columnspan=2, sticky=Tkinter.S+Tkinter.E+Tkinter.W, padx=4, pady=4) self.selectedlabel = Tkinter.Label(self.remframe, text="<no component selected>") self.selectedlabel.grid(row=0, column=0, sticky=Tkinter.S) self.delbutton = Tkinter.Button(self.remframe, text="REMOVE Component", command=self.click_removeComponent ) self.delbutton.grid(row=1, column=0, sticky=Tkinter.S) self.delbutton.config(state=Tkinter.DISABLED) self.window.rowconfigure(0, weight=1) self.window.columnconfigure(0, weight=1) self.window.protocol("WM_DELETE_WINDOW", self.handleCloseWindowRequest ) def main(self): while not self.isDestroyed(): if self.dataReady("inbox"): data = self.recv("inbox") if data[0].upper() == "SELECT": if data[1].upper() == "NODE": self.componentSelected(data[2]) while self.dataReady("control"): msg = self.recv("control") if isinstance(msg, producerFinished) or isinstance(msg, shutdownMicroprocess): self.send(msg, "signal") self.window.destroy() self.tkupdate() yield 1 def handleCloseWindowRequest(self): self.send( shutdownMicroprocess(self), "signal") self.window.destroy() def makeUID(self): uid = self.uid self.uid += 1 return uid def componentSelected(self, component): self.selectedComponent = component if component == None: self.delbutton.config(state=Tkinter.DISABLED) self.selectedlabel["text"] = "<no component selected>" else: self.delbutton.config(state=Tkinter.NORMAL) self.selectedlabel["text"] = repr(component[0]) def click_addComponent(self): # add to the pipeline and wire it in c = self.argPanel.getDef() c["id"] = ( c['name'], repr(self.makeUID()) ) msg = ("ADD", c['id'], c['name'], c, self.selectedComponent) self.send( msg, "outbox") def click_removeComponent(self): if self.selectedComponent: self.send( ("DEL", self.selectedComponent), "outbox") def click_chooseComponent(self): pass def click_menuChoice(self, theclass): if self.argPanel != None: self.argPanel.destroy() self.argPanel = ArgumentsPanel(self.argCanvas, theclass) self.argPanel.update_idletasks() self.argCanvas.itemconfigure(self.argCanvasWID, window=self.argPanel) self.argCanvas['scrollregion'] = self.argCanvas.bbox("all") # ------------------------------------------------------------------- class TextOutputGUI(TkWindow): def __init__(self, title): self.title = title self.allreceived = True super(TextOutputGUI, self).__init__() def setupWindow(self): self.textbox = Tkinter.Text(self.window, cnf={"state":Tkinter.DISABLED} ) self.window.title(self.title) self.textbox.grid(row=0, column=0, sticky=Tkinter.N+Tkinter.E+Tkinter.W+Tkinter.S) self.window.rowconfigure(0, weight=1) self.window.columnconfigure(0, weight=1) self.window.protocol("WM_DELETE_WINDOW", self.handleCloseWindowRequest ) def main(self): while not self.isDestroyed(): if self.dataReady("inbox"): self.textbox.config(state=Tkinter.NORMAL) # enable editing if self.allreceived: self.allreceived = False self.textbox.delete(1.0, Tkinter.END) while self.dataReady("inbox"): data = self.recv("inbox") if data == None: self.allreceived = True else: self.textbox.insert(Tkinter.END, data) self.textbox.config(state=Tkinter.DISABLED) # disable editing while self.dataReady("control"): msg = self.recv("control") if isinstance(msg, shutdownMicroprocess) or isinstance(msg, producerFinished): self.send(msg, "signal") self.window.destroy() self.tkupdate() yield 1 def handleCloseWindowRequest(self): self.send( shutdownMicroprocess(self), "signal") self.window.destroy()
jakobsa/wolfssl
refs/heads/master
wrapper/python/wolfcrypt/ciphers.py
4
# ciphers.py # # Copyright (C) 2006-2016 wolfSSL Inc. # # This file is part of wolfSSL. (formerly known as CyaSSL) # # wolfSSL is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # wolfSSL is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA from wolfcrypt._ffi import ffi as _ffi from wolfcrypt._ffi import lib as _lib from wolfcrypt.utils import t2b from wolfcrypt.random import Random from wolfcrypt.exceptions import * # key direction flags _ENCRYPTION = 0 _DECRYPTION = 1 # feedback modes MODE_ECB = 1 # Electronic Code Book MODE_CBC = 2 # Cipher Block Chaining MODE_CFB = 3 # Cipher Feedback MODE_OFB = 5 # Output Feedback MODE_CTR = 6 # Counter _FEEDBACK_MODES = [MODE_ECB, MODE_CBC, MODE_CFB, MODE_OFB, MODE_CTR] class _Cipher(object): """ A **PEP 272: Block Encryption Algorithms** compliant **Symmetric Key Cipher**. """ def __init__(self, key, mode, IV=None): if mode not in _FEEDBACK_MODES: raise ValueError("this mode is not supported") if mode == MODE_CBC: if IV is None: raise ValueError("this mode requires an 'IV' string") else: raise ValueError("this mode is not supported by this cipher") if self.key_size: if self.key_size != len(key): raise ValueError("key must be %d in length" % self.key_size) elif self._key_sizes: if len(key) not in self._key_sizes: raise ValueError("key must be %s in length" % self._key_sizes) else: if not len(key): raise ValueError("key must not be 0 in length") if IV is not None and len(IV) != self.block_size: raise ValueError("IV must be %d in length" % self.block_size) self._native_object = _ffi.new(self._native_type) self._enc = None self._dec = None self._key = t2b(key) if IV: self._IV = t2b(IV) else: self._IV = t2b("\0" * self.block_size) @classmethod def new(cls, key, mode, IV=None, **kwargs): """ Returns a ciphering object, using the secret key contained in the string **key**, and using the feedback mode **mode**, which must be one of MODE_* defined in this module. If **mode** is MODE_CBC or MODE_CFB, **IV** must be provided and must be a string of the same length as the block size. Not providing a value of **IV** will result in a ValueError exception being raised. """ return cls(key, mode, IV) def encrypt(self, string): """ Encrypts a non-empty string, using the key-dependent data in the object, and with the appropriate feedback mode. The string's length must be an exact multiple of the algorithm's block size or, in CFB mode, of the segment size. Returns a string containing the ciphertext. """ string = t2b(string) if not string or len(string) % self.block_size: raise ValueError( "string must be a multiple of %d in length" % self.block_size) if self._enc is None: self._enc = _ffi.new(self._native_type) ret = self._set_key(_ENCRYPTION) if ret < 0: raise WolfCryptError("Invalid key error (%d)" % ret) result = t2b("\0" * len(string)) ret = self._encrypt(result, string) if ret < 0: raise WolfCryptError("Encryption error (%d)" % ret) return result def decrypt(self, string): """ Decrypts **string**, using the key-dependent data in the object and with the appropriate feedback mode. The string's length must be an exact multiple of the algorithm's block size or, in CFB mode, of the segment size. Returns a string containing the plaintext. """ string = t2b(string) if not string or len(string) % self.block_size: raise ValueError( "string must be a multiple of %d in length" % self.block_size) if self._dec is None: self._dec = _ffi.new(self._native_type) ret = self._set_key(_DECRYPTION) if ret < 0: raise WolfCryptError("Invalid key error (%d)" % ret) result = t2b("\0" * len(string)) ret = self._decrypt(result, string) if ret < 0: raise WolfCryptError("Decryption error (%d)" % ret) return result class Aes(_Cipher): """ The **Advanced Encryption Standard** (AES), a.k.a. Rijndael, is a symmetric-key cipher standardized by **NIST**. """ block_size = 16 key_size = None # 16, 24, 32 _key_sizes = [16, 24, 32] _native_type = "Aes *" def _set_key(self, direction): if direction == _ENCRYPTION: return _lib.wc_AesSetKey( self._enc, self._key, len(self._key), self._IV, _ENCRYPTION) else: return _lib.wc_AesSetKey( self._dec, self._key, len(self._key), self._IV, _DECRYPTION) def _encrypt(self, destination, source): return _lib.wc_AesCbcEncrypt(self._enc, destination, source,len(source)) def _decrypt(self, destination, source): return _lib.wc_AesCbcDecrypt(self._dec, destination, source,len(source)) class Des3(_Cipher): """ **Triple DES** (3DES) is the common name for the **Triple Data Encryption Algorithm** (TDEA or Triple DEA) symmetric-key block cipher, which applies the **Data Encryption Standard** (DES) cipher algorithm three times to each data block. """ block_size = 8 key_size = 24 _native_type = "Des3 *" def _set_key(self, direction): if direction == _ENCRYPTION: return _lib.wc_Des3_SetKey(self._enc,self._key,self._IV,_ENCRYPTION) else: return _lib.wc_Des3_SetKey(self._dec,self._key,self._IV,_DECRYPTION) def _encrypt(self, destination, source): return _lib.wc_Des3_CbcEncrypt(self._enc,destination,source,len(source)) def _decrypt(self, destination, source): return _lib.wc_Des3_CbcDecrypt(self._dec,destination,source,len(source)) class _Rsa(object): RSA_MIN_PAD_SIZE = 11 def __init__(self): self.native_object = _ffi.new("RsaKey *") ret = _lib.wc_InitRsaKey(self.native_object, _ffi.NULL) if ret < 0: raise WolfCryptError("Invalid key error (%d)" % ret) self._random = Random() def __del__(self): if self.native_object: _lib.wc_FreeRsaKey(self.native_object) class RsaPublic(_Rsa): def __init__(self, key): key = t2b(key) _Rsa.__init__(self) idx = _ffi.new("word32*") idx[0] = 0 ret = _lib.wc_RsaPublicKeyDecode(key, idx, self.native_object, len(key)) if ret < 0: raise WolfCryptError("Invalid key error (%d)" % ret) self.output_size = _lib.wc_RsaEncryptSize(self.native_object) if self.output_size <= 0: raise WolfCryptError("Invalid key error (%d)" % self.output_size) def encrypt(self, plaintext): """ Encrypts **plaintext**, using the public key data in the object. The plaintext's length must not be greater than: **self.output_size - self.RSA_MIN_PAD_SIZE** Returns a string containing the ciphertext. """ plaintext = t2b(plaintext) ciphertext = t2b("\0" * self.output_size) ret = _lib.wc_RsaPublicEncrypt(plaintext, len(plaintext), ciphertext, len(ciphertext), self.native_object, self._random.native_object) if ret != self.output_size: raise WolfCryptError("Encryption error (%d)" % ret) return ciphertext def verify(self, signature): """ Verifies **signature**, using the public key data in the object. The signature's length must be equal to: **self.output_size** Returns a string containing the plaintext. """ signature = t2b(signature) plaintext = t2b("\0" * self.output_size) ret = _lib.wc_RsaSSL_Verify(signature, len(signature), plaintext, len(plaintext), self.native_object) if ret < 0: raise WolfCryptError("Verify error (%d)" % ret) return plaintext[:ret] class RsaPrivate(RsaPublic): def __init__(self, key): key = t2b(key) _Rsa.__init__(self) idx = _ffi.new("word32*") idx[0] = 0 ret = _lib.wc_RsaPrivateKeyDecode(key, idx, self.native_object,len(key)) if ret < 0: raise WolfCryptError("Invalid key error (%d)" % ret) self.output_size = _lib.wc_RsaEncryptSize(self.native_object) if self.output_size <= 0: raise WolfCryptError("Invalid key error (%d)" % self.output_size) def decrypt(self, ciphertext): """ Decrypts **ciphertext**, using the private key data in the object. The ciphertext's length must be equal to: **self.output_size** Returns a string containing the plaintext. """ ciphertext = t2b(ciphertext) plaintext = t2b("\0" * self.output_size) ret = _lib.wc_RsaPrivateDecrypt(ciphertext, len(ciphertext), plaintext, len(plaintext), self.native_object) if ret < 0: raise WolfCryptError("Decryption error (%d)" % ret) return plaintext[:ret] def sign(self, plaintext): """ Signs **plaintext**, using the private key data in the object. The plaintext's length must not be greater than: **self.output_size - self.RSA_MIN_PAD_SIZE** Returns a string containing the signature. """ plaintext = t2b(plaintext) signature = t2b("\0" * self.output_size) ret = _lib.wc_RsaSSL_Sign(plaintext, len(plaintext), signature, len(signature), self.native_object, self._random.native_object) if ret != self.output_size: raise WolfCryptError("Signature error (%d)" % ret) return signature
fonea/velon
refs/heads/master
themes/frontend/assets/vendor/node_modules/node-sass/node_modules/node-gyp/gyp/gyp_main.py
1452
#!/usr/bin/env python # Copyright (c) 2009 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import os import sys # Make sure we're using the version of pylib in this repo, not one installed # elsewhere on the system. sys.path.insert(0, os.path.join(os.path.dirname(sys.argv[0]), 'pylib')) import gyp if __name__ == '__main__': sys.exit(gyp.script_main())
pbrod/numpy
refs/heads/master
benchmarks/benchmarks/bench_scalar.py
11
from .common import Benchmark, TYPES1 import numpy as np class ScalarMath(Benchmark): # Test scalar math, note that each of these is run repeatedly to offset # the function call overhead to some degree. params = [TYPES1] param_names = ["type"] def setup(self, typename): self.num = np.dtype(typename).type(2) def time_addition(self, typename): n = self.num res = n + n + n + n + n + n + n + n + n + n def time_addition_pyint(self, typename): n = self.num res = n + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 + 1 def time_multiplication(self, typename): n = self.num res = n * n * n * n * n * n * n * n * n * n def time_power_of_two(self, typename): n = self.num res = n**2, n**2, n**2, n**2, n**2, n**2, n**2, n**2, n**2, n**2 def time_abs(self, typename): n = self.num res = abs(abs(abs(abs(abs(abs(abs(abs(abs(abs(n))))))))))
joshfriend/werkzeug
refs/heads/master
examples/plnt/database.py
45
# -*- coding: utf-8 -*- """ plnt.database ~~~~~~~~~~~~~ The database definitions for the planet. :copyright: (c) 2009 by the Werkzeug Team, see AUTHORS for more details. :license: BSD. """ from sqlalchemy import MetaData, Table, Column, ForeignKey, Boolean, \ Integer, String, DateTime from sqlalchemy.orm import dynamic_loader, scoped_session, create_session, \ mapper from plnt.utils import application, local_manager def new_db_session(): return create_session(application.database_engine, autoflush=True, autocommit=False) metadata = MetaData() session = scoped_session(new_db_session, local_manager.get_ident) blog_table = Table('blogs', metadata, Column('id', Integer, primary_key=True), Column('name', String(120)), Column('description', String), Column('url', String(200)), Column('feed_url', String(250)) ) entry_table = Table('entries', metadata, Column('id', Integer, primary_key=True), Column('blog_id', Integer, ForeignKey('blogs.id')), Column('guid', String(200), unique=True), Column('title', String(140)), Column('url', String(200)), Column('text', String), Column('pub_date', DateTime), Column('last_update', DateTime) ) class Blog(object): query = session.query_property() def __init__(self, name, url, feed_url, description=u''): self.name = name self.url = url self.feed_url = feed_url self.description = description def __repr__(self): return '<%s %r>' % (self.__class__.__name__, self.url) class Entry(object): query = session.query_property() def __repr__(self): return '<%s %r>' % (self.__class__.__name__, self.guid) mapper(Entry, entry_table) mapper(Blog, blog_table, properties=dict( entries=dynamic_loader(Entry, backref='blog') ))
etalab/udata
refs/heads/master
udata/tests/cli/test_cli_base.py
2
def test_cli_help(cli): '''Should display help without errors''' cli() cli('-?') cli('-h') cli('--help') def test_cli_log_and_printing(cli): '''Should properly log and print''' cli('test log') def test_cli_version(cli): '''Should display version without errors''' cli('--version')
amenonsen/ansible
refs/heads/devel
lib/ansible/modules/network/f5/bigip_user.py
21
#!/usr/bin/python # -*- coding: utf-8 -*- # # Copyright: (c) 2017, F5 Networks Inc. # GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'certified'} DOCUMENTATION = r''' --- module: bigip_user short_description: Manage user accounts and user attributes on a BIG-IP description: - Manage user accounts and user attributes on a BIG-IP. Typically this module operates only on the REST API users and not the CLI users. When specifying C(root), you may only change the password. Your other parameters will be ignored in this case. Changing the C(root) password is not an idempotent operation. Therefore, it will change it every time this module attempts to change it. version_added: 2.4 options: full_name: description: - Full name of the user. type: str username_credential: description: - Name of the user to create, remove or modify. - The C(root) user may not be removed. type: str required: True aliases: - name password_credential: description: - Set the users password to this unencrypted value. C(password_credential) is required when creating a new account. type: str shell: description: - Optionally set the users shell. type: str choices: - bash - none - tmsh partition_access: description: - Specifies the administrative partition to which the user has access. C(partition_access) is required when creating a new account. Should be in the form "partition:role". - Valid roles include C(acceleration-policy-editor), C(admin), C(application-editor), C(auditor), C(certificate-manager), C(guest), C(irule-manager), C(manager), C(no-access), C(operator), C(resource-admin), C(user-manager), C(web-application-security-administrator), and C(web-application-security-editor). - Partition portion of tuple should be an existing partition or the value 'all'. type: list state: description: - Whether the account should exist or not, taking action if the state is different from what is stated. type: str choices: - present - absent default: present update_password: description: - C(always) will allow to update passwords if the user chooses to do so. C(on_create) will only set the password for newly created users. - When C(username_credential) is C(root), this value will be forced to C(always). type: str choices: - always - on_create default: always partition: description: - Device partition to manage resources on. type: str default: Common version_added: 2.5 notes: - Requires BIG-IP versions >= 12.0.0 extends_documentation_fragment: f5 author: - Tim Rupp (@caphrim007) - Wojciech Wypior (@wojtek0806) ''' EXAMPLES = r''' - name: Add the user 'johnd' as an admin bigip_user: username_credential: johnd password_credential: password full_name: John Doe partition_access: all:admin update_password: on_create state: present provider: server: lb.mydomain.com user: admin password: secret delegate_to: localhost - name: Change the user "johnd's" role and shell bigip_user: username_credential: johnd partition_access: NewPartition:manager shell: tmsh state: present provider: server: lb.mydomain.com user: admin password: secret delegate_to: localhost - name: Make the user 'johnd' an admin and set to advanced shell bigip_user: name: johnd partition_access: all:admin shell: bash state: present provider: server: lb.mydomain.com user: admin password: secret delegate_to: localhost - name: Remove the user 'johnd' bigip_user: name: johnd state: absent provider: server: lb.mydomain.com user: admin password: secret delegate_to: localhost - name: Update password bigip_user: state: present username_credential: johnd password_credential: newsupersecretpassword provider: server: lb.mydomain.com user: admin password: secret delegate_to: localhost # Note that the second time this task runs, it would fail because # The password has been changed. Therefore, it is recommended that # you either, # # * Put this in its own playbook that you run when you need to # * Put this task in a `block` # * Include `ignore_errors` on this task - name: Change the Admin password bigip_user: state: present username_credential: admin password_credential: NewSecretPassword provider: server: lb.mydomain.com user: admin password: secret delegate_to: localhost - name: Change the root user's password bigip_user: username_credential: root password_credential: secret state: present provider: server: lb.mydomain.com user: admin password: secret delegate_to: localhost ''' RETURN = r''' full_name: description: Full name of the user returned: changed and success type: str sample: John Doe partition_access: description: - List of strings containing the user's roles and which partitions they are applied to. They are specified in the form "partition:role". returned: changed and success type: list sample: ['all:admin'] shell: description: The shell assigned to the user account returned: changed and success type: str sample: tmsh ''' import os import tempfile from ansible.module_utils._text import to_bytes try: from BytesIO import BytesIO except ImportError: from io import BytesIO from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.basic import env_fallback from ansible.module_utils.six import string_types from distutils.version import LooseVersion try: from library.module_utils.network.f5.bigip import F5RestClient from library.module_utils.network.f5.common import F5ModuleError from library.module_utils.network.f5.common import AnsibleF5Parameters from library.module_utils.network.f5.common import f5_argument_spec from library.module_utils.network.f5.icontrol import tmos_version from library.module_utils.network.f5.icontrol import upload_file except ImportError: from ansible.module_utils.network.f5.bigip import F5RestClient from ansible.module_utils.network.f5.common import F5ModuleError from ansible.module_utils.network.f5.common import AnsibleF5Parameters from ansible.module_utils.network.f5.common import f5_argument_spec from ansible.module_utils.network.f5.icontrol import tmos_version from ansible.module_utils.network.f5.icontrol import upload_file try: # Crypto is used specifically for changing the root password via # tmsh over REST. # # We utilize the crypto library to encrypt the contents of a file # before we upload it, and then decrypt it on-box to change the # password. # # To accomplish such a process, we need to be able to encrypt the # temporary file with the public key found on the box. # # These libraries are used to do the encryption. # # Note that, if these are not available, the ability to change the # root password is disabled and the user will be notified as such # by a failure of the module. # # These libraries *should* be available on most Ansible controllers # by default though as crypto is a dependency of Ansible. # from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import serialization from cryptography.hazmat.primitives.asymmetric import padding from cryptography.hazmat.primitives import hashes HAS_CRYPTO = True except ImportError: HAS_CRYPTO = False class Parameters(AnsibleF5Parameters): api_map = { 'partitionAccess': 'partition_access', 'description': 'full_name', } updatables = [ 'partition_access', 'full_name', 'shell', 'password_credential', ] returnables = [ 'shell', 'partition_access', 'full_name', 'username_credential', 'password_credential', ] api_attributes = [ 'shell', 'partitionAccess', 'description', 'name', 'password', ] @property def partition_access(self): """Partition access values will require some transformation. This operates on both user and device returned values. Check if the element is a string from user input in the format of name:role, if it is split it and create dictionary out of it. If the access value is a dictionary (returned from device, or already processed) and contains nameReference key, delete it and append the remaining dictionary element into a list. If the nameReference key is removed just append the dictionary into the list. Returns: List of dictionaries. Each item in the list is a dictionary which contains the ``name`` of the partition and the ``role`` to allow on that partition. """ if self._values['partition_access'] is None: return result = [] part_access = self._values['partition_access'] for access in part_access: if isinstance(access, dict): if 'nameReference' in access: del access['nameReference'] result.append(access) else: result.append(access) if isinstance(access, string_types): acl = access.split(':') if acl[0].lower() == 'all': acl[0] = 'all-partitions' value = dict( name=acl[0], role=acl[1] ) result.append(value) return result @property def temp_upload_file(self): if self._values['temp_upload_file'] is None: f = tempfile.NamedTemporaryFile() name = os.path.basename(f.name) self._values['temp_upload_file'] = name return self._values['temp_upload_file'] class ApiParameters(Parameters): @property def shell(self): if self._values['shell'] in [None, 'none']: return None return self._values['shell'] class ModuleParameters(Parameters): @property def shell(self): if self._values['shell'] in [None, 'none']: return None return self._values['shell'] class Changes(Parameters): def to_return(self): result = {} for returnable in self.returnables: try: result[returnable] = getattr(self, returnable) except Exception: pass result = self._filter_params(result) return result class UsableChanges(Changes): @property def password(self): if self._values['password_credential'] is None: return None return self._values['password_credential'] class ReportableChanges(Changes): pass class Difference(object): def __init__(self, want, have=None): self.want = want self.have = have def compare(self, param): try: result = getattr(self, param) return result except AttributeError: return self.__default(param) def __default(self, param): attr1 = getattr(self.want, param) try: attr2 = getattr(self.have, param) if attr1 != attr2: return attr1 except AttributeError: return attr1 @property def password_credential(self): if self.want.password_credential is None: return None if self.want.update_password in ['always']: return self.want.password_credential return None @property def shell(self): if self.want.shell is None: if self.have.shell is not None: return 'none' else: return None if self.want.shell == 'bash': self._validate_shell_parameter() if self.want.shell == self.have.shell: return None else: return self.want.shell if self.want.shell != self.have.shell: return self.want.shell def _validate_shell_parameter(self): """Method to validate shell parameters. Raise when shell attribute is set to 'bash' with roles set to either 'admin' or 'resource-admin'. NOTE: Admin and Resource-Admin roles automatically enable access to all partitions, removing any other roles that the user might have had. There are few other roles which do that but those roles, do not allow bash. """ err = "Shell access is only available to " \ "'admin' or 'resource-admin' roles." permit = ['admin', 'resource-admin'] have = self.have.partition_access if not any(r['role'] for r in have if r['role'] in permit): raise F5ModuleError(err) if self.want.partition_access is not None: want = self.want.partition_access if not any(r['role'] for r in want if r['role'] in permit): raise F5ModuleError(err) class ModuleManager(object): def __init__(self, *args, **kwargs): self.module = kwargs.get('module', None) self.client = F5RestClient(**self.module.params) self.kwargs = kwargs def exec_module(self): if self.is_root_username_credential(): manager = self.get_manager('root') elif self.is_version_less_than_13(): manager = self.get_manager('v1') else: manager = self.get_manager('v2') return manager.exec_module() def get_manager(self, type): if type == 'root': return RootUserManager(**self.kwargs) elif type == 'v1': return UnpartitionedManager(**self.kwargs) elif type == 'v2': return PartitionedManager(**self.kwargs) def is_version_less_than_13(self): """Checks to see if the TMOS version is less than 13 Anything less than BIG-IP 13.x does not support users on different partitions. :return: Bool """ version = tmos_version(self.client) if LooseVersion(version) < LooseVersion('13.0.0'): return True else: return False def is_root_username_credential(self): user = self.module.params.get('username_credential', None) if user == 'root': return True return False class BaseManager(object): def __init__(self, *args, **kwargs): self.module = kwargs.get('module', None) self.client = F5RestClient(**self.module.params) self.want = ModuleParameters(params=self.module.params) self.have = ApiParameters() self.changes = UsableChanges() def _announce_deprecations(self, result): warnings = result.pop('__warnings', []) for warning in warnings: self.module.deprecate( msg=warning['msg'], version=warning['version'] ) def _set_changed_options(self): changed = {} for key in Parameters.returnables: if getattr(self.want, key) is not None: changed[key] = getattr(self.want, key) if changed: self.changes = UsableChanges(params=changed) def _update_changed_options(self): diff = Difference(self.want, self.have) updatables = Parameters.updatables changed = dict() for k in updatables: change = diff.compare(k) if change is None: continue else: if isinstance(change, dict): changed.update(change) else: changed[k] = change if changed: self.changes = UsableChanges(params=changed) return True return False def exec_module(self): changed = False result = dict() state = self.want.state if state == "present": changed = self.present() elif state == "absent": changed = self.absent() reportable = ReportableChanges(params=self.changes.to_return()) changes = reportable.to_return() result.update(**changes) result.update(dict(changed=changed)) self._announce_deprecations(result) return result def present(self): if self.exists(): return self.update() else: return self.create() def absent(self): if self.exists(): return self.remove() return False def should_update(self): result = self._update_changed_options() if result: return True return False def update(self): self.have = self.read_current_from_device() if not self.should_update(): return False if self.module.check_mode: return True self.update_on_device() return True def remove(self): if self.module.check_mode: return True self.remove_from_device() if self.exists(): raise F5ModuleError("Failed to delete the user.") return True def create(self): self.validate_create_parameters() if self.want.shell == 'bash': self.validate_shell_parameter() self._set_changed_options() if self.module.check_mode: return True self.create_on_device() return True def validate_shell_parameter(self): """Method to validate shell parameters. Raise when shell attribute is set to 'bash' with roles set to either 'admin' or 'resource-admin'. NOTE: Admin and Resource-Admin roles automatically enable access to all partitions, removing any other roles that the user might have had. There are few other roles which do that but those roles, do not allow bash. """ err = "Shell access is only available to " \ "'admin' or 'resource-admin' roles." permit = ['admin', 'resource-admin'] if self.want.partition_access is not None: want = self.want.partition_access if not any(r['role'] for r in want if r['role'] in permit): raise F5ModuleError(err) def validate_create_parameters(self): if self.want.partition_access is None: err = "The 'partition_access' option " \ "is required when creating a resource." raise F5ModuleError(err) class UnpartitionedManager(BaseManager): def exists(self): uri = "https://{0}:{1}/mgmt/tm/auth/user/{2}".format( self.client.provider['server'], self.client.provider['server_port'], self.want.username_credential ) resp = self.client.api.get(uri) try: response = resp.json() except ValueError: return False if resp.status == 404 or 'code' in response and response['code'] == 404: return False return True def create_on_device(self): params = self.changes.api_params() params['name'] = self.want.username_credential uri = "https://{0}:{1}/mgmt/tm/auth/user/".format( self.client.provider['server'], self.client.provider['server_port'] ) resp = self.client.api.post(uri, json=params) try: response = resp.json() except ValueError as ex: raise F5ModuleError(str(ex)) if 'code' in response and response['code'] in [400, 403]: if 'message' in response: raise F5ModuleError(response['message']) else: raise F5ModuleError(resp.content) return response['selfLink'] def update_on_device(self): params = self.changes.api_params() uri = "https://{0}:{1}/mgmt/tm/auth/user/{2}".format( self.client.provider['server'], self.client.provider['server_port'], self.want.username_credential ) resp = self.client.api.patch(uri, json=params) try: response = resp.json() except ValueError as ex: raise F5ModuleError(str(ex)) if 'code' in response and response['code'] == 400: if 'message' in response: raise F5ModuleError(response['message']) else: raise F5ModuleError(resp.content) def remove_from_device(self): uri = "https://{0}:{1}/mgmt/tm/auth/user/{2}".format( self.client.provider['server'], self.client.provider['server_port'], self.want.username_credential ) response = self.client.api.delete(uri) if response.status == 200: return True raise F5ModuleError(response.content) def read_current_from_device(self): uri = "https://{0}:{1}/mgmt/tm/auth/user/{2}".format( self.client.provider['server'], self.client.provider['server_port'], self.want.username_credential ) resp = self.client.api.get(uri) try: response = resp.json() except ValueError as ex: raise F5ModuleError(str(ex)) if 'code' in response and response['code'] == 400: if 'message' in response: raise F5ModuleError(response['message']) else: raise F5ModuleError(resp.content) return ApiParameters(params=response) class PartitionedManager(BaseManager): def exists(self): response = self.list_users_on_device() if 'items' in response: collection = [x for x in response['items'] if x['name'] == self.want.username_credential] if len(collection) == 1: return True elif len(collection) == 0: return False else: raise F5ModuleError( "Multiple users with the provided name were found!" ) return False def create_on_device(self): params = self.changes.api_params() params['name'] = self.want.username_credential params['partition'] = self.want.partition uri = "https://{0}:{1}/mgmt/tm/auth/user/".format( self.client.provider['server'], self.client.provider['server_port'] ) resp = self.client.api.post(uri, json=params) try: response = resp.json() except ValueError as ex: raise F5ModuleError(str(ex)) if 'code' in response and response['code'] in [400, 404, 409, 403]: if 'message' in response: raise F5ModuleError(response['message']) else: raise F5ModuleError(resp.content) return True def read_current_from_device(self): response = self.list_users_on_device() collection = [x for x in response['items'] if x['name'] == self.want.username_credential] if len(collection) == 1: user = collection.pop() return ApiParameters(params=user) elif len(collection) == 0: raise F5ModuleError( "No accounts with the provided name were found." ) else: raise F5ModuleError( "Multiple users with the provided name were found!" ) def update_on_device(self): params = self.changes.api_params() uri = "https://{0}:{1}/mgmt/tm/auth/user/{2}".format( self.client.provider['server'], self.client.provider['server_port'], self.want.username_credential ) resp = self.client.api.patch(uri, json=params) try: response = resp.json() except ValueError as ex: raise F5ModuleError(str(ex)) if 'code' in response and response['code'] in [400, 404, 409, 403]: if 'message' in response: if 'updated successfully' not in response['message']: raise F5ModuleError(response['message']) else: raise F5ModuleError(resp.content) def remove_from_device(self): uri = "https://{0}:{1}/mgmt/tm/auth/user/{2}".format( self.client.provider['server'], self.client.provider['server_port'], self.want.username_credential ) response = self.client.api.delete(uri) if response.status == 200: return True raise F5ModuleError(response.content) def list_users_on_device(self): uri = "https://{0}:{1}/mgmt/tm/auth/user/".format( self.client.provider['server'], self.client.provider['server_port'], ) query = "?$filter=partition+eq+'{0}'".format(self.want.partition) resp = self.client.api.get(uri + query) try: response = resp.json() except ValueError as ex: raise F5ModuleError(str(ex)) if 'code' in response and response['code'] == 400: if 'message' in response: raise F5ModuleError(response['message']) else: raise F5ModuleError(resp.content) return response class RootUserManager(BaseManager): def exec_module(self): if not HAS_CRYPTO: raise F5ModuleError( "An installed and up-to-date python 'cryptography' package is " "required to change the 'root' password." ) changed = False result = dict() state = self.want.state if state == "present": changed = self.present() elif state == "absent": raise F5ModuleError( "You may not remove the root user." ) reportable = ReportableChanges(params=self.changes.to_return()) changes = reportable.to_return() result.update(**changes) result.update(dict(changed=changed)) self._announce_deprecations(result) return result def exists(self): return True def update(self): public_key = self.get_public_key_from_device() public_key = self.extract_key(public_key) encrypted = self.encrypt_password_change_file( public_key, self.want.password_credential ) self.upload_to_device(encrypted, self.want.temp_upload_file) result = self.update_on_device() self.remove_uploaded_file_from_device(self.want.temp_upload_file) return result def encrypt_password_change_file(self, public_key, password): # This function call requires that the public_key be expressed in bytes pub = serialization.load_pem_public_key( to_bytes(public_key), backend=default_backend() ) message = to_bytes("{0}\n{0}\n".format(password)) ciphertext = pub.encrypt( message, # OpenSSL craziness # # Using this padding because it is the only one that works with # the OpenSSL on BIG-IP at this time. padding.PKCS1v15(), # # OAEP is the recommended padding to use for encrypting, however, two # things are wrong with it on BIG-IP. # # The first is that one of the parameters required to decrypt the data # is not supported by the OpenSSL version on BIG-IP. A "parameter setting" # error is raised when you attempt to use the OAEP parameters to specify # hashing algorithms. # # This is validated by this thread here # # https://mta.openssl.org/pipermail/openssl-dev/2017-September/009745.html # # Were is supported, we could use OAEP, but the second problem is that OAEP # is not the default mode of the ``openssl`` command. Therefore, we need # to adjust the command we use to decrypt the encrypted file when it is # placed on BIG-IP. # # The correct (and recommended if BIG-IP ever upgrades OpenSSL) code is # shown below. # # padding.OAEP( # mgf=padding.MGF1(algorithm=hashes.SHA256()), # algorithm=hashes.SHA256(), # label=None # ) # # Additionally, the code in ``update_on_device()`` would need to be changed # to pass the correct command line arguments to decrypt the file. ) return BytesIO(ciphertext) def extract_key(self, content): """Extracts the public key from the openssl command output over REST The REST output includes some extra output that is not relevant to the public key. This function attempts to only return the valid public key data from the openssl output Args: content: The output from the REST API command to view the public key. Returns: string: The discovered public key """ lines = content.split("\n") start = lines.index('-----BEGIN PUBLIC KEY-----') end = lines.index('-----END PUBLIC KEY-----') result = "\n".join(lines[start:end + 1]) return result def update_on_device(self): errors = ['Bad password', 'password change canceled', 'based on a dictionary word'] # Decrypting logic # # The following commented out command will **not** work on BIG-IP versions # utilizing OpenSSL 1.0.11-fips (15 Jan 2015). # # The reason is because that version of OpenSSL does not support the various # ``-pkeyopt`` parameters shown below. # # Nevertheless, I am including it here as a possible future enhancement in # case the method currently in use stops working. # # This command overrides defaults provided by OpenSSL because I am not # sure how long the defaults will remain the defaults. Probably as long # as it took OpenSSL to reach 1.0... # # openssl = [ # 'openssl', 'pkeyutl', '-in', '/var/config/rest/downloads/{0}'.format(self.want.temp_upload_file), # '-decrypt', '-inkey', '/config/ssl/ssl.key/default.key', # '-pkeyopt', 'rsa_padding_mode:oaep', '-pkeyopt', 'rsa_oaep_md:sha256', # '-pkeyopt', 'rsa_mgf1_md:sha256' # ] # # The command we actually use is (while not recommended) also the only one # that works. It forgoes the usage of OAEP and uses the defaults that come # with OpenSSL (PKCS1v15) # # See this link for information on the parameters used # # https://www.openssl.org/docs/manmaster/man1/pkeyutl.html # # If you change the command below, you will need to additionally change # how the encryption is done in ``encrypt_password_change_file()``. # openssl = [ 'openssl', 'pkeyutl', '-in', '/var/config/rest/downloads/{0}'.format(self.want.temp_upload_file), '-decrypt', '-inkey', '/config/ssl/ssl.key/default.key', ] cmd = '-c "{0} | tmsh modify auth password root"'.format(' '.join(openssl)) params = dict( command='run', utilCmdArgs=cmd ) uri = "https://{0}:{1}/mgmt/tm/util/bash".format( self.client.provider['server'], self.client.provider['server_port'] ) resp = self.client.api.post(uri, json=params) try: response = resp.json() if 'commandResult' in response: if any(x for x in errors if x in response['commandResult']): raise F5ModuleError(response['commandResult']) except ValueError as ex: raise F5ModuleError(str(ex)) if 'code' in response and response['code'] in [400, 403]: if 'message' in response: raise F5ModuleError(response['message']) else: raise F5ModuleError(resp.content) return True def upload_to_device(self, content, name): """Uploads a file-like object via the REST API to a given filename Args: content: The file-like object whose content to upload name: The remote name of the file to store the content in. The final location of the file will be in /var/config/rest/downloads. Returns: void """ url = 'https://{0}:{1}/mgmt/shared/file-transfer/uploads'.format( self.client.provider['server'], self.client.provider['server_port'] ) try: upload_file(self.client, url, content, name) except F5ModuleError: raise F5ModuleError( "Failed to upload the file." ) def remove_uploaded_file_from_device(self, name): filepath = '/var/config/rest/downloads/{0}'.format(name) params = { "command": "run", "utilCmdArgs": filepath } uri = "https://{0}:{1}/mgmt/tm/util/unix-rm".format( self.client.provider['server'], self.client.provider['server_port'] ) resp = self.client.api.post(uri, json=params) try: response = resp.json() except ValueError as ex: raise F5ModuleError(str(ex)) if 'code' in response and response['code'] in [400, 403]: if 'message' in response: raise F5ModuleError(response['message']) else: raise F5ModuleError(resp.content) def get_public_key_from_device(self): cmd = '-c "openssl rsa -in /config/ssl/ssl.key/default.key -pubout"' params = dict( command='run', utilCmdArgs=cmd ) uri = "https://{0}:{1}/mgmt/tm/util/bash".format( self.client.provider['server'], self.client.provider['server_port'] ) resp = self.client.api.post(uri, json=params) try: response = resp.json() except ValueError as ex: raise F5ModuleError(str(ex)) if 'code' in response and response['code'] in [400, 403]: if 'message' in response: raise F5ModuleError(response['message']) else: raise F5ModuleError(resp.content) if 'commandResult' in response: return response['commandResult'] return None class ArgumentSpec(object): def __init__(self): self.supports_check_mode = True argument_spec = dict( username_credential=dict( required=True, aliases=['name'] ), password_credential=dict( no_log=True, ), partition_access=dict( type='list' ), full_name=dict(), shell=dict( choices=['none', 'bash', 'tmsh'] ), update_password=dict( default='always', choices=['always', 'on_create'] ), state=dict(default='present', choices=['absent', 'present']), partition=dict( default='Common', fallback=(env_fallback, ['F5_PARTITION']) ) ) self.argument_spec = {} self.argument_spec.update(f5_argument_spec) self.argument_spec.update(argument_spec) def main(): spec = ArgumentSpec() module = AnsibleModule( argument_spec=spec.argument_spec, supports_check_mode=spec.supports_check_mode ) try: mm = ModuleManager(module=module) results = mm.exec_module() module.exit_json(**results) except F5ModuleError as ex: module.fail_json(msg=str(ex)) if __name__ == '__main__': main()
omni5cience/django-inlineformfield
refs/heads/master
.tox/py27/lib/python2.7/site-packages/django/contrib/gis/tests/geoadmin/models.py
110
from django.contrib.gis.db import models from django.contrib.gis import admin from django.utils.encoding import python_2_unicode_compatible @python_2_unicode_compatible class City(models.Model): name = models.CharField(max_length=30) point = models.PointField() objects = models.GeoManager() class Meta: app_label = 'geoadmin' def __str__(self): return self.name admin.site.register(City, admin.OSMGeoAdmin)
MSeifert04/numpy
refs/heads/master
numpy/f2py/tests/test_parameter.py
11
from __future__ import division, absolute_import, print_function import os import pytest import numpy as np from numpy.testing import assert_raises, assert_equal from . import util def _path(*a): return os.path.join(*((os.path.dirname(__file__),) + a)) class TestParameters(util.F2PyTest): # Check that intent(in out) translates as intent(inout) sources = [_path('src', 'parameter', 'constant_real.f90'), _path('src', 'parameter', 'constant_integer.f90'), _path('src', 'parameter', 'constant_both.f90'), _path('src', 'parameter', 'constant_compound.f90'), _path('src', 'parameter', 'constant_non_compound.f90'), ] @pytest.mark.slow def test_constant_real_single(self): # non-contiguous should raise error x = np.arange(6, dtype=np.float32)[::2] assert_raises(ValueError, self.module.foo_single, x) # check values with contiguous array x = np.arange(3, dtype=np.float32) self.module.foo_single(x) assert_equal(x, [0 + 1 + 2*3, 1, 2]) @pytest.mark.slow def test_constant_real_double(self): # non-contiguous should raise error x = np.arange(6, dtype=np.float64)[::2] assert_raises(ValueError, self.module.foo_double, x) # check values with contiguous array x = np.arange(3, dtype=np.float64) self.module.foo_double(x) assert_equal(x, [0 + 1 + 2*3, 1, 2]) @pytest.mark.slow def test_constant_compound_int(self): # non-contiguous should raise error x = np.arange(6, dtype=np.int32)[::2] assert_raises(ValueError, self.module.foo_compound_int, x) # check values with contiguous array x = np.arange(3, dtype=np.int32) self.module.foo_compound_int(x) assert_equal(x, [0 + 1 + 2*6, 1, 2]) @pytest.mark.slow def test_constant_non_compound_int(self): # check values x = np.arange(4, dtype=np.int32) self.module.foo_non_compound_int(x) assert_equal(x, [0 + 1 + 2 + 3*4, 1, 2, 3]) @pytest.mark.slow def test_constant_integer_int(self): # non-contiguous should raise error x = np.arange(6, dtype=np.int32)[::2] assert_raises(ValueError, self.module.foo_int, x) # check values with contiguous array x = np.arange(3, dtype=np.int32) self.module.foo_int(x) assert_equal(x, [0 + 1 + 2*3, 1, 2]) @pytest.mark.slow def test_constant_integer_long(self): # non-contiguous should raise error x = np.arange(6, dtype=np.int64)[::2] assert_raises(ValueError, self.module.foo_long, x) # check values with contiguous array x = np.arange(3, dtype=np.int64) self.module.foo_long(x) assert_equal(x, [0 + 1 + 2*3, 1, 2]) @pytest.mark.slow def test_constant_both(self): # non-contiguous should raise error x = np.arange(6, dtype=np.float64)[::2] assert_raises(ValueError, self.module.foo, x) # check values with contiguous array x = np.arange(3, dtype=np.float64) self.module.foo(x) assert_equal(x, [0 + 1*3*3 + 2*3*3, 1*3, 2*3]) @pytest.mark.slow def test_constant_no(self): # non-contiguous should raise error x = np.arange(6, dtype=np.float64)[::2] assert_raises(ValueError, self.module.foo_no, x) # check values with contiguous array x = np.arange(3, dtype=np.float64) self.module.foo_no(x) assert_equal(x, [0 + 1*3*3 + 2*3*3, 1*3, 2*3]) @pytest.mark.slow def test_constant_sum(self): # non-contiguous should raise error x = np.arange(6, dtype=np.float64)[::2] assert_raises(ValueError, self.module.foo_sum, x) # check values with contiguous array x = np.arange(3, dtype=np.float64) self.module.foo_sum(x) assert_equal(x, [0 + 1*3*3 + 2*3*3, 1*3, 2*3])
xyzy/androguard
refs/heads/master
demos/geinimi_analysis.py
38
#!/usr/bin/env python import sys import hashlib import pyDes PATH_INSTALL = "./" sys.path.append(PATH_INSTALL + "./") sys.path.append(PATH_INSTALL + "/core") sys.path.append(PATH_INSTALL + "/core/bytecodes") sys.path.append(PATH_INSTALL + "/core/analysis") from androguard import * import analysis TEST = "./geinimi/geinimi.apk" _a = AndroguardS( TEST ) _x = analysis.VMAnalysis( _a.get_vm() ) #print _a.get_strings() KEY = "\x01\x02\x03\x04\x05\x06\x07\x08" _des = pyDes.des( KEY ) #_x.tainted_packages.export_call_graph("toto.dot", "Lcom/swampy/sexpos/pos") tainted_string = _x.tainted_variables.get_string( "DES" ) if tainted_string != None : print "\t -->", tainted_string.get_info() for path in tainted_string.get_paths() : print "\t\t =>", path.get_access_flag(), path.get_method().get_class_name(), path.get_method().get_name(), path.get_method().get_descriptor(), path.get_bb().get_name(), "%x" % ( path.get_bb().start + path.get_idx() ) tainted_field = _x.tainted_variables.get_field( "Lcom/swampy/sexpos/pos/e/k;", "b", "[B" ) if tainted_field != None : print "\t -->", tainted_field.get_info() for path in tainted_field.get_paths() : print "\t\t =>", path.get_access_flag(), path.get_method().get_class_name(), path.get_method().get_name(), path.get_method().get_descriptor(), path.get_bb().get_name(), "%x" % (path.get_bb().start + path.get_idx() ) tainted_field = _x.tainted_variables.get_field( "Lcom/swampy/sexpos/pos/e/p;", "a", "[[B" ) if tainted_field != None : print "\t -->", tainted_field.get_info() for path in tainted_field.get_paths() : print "\t\t =>", path.get_access_flag(), path.get_method().get_class_name(), path.get_method().get_name(), path.get_method().get_descriptor(), path.get_bb().get_name(), "%x" % (path.get_bb().start + path.get_idx() ) if path.get_access_flag() == "W" : b = "" for ins in path.get_method().get_code().get_bc().get() : if ins.get_name() == "FILL-ARRAY-DATA" : b += ins.get_data() print repr( _des.decrypt( b ) ) tainted_field = _x.tainted_variables.get_field( "Lcom/swampy/sexpos/pos/a;", "g", "Ljava/lang/String;" ) if tainted_field != None : print "\t -->", tainted_field.get_info() for path in tainted_field.get_paths() : print "\t\t =>", path.get_access_flag(), path.get_method().get_class_name(), path.get_method().get_name(), path.get_method().get_descriptor(), path.get_bb().get_name(), "%x" % (path.get_bb().start + path.get_idx() ) tainted_method = _x.tainted_packages.get_method( "Lcom/swampy/sexpos/pos/e/q;", "a", "(Ljava/lang/String;)Ljava/lang/String;" ) for path in tainted_method : print path.get_access_flag(), path.get_method().get_class_name(), path.get_method().get_name(), path.get_method().get_descriptor(), path.get_bb().get_name(), "%x" % (path.get_bb().start + path.get_idx() )
TOCyna/tabelinha
refs/heads/master
flask/lib/python2.7/site-packages/passlib/handlers/pbkdf2.py
19
"""passlib.handlers.pbkdf - PBKDF2 based hashes""" #============================================================================= # imports #============================================================================= # core from binascii import hexlify, unhexlify from base64 import b64encode, b64decode import re import logging; log = logging.getLogger(__name__) from warnings import warn # site # pkg from passlib.utils import ab64_decode, ab64_encode, to_unicode from passlib.utils.compat import b, bytes, str_to_bascii, u, uascii_to_str, unicode from passlib.utils.pbkdf2 import pbkdf2 import passlib.utils.handlers as uh # local __all__ = [ "pbkdf2_sha1", "pbkdf2_sha256", "pbkdf2_sha512", "cta_pbkdf2_sha1", "dlitz_pbkdf2_sha1", "grub_pbkdf2_sha512", ] #============================================================================= # #============================================================================= class Pbkdf2DigestHandler(uh.HasRounds, uh.HasRawSalt, uh.HasRawChecksum, uh.GenericHandler): "base class for various pbkdf2_{digest} algorithms" #=================================================================== # class attrs #=================================================================== #--GenericHandler-- setting_kwds = ("salt", "salt_size", "rounds") checksum_chars = uh.HASH64_CHARS #--HasSalt-- default_salt_size = 16 min_salt_size = 0 max_salt_size = 1024 #--HasRounds-- default_rounds = None # set by subclass min_rounds = 1 max_rounds = 0xffffffff # setting at 32-bit limit for now rounds_cost = "linear" #--this class-- _prf = None # subclass specified prf identifier # NOTE: max_salt_size and max_rounds are arbitrarily chosen to provide sanity check. # the underlying pbkdf2 specifies no bounds for either. # NOTE: defaults chosen to be at least as large as pbkdf2 rfc recommends... # >8 bytes of entropy in salt, >1000 rounds # increased due to time since rfc established #=================================================================== # methods #=================================================================== @classmethod def from_string(cls, hash): rounds, salt, chk = uh.parse_mc3(hash, cls.ident, handler=cls) salt = ab64_decode(salt.encode("ascii")) if chk: chk = ab64_decode(chk.encode("ascii")) return cls(rounds=rounds, salt=salt, checksum=chk) def to_string(self, withchk=True): salt = ab64_encode(self.salt).decode("ascii") if withchk and self.checksum: chk = ab64_encode(self.checksum).decode("ascii") else: chk = None return uh.render_mc3(self.ident, self.rounds, salt, chk) def _calc_checksum(self, secret): if isinstance(secret, unicode): secret = secret.encode("utf-8") return pbkdf2(secret, self.salt, self.rounds, self.checksum_size, self._prf) def create_pbkdf2_hash(hash_name, digest_size, rounds=12000, ident=None, module=__name__): "create new Pbkdf2DigestHandler subclass for a specific hash" name = 'pbkdf2_' + hash_name if ident is None: ident = u("$pbkdf2-%s$") % (hash_name,) prf = "hmac-%s" % (hash_name,) base = Pbkdf2DigestHandler return type(name, (base,), dict( __module__=module, # so ABCMeta won't clobber it. name=name, ident=ident, _prf = prf, default_rounds=rounds, checksum_size=digest_size, encoded_checksum_size=(digest_size*4+2)//3, __doc__="""This class implements a generic ``PBKDF2-%(prf)s``-based password hash, and follows the :ref:`password-hash-api`. It supports a variable-length salt, and a variable number of rounds. The :meth:`~passlib.ifc.PasswordHash.encrypt` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept the following optional keywords: :type salt: bytes :param salt: Optional salt bytes. If specified, the length must be between 0-1024 bytes. If not specified, a %(dsc)d byte salt will be autogenerated (this is recommended). :type salt_size: int :param salt_size: Optional number of bytes to use when autogenerating new salts. Defaults to 16 bytes, but can be any value between 0 and 1024. :type rounds: int :param rounds: Optional number of rounds to use. Defaults to %(dr)d, but must be within ``range(1,1<<32)``. :type relaxed: bool :param relaxed: By default, providing an invalid value for one of the other keywords will result in a :exc:`ValueError`. If ``relaxed=True``, and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` will be issued instead. Correctable errors include ``rounds`` that are too small or too large, and ``salt`` strings that are too long. .. versionadded:: 1.6 """ % dict(prf=prf.upper(), dsc=base.default_salt_size, dr=rounds) )) #------------------------------------------------------------------------ # derived handlers #------------------------------------------------------------------------ pbkdf2_sha1 = create_pbkdf2_hash("sha1", 20, 60000, ident=u("$pbkdf2$")) pbkdf2_sha256 = create_pbkdf2_hash("sha256", 32, 20000) pbkdf2_sha512 = create_pbkdf2_hash("sha512", 64, 19000) ldap_pbkdf2_sha1 = uh.PrefixWrapper("ldap_pbkdf2_sha1", pbkdf2_sha1, "{PBKDF2}", "$pbkdf2$", ident=True) ldap_pbkdf2_sha256 = uh.PrefixWrapper("ldap_pbkdf2_sha256", pbkdf2_sha256, "{PBKDF2-SHA256}", "$pbkdf2-sha256$", ident=True) ldap_pbkdf2_sha512 = uh.PrefixWrapper("ldap_pbkdf2_sha512", pbkdf2_sha512, "{PBKDF2-SHA512}", "$pbkdf2-sha512$", ident=True) #============================================================================= # cryptacular's pbkdf2 hash #============================================================================= # bytes used by cta hash for base64 values 63 & 64 CTA_ALTCHARS = b("-_") class cta_pbkdf2_sha1(uh.HasRounds, uh.HasRawSalt, uh.HasRawChecksum, uh.GenericHandler): """This class implements Cryptacular's PBKDF2-based crypt algorithm, and follows the :ref:`password-hash-api`. It supports a variable-length salt, and a variable number of rounds. The :meth:`~passlib.ifc.PasswordHash.encrypt` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept the following optional keywords: :type salt: bytes :param salt: Optional salt bytes. If specified, it may be any length. If not specified, a one will be autogenerated (this is recommended). :type salt_size: int :param salt_size: Optional number of bytes to use when autogenerating new salts. Defaults to 16 bytes, but can be any value between 0 and 1024. :type rounds: int :param rounds: Optional number of rounds to use. Defaults to 60000, must be within ``range(1,1<<32)``. :type relaxed: bool :param relaxed: By default, providing an invalid value for one of the other keywords will result in a :exc:`ValueError`. If ``relaxed=True``, and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` will be issued instead. Correctable errors include ``rounds`` that are too small or too large, and ``salt`` strings that are too long. .. versionadded:: 1.6 """ #=================================================================== # class attrs #=================================================================== #--GenericHandler-- name = "cta_pbkdf2_sha1" setting_kwds = ("salt", "salt_size", "rounds") ident = u("$p5k2$") # NOTE: max_salt_size and max_rounds are arbitrarily chosen to provide a # sanity check. underlying algorithm (and reference implementation) # allows effectively unbounded values for both of these parameters. #--HasSalt-- default_salt_size = 16 min_salt_size = 0 max_salt_size = 1024 #--HasRounds-- default_rounds = pbkdf2_sha1.default_rounds min_rounds = 1 max_rounds = 0xffffffff # setting at 32-bit limit for now rounds_cost = "linear" #=================================================================== # formatting #=================================================================== # hash $p5k2$1000$ZxK4ZBJCfQg=$jJZVscWtO--p1-xIZl6jhO2LKR0= # ident $p5k2$ # rounds 1000 # salt ZxK4ZBJCfQg= # chk jJZVscWtO--p1-xIZl6jhO2LKR0= # NOTE: rounds in hex @classmethod def from_string(cls, hash): # NOTE: passlib deviation - forbidding zero-padded rounds rounds, salt, chk = uh.parse_mc3(hash, cls.ident, rounds_base=16, handler=cls) salt = b64decode(salt.encode("ascii"), CTA_ALTCHARS) if chk: chk = b64decode(chk.encode("ascii"), CTA_ALTCHARS) return cls(rounds=rounds, salt=salt, checksum=chk) def to_string(self, withchk=True): salt = b64encode(self.salt, CTA_ALTCHARS).decode("ascii") if withchk and self.checksum: chk = b64encode(self.checksum, CTA_ALTCHARS).decode("ascii") else: chk = None return uh.render_mc3(self.ident, self.rounds, salt, chk, rounds_base=16) #=================================================================== # backend #=================================================================== def _calc_checksum(self, secret): if isinstance(secret, unicode): secret = secret.encode("utf-8") return pbkdf2(secret, self.salt, self.rounds, 20, "hmac-sha1") #=================================================================== # eoc #=================================================================== #============================================================================= # dlitz's pbkdf2 hash #============================================================================= class dlitz_pbkdf2_sha1(uh.HasRounds, uh.HasSalt, uh.GenericHandler): """This class implements Dwayne Litzenberger's PBKDF2-based crypt algorithm, and follows the :ref:`password-hash-api`. It supports a variable-length salt, and a variable number of rounds. The :meth:`~passlib.ifc.PasswordHash.encrypt` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept the following optional keywords: :type salt: str :param salt: Optional salt string. If specified, it may be any length, but must use the characters in the regexp range ``[./0-9A-Za-z]``. If not specified, a 16 character salt will be autogenerated (this is recommended). :type salt_size: int :param salt_size: Optional number of bytes to use when autogenerating new salts. Defaults to 16 bytes, but can be any value between 0 and 1024. :type rounds: int :param rounds: Optional number of rounds to use. Defaults to 60000, must be within ``range(1,1<<32)``. :type relaxed: bool :param relaxed: By default, providing an invalid value for one of the other keywords will result in a :exc:`ValueError`. If ``relaxed=True``, and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` will be issued instead. Correctable errors include ``rounds`` that are too small or too large, and ``salt`` strings that are too long. .. versionadded:: 1.6 """ #=================================================================== # class attrs #=================================================================== #--GenericHandler-- name = "dlitz_pbkdf2_sha1" setting_kwds = ("salt", "salt_size", "rounds") ident = u("$p5k2$") # NOTE: max_salt_size and max_rounds are arbitrarily chosen to provide a # sanity check. underlying algorithm (and reference implementation) # allows effectively unbounded values for both of these parameters. #--HasSalt-- default_salt_size = 16 min_salt_size = 0 max_salt_size = 1024 salt_chars = uh.HASH64_CHARS #--HasRounds-- # NOTE: for security, the default here is set to match pbkdf2_sha1, # even though this hash's extra block makes it twice as slow. default_rounds = pbkdf2_sha1.default_rounds min_rounds = 1 max_rounds = 0xffffffff # setting at 32-bit limit for now rounds_cost = "linear" #=================================================================== # formatting #=================================================================== # hash $p5k2$c$u9HvcT4d$Sd1gwSVCLZYAuqZ25piRnbBEoAesaa/g # ident $p5k2$ # rounds c # salt u9HvcT4d # chk Sd1gwSVCLZYAuqZ25piRnbBEoAesaa/g # rounds in lowercase hex, no zero padding @classmethod def from_string(cls, hash): rounds, salt, chk = uh.parse_mc3(hash, cls.ident, rounds_base=16, default_rounds=400, handler=cls) return cls(rounds=rounds, salt=salt, checksum=chk) def to_string(self, withchk=True): rounds = self.rounds if rounds == 400: rounds = None # omit rounds measurement if == 400 return uh.render_mc3(self.ident, rounds, self.salt, checksum=self.checksum if withchk else None, rounds_base=16) #=================================================================== # backend #=================================================================== def _calc_checksum(self, secret): if isinstance(secret, unicode): secret = secret.encode("utf-8") salt = str_to_bascii(self.to_string(withchk=False)) result = pbkdf2(secret, salt, self.rounds, 24, "hmac-sha1") return ab64_encode(result).decode("ascii") #=================================================================== # eoc #=================================================================== #============================================================================= # crowd #============================================================================= class atlassian_pbkdf2_sha1(uh.HasRawSalt, uh.HasRawChecksum, uh.GenericHandler): """This class implements the PBKDF2 hash used by Atlassian. It supports a fixed-length salt, and a fixed number of rounds. The :meth:`~passlib.ifc.PasswordHash.encrypt` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept the following optional keyword: :type salt: bytes :param salt: Optional salt bytes. If specified, the length must be exactly 16 bytes. If not specified, a salt will be autogenerated (this is recommended). :type relaxed: bool :param relaxed: By default, providing an invalid value for one of the other keywords will result in a :exc:`ValueError`. If ``relaxed=True``, and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` will be issued instead. Correctable errors include ``salt`` strings that are too long. .. versionadded:: 1.6 """ #--GenericHandler-- name = "atlassian_pbkdf2_sha1" setting_kwds =("salt",) ident = u("{PKCS5S2}") checksum_size = 32 _stub_checksum = b("\x00") * 32 #--HasRawSalt-- min_salt_size = max_salt_size = 16 @classmethod def from_string(cls, hash): hash = to_unicode(hash, "ascii", "hash") ident = cls.ident if not hash.startswith(ident): raise uh.exc.InvalidHashError(cls) data = b64decode(hash[len(ident):].encode("ascii")) salt, chk = data[:16], data[16:] return cls(salt=salt, checksum=chk) def to_string(self): data = self.salt + (self.checksum or self._stub_checksum) hash = self.ident + b64encode(data).decode("ascii") return uascii_to_str(hash) def _calc_checksum(self, secret): # TODO: find out what crowd's policy is re: unicode if isinstance(secret, unicode): secret = secret.encode("utf-8") # crowd seems to use a fixed number of rounds. return pbkdf2(secret, self.salt, 10000, 32, "hmac-sha1") #============================================================================= # grub #============================================================================= class grub_pbkdf2_sha512(uh.HasRounds, uh.HasRawSalt, uh.HasRawChecksum, uh.GenericHandler): """This class implements Grub's pbkdf2-hmac-sha512 hash, and follows the :ref:`password-hash-api`. It supports a variable-length salt, and a variable number of rounds. The :meth:`~passlib.ifc.PasswordHash.encrypt` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept the following optional keywords: :type salt: bytes :param salt: Optional salt bytes. If specified, the length must be between 0-1024 bytes. If not specified, a 64 byte salt will be autogenerated (this is recommended). :type salt_size: int :param salt_size: Optional number of bytes to use when autogenerating new salts. Defaults to 64 bytes, but can be any value between 0 and 1024. :type rounds: int :param rounds: Optional number of rounds to use. Defaults to 19000, but must be within ``range(1,1<<32)``. :type relaxed: bool :param relaxed: By default, providing an invalid value for one of the other keywords will result in a :exc:`ValueError`. If ``relaxed=True``, and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` will be issued instead. Correctable errors include ``rounds`` that are too small or too large, and ``salt`` strings that are too long. .. versionadded:: 1.6 """ name = "grub_pbkdf2_sha512" setting_kwds = ("salt", "salt_size", "rounds") ident = u("grub.pbkdf2.sha512.") # NOTE: max_salt_size and max_rounds are arbitrarily chosen to provide a # sanity check. the underlying pbkdf2 specifies no bounds for either, # and it's not clear what grub specifies. default_salt_size = 64 min_salt_size = 0 max_salt_size = 1024 default_rounds = pbkdf2_sha512.default_rounds min_rounds = 1 max_rounds = 0xffffffff # setting at 32-bit limit for now rounds_cost = "linear" @classmethod def from_string(cls, hash): rounds, salt, chk = uh.parse_mc3(hash, cls.ident, sep=u("."), handler=cls) salt = unhexlify(salt.encode("ascii")) if chk: chk = unhexlify(chk.encode("ascii")) return cls(rounds=rounds, salt=salt, checksum=chk) def to_string(self, withchk=True): salt = hexlify(self.salt).decode("ascii").upper() if withchk and self.checksum: chk = hexlify(self.checksum).decode("ascii").upper() else: chk = None return uh.render_mc3(self.ident, self.rounds, salt, chk, sep=u(".")) def _calc_checksum(self, secret): # TODO: find out what grub's policy is re: unicode if isinstance(secret, unicode): secret = secret.encode("utf-8") return pbkdf2(secret, self.salt, self.rounds, 64, "hmac-sha512") #============================================================================= # eof #=============================================================================
kubeflow/pipelines
refs/heads/master
sdk/python/tests/compiler/testdata/uri_artifacts.py
1
# Copyright 2020 The Kubeflow Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Pipeline DSL code for testing URI-based artifact passing.""" from kfp import compiler from kfp import components from kfp import dsl # Patch to make the test result deterministic. class Coder: def __init__(self, ): self._code_id = 0 def get_code(self, ): self._code_id += 1 return '{code:0{num_chars:}d}'.format( code=self._code_id, num_chars=dsl._for_loop.LoopArguments.NUM_CODE_CHARS) dsl.ParallelFor._get_unique_id_code = Coder().get_code write_to_gcs = components.load_component_from_text(""" name: Write to GCS inputs: - {name: text, type: String, description: 'Content to be written to GCS'} outputs: - {name: output_gcs_path, type: String, description: 'GCS file path'} implementation: container: image: google/cloud-sdk:slim command: - sh - -c - | set -e -x echo "$0" | gsutil cp - "$1" - {inputValue: text} - {outputUri: output_gcs_path} """) read_from_gcs = components.load_component_from_text(""" name: Read from GCS inputs: - {name: input_gcs_path, type: String, description: 'GCS file path'} implementation: container: image: google/cloud-sdk:slim command: - sh - -c - | set -e -x gsutil cat "$0" - {inputUri: input_gcs_path} """) def flip_coin_op(): """Flip a coin and output heads or tails randomly.""" return dsl.ContainerOp( name='Flip coin', image='python:alpine3.6', command=['sh', '-c'], arguments=['python -c "import random; result = \'heads\' if random.randint(0,1) == 0 ' 'else \'tails\'; print(result)" | tee /tmp/output'], file_outputs={'output': '/tmp/output'} ) @dsl.pipeline( name='uri-artifact-pipeline', pipeline_root='gs://my-bucket/my-output-dir') def uri_artifact(text='Hello world!'): task_1 = write_to_gcs(text=text) task_2 = read_from_gcs( input_gcs_path=task_1.outputs['output_gcs_path']) # Test use URI within ParFor loop. loop_args = [1, 2, 3, 4] with dsl.ParallelFor(loop_args) as loop_arg: loop_task_2 = read_from_gcs( input_gcs_path=task_1.outputs['output_gcs_path']) # Test use URI within condition. flip = flip_coin_op() with dsl.Condition(flip.output == 'heads'): condition_task_2 = read_from_gcs( input_gcs_path=task_1.outputs['output_gcs_path']) if __name__ == '__main__': compiler.Compiler().compile(uri_artifact, __file__ + '.tar.gz')
jpshort/odoo
refs/heads/8.0
addons/lunch/report/order.py
377
# -*- encoding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import time from openerp.report import report_sxw from openerp.osv import osv class order(report_sxw.rml_parse): def get_lines(self, user,objects): lines=[] for obj in objects: if user.id==obj.user_id.id: lines.append(obj) return lines def get_total(self, user,objects): lines=[] for obj in objects: if user.id==obj.user_id.id: lines.append(obj) total=0.0 for line in lines: total+=line.price self.net_total+=total return total def get_nettotal(self): return self.net_total def get_users(self, objects): users=[] for obj in objects: if obj.user_id not in users: users.append(obj.user_id) return users def get_note(self,objects): notes=[] for obj in objects: notes.append(obj.note) return notes def __init__(self, cr, uid, name, context): super(order, self).__init__(cr, uid, name, context) self.net_total=0.0 self.localcontext.update({ 'time': time, 'get_lines': self.get_lines, 'get_users': self.get_users, 'get_total': self.get_total, 'get_nettotal': self.get_nettotal, 'get_note': self.get_note, }) class report_lunchorder(osv.AbstractModel): _name = 'report.lunch.report_lunchorder' _inherit = 'report.abstract_report' _template = 'lunch.report_lunchorder' _wrapped_report_class = order # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
bbiiggppiigg/NTHUOJ_web
refs/heads/master
contest/forms.py
3
''' The MIT License (MIT) Copyright (c) 2014 NTHUOJ team Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ''' import datetime from django import forms from django.views.generic.edit import UpdateView from contest.models import Contest from contest.models import Clarification from contest.contest_info import get_freeze_time_datetime from users.models import User from datetimewidget.widgets import DateTimeWidget, DateWidget, TimeWidget from problem.models import Problem from django.db.models import Q class ContestForm(forms.ModelForm): dateTimeOptions = { 'format': 'yyyy-mm-dd hh:ii:00', 'todayBtn': 'true', 'minuteStep': 1, } start_time = forms.DateTimeField(widget=DateTimeWidget(options=dateTimeOptions, bootstrap_version=3)) end_time = forms.DateTimeField(widget=DateTimeWidget(options=dateTimeOptions, bootstrap_version=3)) def __init__(self, *args, **kwargs): super(ContestForm, self).__init__(*args, **kwargs) # access object through self.instance... initial = kwargs.get('initial',{}) user = initial.get('user',User()) owner = initial.get('owner',User()) method = initial.get('method','') self.fields['coowner'].queryset = User.objects.exclude( Q(user_level=User.USER)|Q(pk = owner)) if method == 'GET': contest_id = initial.get('id',0) # if user not is admin # get all problem when user is admin if not user.has_admin_auth(): # edit contest if contest_id: contest = Contest.objects.get(pk = contest_id) contest_problems = contest.problem.all().distinct() self.fields['problem'].queryset = Problem.objects.filter( Q(visible = True)|Q(owner = user)).distinct() | contest_problems # create contest else: self.fields['problem'].queryset = Problem.objects.filter( Q(visible = True)|Q(owner = user)) elif method == 'POST': self.fields['problem'].queryset = Problem.objects.all() class Meta: model = Contest fields = ( 'cname', 'owner', 'coowner', 'start_time', 'end_time', 'freeze_time', 'problem', 'is_homework', 'open_register', ) def clean_freeze_time(self): start_time = self.cleaned_data.get("start_time") freeze_time = self.cleaned_data.get("freeze_time") end_time = self.cleaned_data.get("end_time") if type(end_time) is datetime.datetime: if end_time - datetime.timedelta(minutes = freeze_time) <= start_time: raise forms.ValidationError("Freeze time cannot longer than Contest duration.") return freeze_time def clean_end_time(self): start_time = self.cleaned_data.get("start_time") end_time = self.cleaned_data.get("end_time") if end_time <= start_time: raise forms.ValidationError("End time cannot be earlier than start time.") return end_time class ClarificationForm(forms.ModelForm): def __init__(self, *args, **kwargs): super(ClarificationForm, self).__init__(*args, **kwargs) #only problems contest contains will be shown in list initial = kwargs.get('initial',{}) contest = initial.get('contest',{}) if type(contest) is Contest: contest_id = contest.id the_contest = Contest.objects.get(id=contest_id) self.fields['problem'] = forms.ChoiceField(choices=[(problem.id,problem.pname) for problem in the_contest.problem.all()]) class Meta: model = Clarification fields = ( 'contest', 'problem', 'content', 'asker', ) widgets = { 'content': forms.Textarea(), } class ReplyForm(forms.ModelForm): def __init__(self, *args, **kwargs): super(ReplyForm, self).__init__(*args, **kwargs) #only problems contest contains will be shown in list initial = kwargs.get('initial',{}) contest = initial.get('contest',{}) if type(contest) is Contest: clarifications = Clarification.objects.filter(contest = contest) self.fields['clarification'] = forms.ChoiceField( choices=[(clarification.id,clarification.content) for clarification in clarifications.all()]) class Meta: model = Clarification fields = ( 'reply', 'replier', 'reply_time', 'reply_all' ) widgets = { 'reply': forms.Textarea(), }
funtoo/portage-funtoo
refs/heads/2013-06-07
pym/portage/package/ebuild/_config/special_env_vars.py
1
# Copyright 2010-2013 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 from __future__ import unicode_literals __all__ = ( 'case_insensitive_vars', 'default_globals', 'env_blacklist', \ 'environ_filter', 'environ_whitelist', 'environ_whitelist_re', ) import re # Blacklisted variables are internal variables that are never allowed # to enter the config instance from the external environment or # configuration files. env_blacklist = frozenset(( "A", "AA", "CATEGORY", "DEPEND", "DESCRIPTION", "EAPI", "EBUILD_FORCE_TEST", "EBUILD_PHASE", "EBUILD_PHASE_FUNC", "EBUILD_SKIP_MANIFEST", "ED", "EMERGE_FROM", "EPREFIX", "EROOT", "GREP_OPTIONS", "HDEPEND", "HOMEPAGE", "INHERITED", "IUSE", "IUSE_EFFECTIVE", "KEYWORDS", "LICENSE", "MERGE_TYPE", "PDEPEND", "PF", "PKGUSE", "PORTAGE_BACKGROUND", "PORTAGE_BACKGROUND_UNMERGE", "PORTAGE_BUILDDIR_LOCKED", "PORTAGE_BUILT_USE", "PORTAGE_CONFIGROOT", "PORTAGE_INTERNAL_CALLER", "PORTAGE_IUSE", "PORTAGE_NONFATAL", "PORTAGE_PIPE_FD", "PORTAGE_REPO_NAME", "PORTAGE_USE", "PROPERTIES", "PROVIDE", "RDEPEND", "REPOSITORY", "RESTRICT", "ROOT", "SLOT", "SRC_URI" )) environ_whitelist = [] # Whitelisted variables are always allowed to enter the ebuild # environment. Generally, this only includes special portage # variables. Ebuilds can unset variables that are not whitelisted # and rely on them remaining unset for future phases, without them # leaking back in from various locations (bug #189417). It's very # important to set our special BASH_ENV variable in the ebuild # environment in order to prevent sandbox from sourcing /etc/profile # in it's bashrc (causing major leakage). environ_whitelist += [ "ACCEPT_LICENSE", "BASH_ENV", "BUILD_PREFIX", "COLUMNS", "D", "DISTDIR", "DOC_SYMLINKS_DIR", "EAPI", "EBUILD", "EBUILD_FORCE_TEST", "EBUILD_PHASE", "EBUILD_PHASE_FUNC", "ECLASSDIR", "ECLASS_DEPTH", "ED", "EMERGE_FROM", "EPREFIX", "EROOT", "FEATURES", "FILESDIR", "HOME", "MERGE_TYPE", "NOCOLOR", "PATH", "PKGDIR", "PKGUSE", "PKG_LOGDIR", "PKG_TMPDIR", "PORTAGE_ACTUAL_DISTDIR", "PORTAGE_ARCHLIST", "PORTAGE_BASHRC", "PM_EBUILD_HOOK_DIR", "PORTAGE_BINPKG_FILE", "PORTAGE_BINPKG_TAR_OPTS", "PORTAGE_BINPKG_TMPFILE", "PORTAGE_BIN_PATH", "PORTAGE_BUILDDIR", "PORTAGE_BUILD_GROUP", "PORTAGE_BUILD_USER", "PORTAGE_BUNZIP2_COMMAND", "PORTAGE_BZIP2_COMMAND", "PORTAGE_COLORMAP", "PORTAGE_COMPRESS", "PORTAGE_COMPRESS_EXCLUDE_SUFFIXES", "PORTAGE_CONFIGROOT", "PORTAGE_DEBUG", "PORTAGE_DEPCACHEDIR", "PORTAGE_DOHTML_UNWARNED_SKIPPED_EXTENSIONS", "PORTAGE_DOHTML_UNWARNED_SKIPPED_FILES", "PORTAGE_DOHTML_WARN_ON_SKIPPED_FILES", "PORTAGE_EBUILD_EXIT_FILE", "PORTAGE_FEATURES", "PORTAGE_GID", "PORTAGE_GRPNAME", "PORTAGE_INTERNAL_CALLER", "PORTAGE_INST_GID", "PORTAGE_INST_UID", "PORTAGE_IPC_DAEMON", "PORTAGE_IUSE", "PORTAGE_LOG_FILE", "PORTAGE_OVERRIDE_EPREFIX", "PORTAGE_PIPE_FD", "PORTAGE_PYM_PATH", "PORTAGE_PYTHON", "PORTAGE_QUIET", "PORTAGE_REPO_NAME", "PORTAGE_RESTRICT", "PORTAGE_SIGPIPE_STATUS", "PORTAGE_TMPDIR", "PORTAGE_UPDATE_ENV", "PORTAGE_USERNAME", "PORTAGE_VERBOSE", "PORTAGE_WORKDIR_MODE", "PORTDIR", "PORTDIR_OVERLAY", "PREROOTPATH", "PROFILE_PATHS", "REPLACING_VERSIONS", "REPLACED_BY_VERSION", "ROOT", "ROOTPATH", "T", "TMP", "TMPDIR", "USE_EXPAND", "USE_ORDER", "WORKDIR", "XARGS", "__PORTAGE_TEST_HARDLINK_LOCKS", ] # user config variables environ_whitelist += [ "DOC_SYMLINKS_DIR", "INSTALL_MASK", "PKG_INSTALL_MASK" ] environ_whitelist += [ "A", "AA", "CATEGORY", "P", "PF", "PN", "PR", "PV", "PVR" ] # misc variables inherited from the calling environment environ_whitelist += [ "COLORTERM", "DISPLAY", "EDITOR", "LESS", "LESSOPEN", "LOGNAME", "LS_COLORS", "PAGER", "TERM", "TERMCAP", "USER", 'ftp_proxy', 'http_proxy', 'no_proxy', ] # tempdir settings environ_whitelist += [ "TMPDIR", "TEMP", "TMP", ] # localization settings environ_whitelist += [ "LANG", "LC_COLLATE", "LC_CTYPE", "LC_MESSAGES", "LC_MONETARY", "LC_NUMERIC", "LC_TIME", "LC_PAPER", "LC_ALL", ] # other variables inherited from the calling environment environ_whitelist += [ "CVS_RSH", "ECHANGELOG_USER", "GPG_AGENT_INFO", "SSH_AGENT_PID", "SSH_AUTH_SOCK", "STY", "WINDOW", "XAUTHORITY", ] environ_whitelist = frozenset(environ_whitelist) environ_whitelist_re = re.compile(r'^(CCACHE_|DISTCC_).*') # Filter selected variables in the config.environ() method so that # they don't needlessly propagate down into the ebuild environment. environ_filter = [] # Exclude anything that could be extremely long here (like SRC_URI) # since that could cause execve() calls to fail with E2BIG errors. For # example, see bug #262647. environ_filter += [ 'DEPEND', 'RDEPEND', 'PDEPEND', 'SRC_URI', ] # misc variables inherited from the calling environment environ_filter += [ "INFOPATH", "MANPATH", "USER", ] # variables that break bash environ_filter += [ "HISTFILE", "POSIXLY_CORRECT", ] # portage config variables and variables set directly by portage environ_filter += [ "ACCEPT_CHOSTS", "ACCEPT_KEYWORDS", "ACCEPT_PROPERTIES", "ACCEPT_RESTRICT", "AUTOCLEAN", "CLEAN_DELAY", "COLLISION_IGNORE", "CONFIG_PROTECT", "CONFIG_PROTECT_MASK", "DCO_SIGNED_OFF_BY", "EGENCACHE_DEFAULT_OPTS", "EMERGE_DEFAULT_OPTS", "EMERGE_LOG_DIR", "EMERGE_WARNING_DELAY", "FETCHCOMMAND", "FETCHCOMMAND_FTP", "FETCHCOMMAND_HTTP", "FETCHCOMMAND_HTTPS", "FETCHCOMMAND_RSYNC", "FETCHCOMMAND_SFTP", "GENTOO_MIRRORS", "NOCONFMEM", "O", "PORTAGE_BACKGROUND", "PORTAGE_BACKGROUND_UNMERGE", "PORTAGE_BINHOST", "PORTAGE_BUILDDIR_LOCKED", "PORTAGE_CHECKSUM_FILTER", "PORTAGE_ELOG_CLASSES", "PORTAGE_ELOG_MAILFROM", "PORTAGE_ELOG_MAILSUBJECT", "PORTAGE_ELOG_MAILURI", "PORTAGE_ELOG_SYSTEM", "PORTAGE_FETCH_CHECKSUM_TRY_MIRRORS", "PORTAGE_FETCH_RESUME_MIN_SIZE", "PORTAGE_GPG_DIR", "PORTAGE_GPG_KEY", "PORTAGE_GPG_SIGNING_COMMAND", "PORTAGE_IONICE_COMMAND", "PORTAGE_PACKAGE_EMPTY_ABORT", "PORTAGE_REPO_DUPLICATE_WARN", "PORTAGE_RO_DISTDIRS", "PORTAGE_SSH_OPTS", "PORTAGE_SYNC_STALE", "PORTAGE_USE", "PORTAGE_XATTR_EXCLUDE", "PORT_LOGDIR", "PORT_LOGDIR_CLEAN", "QUICKPKG_DEFAULT_OPTS", "REPOMAN_DEFAULT_OPTS", "RESUMECOMMAND", "RESUMECOMMAND_FTP", "RESUMECOMMAND_HTTP", "RESUMECOMMAND_HTTPS", "RESUMECOMMAND_SFTP", "SYNC", "UNINSTALL_IGNORE", "USE_EXPAND_HIDDEN", "USE_ORDER", "__PORTAGE_HELPER" ] environ_filter = frozenset(environ_filter) # Variables that are not allowed to have per-repo or per-package # settings. global_only_vars = frozenset([ "CONFIG_PROTECT", ]) default_globals = { 'ACCEPT_LICENSE': '* -@EULA', 'ACCEPT_PROPERTIES': '*', 'PORTAGE_BZIP2_COMMAND': 'bzip2', } validate_commands = ('PORTAGE_BZIP2_COMMAND', 'PORTAGE_BUNZIP2_COMMAND',) # To enhance usability, make some vars case insensitive # by forcing them to lower case. case_insensitive_vars = ('AUTOCLEAN', 'NOCOLOR',)
deeplook/bokeh
refs/heads/master
examples/plotting/file/iris.py
45
from bokeh.sampledata.iris import flowers from bokeh.plotting import figure, show, output_file colormap = {'setosa': 'red', 'versicolor': 'green', 'virginica': 'blue'} flowers['color'] = flowers['species'].map(lambda x: colormap[x]) output_file("iris.html", title="iris.py example") p = figure(title = "Iris Morphology") p.xaxis.axis_label = 'Petal Length' p.yaxis.axis_label = 'Petal Width' p.circle(flowers["petal_length"], flowers["petal_width"], color=flowers["color"], fill_alpha=0.2, size=10, ) show(p)
tomz/ruby-spark
refs/heads/master
benchmark/comparison/python.py
2
import os import math from time import time from random import random from operator import add from pyspark import SparkContext sc = SparkContext(appName="Python", master="local[*]") log_file = open(os.environ.get('PYTHON_LOG'), 'w') def log(*values): values = map(lambda x: str(x), values) log_file.write(';'.join(values)) log_file.write('\n') workers = int(os.environ.get('WORKERS')) numbers_count = int(os.environ.get('NUMBERS_COUNT')) text_file = os.environ.get('TEXT_FILE') numbers = range(numbers_count) floats = [float(i) for i in numbers] with open(text_file) as t: strings = t.read().split("\n") # ============================================================================= # Serialization # ============================================================================= t = time() rdd_numbers = sc.parallelize(numbers, workers) t = time() - t log('NumbersSerialization', t) t = time() rdd_floats = sc.parallelize(floats, workers) t = time() - t log('FloatsSerialization', t) t = time() rdd_strings = sc.parallelize(strings, workers) t = time() - t log('StringsSerialization', t) # ============================================================================= # Computing # ============================================================================= # --- Is prime? --------------------------------------------------------------- def is_prime(x): if x < 2: return [x, False] elif x == 2: return [x, True] elif x % 2 == 0: return [x, False] else: upper = int(math.sqrt(float(x))) result = True i = 3 while i <= upper: if x % i == 0: result = False break i += 2 return [x, result] t = time() rdd_numbers.map(is_prime).collect() t = time() - t log('IsPrime', t) # --- Matrix multiplication --------------------------------------------------- matrix_size = int(os.environ.get('MATRIX_SIZE')) matrix = [] for row in range(matrix_size): matrix.append([]) for col in range(matrix_size): matrix[row].append(row+col) def multiplication_func(matrix): matrix = list(matrix) size = len(matrix) new_matrix = [] for row in range(size): new_matrix.append([]) for col in range(size): result = 0 for i in range(size): result += matrix[row][i] * matrix[col][i] new_matrix[row].append(result) return new_matrix t = time() rdd = sc.parallelize(matrix, 1) rdd.mapPartitions(multiplication_func).collect() t = time() - t log('MatrixMultiplication', t) # --- Pi digits --------------------------------------------------------------- # http://rosettacode.org/wiki/Pi#Python pi_digit = int(os.environ.get('PI_DIGIT')) def pi_func(size): size = size.next() result = '' q, r, t, k, n, l = 1, 0, 1, 1, 3, 3 while size > 0: if 4*q+r-t < n*t: result += str(n) size -= 1 nr = 10*(r-n*t) n = ((10*(3*q+r))//t)-10*n q *= 10 r = nr else: nr = (2*q+r)*l nn = (q*(7*k)+2+(r*l))//(t*l) q *= k t *= l l += 2 k += 1 n = nn r = nr return [result] t = time() rdd = sc.parallelize([pi_digit], 1) rdd.mapPartitions(pi_func).collect() t = time() - t log('PiDigit', t) log_file.close()
jaimahajan1997/sympy
refs/heads/master
sympy/polys/domains/realfield.py
63
"""Implementation of :class:`RealField` class. """ from __future__ import print_function, division from sympy.polys.domains.field import Field from sympy.polys.domains.simpledomain import SimpleDomain from sympy.polys.domains.characteristiczero import CharacteristicZero from sympy.polys.domains.mpelements import MPContext from sympy.polys.polyerrors import DomainError, CoercionFailed from sympy.core.numbers import Float from sympy.utilities import public @public class RealField(Field, CharacteristicZero, SimpleDomain): """Real numbers up to the given precision. """ rep = 'RR' is_RealField = is_RR = True is_Exact = False is_Numerical = True has_assoc_Ring = False has_assoc_Field = True _default_precision = 53 @property def has_default_precision(self): return self.precision == self._default_precision @property def precision(self): return self._context.prec @property def dps(self): return self._context.dps @property def tolerance(self): return self._context.tolerance def __init__(self, prec=_default_precision, dps=None, tol=None): context = MPContext(prec, dps, tol) context._parent = self self._context = context self.dtype = context.mpf self.zero = self.dtype(0) self.one = self.dtype(1) def __eq__(self, other): return (isinstance(other, RealField) and self.precision == other.precision and self.tolerance == other.tolerance) def __hash__(self): return hash((self.__class__.__name__, self.dtype, self.precision, self.tolerance)) def to_sympy(self, element): """Convert ``element`` to SymPy number. """ return Float(element, self.dps) def from_sympy(self, expr): """Convert SymPy's number to ``dtype``. """ number = expr.evalf(n=self.dps) if number.is_Number: return self.dtype(number) else: raise CoercionFailed("expected real number, got %s" % expr) def from_ZZ_python(self, element, base): return self.dtype(element) def from_QQ_python(self, element, base): return self.dtype(element.numerator) / element.denominator def from_ZZ_gmpy(self, element, base): return self.dtype(int(element)) def from_QQ_gmpy(self, element, base): return self.dtype(int(element.numerator)) / int(element.denominator) def from_RealField(self, element, base): if self == base: return element else: return self.dtype(element) def from_ComplexField(self, element, base): if not element.imag: return self.dtype(element.real) def to_rational(self, element, limit=True): """Convert a real number to rational number. """ return self._context.to_rational(element, limit) def get_ring(self): """Returns a ring associated with ``self``. """ return self def get_exact(self): """Returns an exact domain associated with ``self``. """ from sympy.polys.domains import QQ return QQ def gcd(self, a, b): """Returns GCD of ``a`` and ``b``. """ return self.one def lcm(self, a, b): """Returns LCM of ``a`` and ``b``. """ return a*b def almosteq(self, a, b, tolerance=None): """Check if ``a`` and ``b`` are almost equal. """ return self._context.almosteq(a, b, tolerance)
yencarnacion/jaikuengine
refs/heads/master
.google_appengine/lib/django-1.5/django/contrib/formtools/tests/wizard/wizardtests/urls.py
319
from django.conf.urls import patterns, url from django.contrib.formtools.tests.wizard.wizardtests.forms import ( SessionContactWizard, CookieContactWizard, Page1, Page2, Page3, Page4) urlpatterns = patterns('', url(r'^wiz_session/$', SessionContactWizard.as_view( [('form1', Page1), ('form2', Page2), ('form3', Page3), ('form4', Page4)])), url(r'^wiz_cookie/$', CookieContactWizard.as_view( [('form1', Page1), ('form2', Page2), ('form3', Page3), ('form4', Page4)])), url(r'^wiz_other_template/$', CookieContactWizard.as_view( [('form1', Page1), ('form2', Page2), ('form3', Page3), ('form4', Page4)], template_name='other_wizard_form.html')), )
leohmoraes/tablib
refs/heads/develop
tablib/packages/odf/attrconverters.py
64
# -*- coding: utf-8 -*- # Copyright (C) 2006-2010 Søren Roug, European Environment Agency # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA # # Contributor(s): # from namespaces import * import re, types pattern_color = re.compile(r'#[0-9a-fA-F]{6}') pattern_vector3D = re.compile(r'\([ ]*-?([0-9]+(\.[0-9]*)?|\.[0-9]+)([ ]+-?([0-9]+(\.[0-9]*)?|\.[0-9]+)){2}[ ]*\)') def make_NCName(arg): for c in (':',' '): arg = arg.replace(c,"_%x_" % ord(c)) return arg def cnv_anyURI(attribute, arg, element): return unicode(arg) def cnv_boolean(attribute, arg, element): if arg.lower() in ("false","no"): return "false" if arg: return "true" return "false" # Potentially accept color values def cnv_color(attribute, arg, element): """ A RGB color in conformance with §5.9.11 of [XSL], that is a RGB color in notation “#rrggbb”, where rr, gg and bb are 8-bit hexadecimal digits. """ return str(arg) def cnv_configtype(attribute, arg, element): if str(arg) not in ("boolean", "short", "int", "long", "double", "string", "datetime", "base64Binary"): raise ValueError, "'%s' not allowed" % str(arg) return str(arg) def cnv_data_source_has_labels(attribute, arg, element): if str(arg) not in ("none","row","column","both"): raise ValueError, "'%s' not allowed" % str(arg) return str(arg) # Understand different date formats def cnv_date(attribute, arg, element): """ A dateOrDateTime value is either an [xmlschema-2] date value or an [xmlschema-2] dateTime value. """ return str(arg) def cnv_dateTime(attribute, arg, element): """ A dateOrDateTime value is either an [xmlschema-2] date value or an [xmlschema-2] dateTime value. """ return str(arg) def cnv_double(attribute, arg, element): return str(arg) def cnv_duration(attribute, arg, element): return str(arg) def cnv_family(attribute, arg, element): """ A style family """ if str(arg) not in ("text", "paragraph", "section", "ruby", "table", "table-column", "table-row", "table-cell", "graphic", "presentation", "drawing-page", "chart"): raise ValueError, "'%s' not allowed" % str(arg) return str(arg) def __save_prefix(attribute, arg, element): prefix = arg.split(':',1)[0] if prefix == arg: return unicode(arg) namespace = element.get_knownns(prefix) if namespace is None: #raise ValueError, "'%s' is an unknown prefix" % str(prefix) return unicode(arg) p = element.get_nsprefix(namespace) return unicode(arg) def cnv_formula(attribute, arg, element): """ A string containing a formula. Formulas do not have a predefined syntax, but the string should begin with a namespace prefix, followed by a “:” (COLON, U+003A) separator, followed by the text of the formula. The namespace bound to the prefix determines the syntax and semantics of the formula. """ return __save_prefix(attribute, arg, element) def cnv_ID(attribute, arg, element): return str(arg) def cnv_IDREF(attribute, arg, element): return str(arg) def cnv_integer(attribute, arg, element): return str(arg) def cnv_legend_position(attribute, arg, element): if str(arg) not in ("start", "end", "top", "bottom", "top-start", "bottom-start", "top-end", "bottom-end"): raise ValueError, "'%s' not allowed" % str(arg) return str(arg) pattern_length = re.compile(r'-?([0-9]+(\.[0-9]*)?|\.[0-9]+)((cm)|(mm)|(in)|(pt)|(pc)|(px))') def cnv_length(attribute, arg, element): """ A (positive or negative) physical length, consisting of magnitude and unit, in conformance with the Units of Measure defined in §5.9.13 of [XSL]. """ global pattern_length if not pattern_length.match(arg): raise ValueError, "'%s' is not a valid length" % arg return arg def cnv_lengthorpercent(attribute, arg, element): failed = False try: return cnv_length(attribute, arg, element) except: failed = True try: return cnv_percent(attribute, arg, element) except: failed = True if failed: raise ValueError, "'%s' is not a valid length or percent" % arg return arg def cnv_metavaluetype(attribute, arg, element): if str(arg) not in ("float", "date", "time", "boolean", "string"): raise ValueError, "'%s' not allowed" % str(arg) return str(arg) def cnv_major_minor(attribute, arg, element): if arg not in ('major','minor'): raise ValueError, "'%s' is not either 'minor' or 'major'" % arg pattern_namespacedToken = re.compile(r'[0-9a-zA-Z_]+:[0-9a-zA-Z._\-]+') def cnv_namespacedToken(attribute, arg, element): global pattern_namespacedToken if not pattern_namespacedToken.match(arg): raise ValueError, "'%s' is not a valid namespaced token" % arg return __save_prefix(attribute, arg, element) def cnv_NCName(attribute, arg, element): """ NCName is defined in http://www.w3.org/TR/REC-xml-names/#NT-NCName Essentially an XML name minus ':' """ if type(arg) in types.StringTypes: return make_NCName(arg) else: return arg.getAttrNS(STYLENS, 'name') # This function takes either an instance of a style (preferred) # or a text string naming the style. If it is a text string, then it must # already have been converted to an NCName # The text-string argument is mainly for when we build a structure from XML def cnv_StyleNameRef(attribute, arg, element): try: return arg.getAttrNS(STYLENS, 'name') except: return arg # This function takes either an instance of a style (preferred) # or a text string naming the style. If it is a text string, then it must # already have been converted to an NCName # The text-string argument is mainly for when we build a structure from XML def cnv_DrawNameRef(attribute, arg, element): try: return arg.getAttrNS(DRAWNS, 'name') except: return arg # Must accept list of Style objects def cnv_NCNames(attribute, arg, element): return ' '.join(arg) def cnv_nonNegativeInteger(attribute, arg, element): return str(arg) pattern_percent = re.compile(r'-?([0-9]+(\.[0-9]*)?|\.[0-9]+)%') def cnv_percent(attribute, arg, element): global pattern_percent if not pattern_percent.match(arg): raise ValueError, "'%s' is not a valid length" % arg return arg # Real one doesn't allow floating point values pattern_points = re.compile(r'-?[0-9]+,-?[0-9]+([ ]+-?[0-9]+,-?[0-9]+)*') #pattern_points = re.compile(r'-?[0-9.]+,-?[0-9.]+([ ]+-?[0-9.]+,-?[0-9.]+)*') def cnv_points(attribute, arg, element): global pattern_points if type(arg) in types.StringTypes: if not pattern_points.match(arg): raise ValueError, "x,y are separated by a comma and the points are separated by white spaces" return arg else: try: strarg = ' '.join([ "%d,%d" % p for p in arg]) except: raise ValueError, "Points must be string or [(0,0),(1,1)] - not %s" % arg return strarg def cnv_positiveInteger(attribute, arg, element): return str(arg) def cnv_string(attribute, arg, element): return unicode(arg) def cnv_textnoteclass(attribute, arg, element): if str(arg) not in ("footnote", "endnote"): raise ValueError, "'%s' not allowed" % str(arg) return str(arg) # Understand different time formats def cnv_time(attribute, arg, element): return str(arg) def cnv_token(attribute, arg, element): return str(arg) pattern_viewbox = re.compile(r'-?[0-9]+([ ]+-?[0-9]+){3}$') def cnv_viewbox(attribute, arg, element): global pattern_viewbox if not pattern_viewbox.match(arg): raise ValueError, "viewBox must be four integers separated by whitespaces" return arg def cnv_xlinkshow(attribute, arg, element): if str(arg) not in ("new", "replace", "embed"): raise ValueError, "'%s' not allowed" % str(arg) return str(arg) attrconverters = { ((ANIMNS,u'audio-level'), None): cnv_double, ((ANIMNS,u'color-interpolation'), None): cnv_string, ((ANIMNS,u'color-interpolation-direction'), None): cnv_string, ((ANIMNS,u'command'), None): cnv_string, ((ANIMNS,u'formula'), None): cnv_string, ((ANIMNS,u'id'), None): cnv_ID, ((ANIMNS,u'iterate-interval'), None): cnv_duration, ((ANIMNS,u'iterate-type'), None): cnv_string, ((ANIMNS,u'name'), None): cnv_string, ((ANIMNS,u'sub-item'), None): cnv_string, ((ANIMNS,u'value'), None): cnv_string, # ((DBNS,u'type'), None): cnv_namespacedToken, ((CHARTNS,u'attached-axis'), None): cnv_string, ((CHARTNS,u'class'), (CHARTNS,u'grid')): cnv_major_minor, ((CHARTNS,u'class'), None): cnv_namespacedToken, ((CHARTNS,u'column-mapping'), None): cnv_string, ((CHARTNS,u'connect-bars'), None): cnv_boolean, ((CHARTNS,u'data-label-number'), None): cnv_string, ((CHARTNS,u'data-label-symbol'), None): cnv_boolean, ((CHARTNS,u'data-label-text'), None): cnv_boolean, ((CHARTNS,u'data-source-has-labels'), None): cnv_data_source_has_labels, ((CHARTNS,u'deep'), None): cnv_boolean, ((CHARTNS,u'dimension'), None): cnv_string, ((CHARTNS,u'display-label'), None): cnv_boolean, ((CHARTNS,u'error-category'), None): cnv_string, ((CHARTNS,u'error-lower-indicator'), None): cnv_boolean, ((CHARTNS,u'error-lower-limit'), None): cnv_string, ((CHARTNS,u'error-margin'), None): cnv_string, ((CHARTNS,u'error-percentage'), None): cnv_string, ((CHARTNS,u'error-upper-indicator'), None): cnv_boolean, ((CHARTNS,u'error-upper-limit'), None): cnv_string, ((CHARTNS,u'gap-width'), None): cnv_string, ((CHARTNS,u'interpolation'), None): cnv_string, ((CHARTNS,u'interval-major'), None): cnv_string, ((CHARTNS,u'interval-minor-divisor'), None): cnv_string, ((CHARTNS,u'japanese-candle-stick'), None): cnv_boolean, ((CHARTNS,u'label-arrangement'), None): cnv_string, ((CHARTNS,u'label-cell-address'), None): cnv_string, ((CHARTNS,u'legend-align'), None): cnv_string, ((CHARTNS,u'legend-position'), None): cnv_legend_position, ((CHARTNS,u'lines'), None): cnv_boolean, ((CHARTNS,u'link-data-style-to-source'), None): cnv_boolean, ((CHARTNS,u'logarithmic'), None): cnv_boolean, ((CHARTNS,u'maximum'), None): cnv_string, ((CHARTNS,u'mean-value'), None): cnv_boolean, ((CHARTNS,u'minimum'), None): cnv_string, ((CHARTNS,u'name'), None): cnv_string, ((CHARTNS,u'origin'), None): cnv_string, ((CHARTNS,u'overlap'), None): cnv_string, ((CHARTNS,u'percentage'), None): cnv_boolean, ((CHARTNS,u'pie-offset'), None): cnv_string, ((CHARTNS,u'regression-type'), None): cnv_string, ((CHARTNS,u'repeated'), None): cnv_nonNegativeInteger, ((CHARTNS,u'row-mapping'), None): cnv_string, ((CHARTNS,u'scale-text'), None): cnv_boolean, ((CHARTNS,u'series-source'), None): cnv_string, ((CHARTNS,u'solid-type'), None): cnv_string, ((CHARTNS,u'spline-order'), None): cnv_string, ((CHARTNS,u'spline-resolution'), None): cnv_string, ((CHARTNS,u'stacked'), None): cnv_boolean, ((CHARTNS,u'style-name'), None): cnv_StyleNameRef, ((CHARTNS,u'symbol-height'), None): cnv_string, ((CHARTNS,u'symbol-name'), None): cnv_string, ((CHARTNS,u'symbol-type'), None): cnv_string, ((CHARTNS,u'symbol-width'), None): cnv_string, ((CHARTNS,u'text-overlap'), None): cnv_boolean, ((CHARTNS,u'three-dimensional'), None): cnv_boolean, ((CHARTNS,u'tick-marks-major-inner'), None): cnv_boolean, ((CHARTNS,u'tick-marks-major-outer'), None): cnv_boolean, ((CHARTNS,u'tick-marks-minor-inner'), None): cnv_boolean, ((CHARTNS,u'tick-marks-minor-outer'), None): cnv_boolean, ((CHARTNS,u'values-cell-range-address'), None): cnv_string, ((CHARTNS,u'vertical'), None): cnv_boolean, ((CHARTNS,u'visible'), None): cnv_boolean, ((CONFIGNS,u'name'), None): cnv_formula, ((CONFIGNS,u'type'), None): cnv_configtype, ((DR3DNS,u'ambient-color'), None): cnv_string, ((DR3DNS,u'back-scale'), None): cnv_string, ((DR3DNS,u'backface-culling'), None): cnv_string, ((DR3DNS,u'center'), None): cnv_string, ((DR3DNS,u'close-back'), None): cnv_boolean, ((DR3DNS,u'close-front'), None): cnv_boolean, ((DR3DNS,u'depth'), None): cnv_length, ((DR3DNS,u'diffuse-color'), None): cnv_string, ((DR3DNS,u'direction'), None): cnv_string, ((DR3DNS,u'distance'), None): cnv_length, ((DR3DNS,u'edge-rounding'), None): cnv_string, ((DR3DNS,u'edge-rounding-mode'), None): cnv_string, ((DR3DNS,u'emissive-color'), None): cnv_string, ((DR3DNS,u'enabled'), None): cnv_boolean, ((DR3DNS,u'end-angle'), None): cnv_string, ((DR3DNS,u'focal-length'), None): cnv_length, ((DR3DNS,u'horizontal-segments'), None): cnv_string, ((DR3DNS,u'lighting-mode'), None): cnv_boolean, ((DR3DNS,u'max-edge'), None): cnv_string, ((DR3DNS,u'min-edge'), None): cnv_string, ((DR3DNS,u'normals-direction'), None): cnv_string, ((DR3DNS,u'normals-kind'), None): cnv_string, ((DR3DNS,u'projection'), None): cnv_string, ((DR3DNS,u'shade-mode'), None): cnv_string, ((DR3DNS,u'shadow'), None): cnv_string, ((DR3DNS,u'shadow-slant'), None): cnv_nonNegativeInteger, ((DR3DNS,u'shininess'), None): cnv_string, ((DR3DNS,u'size'), None): cnv_string, ((DR3DNS,u'specular'), None): cnv_boolean, ((DR3DNS,u'specular-color'), None): cnv_string, ((DR3DNS,u'texture-filter'), None): cnv_string, ((DR3DNS,u'texture-generation-mode-x'), None): cnv_string, ((DR3DNS,u'texture-generation-mode-y'), None): cnv_string, ((DR3DNS,u'texture-kind'), None): cnv_string, ((DR3DNS,u'texture-mode'), None): cnv_string, ((DR3DNS,u'transform'), None): cnv_string, ((DR3DNS,u'vertical-segments'), None): cnv_string, ((DR3DNS,u'vpn'), None): cnv_string, ((DR3DNS,u'vrp'), None): cnv_string, ((DR3DNS,u'vup'), None): cnv_string, ((DRAWNS,u'align'), None): cnv_string, ((DRAWNS,u'angle'), None): cnv_integer, ((DRAWNS,u'archive'), None): cnv_string, ((DRAWNS,u'auto-grow-height'), None): cnv_boolean, ((DRAWNS,u'auto-grow-width'), None): cnv_boolean, ((DRAWNS,u'background-size'), None): cnv_string, ((DRAWNS,u'blue'), None): cnv_string, ((DRAWNS,u'border'), None): cnv_string, ((DRAWNS,u'caption-angle'), None): cnv_string, ((DRAWNS,u'caption-angle-type'), None): cnv_string, ((DRAWNS,u'caption-escape'), None): cnv_string, ((DRAWNS,u'caption-escape-direction'), None): cnv_string, ((DRAWNS,u'caption-fit-line-length'), None): cnv_boolean, ((DRAWNS,u'caption-gap'), None): cnv_string, ((DRAWNS,u'caption-line-length'), None): cnv_length, ((DRAWNS,u'caption-point-x'), None): cnv_string, ((DRAWNS,u'caption-point-y'), None): cnv_string, ((DRAWNS,u'caption-id'), None): cnv_IDREF, ((DRAWNS,u'caption-type'), None): cnv_string, ((DRAWNS,u'chain-next-name'), None): cnv_string, ((DRAWNS,u'class-id'), None): cnv_string, ((DRAWNS,u'class-names'), None): cnv_NCNames, ((DRAWNS,u'code'), None): cnv_string, ((DRAWNS,u'color'), None): cnv_string, ((DRAWNS,u'color-inversion'), None): cnv_boolean, ((DRAWNS,u'color-mode'), None): cnv_string, ((DRAWNS,u'concave'), None): cnv_string, ((DRAWNS,u'concentric-gradient-fill-allowed'), None): cnv_boolean, ((DRAWNS,u'contrast'), None): cnv_string, ((DRAWNS,u'control'), None): cnv_IDREF, ((DRAWNS,u'copy-of'), None): cnv_string, ((DRAWNS,u'corner-radius'), None): cnv_length, ((DRAWNS,u'corners'), None): cnv_positiveInteger, ((DRAWNS,u'cx'), None): cnv_string, ((DRAWNS,u'cy'), None): cnv_string, ((DRAWNS,u'data'), None): cnv_string, ((DRAWNS,u'decimal-places'), None): cnv_string, ((DRAWNS,u'display'), None): cnv_string, ((DRAWNS,u'display-name'), None): cnv_string, ((DRAWNS,u'distance'), None): cnv_lengthorpercent, ((DRAWNS,u'dots1'), None): cnv_integer, ((DRAWNS,u'dots1-length'), None): cnv_lengthorpercent, ((DRAWNS,u'dots2'), None): cnv_integer, ((DRAWNS,u'dots2-length'), None): cnv_lengthorpercent, ((DRAWNS,u'end-angle'), None): cnv_double, ((DRAWNS,u'end'), None): cnv_string, ((DRAWNS,u'end-color'), None): cnv_string, ((DRAWNS,u'end-glue-point'), None): cnv_nonNegativeInteger, ((DRAWNS,u'end-guide'), None): cnv_length, ((DRAWNS,u'end-intensity'), None): cnv_string, ((DRAWNS,u'end-line-spacing-horizontal'), None): cnv_string, ((DRAWNS,u'end-line-spacing-vertical'), None): cnv_string, ((DRAWNS,u'end-shape'), None): cnv_IDREF, ((DRAWNS,u'engine'), None): cnv_namespacedToken, ((DRAWNS,u'enhanced-path'), None): cnv_string, ((DRAWNS,u'escape-direction'), None): cnv_string, ((DRAWNS,u'extrusion-allowed'), None): cnv_boolean, ((DRAWNS,u'extrusion-brightness'), None): cnv_string, ((DRAWNS,u'extrusion'), None): cnv_boolean, ((DRAWNS,u'extrusion-color'), None): cnv_boolean, ((DRAWNS,u'extrusion-depth'), None): cnv_double, ((DRAWNS,u'extrusion-diffusion'), None): cnv_string, ((DRAWNS,u'extrusion-first-light-direction'), None): cnv_string, ((DRAWNS,u'extrusion-first-light-harsh'), None): cnv_boolean, ((DRAWNS,u'extrusion-first-light-level'), None): cnv_string, ((DRAWNS,u'extrusion-light-face'), None): cnv_boolean, ((DRAWNS,u'extrusion-metal'), None): cnv_boolean, ((DRAWNS,u'extrusion-number-of-line-segments'), None): cnv_integer, ((DRAWNS,u'extrusion-origin'), None): cnv_double, ((DRAWNS,u'extrusion-rotation-angle'), None): cnv_double, ((DRAWNS,u'extrusion-rotation-center'), None): cnv_string, ((DRAWNS,u'extrusion-second-light-direction'), None): cnv_string, ((DRAWNS,u'extrusion-second-light-harsh'), None): cnv_boolean, ((DRAWNS,u'extrusion-second-light-level'), None): cnv_string, ((DRAWNS,u'extrusion-shininess'), None): cnv_string, ((DRAWNS,u'extrusion-skew'), None): cnv_double, ((DRAWNS,u'extrusion-specularity'), None): cnv_string, ((DRAWNS,u'extrusion-viewpoint'), None): cnv_string, ((DRAWNS,u'fill'), None): cnv_string, ((DRAWNS,u'fill-color'), None): cnv_string, ((DRAWNS,u'fill-gradient-name'), None): cnv_string, ((DRAWNS,u'fill-hatch-name'), None): cnv_string, ((DRAWNS,u'fill-hatch-solid'), None): cnv_boolean, ((DRAWNS,u'fill-image-height'), None): cnv_lengthorpercent, ((DRAWNS,u'fill-image-name'), None): cnv_DrawNameRef, ((DRAWNS,u'fill-image-ref-point'), None): cnv_string, ((DRAWNS,u'fill-image-ref-point-x'), None): cnv_string, ((DRAWNS,u'fill-image-ref-point-y'), None): cnv_string, ((DRAWNS,u'fill-image-width'), None): cnv_lengthorpercent, ((DRAWNS,u'filter-name'), None): cnv_string, ((DRAWNS,u'fit-to-contour'), None): cnv_boolean, ((DRAWNS,u'fit-to-size'), None): cnv_boolean, ((DRAWNS,u'formula'), None): cnv_string, ((DRAWNS,u'frame-display-border'), None): cnv_boolean, ((DRAWNS,u'frame-display-scrollbar'), None): cnv_boolean, ((DRAWNS,u'frame-margin-horizontal'), None): cnv_string, ((DRAWNS,u'frame-margin-vertical'), None): cnv_string, ((DRAWNS,u'frame-name'), None): cnv_string, ((DRAWNS,u'gamma'), None): cnv_string, ((DRAWNS,u'glue-point-leaving-directions'), None): cnv_string, ((DRAWNS,u'glue-point-type'), None): cnv_string, ((DRAWNS,u'glue-points'), None): cnv_string, ((DRAWNS,u'gradient-step-count'), None): cnv_string, ((DRAWNS,u'green'), None): cnv_string, ((DRAWNS,u'guide-distance'), None): cnv_string, ((DRAWNS,u'guide-overhang'), None): cnv_length, ((DRAWNS,u'handle-mirror-horizontal'), None): cnv_boolean, ((DRAWNS,u'handle-mirror-vertical'), None): cnv_boolean, ((DRAWNS,u'handle-polar'), None): cnv_string, ((DRAWNS,u'handle-position'), None): cnv_string, ((DRAWNS,u'handle-radius-range-maximum'), None): cnv_string, ((DRAWNS,u'handle-radius-range-minimum'), None): cnv_string, ((DRAWNS,u'handle-range-x-maximum'), None): cnv_string, ((DRAWNS,u'handle-range-x-minimum'), None): cnv_string, ((DRAWNS,u'handle-range-y-maximum'), None): cnv_string, ((DRAWNS,u'handle-range-y-minimum'), None): cnv_string, ((DRAWNS,u'handle-switched'), None): cnv_boolean, # ((DRAWNS,u'id'), None): cnv_ID, # ((DRAWNS,u'id'), None): cnv_nonNegativeInteger, # ?? line 6581 in RNG ((DRAWNS,u'id'), None): cnv_string, ((DRAWNS,u'image-opacity'), None): cnv_string, ((DRAWNS,u'kind'), None): cnv_string, ((DRAWNS,u'layer'), None): cnv_string, ((DRAWNS,u'line-distance'), None): cnv_string, ((DRAWNS,u'line-skew'), None): cnv_string, ((DRAWNS,u'luminance'), None): cnv_string, ((DRAWNS,u'marker-end-center'), None): cnv_boolean, ((DRAWNS,u'marker-end'), None): cnv_string, ((DRAWNS,u'marker-end-width'), None): cnv_length, ((DRAWNS,u'marker-start-center'), None): cnv_boolean, ((DRAWNS,u'marker-start'), None): cnv_string, ((DRAWNS,u'marker-start-width'), None): cnv_length, ((DRAWNS,u'master-page-name'), None): cnv_StyleNameRef, ((DRAWNS,u'may-script'), None): cnv_boolean, ((DRAWNS,u'measure-align'), None): cnv_string, ((DRAWNS,u'measure-vertical-align'), None): cnv_string, ((DRAWNS,u'mime-type'), None): cnv_string, ((DRAWNS,u'mirror-horizontal'), None): cnv_boolean, ((DRAWNS,u'mirror-vertical'), None): cnv_boolean, ((DRAWNS,u'modifiers'), None): cnv_string, ((DRAWNS,u'name'), None): cnv_NCName, # ((DRAWNS,u'name'), None): cnv_string, ((DRAWNS,u'nav-order'), None): cnv_IDREF, ((DRAWNS,u'nohref'), None): cnv_string, ((DRAWNS,u'notify-on-update-of-ranges'), None): cnv_string, ((DRAWNS,u'object'), None): cnv_string, ((DRAWNS,u'ole-draw-aspect'), None): cnv_string, ((DRAWNS,u'opacity'), None): cnv_string, ((DRAWNS,u'opacity-name'), None): cnv_string, ((DRAWNS,u'page-number'), None): cnv_positiveInteger, ((DRAWNS,u'parallel'), None): cnv_boolean, ((DRAWNS,u'path-stretchpoint-x'), None): cnv_double, ((DRAWNS,u'path-stretchpoint-y'), None): cnv_double, ((DRAWNS,u'placing'), None): cnv_string, ((DRAWNS,u'points'), None): cnv_points, ((DRAWNS,u'protected'), None): cnv_boolean, ((DRAWNS,u'recreate-on-edit'), None): cnv_boolean, ((DRAWNS,u'red'), None): cnv_string, ((DRAWNS,u'rotation'), None): cnv_integer, ((DRAWNS,u'secondary-fill-color'), None): cnv_string, ((DRAWNS,u'shadow'), None): cnv_string, ((DRAWNS,u'shadow-color'), None): cnv_string, ((DRAWNS,u'shadow-offset-x'), None): cnv_length, ((DRAWNS,u'shadow-offset-y'), None): cnv_length, ((DRAWNS,u'shadow-opacity'), None): cnv_string, ((DRAWNS,u'shape-id'), None): cnv_IDREF, ((DRAWNS,u'sharpness'), None): cnv_string, ((DRAWNS,u'show-unit'), None): cnv_boolean, ((DRAWNS,u'start-angle'), None): cnv_double, ((DRAWNS,u'start'), None): cnv_string, ((DRAWNS,u'start-color'), None): cnv_string, ((DRAWNS,u'start-glue-point'), None): cnv_nonNegativeInteger, ((DRAWNS,u'start-guide'), None): cnv_length, ((DRAWNS,u'start-intensity'), None): cnv_string, ((DRAWNS,u'start-line-spacing-horizontal'), None): cnv_string, ((DRAWNS,u'start-line-spacing-vertical'), None): cnv_string, ((DRAWNS,u'start-shape'), None): cnv_IDREF, ((DRAWNS,u'stroke'), None): cnv_string, ((DRAWNS,u'stroke-dash'), None): cnv_string, ((DRAWNS,u'stroke-dash-names'), None): cnv_string, ((DRAWNS,u'stroke-linejoin'), None): cnv_string, ((DRAWNS,u'style'), None): cnv_string, ((DRAWNS,u'style-name'), None): cnv_StyleNameRef, ((DRAWNS,u'symbol-color'), None): cnv_string, ((DRAWNS,u'text-areas'), None): cnv_string, ((DRAWNS,u'text-path-allowed'), None): cnv_boolean, ((DRAWNS,u'text-path'), None): cnv_boolean, ((DRAWNS,u'text-path-mode'), None): cnv_string, ((DRAWNS,u'text-path-same-letter-heights'), None): cnv_boolean, ((DRAWNS,u'text-path-scale'), None): cnv_string, ((DRAWNS,u'text-rotate-angle'), None): cnv_double, ((DRAWNS,u'text-style-name'), None): cnv_StyleNameRef, ((DRAWNS,u'textarea-horizontal-align'), None): cnv_string, ((DRAWNS,u'textarea-vertical-align'), None): cnv_string, ((DRAWNS,u'tile-repeat-offset'), None): cnv_string, ((DRAWNS,u'transform'), None): cnv_string, ((DRAWNS,u'type'), None): cnv_string, ((DRAWNS,u'unit'), None): cnv_string, ((DRAWNS,u'value'), None): cnv_string, ((DRAWNS,u'visible-area-height'), None): cnv_string, ((DRAWNS,u'visible-area-left'), None): cnv_string, ((DRAWNS,u'visible-area-top'), None): cnv_string, ((DRAWNS,u'visible-area-width'), None): cnv_string, ((DRAWNS,u'wrap-influence-on-position'), None): cnv_string, ((DRAWNS,u'z-index'), None): cnv_nonNegativeInteger, ((FONS,u'background-color'), None): cnv_string, ((FONS,u'border-bottom'), None): cnv_string, ((FONS,u'border'), None): cnv_string, ((FONS,u'border-left'), None): cnv_string, ((FONS,u'border-right'), None): cnv_string, ((FONS,u'border-top'), None): cnv_string, ((FONS,u'break-after'), None): cnv_string, ((FONS,u'break-before'), None): cnv_string, ((FONS,u'clip'), None): cnv_string, ((FONS,u'color'), None): cnv_string, ((FONS,u'column-count'), None): cnv_positiveInteger, ((FONS,u'column-gap'), None): cnv_length, ((FONS,u'country'), None): cnv_token, ((FONS,u'end-indent'), None): cnv_length, ((FONS,u'font-family'), None): cnv_string, ((FONS,u'font-size'), None): cnv_string, ((FONS,u'font-style'), None): cnv_string, ((FONS,u'font-variant'), None): cnv_string, ((FONS,u'font-weight'), None): cnv_string, ((FONS,u'height'), None): cnv_string, ((FONS,u'hyphenate'), None): cnv_boolean, ((FONS,u'hyphenation-keep'), None): cnv_string, ((FONS,u'hyphenation-ladder-count'), None): cnv_string, ((FONS,u'hyphenation-push-char-count'), None): cnv_string, ((FONS,u'hyphenation-remain-char-count'), None): cnv_string, ((FONS,u'keep-together'), None): cnv_string, ((FONS,u'keep-with-next'), None): cnv_string, ((FONS,u'language'), None): cnv_token, ((FONS,u'letter-spacing'), None): cnv_string, ((FONS,u'line-height'), None): cnv_string, ((FONS,u'margin-bottom'), None): cnv_string, ((FONS,u'margin'), None): cnv_string, ((FONS,u'margin-left'), None): cnv_string, ((FONS,u'margin-right'), None): cnv_string, ((FONS,u'margin-top'), None): cnv_string, ((FONS,u'max-height'), None): cnv_string, ((FONS,u'max-width'), None): cnv_string, ((FONS,u'min-height'), None): cnv_length, ((FONS,u'min-width'), None): cnv_string, ((FONS,u'orphans'), None): cnv_string, ((FONS,u'padding-bottom'), None): cnv_string, ((FONS,u'padding'), None): cnv_string, ((FONS,u'padding-left'), None): cnv_string, ((FONS,u'padding-right'), None): cnv_string, ((FONS,u'padding-top'), None): cnv_string, ((FONS,u'page-height'), None): cnv_length, ((FONS,u'page-width'), None): cnv_length, ((FONS,u'space-after'), None): cnv_length, ((FONS,u'space-before'), None): cnv_length, ((FONS,u'start-indent'), None): cnv_length, ((FONS,u'text-align'), None): cnv_string, ((FONS,u'text-align-last'), None): cnv_string, ((FONS,u'text-indent'), None): cnv_string, ((FONS,u'text-shadow'), None): cnv_string, ((FONS,u'text-transform'), None): cnv_string, ((FONS,u'widows'), None): cnv_string, ((FONS,u'width'), None): cnv_string, ((FONS,u'wrap-option'), None): cnv_string, ((FORMNS,u'allow-deletes'), None): cnv_boolean, ((FORMNS,u'allow-inserts'), None): cnv_boolean, ((FORMNS,u'allow-updates'), None): cnv_boolean, ((FORMNS,u'apply-design-mode'), None): cnv_boolean, ((FORMNS,u'apply-filter'), None): cnv_boolean, ((FORMNS,u'auto-complete'), None): cnv_boolean, ((FORMNS,u'automatic-focus'), None): cnv_boolean, ((FORMNS,u'bound-column'), None): cnv_string, ((FORMNS,u'button-type'), None): cnv_string, ((FORMNS,u'command'), None): cnv_string, ((FORMNS,u'command-type'), None): cnv_string, ((FORMNS,u'control-implementation'), None): cnv_namespacedToken, ((FORMNS,u'convert-empty-to-null'), None): cnv_boolean, ((FORMNS,u'current-selected'), None): cnv_boolean, ((FORMNS,u'current-state'), None): cnv_string, # ((FORMNS,u'current-value'), None): cnv_date, # ((FORMNS,u'current-value'), None): cnv_double, ((FORMNS,u'current-value'), None): cnv_string, # ((FORMNS,u'current-value'), None): cnv_time, ((FORMNS,u'data-field'), None): cnv_string, ((FORMNS,u'datasource'), None): cnv_string, ((FORMNS,u'default-button'), None): cnv_boolean, ((FORMNS,u'delay-for-repeat'), None): cnv_duration, ((FORMNS,u'detail-fields'), None): cnv_string, ((FORMNS,u'disabled'), None): cnv_boolean, ((FORMNS,u'dropdown'), None): cnv_boolean, ((FORMNS,u'echo-char'), None): cnv_string, ((FORMNS,u'enctype'), None): cnv_string, ((FORMNS,u'escape-processing'), None): cnv_boolean, ((FORMNS,u'filter'), None): cnv_string, ((FORMNS,u'focus-on-click'), None): cnv_boolean, ((FORMNS,u'for'), None): cnv_string, ((FORMNS,u'id'), None): cnv_ID, ((FORMNS,u'ignore-result'), None): cnv_boolean, ((FORMNS,u'image-align'), None): cnv_string, ((FORMNS,u'image-data'), None): cnv_anyURI, ((FORMNS,u'image-position'), None): cnv_string, ((FORMNS,u'is-tristate'), None): cnv_boolean, ((FORMNS,u'label'), None): cnv_string, ((FORMNS,u'list-source'), None): cnv_string, ((FORMNS,u'list-source-type'), None): cnv_string, ((FORMNS,u'master-fields'), None): cnv_string, ((FORMNS,u'max-length'), None): cnv_nonNegativeInteger, # ((FORMNS,u'max-value'), None): cnv_date, # ((FORMNS,u'max-value'), None): cnv_double, ((FORMNS,u'max-value'), None): cnv_string, # ((FORMNS,u'max-value'), None): cnv_time, ((FORMNS,u'method'), None): cnv_string, # ((FORMNS,u'min-value'), None): cnv_date, # ((FORMNS,u'min-value'), None): cnv_double, ((FORMNS,u'min-value'), None): cnv_string, # ((FORMNS,u'min-value'), None): cnv_time, ((FORMNS,u'multi-line'), None): cnv_boolean, ((FORMNS,u'multiple'), None): cnv_boolean, ((FORMNS,u'name'), None): cnv_string, ((FORMNS,u'navigation-mode'), None): cnv_string, ((FORMNS,u'order'), None): cnv_string, ((FORMNS,u'orientation'), None): cnv_string, ((FORMNS,u'page-step-size'), None): cnv_positiveInteger, ((FORMNS,u'printable'), None): cnv_boolean, ((FORMNS,u'property-name'), None): cnv_string, ((FORMNS,u'readonly'), None): cnv_boolean, ((FORMNS,u'selected'), None): cnv_boolean, ((FORMNS,u'size'), None): cnv_nonNegativeInteger, ((FORMNS,u'state'), None): cnv_string, ((FORMNS,u'step-size'), None): cnv_positiveInteger, ((FORMNS,u'tab-cycle'), None): cnv_string, ((FORMNS,u'tab-index'), None): cnv_nonNegativeInteger, ((FORMNS,u'tab-stop'), None): cnv_boolean, ((FORMNS,u'text-style-name'), None): cnv_StyleNameRef, ((FORMNS,u'title'), None): cnv_string, ((FORMNS,u'toggle'), None): cnv_boolean, ((FORMNS,u'validation'), None): cnv_boolean, # ((FORMNS,u'value'), None): cnv_date, # ((FORMNS,u'value'), None): cnv_double, ((FORMNS,u'value'), None): cnv_string, # ((FORMNS,u'value'), None): cnv_time, ((FORMNS,u'visual-effect'), None): cnv_string, ((FORMNS,u'xforms-list-source'), None): cnv_string, ((FORMNS,u'xforms-submission'), None): cnv_string, ((MANIFESTNS,'algorithm-name'), None): cnv_string, ((MANIFESTNS,'checksum'), None): cnv_string, ((MANIFESTNS,'checksum-type'), None): cnv_string, ((MANIFESTNS,'full-path'), None): cnv_string, ((MANIFESTNS,'initialisation-vector'), None): cnv_string, ((MANIFESTNS,'iteration-count'), None): cnv_nonNegativeInteger, ((MANIFESTNS,'key-derivation-name'), None): cnv_string, ((MANIFESTNS,'media-type'), None): cnv_string, ((MANIFESTNS,'salt'), None): cnv_string, ((MANIFESTNS,'size'), None): cnv_nonNegativeInteger, ((METANS,u'cell-count'), None): cnv_nonNegativeInteger, ((METANS,u'character-count'), None): cnv_nonNegativeInteger, ((METANS,u'date'), None): cnv_dateTime, ((METANS,u'delay'), None): cnv_duration, ((METANS,u'draw-count'), None): cnv_nonNegativeInteger, ((METANS,u'frame-count'), None): cnv_nonNegativeInteger, ((METANS,u'image-count'), None): cnv_nonNegativeInteger, ((METANS,u'name'), None): cnv_string, ((METANS,u'non-whitespace-character-count'), None): cnv_nonNegativeInteger, ((METANS,u'object-count'), None): cnv_nonNegativeInteger, ((METANS,u'ole-object-count'), None): cnv_nonNegativeInteger, ((METANS,u'page-count'), None): cnv_nonNegativeInteger, ((METANS,u'paragraph-count'), None): cnv_nonNegativeInteger, ((METANS,u'row-count'), None): cnv_nonNegativeInteger, ((METANS,u'sentence-count'), None): cnv_nonNegativeInteger, ((METANS,u'syllable-count'), None): cnv_nonNegativeInteger, ((METANS,u'table-count'), None): cnv_nonNegativeInteger, ((METANS,u'value-type'), None): cnv_metavaluetype, ((METANS,u'word-count'), None): cnv_nonNegativeInteger, ((NUMBERNS,u'automatic-order'), None): cnv_boolean, ((NUMBERNS,u'calendar'), None): cnv_string, ((NUMBERNS,u'country'), None): cnv_token, ((NUMBERNS,u'decimal-places'), None): cnv_integer, ((NUMBERNS,u'decimal-replacement'), None): cnv_string, ((NUMBERNS,u'denominator-value'), None): cnv_integer, ((NUMBERNS,u'display-factor'), None): cnv_double, ((NUMBERNS,u'format-source'), None): cnv_string, ((NUMBERNS,u'grouping'), None): cnv_boolean, ((NUMBERNS,u'language'), None): cnv_token, ((NUMBERNS,u'min-denominator-digits'), None): cnv_integer, ((NUMBERNS,u'min-exponent-digits'), None): cnv_integer, ((NUMBERNS,u'min-integer-digits'), None): cnv_integer, ((NUMBERNS,u'min-numerator-digits'), None): cnv_integer, ((NUMBERNS,u'position'), None): cnv_integer, ((NUMBERNS,u'possessive-form'), None): cnv_boolean, ((NUMBERNS,u'style'), None): cnv_string, ((NUMBERNS,u'textual'), None): cnv_boolean, ((NUMBERNS,u'title'), None): cnv_string, ((NUMBERNS,u'transliteration-country'), None): cnv_token, ((NUMBERNS,u'transliteration-format'), None): cnv_string, ((NUMBERNS,u'transliteration-language'), None): cnv_token, ((NUMBERNS,u'transliteration-style'), None): cnv_string, ((NUMBERNS,u'truncate-on-overflow'), None): cnv_boolean, ((OFFICENS,u'automatic-update'), None): cnv_boolean, ((OFFICENS,u'boolean-value'), None): cnv_boolean, ((OFFICENS,u'conversion-mode'), None): cnv_string, ((OFFICENS,u'currency'), None): cnv_string, ((OFFICENS,u'date-value'), None): cnv_dateTime, ((OFFICENS,u'dde-application'), None): cnv_string, ((OFFICENS,u'dde-item'), None): cnv_string, ((OFFICENS,u'dde-topic'), None): cnv_string, ((OFFICENS,u'display'), None): cnv_boolean, ((OFFICENS,u'mimetype'), None): cnv_string, ((OFFICENS,u'name'), None): cnv_string, ((OFFICENS,u'process-content'), None): cnv_boolean, ((OFFICENS,u'server-map'), None): cnv_boolean, ((OFFICENS,u'string-value'), None): cnv_string, ((OFFICENS,u'target-frame'), None): cnv_string, ((OFFICENS,u'target-frame-name'), None): cnv_string, ((OFFICENS,u'time-value'), None): cnv_duration, ((OFFICENS,u'title'), None): cnv_string, ((OFFICENS,u'value'), None): cnv_double, ((OFFICENS,u'value-type'), None): cnv_string, ((OFFICENS,u'version'), None): cnv_string, ((PRESENTATIONNS,u'action'), None): cnv_string, ((PRESENTATIONNS,u'animations'), None): cnv_string, ((PRESENTATIONNS,u'background-objects-visible'), None): cnv_boolean, ((PRESENTATIONNS,u'background-visible'), None): cnv_boolean, ((PRESENTATIONNS,u'class'), None): cnv_string, ((PRESENTATIONNS,u'class-names'), None): cnv_NCNames, ((PRESENTATIONNS,u'delay'), None): cnv_duration, ((PRESENTATIONNS,u'direction'), None): cnv_string, ((PRESENTATIONNS,u'display-date-time'), None): cnv_boolean, ((PRESENTATIONNS,u'display-footer'), None): cnv_boolean, ((PRESENTATIONNS,u'display-header'), None): cnv_boolean, ((PRESENTATIONNS,u'display-page-number'), None): cnv_boolean, ((PRESENTATIONNS,u'duration'), None): cnv_string, ((PRESENTATIONNS,u'effect'), None): cnv_string, ((PRESENTATIONNS,u'endless'), None): cnv_boolean, ((PRESENTATIONNS,u'force-manual'), None): cnv_boolean, ((PRESENTATIONNS,u'full-screen'), None): cnv_boolean, ((PRESENTATIONNS,u'group-id'), None): cnv_string, ((PRESENTATIONNS,u'master-element'), None): cnv_IDREF, ((PRESENTATIONNS,u'mouse-as-pen'), None): cnv_boolean, ((PRESENTATIONNS,u'mouse-visible'), None): cnv_boolean, ((PRESENTATIONNS,u'name'), None): cnv_string, ((PRESENTATIONNS,u'node-type'), None): cnv_string, ((PRESENTATIONNS,u'object'), None): cnv_string, ((PRESENTATIONNS,u'pages'), None): cnv_string, ((PRESENTATIONNS,u'path-id'), None): cnv_string, ((PRESENTATIONNS,u'pause'), None): cnv_duration, ((PRESENTATIONNS,u'placeholder'), None): cnv_boolean, ((PRESENTATIONNS,u'play-full'), None): cnv_boolean, ((PRESENTATIONNS,u'presentation-page-layout-name'), None): cnv_StyleNameRef, ((PRESENTATIONNS,u'preset-class'), None): cnv_string, ((PRESENTATIONNS,u'preset-id'), None): cnv_string, ((PRESENTATIONNS,u'preset-sub-type'), None): cnv_string, ((PRESENTATIONNS,u'show'), None): cnv_string, ((PRESENTATIONNS,u'show-end-of-presentation-slide'), None): cnv_boolean, ((PRESENTATIONNS,u'show-logo'), None): cnv_boolean, ((PRESENTATIONNS,u'source'), None): cnv_string, ((PRESENTATIONNS,u'speed'), None): cnv_string, ((PRESENTATIONNS,u'start-page'), None): cnv_string, ((PRESENTATIONNS,u'start-scale'), None): cnv_string, ((PRESENTATIONNS,u'start-with-navigator'), None): cnv_boolean, ((PRESENTATIONNS,u'stay-on-top'), None): cnv_boolean, ((PRESENTATIONNS,u'style-name'), None): cnv_StyleNameRef, ((PRESENTATIONNS,u'transition-on-click'), None): cnv_string, ((PRESENTATIONNS,u'transition-speed'), None): cnv_string, ((PRESENTATIONNS,u'transition-style'), None): cnv_string, ((PRESENTATIONNS,u'transition-type'), None): cnv_string, ((PRESENTATIONNS,u'use-date-time-name'), None): cnv_string, ((PRESENTATIONNS,u'use-footer-name'), None): cnv_string, ((PRESENTATIONNS,u'use-header-name'), None): cnv_string, ((PRESENTATIONNS,u'user-transformed'), None): cnv_boolean, ((PRESENTATIONNS,u'verb'), None): cnv_nonNegativeInteger, ((PRESENTATIONNS,u'visibility'), None): cnv_string, ((SCRIPTNS,u'event-name'), None): cnv_formula, ((SCRIPTNS,u'language'), None): cnv_formula, ((SCRIPTNS,u'macro-name'), None): cnv_string, ((SMILNS,u'accelerate'), None): cnv_double, ((SMILNS,u'accumulate'), None): cnv_string, ((SMILNS,u'additive'), None): cnv_string, ((SMILNS,u'attributeName'), None): cnv_string, ((SMILNS,u'autoReverse'), None): cnv_boolean, ((SMILNS,u'begin'), None): cnv_string, ((SMILNS,u'by'), None): cnv_string, ((SMILNS,u'calcMode'), None): cnv_string, ((SMILNS,u'decelerate'), None): cnv_double, ((SMILNS,u'direction'), None): cnv_string, ((SMILNS,u'dur'), None): cnv_string, ((SMILNS,u'end'), None): cnv_string, ((SMILNS,u'endsync'), None): cnv_string, ((SMILNS,u'fadeColor'), None): cnv_string, ((SMILNS,u'fill'), None): cnv_string, ((SMILNS,u'fillDefault'), None): cnv_string, ((SMILNS,u'from'), None): cnv_string, ((SMILNS,u'keySplines'), None): cnv_string, ((SMILNS,u'keyTimes'), None): cnv_string, ((SMILNS,u'mode'), None): cnv_string, ((SMILNS,u'repeatCount'), None): cnv_nonNegativeInteger, ((SMILNS,u'repeatDur'), None): cnv_string, ((SMILNS,u'restart'), None): cnv_string, ((SMILNS,u'restartDefault'), None): cnv_string, ((SMILNS,u'subtype'), None): cnv_string, ((SMILNS,u'targetElement'), None): cnv_IDREF, ((SMILNS,u'to'), None): cnv_string, ((SMILNS,u'type'), None): cnv_string, ((SMILNS,u'values'), None): cnv_string, ((STYLENS,u'adjustment'), None): cnv_string, ((STYLENS,u'apply-style-name'), None): cnv_StyleNameRef, ((STYLENS,u'auto-text-indent'), None): cnv_boolean, ((STYLENS,u'auto-update'), None): cnv_boolean, ((STYLENS,u'background-transparency'), None): cnv_string, ((STYLENS,u'base-cell-address'), None): cnv_string, ((STYLENS,u'border-line-width-bottom'), None): cnv_string, ((STYLENS,u'border-line-width'), None): cnv_string, ((STYLENS,u'border-line-width-left'), None): cnv_string, ((STYLENS,u'border-line-width-right'), None): cnv_string, ((STYLENS,u'border-line-width-top'), None): cnv_string, ((STYLENS,u'cell-protect'), None): cnv_string, ((STYLENS,u'char'), None): cnv_string, ((STYLENS,u'class'), None): cnv_string, ((STYLENS,u'color'), None): cnv_string, ((STYLENS,u'column-width'), None): cnv_string, ((STYLENS,u'condition'), None): cnv_string, ((STYLENS,u'country-asian'), None): cnv_string, ((STYLENS,u'country-complex'), None): cnv_string, ((STYLENS,u'data-style-name'), None): cnv_StyleNameRef, ((STYLENS,u'decimal-places'), None): cnv_string, ((STYLENS,u'default-outline-level'), None): cnv_positiveInteger, ((STYLENS,u'diagonal-bl-tr'), None): cnv_string, ((STYLENS,u'diagonal-bl-tr-widths'), None): cnv_string, ((STYLENS,u'diagonal-tl-br'), None): cnv_string, ((STYLENS,u'diagonal-tl-br-widths'), None): cnv_string, ((STYLENS,u'direction'), None): cnv_string, ((STYLENS,u'display'), None): cnv_boolean, ((STYLENS,u'display-name'), None): cnv_string, ((STYLENS,u'distance-after-sep'), None): cnv_length, ((STYLENS,u'distance-before-sep'), None): cnv_length, ((STYLENS,u'distance'), None): cnv_length, ((STYLENS,u'dynamic-spacing'), None): cnv_boolean, ((STYLENS,u'editable'), None): cnv_boolean, ((STYLENS,u'family'), None): cnv_family, ((STYLENS,u'filter-name'), None): cnv_string, ((STYLENS,u'first-page-number'), None): cnv_string, ((STYLENS,u'flow-with-text'), None): cnv_boolean, ((STYLENS,u'font-adornments'), None): cnv_string, ((STYLENS,u'font-charset'), None): cnv_string, ((STYLENS,u'font-charset-asian'), None): cnv_string, ((STYLENS,u'font-charset-complex'), None): cnv_string, ((STYLENS,u'font-family-asian'), None): cnv_string, ((STYLENS,u'font-family-complex'), None): cnv_string, ((STYLENS,u'font-family-generic-asian'), None): cnv_string, ((STYLENS,u'font-family-generic'), None): cnv_string, ((STYLENS,u'font-family-generic-complex'), None): cnv_string, ((STYLENS,u'font-independent-line-spacing'), None): cnv_boolean, ((STYLENS,u'font-name-asian'), None): cnv_string, ((STYLENS,u'font-name'), None): cnv_string, ((STYLENS,u'font-name-complex'), None): cnv_string, ((STYLENS,u'font-pitch-asian'), None): cnv_string, ((STYLENS,u'font-pitch'), None): cnv_string, ((STYLENS,u'font-pitch-complex'), None): cnv_string, ((STYLENS,u'font-relief'), None): cnv_string, ((STYLENS,u'font-size-asian'), None): cnv_string, ((STYLENS,u'font-size-complex'), None): cnv_string, ((STYLENS,u'font-size-rel-asian'), None): cnv_length, ((STYLENS,u'font-size-rel'), None): cnv_length, ((STYLENS,u'font-size-rel-complex'), None): cnv_length, ((STYLENS,u'font-style-asian'), None): cnv_string, ((STYLENS,u'font-style-complex'), None): cnv_string, ((STYLENS,u'font-style-name-asian'), None): cnv_string, ((STYLENS,u'font-style-name'), None): cnv_string, ((STYLENS,u'font-style-name-complex'), None): cnv_string, ((STYLENS,u'font-weight-asian'), None): cnv_string, ((STYLENS,u'font-weight-complex'), None): cnv_string, ((STYLENS,u'footnote-max-height'), None): cnv_length, ((STYLENS,u'glyph-orientation-vertical'), None): cnv_string, ((STYLENS,u'height'), None): cnv_string, ((STYLENS,u'horizontal-pos'), None): cnv_string, ((STYLENS,u'horizontal-rel'), None): cnv_string, ((STYLENS,u'justify-single-word'), None): cnv_boolean, ((STYLENS,u'language-asian'), None): cnv_string, ((STYLENS,u'language-complex'), None): cnv_string, ((STYLENS,u'layout-grid-base-height'), None): cnv_length, ((STYLENS,u'layout-grid-color'), None): cnv_string, ((STYLENS,u'layout-grid-display'), None): cnv_boolean, ((STYLENS,u'layout-grid-lines'), None): cnv_string, ((STYLENS,u'layout-grid-mode'), None): cnv_string, ((STYLENS,u'layout-grid-print'), None): cnv_boolean, ((STYLENS,u'layout-grid-ruby-below'), None): cnv_boolean, ((STYLENS,u'layout-grid-ruby-height'), None): cnv_length, ((STYLENS,u'leader-char'), None): cnv_string, ((STYLENS,u'leader-color'), None): cnv_string, ((STYLENS,u'leader-style'), None): cnv_string, ((STYLENS,u'leader-text'), None): cnv_string, ((STYLENS,u'leader-text-style'), None): cnv_StyleNameRef, ((STYLENS,u'leader-type'), None): cnv_string, ((STYLENS,u'leader-width'), None): cnv_string, ((STYLENS,u'legend-expansion-aspect-ratio'), None): cnv_double, ((STYLENS,u'legend-expansion'), None): cnv_string, ((STYLENS,u'length'), None): cnv_positiveInteger, ((STYLENS,u'letter-kerning'), None): cnv_boolean, ((STYLENS,u'line-break'), None): cnv_string, ((STYLENS,u'line-height-at-least'), None): cnv_string, ((STYLENS,u'line-spacing'), None): cnv_length, ((STYLENS,u'line-style'), None): cnv_string, ((STYLENS,u'lines'), None): cnv_positiveInteger, ((STYLENS,u'list-style-name'), None): cnv_StyleNameRef, ((STYLENS,u'master-page-name'), None): cnv_StyleNameRef, ((STYLENS,u'may-break-between-rows'), None): cnv_boolean, ((STYLENS,u'min-row-height'), None): cnv_string, ((STYLENS,u'mirror'), None): cnv_string, ((STYLENS,u'name'), None): cnv_NCName, ((STYLENS,u'name'), (STYLENS,u'font-face')): cnv_string, ((STYLENS,u'next-style-name'), None): cnv_StyleNameRef, ((STYLENS,u'num-format'), None): cnv_string, ((STYLENS,u'num-letter-sync'), None): cnv_boolean, ((STYLENS,u'num-prefix'), None): cnv_string, ((STYLENS,u'num-suffix'), None): cnv_string, ((STYLENS,u'number-wrapped-paragraphs'), None): cnv_string, ((STYLENS,u'overflow-behavior'), None): cnv_string, ((STYLENS,u'page-layout-name'), None): cnv_StyleNameRef, ((STYLENS,u'page-number'), None): cnv_string, ((STYLENS,u'page-usage'), None): cnv_string, ((STYLENS,u'paper-tray-name'), None): cnv_string, ((STYLENS,u'parent-style-name'), None): cnv_StyleNameRef, ((STYLENS,u'position'), (STYLENS,u'tab-stop')): cnv_length, ((STYLENS,u'position'), None): cnv_string, ((STYLENS,u'print'), None): cnv_string, ((STYLENS,u'print-content'), None): cnv_boolean, ((STYLENS,u'print-orientation'), None): cnv_string, ((STYLENS,u'print-page-order'), None): cnv_string, ((STYLENS,u'protect'), None): cnv_boolean, ((STYLENS,u'punctuation-wrap'), None): cnv_string, ((STYLENS,u'register-true'), None): cnv_boolean, ((STYLENS,u'register-truth-ref-style-name'), None): cnv_string, ((STYLENS,u'rel-column-width'), None): cnv_string, ((STYLENS,u'rel-height'), None): cnv_string, ((STYLENS,u'rel-width'), None): cnv_string, ((STYLENS,u'repeat'), None): cnv_string, ((STYLENS,u'repeat-content'), None): cnv_boolean, ((STYLENS,u'rotation-align'), None): cnv_string, ((STYLENS,u'rotation-angle'), None): cnv_string, ((STYLENS,u'row-height'), None): cnv_string, ((STYLENS,u'ruby-align'), None): cnv_string, ((STYLENS,u'ruby-position'), None): cnv_string, ((STYLENS,u'run-through'), None): cnv_string, ((STYLENS,u'scale-to'), None): cnv_string, ((STYLENS,u'scale-to-pages'), None): cnv_string, ((STYLENS,u'script-type'), None): cnv_string, ((STYLENS,u'shadow'), None): cnv_string, ((STYLENS,u'shrink-to-fit'), None): cnv_boolean, ((STYLENS,u'snap-to-layout-grid'), None): cnv_boolean, ((STYLENS,u'style'), None): cnv_string, ((STYLENS,u'style-name'), None): cnv_StyleNameRef, ((STYLENS,u'tab-stop-distance'), None): cnv_string, ((STYLENS,u'table-centering'), None): cnv_string, ((STYLENS,u'text-align-source'), None): cnv_string, ((STYLENS,u'text-autospace'), None): cnv_string, ((STYLENS,u'text-blinking'), None): cnv_boolean, ((STYLENS,u'text-combine'), None): cnv_string, ((STYLENS,u'text-combine-end-char'), None): cnv_string, ((STYLENS,u'text-combine-start-char'), None): cnv_string, ((STYLENS,u'text-emphasize'), None): cnv_string, ((STYLENS,u'text-line-through-color'), None): cnv_string, ((STYLENS,u'text-line-through-mode'), None): cnv_string, ((STYLENS,u'text-line-through-style'), None): cnv_string, ((STYLENS,u'text-line-through-text'), None): cnv_string, ((STYLENS,u'text-line-through-text-style'), None): cnv_string, ((STYLENS,u'text-line-through-type'), None): cnv_string, ((STYLENS,u'text-line-through-width'), None): cnv_string, ((STYLENS,u'text-outline'), None): cnv_boolean, ((STYLENS,u'text-position'), None): cnv_string, ((STYLENS,u'text-rotation-angle'), None): cnv_string, ((STYLENS,u'text-rotation-scale'), None): cnv_string, ((STYLENS,u'text-scale'), None): cnv_string, ((STYLENS,u'text-underline-color'), None): cnv_string, ((STYLENS,u'text-underline-mode'), None): cnv_string, ((STYLENS,u'text-underline-style'), None): cnv_string, ((STYLENS,u'text-underline-type'), None): cnv_string, ((STYLENS,u'text-underline-width'), None): cnv_string, ((STYLENS,u'type'), None): cnv_string, ((STYLENS,u'use-optimal-column-width'), None): cnv_boolean, ((STYLENS,u'use-optimal-row-height'), None): cnv_boolean, ((STYLENS,u'use-window-font-color'), None): cnv_boolean, ((STYLENS,u'vertical-align'), None): cnv_string, ((STYLENS,u'vertical-pos'), None): cnv_string, ((STYLENS,u'vertical-rel'), None): cnv_string, ((STYLENS,u'volatile'), None): cnv_boolean, ((STYLENS,u'width'), None): cnv_string, ((STYLENS,u'wrap'), None): cnv_string, ((STYLENS,u'wrap-contour'), None): cnv_boolean, ((STYLENS,u'wrap-contour-mode'), None): cnv_string, ((STYLENS,u'wrap-dynamic-threshold'), None): cnv_length, ((STYLENS,u'writing-mode-automatic'), None): cnv_boolean, ((STYLENS,u'writing-mode'), None): cnv_string, ((SVGNS,u'accent-height'), None): cnv_integer, ((SVGNS,u'alphabetic'), None): cnv_integer, ((SVGNS,u'ascent'), None): cnv_integer, ((SVGNS,u'bbox'), None): cnv_string, ((SVGNS,u'cap-height'), None): cnv_integer, ((SVGNS,u'cx'), None): cnv_string, ((SVGNS,u'cy'), None): cnv_string, ((SVGNS,u'd'), None): cnv_string, ((SVGNS,u'descent'), None): cnv_integer, ((SVGNS,u'fill-rule'), None): cnv_string, ((SVGNS,u'font-family'), None): cnv_string, ((SVGNS,u'font-size'), None): cnv_string, ((SVGNS,u'font-stretch'), None): cnv_string, ((SVGNS,u'font-style'), None): cnv_string, ((SVGNS,u'font-variant'), None): cnv_string, ((SVGNS,u'font-weight'), None): cnv_string, ((SVGNS,u'fx'), None): cnv_string, ((SVGNS,u'fy'), None): cnv_string, ((SVGNS,u'gradientTransform'), None): cnv_string, ((SVGNS,u'gradientUnits'), None): cnv_string, ((SVGNS,u'hanging'), None): cnv_integer, ((SVGNS,u'height'), None): cnv_length, ((SVGNS,u'ideographic'), None): cnv_integer, ((SVGNS,u'mathematical'), None): cnv_integer, ((SVGNS,u'name'), None): cnv_string, ((SVGNS,u'offset'), None): cnv_string, ((SVGNS,u'origin'), None): cnv_string, ((SVGNS,u'overline-position'), None): cnv_integer, ((SVGNS,u'overline-thickness'), None): cnv_integer, ((SVGNS,u'panose-1'), None): cnv_string, ((SVGNS,u'path'), None): cnv_string, ((SVGNS,u'r'), None): cnv_length, ((SVGNS,u'rx'), None): cnv_length, ((SVGNS,u'ry'), None): cnv_length, ((SVGNS,u'slope'), None): cnv_integer, ((SVGNS,u'spreadMethod'), None): cnv_string, ((SVGNS,u'stemh'), None): cnv_integer, ((SVGNS,u'stemv'), None): cnv_integer, ((SVGNS,u'stop-color'), None): cnv_string, ((SVGNS,u'stop-opacity'), None): cnv_double, ((SVGNS,u'strikethrough-position'), None): cnv_integer, ((SVGNS,u'strikethrough-thickness'), None): cnv_integer, ((SVGNS,u'string'), None): cnv_string, ((SVGNS,u'stroke-color'), None): cnv_string, ((SVGNS,u'stroke-opacity'), None): cnv_string, ((SVGNS,u'stroke-width'), None): cnv_length, ((SVGNS,u'type'), None): cnv_string, ((SVGNS,u'underline-position'), None): cnv_integer, ((SVGNS,u'underline-thickness'), None): cnv_integer, ((SVGNS,u'unicode-range'), None): cnv_string, ((SVGNS,u'units-per-em'), None): cnv_integer, ((SVGNS,u'v-alphabetic'), None): cnv_integer, ((SVGNS,u'v-hanging'), None): cnv_integer, ((SVGNS,u'v-ideographic'), None): cnv_integer, ((SVGNS,u'v-mathematical'), None): cnv_integer, ((SVGNS,u'viewBox'), None): cnv_viewbox, ((SVGNS,u'width'), None): cnv_length, ((SVGNS,u'widths'), None): cnv_string, ((SVGNS,u'x'), None): cnv_length, ((SVGNS,u'x-height'), None): cnv_integer, ((SVGNS,u'x1'), None): cnv_lengthorpercent, ((SVGNS,u'x2'), None): cnv_lengthorpercent, ((SVGNS,u'y'), None): cnv_length, ((SVGNS,u'y1'), None): cnv_lengthorpercent, ((SVGNS,u'y2'), None): cnv_lengthorpercent, ((TABLENS,u'acceptance-state'), None): cnv_string, ((TABLENS,u'add-empty-lines'), None): cnv_boolean, ((TABLENS,u'algorithm'), None): cnv_formula, ((TABLENS,u'align'), None): cnv_string, ((TABLENS,u'allow-empty-cell'), None): cnv_boolean, ((TABLENS,u'application-data'), None): cnv_string, ((TABLENS,u'automatic-find-labels'), None): cnv_boolean, ((TABLENS,u'base-cell-address'), None): cnv_string, ((TABLENS,u'bind-styles-to-content'), None): cnv_boolean, ((TABLENS,u'border-color'), None): cnv_string, ((TABLENS,u'border-model'), None): cnv_string, ((TABLENS,u'buttons'), None): cnv_string, ((TABLENS,u'buttons'), None): cnv_string, ((TABLENS,u'case-sensitive'), None): cnv_boolean, ((TABLENS,u'case-sensitive'), None): cnv_string, ((TABLENS,u'cell-address'), None): cnv_string, ((TABLENS,u'cell-range-address'), None): cnv_string, ((TABLENS,u'cell-range-address'), None): cnv_string, ((TABLENS,u'cell-range'), None): cnv_string, ((TABLENS,u'column'), None): cnv_integer, ((TABLENS,u'comment'), None): cnv_string, ((TABLENS,u'condition'), None): cnv_formula, ((TABLENS,u'condition-source'), None): cnv_string, ((TABLENS,u'condition-source-range-address'), None): cnv_string, ((TABLENS,u'contains-error'), None): cnv_boolean, ((TABLENS,u'contains-header'), None): cnv_boolean, ((TABLENS,u'content-validation-name'), None): cnv_string, ((TABLENS,u'copy-back'), None): cnv_boolean, ((TABLENS,u'copy-formulas'), None): cnv_boolean, ((TABLENS,u'copy-styles'), None): cnv_boolean, ((TABLENS,u'count'), None): cnv_positiveInteger, ((TABLENS,u'country'), None): cnv_token, ((TABLENS,u'data-cell-range-address'), None): cnv_string, ((TABLENS,u'data-field'), None): cnv_string, ((TABLENS,u'data-type'), None): cnv_string, ((TABLENS,u'database-name'), None): cnv_string, ((TABLENS,u'database-table-name'), None): cnv_string, ((TABLENS,u'date-end'), None): cnv_string, ((TABLENS,u'date-start'), None): cnv_string, ((TABLENS,u'date-value'), None): cnv_date, ((TABLENS,u'default-cell-style-name'), None): cnv_StyleNameRef, ((TABLENS,u'direction'), None): cnv_string, ((TABLENS,u'display-border'), None): cnv_boolean, ((TABLENS,u'display'), None): cnv_boolean, ((TABLENS,u'display-duplicates'), None): cnv_boolean, ((TABLENS,u'display-filter-buttons'), None): cnv_boolean, ((TABLENS,u'display-list'), None): cnv_string, ((TABLENS,u'display-member-mode'), None): cnv_string, ((TABLENS,u'drill-down-on-double-click'), None): cnv_boolean, ((TABLENS,u'enabled'), None): cnv_boolean, ((TABLENS,u'end-cell-address'), None): cnv_string, ((TABLENS,u'end'), None): cnv_string, ((TABLENS,u'end-column'), None): cnv_integer, ((TABLENS,u'end-position'), None): cnv_integer, ((TABLENS,u'end-row'), None): cnv_integer, ((TABLENS,u'end-table'), None): cnv_integer, ((TABLENS,u'end-x'), None): cnv_length, ((TABLENS,u'end-y'), None): cnv_length, ((TABLENS,u'execute'), None): cnv_boolean, ((TABLENS,u'expression'), None): cnv_formula, ((TABLENS,u'field-name'), None): cnv_string, ((TABLENS,u'field-number'), None): cnv_nonNegativeInteger, ((TABLENS,u'field-number'), None): cnv_string, ((TABLENS,u'filter-name'), None): cnv_string, ((TABLENS,u'filter-options'), None): cnv_string, ((TABLENS,u'formula'), None): cnv_formula, ((TABLENS,u'function'), None): cnv_string, ((TABLENS,u'function'), None): cnv_string, ((TABLENS,u'grand-total'), None): cnv_string, ((TABLENS,u'group-by-field-number'), None): cnv_nonNegativeInteger, ((TABLENS,u'grouped-by'), None): cnv_string, ((TABLENS,u'has-persistent-data'), None): cnv_boolean, ((TABLENS,u'id'), None): cnv_string, ((TABLENS,u'identify-categories'), None): cnv_boolean, ((TABLENS,u'ignore-empty-rows'), None): cnv_boolean, ((TABLENS,u'index'), None): cnv_nonNegativeInteger, ((TABLENS,u'is-active'), None): cnv_boolean, ((TABLENS,u'is-data-layout-field'), None): cnv_string, ((TABLENS,u'is-selection'), None): cnv_boolean, ((TABLENS,u'is-sub-table'), None): cnv_boolean, ((TABLENS,u'label-cell-range-address'), None): cnv_string, ((TABLENS,u'language'), None): cnv_token, ((TABLENS,u'language'), None): cnv_token, ((TABLENS,u'last-column-spanned'), None): cnv_positiveInteger, ((TABLENS,u'last-row-spanned'), None): cnv_positiveInteger, ((TABLENS,u'layout-mode'), None): cnv_string, ((TABLENS,u'link-to-source-data'), None): cnv_boolean, ((TABLENS,u'marked-invalid'), None): cnv_boolean, ((TABLENS,u'matrix-covered'), None): cnv_boolean, ((TABLENS,u'maximum-difference'), None): cnv_double, ((TABLENS,u'member-count'), None): cnv_nonNegativeInteger, ((TABLENS,u'member-name'), None): cnv_string, ((TABLENS,u'member-type'), None): cnv_string, ((TABLENS,u'message-type'), None): cnv_string, ((TABLENS,u'mode'), None): cnv_string, ((TABLENS,u'multi-deletion-spanned'), None): cnv_integer, ((TABLENS,u'name'), None): cnv_string, ((TABLENS,u'name'), None): cnv_string, ((TABLENS,u'null-year'), None): cnv_positiveInteger, ((TABLENS,u'number-columns-repeated'), None): cnv_positiveInteger, ((TABLENS,u'number-columns-spanned'), None): cnv_positiveInteger, ((TABLENS,u'number-matrix-columns-spanned'), None): cnv_positiveInteger, ((TABLENS,u'number-matrix-rows-spanned'), None): cnv_positiveInteger, ((TABLENS,u'number-rows-repeated'), None): cnv_positiveInteger, ((TABLENS,u'number-rows-spanned'), None): cnv_positiveInteger, ((TABLENS,u'object-name'), None): cnv_string, ((TABLENS,u'on-update-keep-size'), None): cnv_boolean, ((TABLENS,u'on-update-keep-styles'), None): cnv_boolean, ((TABLENS,u'operator'), None): cnv_string, ((TABLENS,u'operator'), None): cnv_string, ((TABLENS,u'order'), None): cnv_string, ((TABLENS,u'orientation'), None): cnv_string, ((TABLENS,u'orientation'), None): cnv_string, ((TABLENS,u'page-breaks-on-group-change'), None): cnv_boolean, ((TABLENS,u'parse-sql-statement'), None): cnv_boolean, ((TABLENS,u'password'), None): cnv_string, ((TABLENS,u'position'), None): cnv_integer, ((TABLENS,u'precision-as-shown'), None): cnv_boolean, ((TABLENS,u'print'), None): cnv_boolean, ((TABLENS,u'print-ranges'), None): cnv_string, ((TABLENS,u'protect'), None): cnv_boolean, ((TABLENS,u'protected'), None): cnv_boolean, ((TABLENS,u'protection-key'), None): cnv_string, ((TABLENS,u'query-name'), None): cnv_string, ((TABLENS,u'range-usable-as'), None): cnv_string, ((TABLENS,u'refresh-delay'), None): cnv_boolean, ((TABLENS,u'refresh-delay'), None): cnv_duration, ((TABLENS,u'rejecting-change-id'), None): cnv_string, ((TABLENS,u'row'), None): cnv_integer, ((TABLENS,u'scenario-ranges'), None): cnv_string, ((TABLENS,u'search-criteria-must-apply-to-whole-cell'), None): cnv_boolean, ((TABLENS,u'selected-page'), None): cnv_string, ((TABLENS,u'show-details'), None): cnv_boolean, ((TABLENS,u'show-empty'), None): cnv_boolean, ((TABLENS,u'show-empty'), None): cnv_string, ((TABLENS,u'show-filter-button'), None): cnv_boolean, ((TABLENS,u'sort-mode'), None): cnv_string, ((TABLENS,u'source-cell-range-addresses'), None): cnv_string, ((TABLENS,u'source-cell-range-addresses'), None): cnv_string, ((TABLENS,u'source-field-name'), None): cnv_string, ((TABLENS,u'source-field-name'), None): cnv_string, ((TABLENS,u'source-name'), None): cnv_string, ((TABLENS,u'sql-statement'), None): cnv_string, ((TABLENS,u'start'), None): cnv_string, ((TABLENS,u'start-column'), None): cnv_integer, ((TABLENS,u'start-position'), None): cnv_integer, ((TABLENS,u'start-row'), None): cnv_integer, ((TABLENS,u'start-table'), None): cnv_integer, ((TABLENS,u'status'), None): cnv_string, ((TABLENS,u'step'), None): cnv_double, ((TABLENS,u'steps'), None): cnv_positiveInteger, ((TABLENS,u'structure-protected'), None): cnv_boolean, ((TABLENS,u'style-name'), None): cnv_StyleNameRef, ((TABLENS,u'table-background'), None): cnv_boolean, ((TABLENS,u'table'), None): cnv_integer, ((TABLENS,u'table-name'), None): cnv_string, ((TABLENS,u'target-cell-address'), None): cnv_string, ((TABLENS,u'target-cell-address'), None): cnv_string, ((TABLENS,u'target-range-address'), None): cnv_string, ((TABLENS,u'target-range-address'), None): cnv_string, ((TABLENS,u'title'), None): cnv_string, ((TABLENS,u'track-changes'), None): cnv_boolean, ((TABLENS,u'type'), None): cnv_string, ((TABLENS,u'use-labels'), None): cnv_string, ((TABLENS,u'use-regular-expressions'), None): cnv_boolean, ((TABLENS,u'used-hierarchy'), None): cnv_integer, ((TABLENS,u'user-name'), None): cnv_string, ((TABLENS,u'value'), None): cnv_string, ((TABLENS,u'value'), None): cnv_string, ((TABLENS,u'value-type'), None): cnv_string, ((TABLENS,u'visibility'), None): cnv_string, ((TEXTNS,u'active'), None): cnv_boolean, ((TEXTNS,u'address'), None): cnv_string, ((TEXTNS,u'alphabetical-separators'), None): cnv_boolean, ((TEXTNS,u'anchor-page-number'), None): cnv_positiveInteger, ((TEXTNS,u'anchor-type'), None): cnv_string, ((TEXTNS,u'animation'), None): cnv_string, ((TEXTNS,u'animation-delay'), None): cnv_string, ((TEXTNS,u'animation-direction'), None): cnv_string, ((TEXTNS,u'animation-repeat'), None): cnv_string, ((TEXTNS,u'animation-start-inside'), None): cnv_boolean, ((TEXTNS,u'animation-steps'), None): cnv_length, ((TEXTNS,u'animation-stop-inside'), None): cnv_boolean, ((TEXTNS,u'annote'), None): cnv_string, ((TEXTNS,u'author'), None): cnv_string, ((TEXTNS,u'bibliography-data-field'), None): cnv_string, ((TEXTNS,u'bibliography-type'), None): cnv_string, ((TEXTNS,u'booktitle'), None): cnv_string, ((TEXTNS,u'bullet-char'), None): cnv_string, ((TEXTNS,u'bullet-relative-size'), None): cnv_string, ((TEXTNS,u'c'), None): cnv_nonNegativeInteger, ((TEXTNS,u'capitalize-entries'), None): cnv_boolean, ((TEXTNS,u'caption-sequence-format'), None): cnv_string, ((TEXTNS,u'caption-sequence-name'), None): cnv_string, ((TEXTNS,u'change-id'), None): cnv_IDREF, ((TEXTNS,u'chapter'), None): cnv_string, ((TEXTNS,u'citation-body-style-name'), None): cnv_StyleNameRef, ((TEXTNS,u'citation-style-name'), None): cnv_StyleNameRef, ((TEXTNS,u'class-names'), None): cnv_NCNames, ((TEXTNS,u'column-name'), None): cnv_string, ((TEXTNS,u'combine-entries'), None): cnv_boolean, ((TEXTNS,u'combine-entries-with-dash'), None): cnv_boolean, ((TEXTNS,u'combine-entries-with-pp'), None): cnv_boolean, ((TEXTNS,u'comma-separated'), None): cnv_boolean, ((TEXTNS,u'cond-style-name'), None): cnv_StyleNameRef, ((TEXTNS,u'condition'), None): cnv_formula, ((TEXTNS,u'connection-name'), None): cnv_string, ((TEXTNS,u'consecutive-numbering'), None): cnv_boolean, ((TEXTNS,u'continue-numbering'), None): cnv_boolean, ((TEXTNS,u'copy-outline-levels'), None): cnv_boolean, ((TEXTNS,u'count-empty-lines'), None): cnv_boolean, ((TEXTNS,u'count-in-text-boxes'), None): cnv_boolean, ((TEXTNS,u'current-value'), None): cnv_boolean, ((TEXTNS,u'custom1'), None): cnv_string, ((TEXTNS,u'custom2'), None): cnv_string, ((TEXTNS,u'custom3'), None): cnv_string, ((TEXTNS,u'custom4'), None): cnv_string, ((TEXTNS,u'custom5'), None): cnv_string, ((TEXTNS,u'database-name'), None): cnv_string, ((TEXTNS,u'date-adjust'), None): cnv_duration, ((TEXTNS,u'date-value'), None): cnv_date, # ((TEXTNS,u'date-value'), None): cnv_dateTime, ((TEXTNS,u'default-style-name'), None): cnv_StyleNameRef, ((TEXTNS,u'description'), None): cnv_string, ((TEXTNS,u'display'), None): cnv_string, ((TEXTNS,u'display-levels'), None): cnv_positiveInteger, ((TEXTNS,u'display-outline-level'), None): cnv_nonNegativeInteger, ((TEXTNS,u'dont-balance-text-columns'), None): cnv_boolean, ((TEXTNS,u'duration'), None): cnv_duration, ((TEXTNS,u'edition'), None): cnv_string, ((TEXTNS,u'editor'), None): cnv_string, ((TEXTNS,u'filter-name'), None): cnv_string, ((TEXTNS,u'first-row-end-column'), None): cnv_string, ((TEXTNS,u'first-row-start-column'), None): cnv_string, ((TEXTNS,u'fixed'), None): cnv_boolean, ((TEXTNS,u'footnotes-position'), None): cnv_string, ((TEXTNS,u'formula'), None): cnv_formula, ((TEXTNS,u'global'), None): cnv_boolean, ((TEXTNS,u'howpublished'), None): cnv_string, ((TEXTNS,u'id'), None): cnv_ID, # ((TEXTNS,u'id'), None): cnv_string, ((TEXTNS,u'identifier'), None): cnv_string, ((TEXTNS,u'ignore-case'), None): cnv_boolean, ((TEXTNS,u'increment'), None): cnv_nonNegativeInteger, ((TEXTNS,u'index-name'), None): cnv_string, ((TEXTNS,u'index-scope'), None): cnv_string, ((TEXTNS,u'institution'), None): cnv_string, ((TEXTNS,u'is-hidden'), None): cnv_boolean, ((TEXTNS,u'is-list-header'), None): cnv_boolean, ((TEXTNS,u'isbn'), None): cnv_string, ((TEXTNS,u'issn'), None): cnv_string, ((TEXTNS,u'issn'), None): cnv_string, ((TEXTNS,u'journal'), None): cnv_string, ((TEXTNS,u'key'), None): cnv_string, ((TEXTNS,u'key1'), None): cnv_string, ((TEXTNS,u'key1-phonetic'), None): cnv_string, ((TEXTNS,u'key2'), None): cnv_string, ((TEXTNS,u'key2-phonetic'), None): cnv_string, ((TEXTNS,u'kind'), None): cnv_string, ((TEXTNS,u'label'), None): cnv_string, ((TEXTNS,u'last-row-end-column'), None): cnv_string, ((TEXTNS,u'last-row-start-column'), None): cnv_string, ((TEXTNS,u'level'), None): cnv_positiveInteger, ((TEXTNS,u'line-break'), None): cnv_boolean, ((TEXTNS,u'line-number'), None): cnv_string, ((TEXTNS,u'main-entry'), None): cnv_boolean, ((TEXTNS,u'main-entry-style-name'), None): cnv_StyleNameRef, ((TEXTNS,u'master-page-name'), None): cnv_StyleNameRef, ((TEXTNS,u'min-label-distance'), None): cnv_string, ((TEXTNS,u'min-label-width'), None): cnv_string, ((TEXTNS,u'month'), None): cnv_string, ((TEXTNS,u'name'), None): cnv_string, ((TEXTNS,u'note-class'), None): cnv_textnoteclass, ((TEXTNS,u'note'), None): cnv_string, ((TEXTNS,u'number'), None): cnv_string, ((TEXTNS,u'number-lines'), None): cnv_boolean, ((TEXTNS,u'number-position'), None): cnv_string, ((TEXTNS,u'numbered-entries'), None): cnv_boolean, ((TEXTNS,u'offset'), None): cnv_string, ((TEXTNS,u'organizations'), None): cnv_string, ((TEXTNS,u'outline-level'), None): cnv_string, ((TEXTNS,u'page-adjust'), None): cnv_integer, ((TEXTNS,u'pages'), None): cnv_string, ((TEXTNS,u'paragraph-style-name'), None): cnv_StyleNameRef, ((TEXTNS,u'placeholder-type'), None): cnv_string, ((TEXTNS,u'prefix'), None): cnv_string, ((TEXTNS,u'protected'), None): cnv_boolean, ((TEXTNS,u'protection-key'), None): cnv_string, ((TEXTNS,u'publisher'), None): cnv_string, ((TEXTNS,u'ref-name'), None): cnv_string, ((TEXTNS,u'reference-format'), None): cnv_string, ((TEXTNS,u'relative-tab-stop-position'), None): cnv_boolean, ((TEXTNS,u'report-type'), None): cnv_string, ((TEXTNS,u'restart-numbering'), None): cnv_boolean, ((TEXTNS,u'restart-on-page'), None): cnv_boolean, ((TEXTNS,u'row-number'), None): cnv_nonNegativeInteger, ((TEXTNS,u'school'), None): cnv_string, ((TEXTNS,u'section-name'), None): cnv_string, ((TEXTNS,u'select-page'), None): cnv_string, ((TEXTNS,u'separation-character'), None): cnv_string, ((TEXTNS,u'series'), None): cnv_string, ((TEXTNS,u'sort-algorithm'), None): cnv_string, ((TEXTNS,u'sort-ascending'), None): cnv_boolean, ((TEXTNS,u'sort-by-position'), None): cnv_boolean, ((TEXTNS,u'space-before'), None): cnv_string, ((TEXTNS,u'start-numbering-at'), None): cnv_string, ((TEXTNS,u'start-value'), None): cnv_nonNegativeInteger, ((TEXTNS,u'start-value'), None): cnv_positiveInteger, ((TEXTNS,u'string-value'), None): cnv_string, ((TEXTNS,u'string-value-if-false'), None): cnv_string, ((TEXTNS,u'string-value-if-true'), None): cnv_string, ((TEXTNS,u'string-value-phonetic'), None): cnv_string, ((TEXTNS,u'style-name'), None): cnv_StyleNameRef, ((TEXTNS,u'suffix'), None): cnv_string, ((TEXTNS,u'tab-ref'), None): cnv_nonNegativeInteger, ((TEXTNS,u'table-name'), None): cnv_string, ((TEXTNS,u'table-type'), None): cnv_string, ((TEXTNS,u'time-adjust'), None): cnv_duration, ((TEXTNS,u'time-value'), None): cnv_dateTime, ((TEXTNS,u'time-value'), None): cnv_time, ((TEXTNS,u'title'), None): cnv_string, ((TEXTNS,u'track-changes'), None): cnv_boolean, ((TEXTNS,u'url'), None): cnv_string, ((TEXTNS,u'use-caption'), None): cnv_boolean, ((TEXTNS,u'use-chart-objects'), None): cnv_boolean, ((TEXTNS,u'use-draw-objects'), None): cnv_boolean, ((TEXTNS,u'use-floating-frames'), None): cnv_boolean, ((TEXTNS,u'use-graphics'), None): cnv_boolean, ((TEXTNS,u'use-index-marks'), None): cnv_boolean, ((TEXTNS,u'use-index-source-styles'), None): cnv_boolean, ((TEXTNS,u'use-keys-as-entries'), None): cnv_boolean, ((TEXTNS,u'use-math-objects'), None): cnv_boolean, ((TEXTNS,u'use-objects'), None): cnv_boolean, ((TEXTNS,u'use-other-objects'), None): cnv_boolean, ((TEXTNS,u'use-outline-level'), None): cnv_boolean, ((TEXTNS,u'use-soft-page-breaks'), None): cnv_boolean, ((TEXTNS,u'use-spreadsheet-objects'), None): cnv_boolean, ((TEXTNS,u'use-tables'), None): cnv_boolean, ((TEXTNS,u'value'), None): cnv_nonNegativeInteger, ((TEXTNS,u'visited-style-name'), None): cnv_StyleNameRef, ((TEXTNS,u'volume'), None): cnv_string, ((TEXTNS,u'year'), None): cnv_string, ((XFORMSNS,u'bind'), None): cnv_string, ((XLINKNS,u'actuate'), None): cnv_string, ((XLINKNS,u'href'), None): cnv_anyURI, ((XLINKNS,u'show'), None): cnv_xlinkshow, ((XLINKNS,u'title'), None): cnv_string, ((XLINKNS,u'type'), None): cnv_string, } class AttrConverters: def convert(self, attribute, value, element): """ Based on the element, figures out how to check/convert the attribute value All values are converted to string """ conversion = attrconverters.get((attribute, element.qname), None) if conversion is not None: return conversion(attribute, value, element) else: conversion = attrconverters.get((attribute, None), None) if conversion is not None: return conversion(attribute, value, element) return unicode(value)
rsvip/Django
refs/heads/master
tests/postgres_tests/models.py
50
from django.db import connection, models from .fields import ( ArrayField, BigIntegerRangeField, DateRangeField, DateTimeRangeField, FloatRangeField, HStoreField, IntegerRangeField, JSONField, ) class PostgreSQLModel(models.Model): class Meta: abstract = True required_db_vendor = 'postgresql' class IntegerArrayModel(PostgreSQLModel): field = ArrayField(models.IntegerField()) class NullableIntegerArrayModel(PostgreSQLModel): field = ArrayField(models.IntegerField(), blank=True, null=True) class CharArrayModel(PostgreSQLModel): field = ArrayField(models.CharField(max_length=10)) class DateTimeArrayModel(PostgreSQLModel): datetimes = ArrayField(models.DateTimeField()) dates = ArrayField(models.DateField()) times = ArrayField(models.TimeField()) class NestedIntegerArrayModel(PostgreSQLModel): field = ArrayField(ArrayField(models.IntegerField())) class OtherTypesArrayModel(PostgreSQLModel): ips = ArrayField(models.GenericIPAddressField()) uuids = ArrayField(models.UUIDField()) decimals = ArrayField(models.DecimalField(max_digits=5, decimal_places=2)) class HStoreModel(PostgreSQLModel): field = HStoreField(blank=True, null=True) class CharFieldModel(models.Model): field = models.CharField(max_length=16) class TextFieldModel(models.Model): field = models.TextField() # Only create this model for postgres >= 9.2 if connection.vendor == 'postgresql' and connection.pg_version >= 90200: class RangesModel(PostgreSQLModel): ints = IntegerRangeField(blank=True, null=True) bigints = BigIntegerRangeField(blank=True, null=True) floats = FloatRangeField(blank=True, null=True) timestamps = DateTimeRangeField(blank=True, null=True) dates = DateRangeField(blank=True, null=True) class RangeLookupsModel(PostgreSQLModel): parent = models.ForeignKey(RangesModel, blank=True, null=True) integer = models.IntegerField(blank=True, null=True) big_integer = models.BigIntegerField(blank=True, null=True) float = models.FloatField(blank=True, null=True) timestamp = models.DateTimeField(blank=True, null=True) date = models.DateField(blank=True, null=True) else: # create an object with this name so we don't have failing imports class RangesModel(object): pass class RangeLookupsModel(object): pass # Only create this model for postgres >= 9.4 if connection.vendor == 'postgresql' and connection.pg_version >= 90400: class JSONModel(models.Model): field = JSONField(blank=True, null=True) else: # create an object with this name so we don't have failing imports class JSONModel(object): pass class ArrayFieldSubclass(ArrayField): def __init__(self, *args, **kwargs): super(ArrayFieldSubclass, self).__init__(models.IntegerField()) class AggregateTestModel(models.Model): """ To test postgres-specific general aggregation functions """ char_field = models.CharField(max_length=30, blank=True) integer_field = models.IntegerField(null=True) boolean_field = models.NullBooleanField() class StatTestModel(models.Model): """ To test postgres-specific aggregation functions for statistics """ int1 = models.IntegerField() int2 = models.IntegerField() related_field = models.ForeignKey(AggregateTestModel, null=True) class NowTestModel(models.Model): when = models.DateTimeField(null=True, default=None)
0359xiaodong/libgdx
refs/heads/master
extensions/gdx-freetype/jni/freetype-2.4.10/src/tools/docmaker/utils.py
515
# Utils (c) 2002, 2004, 2007, 2008 David Turner <david@freetype.org> # import string, sys, os, glob # current output directory # output_dir = None # This function is used to sort the index. It is a simple lexicographical # sort, except that it places capital letters before lowercase ones. # def index_sort( s1, s2 ): if not s1: return -1 if not s2: return 1 l1 = len( s1 ) l2 = len( s2 ) m1 = string.lower( s1 ) m2 = string.lower( s2 ) for i in range( l1 ): if i >= l2 or m1[i] > m2[i]: return 1 if m1[i] < m2[i]: return -1 if s1[i] < s2[i]: return -1 if s1[i] > s2[i]: return 1 if l2 > l1: return -1 return 0 # Sort input_list, placing the elements of order_list in front. # def sort_order_list( input_list, order_list ): new_list = order_list[:] for id in input_list: if not id in order_list: new_list.append( id ) return new_list # Open the standard output to a given project documentation file. Use # "output_dir" to determine the filename location if necessary and save the # old stdout in a tuple that is returned by this function. # def open_output( filename ): global output_dir if output_dir and output_dir != "": filename = output_dir + os.sep + filename old_stdout = sys.stdout new_file = open( filename, "w" ) sys.stdout = new_file return ( new_file, old_stdout ) # Close the output that was returned by "close_output". # def close_output( output ): output[0].close() sys.stdout = output[1] # Check output directory. # def check_output(): global output_dir if output_dir: if output_dir != "": if not os.path.isdir( output_dir ): sys.stderr.write( "argument" + " '" + output_dir + "' " + \ "is not a valid directory" ) sys.exit( 2 ) else: output_dir = None def file_exists( pathname ): """checks that a given file exists""" result = 1 try: file = open( pathname, "r" ) file.close() except: result = None sys.stderr.write( pathname + " couldn't be accessed\n" ) return result def make_file_list( args = None ): """builds a list of input files from command-line arguments""" file_list = [] # sys.stderr.write( repr( sys.argv[1 :] ) + '\n' ) if not args: args = sys.argv[1 :] for pathname in args: if string.find( pathname, '*' ) >= 0: newpath = glob.glob( pathname ) newpath.sort() # sort files -- this is important because # of the order of files else: newpath = [pathname] file_list.extend( newpath ) if len( file_list ) == 0: file_list = None else: # now filter the file list to remove non-existing ones file_list = filter( file_exists, file_list ) return file_list # eof