repo_name
stringlengths
5
100
ref
stringlengths
12
67
path
stringlengths
4
244
copies
stringlengths
1
8
content
stringlengths
0
1.05M
BruceDai/crosswalk-test-suite
refs/heads/master
webapi/tct-csp-w3c-tests/csp-py/csp_media-src_corss-origin_audio_blocked_int.py
30
def main(request, response): import simplejson as json f = file('config.json') source = f.read() s = json.JSONDecoder().decode(source) url1 = "http://" + s['host'] + ":" + str(s['ports']['http'][1]) url2 = "http://" + s['host'] + ":" + str(s['ports']['http'][0]) _CSP = "media-src http://www.w3.org; script-src 'self' 'unsafe-inline'" response.headers.set("Content-Security-Policy", _CSP) response.headers.set("X-Content-Security-Policy", _CSP) response.headers.set("X-WebKit-CSP", _CSP) return """<!DOCTYPE html> <!-- Copyright (c) 2013 Intel Corporation. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of works must retain the original copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the original copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Intel Corporation nor the names of its contributors may be used to endorse or promote products derived from this work without specific prior written permission. THIS SOFTWARE IS PROVIDED BY INTEL CORPORATION "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL INTEL CORPORATION BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Authors: Zhang, Zhiqiang <zhiqiang.zhang@intel.com> --> <html> <head> <title>CSP Test: csp_media-src_cross-origin_audio_blocked_int</title> <link rel="author" title="Intel" href="http://www.intel.com"/> <link rel="help" href="http://www.w3.org/TR/2012/CR-CSP-20121115/#media-src"/> <meta name="flags" content=""/> <meta name="assert" content="media-src http://www.w3.org; script-src 'self' 'unsafe-inline'"/> <meta charset="utf-8"/> <script src="../resources/testharness.js"></script> <script src="../resources/testharnessreport.js"></script> </head> <body> <div id="log"></div> <audio id="m"></audio> <script> var t = async_test(document.title); var m = document.getElementById("m"); m.src = "support/khronos/red-green.theora.ogv"; window.setTimeout(function() { t.step(function() { assert_true(m.currentSrc == "", "audio.currentSrc should be empty after setting src attribute"); }); t.done(); }, 0); </script> </body> </html> """
dylanlesko/youtube-dl
refs/heads/master
test/test_netrc.py
168
# coding: utf-8 from __future__ import unicode_literals import os import sys import unittest sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) from youtube_dl.extractor import ( gen_extractors, ) class TestNetRc(unittest.TestCase): def test_netrc_present(self): for ie in gen_extractors(): if not hasattr(ie, '_login'): continue self.assertTrue( hasattr(ie, '_NETRC_MACHINE'), 'Extractor %s supports login, but is missing a _NETRC_MACHINE property' % ie.IE_NAME) if __name__ == '__main__': unittest.main()
semonte/intellij-community
refs/heads/master
python/testData/quickFixes/PyAddCallSuperQuickFixTest/kwargs_after.py
80
class A(object): def __init__(self): pass class B(A): def __init__(self, **kwargs): super(B, self).__init__()
9and3r/RPi-InfoScreen-Kivy
refs/heads/master
screens/clock/screen.py
4
from datetime import datetime from kivy.properties import DictProperty from kivy.clock import Clock from kivy.uix.screenmanager import Screen class ClockScreen(Screen): """Simple plugin screen to show digital clock of current time.""" # String Property to hold time timedata = DictProperty(None) def __init__(self, **kwargs): self.get_time() super(ClockScreen, self).__init__(**kwargs) self.timer = None def get_time(self): """Sets self.timedata to current time.""" n = datetime.now() self.timedata["h"] = n.hour self.timedata["m"] = n.minute self.timedata["s"] = n.second def update(self, dt): self.get_time() def on_enter(self): # We only need to update the clock every second. self.timer = Clock.schedule_interval(self.update, 1) def on_pre_enter(self): self.get_time() def on_pre_leave(self): # Save resource by unscheduling the updates. Clock.unschedule(self.timer)
smartforceplus/SmartForceplus
refs/heads/master
.local/share/Odoo/addons/8.0/builder/models/demo/normal_distribution.py
1
import random from openerp import models, api, fields __author__ = 'one' class NormalDistributionGenerator(models.Model): _name = 'builder.ir.model.demo.generator.normal_distribution' _description = 'Normal Distribution Generator' _inherits = { 'builder.ir.model.demo.generator': 'base_id' } _inherit = ['ir.mixin.polymorphism.subclass', 'builder.ir.model.demo.generator.base'] _target_type = 'integer' base_id = fields.Many2one( comodel_name='builder.ir.model.demo.generator', string='Base', ondelete='cascade', required=True ) mean = fields.Float('Mean') stdev = fields.Float('Std Deviation') _defaults = { 'subclass_model': lambda s, c, u, cxt=None: s._name } @api.multi def get_generator(self, field): while True: yield self.format_value(field, random.gauss(self.mean, self.stdev)) @api.model def format_value(self, field, value): if field.ttype == 'integer': return int(value) else: return value
dannyboi104/SickRage
refs/heads/master
sickbeard/name_parser/parser.py
3
# Author: Nic Wolfe <nic@wolfeden.ca> # URL: http://code.google.com/p/sickbeard/ # # This file is part of SickRage. # # SickRage is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # SickRage is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with SickRage. If not, see <http://www.gnu.org/licenses/>. from __future__ import with_statement import os import time import re import os.path import regexes import sickbeard from sickbeard import logger, helpers, scene_numbering, common, scene_exceptions, encodingKludge as ek, db from sickbeard.exceptions import ex from dateutil import parser class NameParser(object): ALL_REGEX = 0 NORMAL_REGEX = 1 ANIME_REGEX = 2 def __init__(self, file_name=True, showObj=None, tryIndexers=False, trySceneExceptions=False, naming_pattern=False): self.file_name = file_name self.showObj = showObj self.tryIndexers = tryIndexers self.trySceneExceptions = trySceneExceptions self.naming_pattern = naming_pattern if self.showObj and not self.showObj.is_anime: self._compile_regexes(self.NORMAL_REGEX) elif self.showObj and self.showObj.is_anime: self._compile_regexes(self.ANIME_REGEX) else: self._compile_regexes(self.ALL_REGEX) def clean_series_name(self, series_name): """Cleans up series name by removing any . and _ characters, along with any trailing hyphens. Is basically equivalent to replacing all _ and . with a space, but handles decimal numbers in string, for example: >>> cleanRegexedSeriesName("an.example.1.0.test") 'an example 1.0 test' >>> cleanRegexedSeriesName("an_example_1.0_test") 'an example 1.0 test' Stolen from dbr's tvnamer """ series_name = re.sub("(\D)\.(?!\s)(\D)", "\\1 \\2", series_name) series_name = re.sub("(\d)\.(\d{4})", "\\1 \\2", series_name) # if it ends in a year then don't keep the dot series_name = re.sub("(\D)\.(?!\s)", "\\1 ", series_name) series_name = re.sub("\.(?!\s)(\D)", " \\1", series_name) series_name = series_name.replace("_", " ") series_name = re.sub("-$", "", series_name) series_name = re.sub("^\[.*\]", "", series_name) return series_name.strip() def _compile_regexes(self, regexMode): if regexMode == self.ANIME_REGEX: dbg_str = u"ANIME" uncompiled_regex = [regexes.anime_regexes, regexes.normal_regexes] elif regexMode == self.NORMAL_REGEX: dbg_str = u"NORMAL" uncompiled_regex = [regexes.normal_regexes] else: dbg_str = u"ALL" uncompiled_regex = [regexes.normal_regexes, regexes.anime_regexes] self.compiled_regexes = [] for regexItem in uncompiled_regex: for cur_pattern_num, (cur_pattern_name, cur_pattern) in enumerate(regexItem): try: cur_regex = re.compile(cur_pattern, re.VERBOSE | re.IGNORECASE) except re.error, errormsg: logger.log(u"WARNING: Invalid episode_pattern using %s regexs, %s. %s" % (dbg_str, errormsg, cur_pattern)) else: self.compiled_regexes.append((cur_pattern_num, cur_pattern_name, cur_regex)) def _parse_string(self, name): if not name: return matches = [] bestResult = None for (cur_regex_num, cur_regex_name, cur_regex) in self.compiled_regexes: match = cur_regex.match(name) if not match: continue result = ParseResult(name) result.which_regex = [cur_regex_name] result.score = 0 - cur_regex_num named_groups = match.groupdict().keys() if 'series_name' in named_groups: result.series_name = match.group('series_name') if result.series_name: result.series_name = self.clean_series_name(result.series_name) result.score += 1 if 'series_num' in named_groups and match.group('series_num'): result.score += 1 if 'season_num' in named_groups: tmp_season = int(match.group('season_num')) if cur_regex_name == 'bare' and tmp_season in (19, 20): continue result.season_number = tmp_season result.score += 1 if 'ep_num' in named_groups: ep_num = self._convert_number(match.group('ep_num')) if 'extra_ep_num' in named_groups and match.group('extra_ep_num'): result.episode_numbers = range(ep_num, self._convert_number(match.group('extra_ep_num')) + 1) result.score += 1 else: result.episode_numbers = [ep_num] result.score += 1 if 'ep_ab_num' in named_groups: ep_ab_num = self._convert_number(match.group('ep_ab_num')) if 'extra_ab_ep_num' in named_groups and match.group('extra_ab_ep_num'): result.ab_episode_numbers = range(ep_ab_num, self._convert_number(match.group('extra_ab_ep_num')) + 1) result.score += 1 else: result.ab_episode_numbers = [ep_ab_num] result.score += 1 if 'air_date' in named_groups: air_date = match.group('air_date') try: result.air_date = parser.parse(air_date, fuzzy=True).date() result.score += 1 except: continue if 'extra_info' in named_groups: tmp_extra_info = match.group('extra_info') # Show.S04.Special or Show.S05.Part.2.Extras is almost certainly not every episode in the season if tmp_extra_info and cur_regex_name == 'season_only' and re.search( r'([. _-]|^)(special|extra)s?\w*([. _-]|$)', tmp_extra_info, re.I): continue result.extra_info = tmp_extra_info result.score += 1 if 'release_group' in named_groups: result.release_group = match.group('release_group') result.score += 1 if 'version' in named_groups: # assigns version to anime file if detected using anime regex. Non-anime regex receives -1 version = match.group('version') if version: result.version = version else: result.version = 1 else: result.version = -1 matches.append(result) if len(matches): # pick best match with highest score based on placement bestResult = max(sorted(matches, reverse=True, key=lambda x: x.which_regex), key=lambda x: x.score) show = None if not self.naming_pattern: # try and create a show object for this result show = helpers.get_show(bestResult.series_name, self.tryIndexers, self.trySceneExceptions) # confirm passed in show object indexer id matches result show object indexer id if show: if self.showObj and show.indexerid != self.showObj.indexerid: show = None bestResult.show = show elif not show and self.showObj: bestResult.show = self.showObj # if this is a naming pattern test or result doesn't have a show object then return best result if not bestResult.show or self.naming_pattern: return bestResult # get quality bestResult.quality = common.Quality.nameQuality(name, bestResult.show.is_anime) new_episode_numbers = [] new_season_numbers = [] new_absolute_numbers = [] # if we have an air-by-date show then get the real season/episode numbers if bestResult.is_air_by_date: airdate = bestResult.air_date.toordinal() myDB = db.DBConnection() sql_result = myDB.select( "SELECT season, episode FROM tv_episodes WHERE showid = ? and indexer = ? and airdate = ?", [bestResult.show.indexerid, bestResult.show.indexer, airdate]) season_number = None episode_numbers = [] if sql_result: season_number = int(sql_result[0][0]) episode_numbers = [int(sql_result[0][1])] if not season_number or not len(episode_numbers): try: lINDEXER_API_PARMS = sickbeard.indexerApi(bestResult.show.indexer).api_params.copy() if bestResult.show.lang: lINDEXER_API_PARMS['language'] = bestResult.show.lang t = sickbeard.indexerApi(bestResult.show.indexer).indexer(**lINDEXER_API_PARMS) epObj = t[bestResult.show.indexerid].airedOn(bestResult.air_date)[0] season_number = int(epObj["seasonnumber"]) episode_numbers = [int(epObj["episodenumber"])] except sickbeard.indexer_episodenotfound: logger.log(u"Unable to find episode with date " + str(bestResult.air_date) + " for show " + bestResult.show.name + ", skipping", logger.WARNING) episode_numbers = [] except sickbeard.indexer_error, e: logger.log(u"Unable to contact " + sickbeard.indexerApi(bestResult.show.indexer).name + ": " + ex(e), logger.WARNING) episode_numbers = [] for epNo in episode_numbers: s = season_number e = epNo if bestResult.show.is_scene: (s, e) = scene_numbering.get_indexer_numbering(bestResult.show.indexerid, bestResult.show.indexer, season_number, epNo) new_episode_numbers.append(e) new_season_numbers.append(s) elif bestResult.show.is_anime and len(bestResult.ab_episode_numbers): scene_season = scene_exceptions.get_scene_exception_by_name(bestResult.series_name)[1] for epAbsNo in bestResult.ab_episode_numbers: a = epAbsNo if bestResult.show.is_scene: a = scene_numbering.get_indexer_absolute_numbering(bestResult.show.indexerid, bestResult.show.indexer, epAbsNo, True, scene_season) (s, e) = helpers.get_all_episodes_from_absolute_number(bestResult.show, [a]) new_absolute_numbers.append(a) new_episode_numbers.extend(e) new_season_numbers.append(s) elif bestResult.season_number and len(bestResult.episode_numbers): for epNo in bestResult.episode_numbers: s = bestResult.season_number e = epNo if bestResult.show.is_scene: (s, e) = scene_numbering.get_indexer_numbering(bestResult.show.indexerid, bestResult.show.indexer, bestResult.season_number, epNo) if bestResult.show.is_anime: a = helpers.get_absolute_number_from_season_and_episode(bestResult.show, s, e) if a: new_absolute_numbers.append(a) new_episode_numbers.append(e) new_season_numbers.append(s) # need to do a quick sanity check heregex. It's possible that we now have episodes # from more than one season (by tvdb numbering), and this is just too much # for sickbeard, so we'd need to flag it. new_season_numbers = list(set(new_season_numbers)) # remove duplicates if len(new_season_numbers) > 1: raise InvalidNameException("Scene numbering results episodes from " "seasons %s, (i.e. more than one) and " "sickrage does not support this. " "Sorry." % (str(new_season_numbers))) # I guess it's possible that we'd have duplicate episodes too, so lets # eliminate them new_episode_numbers = list(set(new_episode_numbers)) new_episode_numbers.sort() # maybe even duplicate absolute numbers so why not do them as well new_absolute_numbers = list(set(new_absolute_numbers)) new_absolute_numbers.sort() if len(new_absolute_numbers): bestResult.ab_episode_numbers = new_absolute_numbers if len(new_season_numbers) and len(new_episode_numbers): bestResult.episode_numbers = new_episode_numbers bestResult.season_number = new_season_numbers[0] if bestResult.show.is_scene: logger.log( u"Converted parsed result " + bestResult.original_name + " into " + str(bestResult).decode('utf-8', 'xmlcharrefreplace'), logger.DEBUG) # CPU sleep time.sleep(0.02) return bestResult def _combine_results(self, first, second, attr): # if the first doesn't exist then return the second or nothing if not first: if not second: return None else: return getattr(second, attr) # if the second doesn't exist then return the first if not second: return getattr(first, attr) a = getattr(first, attr) b = getattr(second, attr) # if a is good use it if a != None or (type(a) == list and len(a)): return a # if not use b (if b isn't set it'll just be default) else: return b def _unicodify(self, obj, encoding="utf-8"): if isinstance(obj, basestring): if not isinstance(obj, unicode): obj = unicode(obj, encoding, 'replace') return obj def _convert_number(self, org_number): """ Convert org_number into an integer org_number: integer or representation of a number: string or unicode Try force converting to int first, on error try converting from Roman numerals returns integer or 0 """ try: # try forcing to int if org_number: number = int(org_number) else: number = 0 except: # on error try converting from Roman numerals roman_to_int_map = (('M', 1000), ('CM', 900), ('D', 500), ('CD', 400), ('C', 100), ('XC', 90), ('L', 50), ('XL', 40), ('X', 10), ('IX', 9), ('V', 5), ('IV', 4), ('I', 1) ) roman_numeral = str(org_number).upper() number = 0 index = 0 for numeral, integer in roman_to_int_map: while roman_numeral[index:index + len(numeral)] == numeral: number += integer index += len(numeral) return number def parse(self, name, cache_result=True): name = self._unicodify(name) if self.naming_pattern: cache_result = False cached = name_parser_cache.get(name) if cached: return cached # break it into parts if there are any (dirname, file name, extension) dir_name, file_name = ek.ek(os.path.split, name) if self.file_name: base_file_name = helpers.remove_extension(file_name) else: base_file_name = file_name # set up a result to use final_result = ParseResult(name) # try parsing the file name file_name_result = self._parse_string(base_file_name) # use only the direct parent dir dir_name = os.path.basename(dir_name) # parse the dirname for extra info if needed dir_name_result = self._parse_string(dir_name) # build the ParseResult object final_result.air_date = self._combine_results(file_name_result, dir_name_result, 'air_date') # anime absolute numbers final_result.ab_episode_numbers = self._combine_results(file_name_result, dir_name_result, 'ab_episode_numbers') # season and episode numbers final_result.season_number = self._combine_results(file_name_result, dir_name_result, 'season_number') final_result.episode_numbers = self._combine_results(file_name_result, dir_name_result, 'episode_numbers') # if the dirname has a release group/show name I believe it over the filename final_result.series_name = self._combine_results(dir_name_result, file_name_result, 'series_name') final_result.extra_info = self._combine_results(dir_name_result, file_name_result, 'extra_info') final_result.release_group = self._combine_results(dir_name_result, file_name_result, 'release_group') final_result.version = self._combine_results(dir_name_result, file_name_result, 'version') final_result.which_regex = [] if final_result == file_name_result: final_result.which_regex = file_name_result.which_regex elif final_result == dir_name_result: final_result.which_regex = dir_name_result.which_regex else: if file_name_result: final_result.which_regex += file_name_result.which_regex if dir_name_result: final_result.which_regex += dir_name_result.which_regex final_result.show = self._combine_results(file_name_result, dir_name_result, 'show') final_result.quality = self._combine_results(file_name_result, dir_name_result, 'quality') if not final_result.show: raise InvalidShowException( "Unable to parse " + name.encode(sickbeard.SYS_ENCODING, 'xmlcharrefreplace')) # if there's no useful info in it then raise an exception if final_result.season_number == None and not final_result.episode_numbers and final_result.air_date == None and not final_result.ab_episode_numbers and not final_result.series_name: raise InvalidNameException("Unable to parse " + name.encode(sickbeard.SYS_ENCODING, 'xmlcharrefreplace')) if cache_result: name_parser_cache.add(name, final_result) logger.log(u"Parsed " + name + " into " + str(final_result).decode('utf-8', 'xmlcharrefreplace'), logger.DEBUG) return final_result class ParseResult(object): def __init__(self, original_name, series_name=None, season_number=None, episode_numbers=None, extra_info=None, release_group=None, air_date=None, ab_episode_numbers=None, show=None, score=None, quality=None, version=None ): self.original_name = original_name self.series_name = series_name self.season_number = season_number if not episode_numbers: self.episode_numbers = [] else: self.episode_numbers = episode_numbers if not ab_episode_numbers: self.ab_episode_numbers = [] else: self.ab_episode_numbers = ab_episode_numbers if not quality: self.quality = common.Quality.UNKNOWN else: self.quality = quality self.extra_info = extra_info self.release_group = release_group self.air_date = air_date self.which_regex = [] self.show = show self.score = score self.version = version def __eq__(self, other): if not other: return False if self.series_name != other.series_name: return False if self.season_number != other.season_number: return False if self.episode_numbers != other.episode_numbers: return False if self.extra_info != other.extra_info: return False if self.release_group != other.release_group: return False if self.air_date != other.air_date: return False if self.ab_episode_numbers != other.ab_episode_numbers: return False if self.show != other.show: return False if self.score != other.score: return False if self.quality != other.quality: return False if self.version != other.version: return False return True def __str__(self): if self.series_name != None: to_return = self.series_name + u' - ' else: to_return = u'' if self.season_number != None: to_return += 'S' + str(self.season_number).zfill(2) if self.episode_numbers and len(self.episode_numbers): for e in self.episode_numbers: to_return += 'E' + str(e).zfill(2) if self.is_air_by_date: to_return += str(self.air_date) if self.ab_episode_numbers: to_return += ' [ABS: ' + str(self.ab_episode_numbers) + ']' if self.version and self.is_anime is True: to_return += ' [ANIME VER: ' + str(self.version) + ']' if self.release_group: to_return += ' [GROUP: ' + self.release_group + ']' to_return += ' [ABD: ' + str(self.is_air_by_date) + ']' to_return += ' [ANIME: ' + str(self.is_anime) + ']' to_return += ' [whichReg: ' + str(self.which_regex) + ']' return to_return.encode('utf-8') @property def is_air_by_date(self): if self.air_date: return True return False @property def is_anime(self): if len(self.ab_episode_numbers): return True return False class NameParserCache(object): _previous_parsed = {} _cache_size = 100 def add(self, name, parse_result): self._previous_parsed[name] = parse_result _current_cache_size = len(self._previous_parsed) if _current_cache_size > self._cache_size: for i in range(_current_cache_size - self._cache_size): del self._previous_parsed[self._previous_parsed.keys()[0]] def get(self, name): if name in self._previous_parsed: logger.log("Using cached parse result for: " + name, logger.DEBUG) return self._previous_parsed[name] name_parser_cache = NameParserCache() class InvalidNameException(Exception): "The given release name is not valid" class InvalidShowException(Exception): "The given show name is not valid"
yanheven/glance
refs/heads/master
glance/image_cache/__init__.py
7
# Copyright 2011 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ LRU Cache for Image Data """ import hashlib from oslo_config import cfg from oslo_log import log as logging from oslo_utils import excutils from oslo_utils import importutils from oslo_utils import units from glance.common import exception from glance.common import utils from glance import i18n LOG = logging.getLogger(__name__) _ = i18n._ _LE = i18n._LE _LI = i18n._LI _LW = i18n._LW image_cache_opts = [ cfg.StrOpt('image_cache_driver', default='sqlite', help=_('The driver to use for image cache management.')), cfg.IntOpt('image_cache_max_size', default=10 * units.Gi, # 10 GB help=_('The upper limit (the maximum size of accumulated ' 'cache in bytes) beyond which pruner, if running, ' 'starts cleaning the images cache.')), cfg.IntOpt('image_cache_stall_time', default=86400, # 24 hours help=_('The amount of time to let an image remain in the ' 'cache without being accessed.')), cfg.StrOpt('image_cache_dir', help=_('Base directory that the Image Cache uses.')), ] CONF = cfg.CONF CONF.register_opts(image_cache_opts) class ImageCache(object): """Provides an LRU cache for image data.""" def __init__(self): self.init_driver() def init_driver(self): """ Create the driver for the cache """ driver_name = CONF.image_cache_driver driver_module = (__name__ + '.drivers.' + driver_name + '.Driver') try: self.driver_class = importutils.import_class(driver_module) LOG.info(_LI("Image cache loaded driver '%s'.") % driver_name) except ImportError as import_err: LOG.warn(_LW("Image cache driver " "'%(driver_name)s' failed to load. " "Got error: '%(import_err)s."), {'driver_name': driver_name, 'import_err': import_err}) driver_module = __name__ + '.drivers.sqlite.Driver' LOG.info(_LI("Defaulting to SQLite driver.")) self.driver_class = importutils.import_class(driver_module) self.configure_driver() def configure_driver(self): """ Configure the driver for the cache and, if it fails to configure, fall back to using the SQLite driver which has no odd dependencies """ try: self.driver = self.driver_class() self.driver.configure() except exception.BadDriverConfiguration as config_err: driver_module = self.driver_class.__module__ LOG.warn(_LW("Image cache driver " "'%(driver_module)s' failed to configure. " "Got error: '%(config_err)s"), {'driver_module': driver_module, 'config_err': config_err}) LOG.info(_LI("Defaulting to SQLite driver.")) default_module = __name__ + '.drivers.sqlite.Driver' self.driver_class = importutils.import_class(default_module) self.driver = self.driver_class() self.driver.configure() def is_cached(self, image_id): """ Returns True if the image with the supplied ID has its image file cached. :param image_id: Image ID """ return self.driver.is_cached(image_id) def is_queued(self, image_id): """ Returns True if the image identifier is in our cache queue. :param image_id: Image ID """ return self.driver.is_queued(image_id) def get_cache_size(self): """ Returns the total size in bytes of the image cache. """ return self.driver.get_cache_size() def get_hit_count(self, image_id): """ Return the number of hits that an image has :param image_id: Opaque image identifier """ return self.driver.get_hit_count(image_id) def get_cached_images(self): """ Returns a list of records about cached images. """ return self.driver.get_cached_images() def delete_all_cached_images(self): """ Removes all cached image files and any attributes about the images and returns the number of cached image files that were deleted. """ return self.driver.delete_all_cached_images() def delete_cached_image(self, image_id): """ Removes a specific cached image file and any attributes about the image :param image_id: Image ID """ self.driver.delete_cached_image(image_id) def delete_all_queued_images(self): """ Removes all queued image files and any attributes about the images and returns the number of queued image files that were deleted. """ return self.driver.delete_all_queued_images() def delete_queued_image(self, image_id): """ Removes a specific queued image file and any attributes about the image :param image_id: Image ID """ self.driver.delete_queued_image(image_id) def prune(self): """ Removes all cached image files above the cache's maximum size. Returns a tuple containing the total number of cached files removed and the total size of all pruned image files. """ max_size = CONF.image_cache_max_size current_size = self.driver.get_cache_size() if max_size > current_size: LOG.debug("Image cache has free space, skipping prune...") return (0, 0) overage = current_size - max_size LOG.debug("Image cache currently %(overage)d bytes over max " "size. Starting prune to max size of %(max_size)d ", {'overage': overage, 'max_size': max_size}) total_bytes_pruned = 0 total_files_pruned = 0 entry = self.driver.get_least_recently_accessed() while entry and current_size > max_size: image_id, size = entry LOG.debug("Pruning '%(image_id)s' to free %(size)d bytes", {'image_id': image_id, 'size': size}) self.driver.delete_cached_image(image_id) total_bytes_pruned = total_bytes_pruned + size total_files_pruned = total_files_pruned + 1 current_size = current_size - size entry = self.driver.get_least_recently_accessed() LOG.debug("Pruning finished pruning. " "Pruned %(total_files_pruned)d and " "%(total_bytes_pruned)d.", {'total_files_pruned': total_files_pruned, 'total_bytes_pruned': total_bytes_pruned}) return total_files_pruned, total_bytes_pruned def clean(self, stall_time=None): """ Cleans up any invalid or incomplete cached images. The cache driver decides what that means... """ self.driver.clean(stall_time) def queue_image(self, image_id): """ This adds a image to be cache to the queue. If the image already exists in the queue or has already been cached, we return False, True otherwise :param image_id: Image ID """ return self.driver.queue_image(image_id) def get_caching_iter(self, image_id, image_checksum, image_iter): """ Returns an iterator that caches the contents of an image while the image contents are read through the supplied iterator. :param image_id: Image ID :param image_checksum: checksum expected to be generated while iterating over image data :param image_iter: Iterator that will read image contents """ if not self.driver.is_cacheable(image_id): return image_iter LOG.debug("Tee'ing image '%s' into cache", image_id) return self.cache_tee_iter(image_id, image_iter, image_checksum) def cache_tee_iter(self, image_id, image_iter, image_checksum): try: current_checksum = hashlib.md5() with self.driver.open_for_write(image_id) as cache_file: for chunk in image_iter: try: cache_file.write(chunk) finally: current_checksum.update(chunk) yield chunk cache_file.flush() if (image_checksum and image_checksum != current_checksum.hexdigest()): msg = _("Checksum verification failed. Aborted " "caching of image '%s'.") % image_id raise exception.GlanceException(msg) except exception.GlanceException as e: with excutils.save_and_reraise_exception(): # image_iter has given us bad, (size_checked_iter has found a # bad length), or corrupt data (checksum is wrong). LOG.exception(utils.exception_to_str(e)) except Exception as e: LOG.exception(_LE("Exception encountered while tee'ing " "image '%(image_id)s' into cache: %(error)s. " "Continuing with response.") % {'image_id': image_id, 'error': utils.exception_to_str(e)}) # If no checksum provided continue responding even if # caching failed. for chunk in image_iter: yield chunk def cache_image_iter(self, image_id, image_iter, image_checksum=None): """ Cache an image with supplied iterator. :param image_id: Image ID :param image_file: Iterator retrieving image chunks :param image_checksum: Checksum of image :retval True if image file was cached, False otherwise """ if not self.driver.is_cacheable(image_id): return False for chunk in self.get_caching_iter(image_id, image_checksum, image_iter): pass return True def cache_image_file(self, image_id, image_file): """ Cache an image file. :param image_id: Image ID :param image_file: Image file to cache :retval True if image file was cached, False otherwise """ CHUNKSIZE = 64 * units.Mi return self.cache_image_iter(image_id, utils.chunkiter(image_file, CHUNKSIZE)) def open_for_read(self, image_id): """ Open and yield file for reading the image file for an image with supplied identifier. :note Upon successful reading of the image file, the image's hit count will be incremented. :param image_id: Image ID """ return self.driver.open_for_read(image_id) def get_image_size(self, image_id): """ Return the size of the image file for an image with supplied identifier. :param image_id: Image ID """ return self.driver.get_image_size(image_id) def get_queued_images(self): """ Returns a list of image IDs that are in the queue. The list should be sorted by the time the image ID was inserted into the queue. """ return self.driver.get_queued_images()
y-asano/primecloud-controller
refs/heads/master
iaas-gw/src/DeleteSnapshot.py
5
# coding: UTF-8 # # Copyright 2014 by SCSK Corporation. # # This file is part of PrimeCloud Controller(TM). # # PrimeCloud Controller(TM) is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 2 of the License, or # (at your option) any later version. # # PrimeCloud Controller(TM) is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with PrimeCloud Controller(TM). If not, see <http://www.gnu.org/licenses/>. # from iaasgw.log.log import IaasLogger from iaasgw.utils.iaasSelecter import iaasSelect import os import sys import traceback if __name__ == '__main__': param = sys.argv logger = IaasLogger() #ログ用パラメータ logparam = ["DeleteSnapshot",os.getpid(), "スナップショットNo:%s" %str(param[3])] logger.start(logparam) #実行 try: #パラメータ解説 # 0.ファイル名 # 1.ユーザー名 # 2.プラットフォームNo # 3.スナップショットNo # # 例:param = [None, "1", "6", "1", "1"] iaasController = iaasSelect(param[1], param[2]) if iaasController == None: sys.exit() res = iaasController.deleteSnapshot(param[3]) print res except: logger.error(traceback.format_exc()) raise logger.end(logparam)
alexschiller/osf.io
refs/heads/develop
addons/osfstorage/tests/test_models.py
4
from __future__ import unicode_literals import mock import unittest import pytest import pytz from django.core.exceptions import ObjectDoesNotExist from django.utils import timezone from nose.tools import * # noqa from addons.osfstorage.models import OsfStorageFile, OsfStorageFileNode, OsfStorageFolder from osf.exceptions import ValidationError from osf.models import Contributor from tests.factories import ProjectFactory from addons.osfstorage.tests import factories from addons.osfstorage.tests.utils import StorageTestCase import datetime from osf import models from addons.osfstorage import utils from addons.osfstorage import settings from website.files.exceptions import FileNodeCheckedOutError @pytest.mark.django_db class TestOsfstorageFileNode(StorageTestCase): def test_root_node_exists(self): assert_true(self.node_settings.root_node is not None) def test_root_node_has_no_parent(self): assert_true(self.node_settings.root_node.parent is None) def test_node_reference(self): assert_equal(self.project, self.node_settings.root_node.node) # def test_get_folder(self): # file = models.OsfStorageFileNode(name='MOAR PYLONS', is_file=True, node=self.node) # folder = models.OsfStorageFileNode(name='MOAR PYLONS', is_file=False, node=self.node) # _id = folder._id # file.save() # folder.save() # assert_equal(folder, models.OsfStorageFileNode.get_folder(_id, self.node_settings)) # def test_get_file(self): # file = models.OsfStorageFileNode(name='MOAR PYLONS', is_file=True, node=self.node) # folder = models.OsfStorageFileNode(name='MOAR PYLONS', is_file=False, node=self.node) # file.save() # folder.save() # _id = file._id # assert_equal(file, models.OsfStorageFileNode.get_file(_id, self.node_settings)) def test_serialize(self): file = OsfStorageFile(name='MOAR PYLONS', node=self.node_settings.owner) file.save() assert_equals(file.serialize(), { u'id': file._id, u'path': file.path, u'created': None, u'name': u'MOAR PYLONS', u'kind': 'file', u'version': 0, u'downloads': 0, u'size': None, u'modified': None, u'contentType': None, u'checkout': None, u'md5': None, u'sha256': None, }) version = file.create_version( self.user, { 'service': 'cloud', settings.WATERBUTLER_RESOURCE: 'osf', 'object': '06d80e', }, { 'size': 1234, 'contentType': 'text/plain' }) assert_equals(file.serialize(), { 'id': file._id, 'path': file.path, 'created': None, 'name': 'MOAR PYLONS', 'kind': 'file', 'version': 1, 'downloads': 0, 'size': 1234L, 'modified': None, 'contentType': 'text/plain', 'checkout': None, 'md5': None, 'sha256': None, }) date = timezone.now() version.update_metadata({ 'modified': date.isoformat() }) assert_equals(file.serialize(), { 'id': file._id, 'path': file.path, 'created': date.isoformat(), 'name': 'MOAR PYLONS', 'kind': 'file', 'version': 1, 'downloads': 0, 'size': 1234L, 'modified': date.isoformat(), 'contentType': 'text/plain', 'checkout': None, 'md5': None, 'sha256': None, }) def test_get_child_by_name(self): child = self.node_settings.get_root().append_file('Test') assert_equal(child, self.node_settings.get_root().find_child_by_name('Test')) def test_root_node_path(self): assert_equal(self.node_settings.get_root().name, '') def test_folder_path(self): path = '/{}/'.format(self.node_settings.root_node._id) assert_equal(self.node_settings.get_root().path, path) def test_file_path(self): file = OsfStorageFileNode(name='MOAR PYLONS', is_file=True, node=self.node) file.save() assert_equal(file.name, 'MOAR PYLONS') assert_equal(file.path, '/{}'.format(file._id)) def test_append_folder(self): child = self.node_settings.get_root().append_folder('Test') children = self.node_settings.get_root().children assert_equal(child.kind, 'folder') assert_equal([child], list(children)) def test_append_file(self): child = self.node_settings.get_root().append_file('Test') children = self.node_settings.get_root().children assert_equal(child.kind, 'file') assert_equal([child], list(children)) def test_append_to_file(self): child = self.node_settings.get_root().append_file('Test') with assert_raises(AttributeError): child.append_file('Cant') def test_children(self): assert_equals([ self.node_settings.get_root().append_file('Foo{}Bar'.format(x)) for x in xrange(100) ], list(self.node_settings.get_root().children)) def test_download_count_file_defaults(self): child = self.node_settings.get_root().append_file('Test') assert_equals(child.get_download_count(), 0) @mock.patch('framework.sessions.session') def test_download_count_file(self, mock_session): mock_session.data = {} child = self.node_settings.get_root().append_file('Test') utils.update_analytics(self.project, child._id, 0) utils.update_analytics(self.project, child._id, 1) utils.update_analytics(self.project, child._id, 2) assert_equals(child.get_download_count(), 3) assert_equals(child.get_download_count(0), 1) assert_equals(child.get_download_count(1), 1) assert_equals(child.get_download_count(2), 1) @unittest.skip def test_create_version(self): pass @unittest.skip def test_update_version_metadata(self): pass def test_delete_folder(self): parent = self.node_settings.get_root().append_folder('Test') kids = [] for x in range(10): kid = parent.append_file(str(x)) kid.save() kids.append(kid) count = OsfStorageFileNode.find().count() tcount = models.TrashedFileNode.find().count() parent.delete() assert_is(OsfStorageFileNode.load(parent._id), None) assert_equals(count - 11, OsfStorageFileNode.find().count()) assert_equals(tcount + 11, models.TrashedFileNode.find().count()) for kid in kids: assert_is( OsfStorageFileNode.load(kid._id), None ) # def test_delete_file(self): # child = self.node_settings.get_root().append_file('Test') # child.delete() # # assert_is(models.OsfStorageFileNode.load(child._id), None) # trashed = models.TrashedFileNode.load(child._id) # child_storage = child.to_storage() # trashed_storage = trashed.to_storage() # trashed_storage['parent'] = trashed_storage['parent'][0] # child_storage['materialized_path'] = child.materialized_path # trashed_storage.pop('deleted_by') # trashed_storage.pop('deleted_on') # trashed_storage.pop('suspended') # assert_equal(child_storage.pop('path'), '') # assert_equal(trashed_storage.pop('path'), '/' + child._id) # assert_equal(trashed_storage, child_storage) def test_delete_file(self): child = self.node_settings.get_root().append_file('Test') field_names = [f.name for f in child._meta.get_fields() if not f.is_relation and f.name not in ['id', 'guid_string', 'content_type_pk']] child_data = {f: getattr(child, f) for f in field_names} child.delete() assert_raises(ObjectDoesNotExist, child.reload) assert_is(OsfStorageFileNode.load(child._id), None) trashed = models.TrashedFileNode.load(child._id) child_storage = dict() trashed_storage = dict() trashed_storage['parent'] = trashed.parent._id child_storage['materialized_path'] = child.materialized_path assert_equal(trashed.path, '/' + child._id) trashed_field_names = [f.name for f in trashed._meta.get_fields() if not f.is_relation and f.name not in ['id', 'guid_string', 'path', '_materialized_path', 'content_type_pk']] for f, value in child_data.iteritems(): if f in trashed_field_names: assert_equal(getattr(trashed, f), value) def test_materialized_path(self): child = self.node_settings.get_root().append_file('Test') assert_equals('/Test', child.materialized_path) def test_materialized_path_folder(self): child = self.node_settings.get_root().append_folder('Test') assert_equals('/Test/', child.materialized_path) def test_materialized_path_nested(self): child = self.node_settings.get_root().append_folder('Cloud').append_file('Carp') assert_equals('/Cloud/Carp', child.materialized_path) def test_copy(self): to_copy = self.node_settings.get_root().append_file('Carp') copy_to = self.node_settings.get_root().append_folder('Cloud') copied = to_copy.copy_under(copy_to) assert_not_equal(copied, to_copy) assert_equal(copied.parent, copy_to) assert_equal(to_copy.parent, self.node_settings.get_root()) def test_move_nested(self): new_project = ProjectFactory() other_node_settings = new_project.get_addon('osfstorage') move_to = other_node_settings.get_root().append_folder('Cloud') to_move = self.node_settings.get_root().append_folder('Carp') child = to_move.append_file('A dee um') moved = to_move.move_under(move_to) child.reload() assert_equal(moved, to_move) assert_equal(new_project, to_move.node) assert_equal(new_project, move_to.node) assert_equal(new_project, child.node) def test_copy_rename(self): to_copy = self.node_settings.get_root().append_file('Carp') copy_to = self.node_settings.get_root().append_folder('Cloud') copied = to_copy.copy_under(copy_to, name='But') assert_equal(copied.name, 'But') assert_not_equal(copied, to_copy) assert_equal(to_copy.name, 'Carp') assert_equal(copied.parent, copy_to) assert_equal(to_copy.parent, self.node_settings.get_root()) def test_move(self): to_move = self.node_settings.get_root().append_file('Carp') move_to = self.node_settings.get_root().append_folder('Cloud') moved = to_move.move_under(move_to) assert_equal(to_move, moved) assert_equal(moved.parent, move_to) def test_move_and_rename(self): to_move = self.node_settings.get_root().append_file('Carp') move_to = self.node_settings.get_root().append_folder('Cloud') moved = to_move.move_under(move_to, name='Tuna') assert_equal(to_move, moved) assert_equal(to_move.name, 'Tuna') assert_equal(moved.parent, move_to) @unittest.skip def test_move_folder(self): pass @unittest.skip def test_move_folder_and_rename(self): pass @unittest.skip def test_rename_folder(self): pass @unittest.skip def test_rename_file(self): pass @unittest.skip def test_move_across_nodes(self): pass @unittest.skip def test_move_folder_across_nodes(self): pass @unittest.skip def test_copy_across_nodes(self): pass @unittest.skip def test_copy_folder_across_nodes(self): pass def test_get_file_guids_for_live_file(self): node = self.node_settings.owner file = OsfStorageFile(name='foo', node=node) file.save() file.get_guid(create=True) guid = file.get_guid()._id assert guid is not None assert guid in OsfStorageFileNode.get_file_guids( '/' + file._id, provider='osfstorage', node=node) def test_get_file_guids_for_live_folder(self): node = self.node_settings.owner folder = OsfStorageFolder(name='foofolder', node=node) folder.save() files = [] for i in range(1, 4): files.append(folder.append_file('foo.{}'.format(i))) files[-1].get_guid(create=True) guids = [file.get_guid()._id for file in files] assert len(guids) == len(files) all_guids = OsfStorageFileNode.get_file_guids( '/' + folder._id, provider='osfstorage', node=node) assert sorted(guids) == sorted(all_guids) def test_get_file_guids_for_trashed_file(self): node = self.node_settings.owner file = OsfStorageFile(name='foo', node=node) file.save() file.get_guid(create=True) guid = file.get_guid()._id file.delete() assert guid is not None assert guid in OsfStorageFileNode.get_file_guids( '/' + file._id, provider='osfstorage', node=node) def test_get_file_guids_for_trashed_folder(self): node = self.node_settings.owner folder = OsfStorageFolder(name='foofolder', node=node) folder.save() files = [] for i in range(1, 4): files.append(folder.append_file('foo.{}'.format(i))) files[-1].get_guid(create=True) guids = [file.get_guid()._id for file in files] assert len(guids) == len(files) folder.delete() all_guids = OsfStorageFileNode.get_file_guids( '/' + folder._id, provider='osfstorage', node=node) assert sorted(guids) == sorted(all_guids) def test_get_file_guids_live_file_wo_guid(self): node = self.node_settings.owner file = OsfStorageFile(name='foo', node=node) file.save() assert [] == OsfStorageFileNode.get_file_guids( '/' + file._id, provider='osfstorage', node=node) def test_get_file_guids_for_live_folder_wo_guids(self): node = self.node_settings.owner folder = OsfStorageFolder(name='foofolder', node=node) folder.save() files = [] for i in range(1, 4): files.append(folder.append_file('foo.{}'.format(i))) all_guids = OsfStorageFileNode.get_file_guids( '/' + folder._id, provider='osfstorage', node=node) assert [] == all_guids def test_get_file_guids_trashed_file_wo_guid(self): node = self.node_settings.owner file = OsfStorageFile(name='foo', node=node) file.save() file.delete() assert [] == OsfStorageFileNode.get_file_guids( '/' + file._id, provider='osfstorage', node=node) def test_get_file_guids_for_trashed_folder_wo_guids(self): node = self.node_settings.owner folder = OsfStorageFolder(name='foofolder', node=node) folder.save() files = [] for i in range(1, 4): files.append(folder.append_file('foo.{}'.format(i))) folder.delete() all_guids = OsfStorageFileNode.get_file_guids( '/' + folder._id, provider='osfstorage', node=node) assert [] == all_guids def test_get_file_guids_for_live_folder_recursive(self): node = self.node_settings.owner folder = OsfStorageFolder(name='foofolder', node=node) folder.save() files = [] for i in range(1, 4): files.append(folder.append_file('foo.{}'.format(i))) files[-1].get_guid(create=True) subfolder = folder.append_folder('subfoo') for i in range(1, 4): files.append(subfolder.append_file('subfoo.{}'.format(i))) files[-1].get_guid(create=True) guids = [file.get_guid()._id for file in files] assert len(guids) == len(files) all_guids = OsfStorageFileNode.get_file_guids( '/' + folder._id, provider='osfstorage', node=node) assert sorted(guids) == sorted(all_guids) def test_get_file_guids_for_trashed_folder_recursive(self): node = self.node_settings.owner folder = OsfStorageFolder(name='foofolder', node=node) folder.save() files = [] for i in range(1, 4): files.append(folder.append_file('foo.{}'.format(i))) files[-1].get_guid(create=True) subfolder = folder.append_folder('subfoo') for i in range(1, 4): files.append(subfolder.append_file('subfoo.{}'.format(i))) files[-1].get_guid(create=True) guids = [file.get_guid()._id for file in files] assert len(guids) == len(files) folder.delete() all_guids = OsfStorageFileNode.get_file_guids( '/' + folder._id, provider='osfstorage', node=node) assert sorted(guids) == sorted(all_guids) def test_get_file_guids_for_live_folder_recursive_wo_guids(self): node = self.node_settings.owner folder = OsfStorageFolder(name='foofolder', node=node) folder.save() files = [] for i in range(1, 4): files.append(folder.append_file('foo.{}'.format(i))) subfolder = folder.append_folder('subfoo') for i in range(1, 4): files.append(subfolder.append_file('subfoo.{}'.format(i))) all_guids = OsfStorageFileNode.get_file_guids( '/' + folder._id, provider='osfstorage', node=node) assert [] == all_guids def test_get_file_guids_for_trashed_folder_recursive_wo_guids(self): node = self.node_settings.owner folder = OsfStorageFolder(name='foofolder', node=node) folder.save() files = [] for i in range(1, 4): files.append(folder.append_file('foo.{}'.format(i))) subfolder = folder.append_folder('subfoo') for i in range(1, 4): files.append(subfolder.append_file('subfoo.{}'.format(i))) folder.delete() all_guids = OsfStorageFileNode.get_file_guids( '/' + folder._id, provider='osfstorage', node=node) assert [] == all_guids @pytest.mark.django_db class TestNodeSettingsModel(StorageTestCase): def test_fields(self): assert_true(self.node_settings._id) assert_is(self.node_settings.has_auth, True) assert_is(self.node_settings.complete, True) def test_after_fork_copies_versions(self): num_versions = 5 path = 'jazz/dreamers-ball.mp3' record = self.node_settings.get_root().append_file(path) for _ in range(num_versions): version = factories.FileVersionFactory() record.versions.add(version) fork = self.project.fork_node(self.auth_obj) fork_node_settings = fork.get_addon('osfstorage') fork_node_settings.reload() cloned_record = fork_node_settings.get_root().find_child_by_name(path) assert_equal(list(cloned_record.versions.all()), list(record.versions.all())) assert_true(fork_node_settings.root_node) @pytest.mark.django_db class TestOsfStorageFileVersion(StorageTestCase): def setUp(self): super(TestOsfStorageFileVersion, self).setUp() self.user = factories.AuthUserFactory() self.mock_date = datetime.datetime(1991, 10, 31, tzinfo=pytz.UTC) def test_fields(self): version = factories.FileVersionFactory( size=1024, content_type='application/json', date_modified=timezone.now(), ) retrieved = models.FileVersion.load(version._id) assert_true(retrieved.creator) assert_true(retrieved.location) assert_true(retrieved.size) # sometimes identifiers are strings, so this always has to be a string, sql is funny about that. assert_equal(retrieved.identifier, u"0") assert_true(retrieved.content_type) assert_true(retrieved.date_modified) def test_is_duplicate_true(self): version1 = factories.FileVersionFactory() version2 = factories.FileVersionFactory() assert_true(version1.is_duplicate(version2)) assert_true(version2.is_duplicate(version1)) def test_is_duplicate_false(self): version1 = factories.FileVersionFactory( location={ 'service': 'cloud', settings.WATERBUTLER_RESOURCE: 'osf', 'object': 'd077f2', }, ) version2 = factories.FileVersionFactory( location={ 'service': 'cloud', settings.WATERBUTLER_RESOURCE: 'osf', 'object': '06d80e', }, ) assert_false(version1.is_duplicate(version2)) assert_false(version2.is_duplicate(version1)) def test_validate_location(self): version = factories.FileVersionFactory.build(location={'invalid': True}) with assert_raises(ValidationError): version.save() version.location = { 'service': 'cloud', settings.WATERBUTLER_RESOURCE: 'osf', 'object': 'object', } version.save() def test_update_metadata(self): version = factories.FileVersionFactory() version.update_metadata( {'archive': 'glacier', 'size': 123, 'modified': 'Mon, 16 Feb 2015 18:45:34 GMT'}) version.reload() assert_in('archive', version.metadata) assert_equal(version.metadata['archive'], 'glacier') def test_matching_archive(self): version = factories.FileVersionFactory( location={ 'service': 'cloud', settings.WATERBUTLER_RESOURCE: 'osf', 'object': 'd077f2', }, metadata={'sha256': 'existing'} ) factories.FileVersionFactory( location={ 'service': 'cloud', settings.WATERBUTLER_RESOURCE: 'osf', 'object': '06d80e', }, metadata={ 'sha256': 'existing', 'vault': 'the cloud', 'archive': 'erchiv' } ) assert_is(version._find_matching_archive(), True) assert_is_not(version.archive, None) assert_equal(version.metadata['vault'], 'the cloud') assert_equal(version.metadata['archive'], 'erchiv') def test_archive_exits(self): node_addon = self.project.get_addon('osfstorage') fnode = node_addon.get_root().append_file('MyCoolTestFile') version = fnode.create_version( self.user, { 'service': 'cloud', settings.WATERBUTLER_RESOURCE: 'osf', 'object': '06d80e', }, { 'sha256': 'existing', 'vault': 'the cloud', 'archive': 'erchiv' }) assert_equal(version.archive, 'erchiv') version2 = fnode.create_version( self.user, { 'service': 'cloud', settings.WATERBUTLER_RESOURCE: 'osf', 'object': '07d80a', }, { 'sha256': 'existing', }) assert_equal(version2.archive, 'erchiv') def test_no_matching_archive(self): models.FileVersion.remove() assert_is(False, factories.FileVersionFactory( location={ 'service': 'cloud', settings.WATERBUTLER_RESOURCE: 'osf', 'object': 'd077f2', }, metadata={'sha256': 'existing'} )._find_matching_archive()) @pytest.mark.django_db class TestOsfStorageCheckout(StorageTestCase): def setUp(self): super(TestOsfStorageCheckout, self).setUp() self.user = factories.AuthUserFactory() self.node = ProjectFactory(creator=self.user) self.osfstorage = self.node.get_addon('osfstorage') self.root_node = self.osfstorage.get_root() self.file = self.root_node.append_file('3005') def test_checkout_logs(self): non_admin = factories.AuthUserFactory() self.node.add_contributor(non_admin, permissions=['read', 'write']) self.node.save() self.file.check_in_or_out(non_admin, non_admin, save=True) self.file.reload() self.node.reload() assert_equal(self.file.checkout, non_admin) assert_equal(self.node.logs.latest().action, 'checked_out') assert_equal(self.node.logs.latest().user, non_admin) self.file.check_in_or_out(self.user, None, save=True) self.file.reload() self.node.reload() assert_equal(self.file.checkout, None) assert_equal(self.node.logs.latest().action, 'checked_in') assert_equal(self.node.logs.latest().user, self.user) self.file.check_in_or_out(self.user, self.user, save=True) self.file.reload() self.node.reload() assert_equal(self.file.checkout, self.user) assert_equal(self.node.logs.latest().action, 'checked_out') assert_equal(self.node.logs.latest().user, self.user) with assert_raises(FileNodeCheckedOutError): self.file.check_in_or_out(non_admin, None, save=True) with assert_raises(FileNodeCheckedOutError): self.file.check_in_or_out(non_admin, non_admin, save=True) def test_delete_checked_out_file(self): self.file.check_in_or_out(self.user, self.user, save=True) self.file.reload() assert_equal(self.file.checkout, self.user) with assert_raises(FileNodeCheckedOutError): self.file.delete() def test_delete_folder_with_checked_out_file(self): folder = self.root_node.append_folder('folder') self.file.move_under(folder) self.file.check_in_or_out(self.user, self.user, save=True) self.file.reload() assert_equal(self.file.checkout, self.user) with assert_raises(FileNodeCheckedOutError): folder.delete() def test_move_checked_out_file(self): self.file.check_in_or_out(self.user, self.user, save=True) self.file.reload() assert_equal(self.file.checkout, self.user) folder = self.root_node.append_folder('folder') with assert_raises(FileNodeCheckedOutError): self.file.move_under(folder) def test_checked_out_merge(self): user = factories.AuthUserFactory() node = ProjectFactory(creator=user) osfstorage = node.get_addon('osfstorage') root_node = osfstorage.get_root() file = root_node.append_file('test_file') user_merge_target = factories.AuthUserFactory() file.check_in_or_out(user, user, save=True) file.reload() assert_equal(file.checkout, user) user_merge_target.merge_user(user) file.reload() assert_equal(user_merge_target.id, file.checkout.id) def test_remove_contributor_with_checked_file(self): user = factories.AuthUserFactory() Contributor.objects.create( node=self.node, user=user, admin=True, write=True, read=True, visible=True ) self.file.check_in_or_out(self.user, self.user, save=True) self.file.reload() assert_equal(self.file.checkout, self.user) self.file.node.remove_contributors([self.user], save=True) self.file.reload() assert_equal(self.file.checkout, None)
anrl/gini3
refs/heads/master
backend/src/gloader/xml/parsers/xmlproc/namespace.py
15
""" A parser filter for namespace support. Placed externally to the parser for efficiency reasons. $Id: namespace.py,v 1.5 2001/12/30 12:09:14 loewis Exp $ """ import string import xmlapp # --- ParserFilter class ParserFilter(xmlapp.Application): "A generic parser filter class." def __init__(self): xmlapp.Application.__init__(self) self.app=xmlapp.Application() def set_application(self,app): "Sets the application to report events to." self.app=app # --- Methods inherited from xmlapp.Application def set_locator(self,locator): xmlapp.Application.set_locator(self,locator) self.app.set_locator(locator) def doc_start(self): self.app.doc_start() def doc_end(self): self.app.doc_end() def handle_comment(self,data): self.app.handle_comment(data) def handle_start_tag(self,name,attrs): self.app.handle_start_tag(name,attrs) def handle_end_tag(self,name): self.app.handle_end_tag(name) def handle_data(self,data,start,end): self.app.handle_data(data,start,end) def handle_ignorable_data(self,data,start,end): self.app.handle_ignorable_data(data,start,end) def handle_pi(self,target,data): self.app.handle_pi(target,data) def handle_doctype(self,root,pubID,sysID): self.app.handle_doctype(root,pubID,sysID) def set_entity_info(self,xmlver,enc,sddecl): self.app.set_entity_info(xmlver,enc,sddecl) # --- NamespaceFilter class NamespaceFilter(ParserFilter): """An xmlproc application that processes qualified names and reports them as 'URI local-part' names. It reports errors through the error reporting mechanisms of the parser.""" def __init__(self,parser): ParserFilter.__init__(self) self.ns_map={} # Current prefix -> URI map self.ns_stack=[] # Pushed for each element, used to maint ns_map self.rep_ns_attrs=0 # Report xmlns-attributes? self.parser=parser def set_report_ns_attributes(self,action): "Tells the filter whether to report or delete xmlns-attributes." self.rep_ns_attrs=action # --- Overridden event methods def handle_start_tag(self,name,attrs): old_ns={} # Reset ns_map to these values when we leave this element del_ns=[] # Delete these prefixes from ns_map when we leave element # attrs=attrs.copy() Will have to do this if more filters are made # Find declarations, update self.ns_map and self.ns_stack for (a,v) in attrs.items(): if a[:6]=="xmlns:": prefix=a[6:] if string.find(prefix,":")!=-1: self.parser.report_error(1900) if v=="": self.parser.report_error(1901) elif a=="xmlns": prefix="" else: continue if self.ns_map.has_key(prefix): old_ns[prefix]=self.ns_map[prefix] else: del_ns.append(prefix) if prefix=="" and v=="": del self.ns_map[prefix] else: self.ns_map[prefix]=v if not self.rep_ns_attrs: del attrs[a] self.ns_stack.append((old_ns,del_ns)) # Process elem and attr names name=self.__process_name(name) parts=string.split(name) if len(parts)>1: ns=parts[0] else: ns=None for (a,v) in attrs.items(): del attrs[a] aname=self.__process_name(a,ns) if attrs.has_key(aname): self.parser.report_error(1903) attrs[aname]=v # Report event self.app.handle_start_tag(name,attrs) def handle_end_tag(self,name): name=self.__process_name(name) # Clean up self.ns_map and self.ns_stack (old_ns,del_ns)=self.ns_stack[-1] del self.ns_stack[-1] self.ns_map.update(old_ns) for prefix in del_ns: del self.ns_map[prefix] self.app.handle_end_tag(name) # --- Internal methods def __process_name(self,name,default_to=None): n=string.split(name,":") if len(n)>2: self.parser.report_error(1900) return name elif len(n)==2: if n[0]=="xmlns": return name try: return "%s %s" % (self.ns_map[n[0]],n[1]) except KeyError: self.parser.report_error(1902) return name elif default_to!=None: return "%s %s" % (default_to,name) elif self.ns_map.has_key("") and name!="xmlns": return "%s %s" % (self.ns_map[""],name) else: return name
arth-co/shoop
refs/heads/master
shoop_workbench/settings/utils.py
15
# -*- coding: utf-8 -*- class DisableMigrations(object): # See https://gist.github.com/NotSqrt/5f3c76cd15e40ef62d09 def __contains__(self, item): return True def __getitem__(self, item): return "notmigrations"
pwnieexpress/raspberry_pwn
refs/heads/master
src/pentest/voiper/sulley/sulley/pgraph/edge.py
25
# # pGRAPH # Copyright (C) 2006 Pedram Amini <pedram.amini@gmail.com> # # This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later # version. # # This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied # warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with this program; if not, write to the Free # Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # ''' @author: Pedram Amini @license: GNU General Public License 2.0 or later @contact: pedram.amini@gmail.com @organization: www.openrce.org ''' class edge (object): ''' ''' id = None src = None dst = None # general graph attributes. color = 0x000000 label = "" # gml relevant attributes. gml_arrow = "none" gml_stipple = 1 gml_line_width = 1.0 #################################################################################################################### def __init__ (self, src, dst): ''' Class constructor. @type src: Mixed @param src: Edge source @type dst: Mixed @param dst: Edge destination ''' # the unique id for any edge (provided that duplicates are not allowed) is the combination of the source and # the destination stored as a long long. self.id = (src << 32) + dst self.src = src self.dst = dst # general graph attributes. self.color = 0x000000 self.label = "" # gml relevant attributes. self.gml_arrow = "none" self.gml_stipple = 1 self.gml_line_width = 1.0 #################################################################################################################### def render_edge_gml (self, graph): ''' Render an edge description suitable for use in a GML file using the set internal attributes. @type graph: pgraph.graph @param graph: Top level graph object containing the current edge @rtype: String @return: GML edge description ''' src = graph.find_node("id", self.src) dst = graph.find_node("id", self.dst) # ensure nodes exist at the source and destination of this edge. if not src or not dst: return "" edge = ' edge [\n' edge += ' source %d\n' % src.number edge += ' target %d\n' % dst.number edge += ' generalization 0\n' edge += ' graphics [\n' edge += ' type "line"\n' edge += ' arrow "%s"\n' % self.gml_arrow edge += ' stipple %d\n' % self.gml_stipple edge += ' lineWidth %f\n' % self.gml_line_width edge += ' fill "#%06x"\n' % self.color edge += ' ]\n' edge += ' ]\n' return edge #################################################################################################################### def render_edge_graphviz (self, graph): ''' Render an edge suitable for use in a Pydot graph using the set internal attributes. @type graph: pgraph.graph @param graph: Top level graph object containing the current edge @rtype: pydot.Edge() @return: Pydot object representing edge ''' import pydot # no need to validate if nodes exist for src/dst. graphviz takes care of that for us transparently. dot_edge = pydot.Edge(self.src, self.dst) if self.label: dot_edge.label = self.label dot_edge.color = "#%06x" % self.color return dot_edge #################################################################################################################### def render_edge_udraw (self, graph): ''' Render an edge description suitable for use in a GML file using the set internal attributes. @type graph: pgraph.graph @param graph: Top level graph object containing the current edge @rtype: String @return: GML edge description ''' src = graph.find_node("id", self.src) dst = graph.find_node("id", self.dst) # ensure nodes exist at the source and destination of this edge. if not src or not dst: return "" # translate newlines for uDraw. self.label = self.label.replace("\n", "\\n") udraw = 'l("%08x->%08x",' % (self.src, self.dst) udraw += 'e("",' # open edge udraw += '[' # open attributes udraw += 'a("EDGECOLOR","#%06x"),' % self.color udraw += 'a("OBJECT","%s")' % self.label udraw += '],' # close attributes udraw += 'r("%08x")' % self.dst udraw += ')' # close edge udraw += ')' # close element return udraw #################################################################################################################### def render_edge_udraw_update (self): ''' Render an edge update description suitable for use in a GML file using the set internal attributes. @rtype: String @return: GML edge update description ''' # translate newlines for uDraw. self.label = self.label.replace("\n", "\\n") udraw = 'new_edge("%08x->%08x","",' % (self.src, self.dst) udraw += '[' udraw += 'a("EDGECOLOR","#%06x"),' % self.color udraw += 'a("OBJECT","%s")' % self.label udraw += '],' udraw += '"%08x","%08x"' % (self.src, self.dst) udraw += ')' return udraw
mahak/ansible
refs/heads/devel
lib/ansible/galaxy/dependency_resolution/dataclasses.py
18
# -*- coding: utf-8 -*- # Copyright: (c) 2020-2021, Ansible Project # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) """Dependency structs.""" # FIXME: add caching all over the place from __future__ import (absolute_import, division, print_function) __metaclass__ = type import json import os from collections import namedtuple from glob import iglob try: from typing import TYPE_CHECKING except ImportError: TYPE_CHECKING = False if TYPE_CHECKING: from typing import Tuple, Type, TypeVar from ansible.galaxy.collection.concrete_artifact_manager import ( ConcreteArtifactsManager, ) Collection = TypeVar( 'Collection', 'Candidate', 'Requirement', '_ComputedReqKindsMixin', ) import yaml from ansible.errors import AnsibleError from ansible.galaxy.api import GalaxyAPI from ansible.module_utils._text import to_bytes, to_native, to_text from ansible.module_utils.six.moves.urllib.parse import urlparse from ansible.module_utils.six import raise_from from ansible.utils.collection_loader import AnsibleCollectionRef from ansible.utils.display import Display _ALLOW_CONCRETE_POINTER_IN_SOURCE = False # NOTE: This is a feature flag _GALAXY_YAML = b'galaxy.yml' _MANIFEST_JSON = b'MANIFEST.json' display = Display() def _is_collection_src_dir(dir_path): b_dir_path = to_bytes(dir_path, errors='surrogate_or_strict') return os.path.isfile(os.path.join(b_dir_path, _GALAXY_YAML)) def _is_installed_collection_dir(dir_path): b_dir_path = to_bytes(dir_path, errors='surrogate_or_strict') return os.path.isfile(os.path.join(b_dir_path, _MANIFEST_JSON)) def _is_collection_dir(dir_path): return ( _is_installed_collection_dir(dir_path) or _is_collection_src_dir(dir_path) ) def _find_collections_in_subdirs(dir_path): b_dir_path = to_bytes(dir_path, errors='surrogate_or_strict') subdir_glob_pattern = os.path.join( b_dir_path, # b'*', # namespace is supposed to be top-level per spec b'*', # collection name ) for subdir in iglob(subdir_glob_pattern): if os.path.isfile(os.path.join(subdir, _MANIFEST_JSON)): yield subdir elif os.path.isfile(os.path.join(subdir, _GALAXY_YAML)): yield subdir def _is_collection_namespace_dir(tested_str): return any(_find_collections_in_subdirs(tested_str)) def _is_file_path(tested_str): return os.path.isfile(to_bytes(tested_str, errors='surrogate_or_strict')) def _is_http_url(tested_str): return urlparse(tested_str).scheme.lower() in {'http', 'https'} def _is_git_url(tested_str): return tested_str.startswith(('git+', 'git@')) def _is_concrete_artifact_pointer(tested_str): return any( predicate(tested_str) for predicate in ( # NOTE: Maintain the checks to be sorted from light to heavy: _is_git_url, _is_http_url, _is_file_path, _is_collection_dir, _is_collection_namespace_dir, ) ) class _ComputedReqKindsMixin: @classmethod def from_dir_path_as_unknown( # type: ignore[misc] cls, # type: Type[Collection] dir_path, # type: bytes art_mgr, # type: ConcreteArtifactsManager ): # type: (...) -> Collection """Make collection from an unspecified dir type. This alternative constructor attempts to grab metadata from the given path if it's a directory. If there's no metadata, it falls back to guessing the FQCN based on the directory path and sets the version to "*". It raises a ValueError immediatelly if the input is not an existing directory path. """ if not os.path.isdir(dir_path): raise ValueError( "The collection directory '{path!s}' doesn't exist". format(path=to_native(dir_path)), ) try: return cls.from_dir_path(dir_path, art_mgr) except ValueError: return cls.from_dir_path_implicit(dir_path) @classmethod def from_dir_path(cls, dir_path, art_mgr): """Make collection from an directory with metadata.""" b_dir_path = to_bytes(dir_path, errors='surrogate_or_strict') if not _is_collection_dir(b_dir_path): display.warning( u"Collection at '{path!s}' does not have a {manifest_json!s} " u'file, nor has it {galaxy_yml!s}: cannot detect version.'. format( galaxy_yml=to_text(_GALAXY_YAML), manifest_json=to_text(_MANIFEST_JSON), path=to_text(dir_path, errors='surrogate_or_strict'), ), ) raise ValueError( '`dir_path` argument must be an installed or a source' ' collection directory.', ) tmp_inst_req = cls(None, None, dir_path, 'dir') req_name = art_mgr.get_direct_collection_fqcn(tmp_inst_req) req_version = art_mgr.get_direct_collection_version(tmp_inst_req) return cls(req_name, req_version, dir_path, 'dir') @classmethod def from_dir_path_implicit( # type: ignore[misc] cls, # type: Type[Collection] dir_path, # type: bytes ): # type: (...) -> Collection """Construct a collection instance based on an arbitrary dir. This alternative constructor infers the FQCN based on the parent and current directory names. It also sets the version to "*" regardless of whether any of known metadata files are present. """ # There is no metadata, but it isn't required for a functional collection. Determine the namespace.name from the path. u_dir_path = to_text(dir_path, errors='surrogate_or_strict') path_list = u_dir_path.split(os.path.sep) req_name = '.'.join(path_list[-2:]) return cls(req_name, '*', dir_path, 'dir') # type: ignore[call-arg] @classmethod def from_string(cls, collection_input, artifacts_manager): req = {} if _is_concrete_artifact_pointer(collection_input): # Arg is a file path or URL to a collection req['name'] = collection_input else: req['name'], _sep, req['version'] = collection_input.partition(':') if not req['version']: del req['version'] return cls.from_requirement_dict(req, artifacts_manager) @classmethod def from_requirement_dict(cls, collection_req, art_mgr): req_name = collection_req.get('name', None) req_version = collection_req.get('version', '*') req_type = collection_req.get('type') # TODO: decide how to deprecate the old src API behavior req_source = collection_req.get('source', None) if req_type is None: if ( # FIXME: decide on the future behavior: _ALLOW_CONCRETE_POINTER_IN_SOURCE and req_source is not None and _is_concrete_artifact_pointer(req_source) ): src_path = req_source elif ( req_name is not None and AnsibleCollectionRef.is_valid_collection_name(req_name) ): req_type = 'galaxy' elif ( req_name is not None and _is_concrete_artifact_pointer(req_name) ): src_path, req_name = req_name, None else: dir_tip_tmpl = ( # NOTE: leading LFs are for concat '\n\nTip: Make sure you are pointing to the right ' 'subdirectory — `{src!s}` looks like a directory ' 'but it is neither a collection, nor a namespace ' 'dir.' ) if req_source is not None and os.path.isdir(req_source): tip = dir_tip_tmpl.format(src=req_source) elif req_name is not None and os.path.isdir(req_name): tip = dir_tip_tmpl.format(src=req_name) elif req_name: tip = '\n\nCould not find {0}.'.format(req_name) else: tip = '' raise AnsibleError( # NOTE: I'd prefer a ValueError instead 'Neither the collection requirement entry key ' "'name', nor 'source' point to a concrete " "resolvable collection artifact. Also 'name' is " 'not an FQCN. A valid collection name must be in ' 'the format <namespace>.<collection>. Please make ' 'sure that the namespace and the collection name ' ' contain characters from [a-zA-Z0-9_] only.' '{extra_tip!s}'.format(extra_tip=tip), ) if req_type is None: if _is_git_url(src_path): req_type = 'git' req_source = src_path elif _is_http_url(src_path): req_type = 'url' req_source = src_path elif _is_file_path(src_path): req_type = 'file' req_source = src_path elif _is_collection_dir(src_path): if _is_installed_collection_dir(src_path) and _is_collection_src_dir(src_path): # Note that ``download`` requires a dir with a ``galaxy.yml`` and fails if it # doesn't exist, but if a ``MANIFEST.json`` also exists, it would be used # instead of the ``galaxy.yml``. raise AnsibleError( u"Collection requirement at '{path!s}' has both a {manifest_json!s} " u"file and a {galaxy_yml!s}.\nThe requirement must either be an installed " u"collection directory or a source collection directory, not both.". format( path=to_text(src_path, errors='surrogate_or_strict'), manifest_json=to_text(_MANIFEST_JSON), galaxy_yml=to_text(_GALAXY_YAML), ) ) req_type = 'dir' req_source = src_path elif _is_collection_namespace_dir(src_path): req_name = None # No name for a virtual req or "namespace."? req_type = 'subdirs' req_source = src_path else: raise AnsibleError( # NOTE: this is never supposed to be hit 'Failed to automatically detect the collection ' 'requirement type.', ) if req_type not in {'file', 'galaxy', 'git', 'url', 'dir', 'subdirs'}: raise AnsibleError( "The collection requirement entry key 'type' must be " 'one of file, galaxy, git, dir, subdirs, or url.' ) if req_name is None and req_type == 'galaxy': raise AnsibleError( 'Collections requirement entry should contain ' "the key 'name' if it's requested from a Galaxy-like " 'index server.', ) if req_type != 'galaxy' and req_source is None: req_source, req_name = req_name, None if ( req_type == 'galaxy' and isinstance(req_source, GalaxyAPI) and not _is_http_url(req_source.api_server) ): raise AnsibleError( "Collections requirement 'source' entry should contain " 'a valid Galaxy API URL but it does not: {not_url!s} ' 'is not an HTTP URL.'. format(not_url=req_source.api_server), ) tmp_inst_req = cls(req_name, req_version, req_source, req_type) if req_type not in {'galaxy', 'subdirs'} and req_name is None: req_name = art_mgr.get_direct_collection_fqcn(tmp_inst_req) # TODO: fix the cache key in artifacts manager? if req_type not in {'galaxy', 'subdirs'} and req_version == '*': req_version = art_mgr.get_direct_collection_version(tmp_inst_req) return cls( req_name, req_version, req_source, req_type, ) def __repr__(self): return ( '<{self!s} of type {coll_type!r} from {src!s}>'. format(self=self, coll_type=self.type, src=self.src or 'Galaxy') ) def __str__(self): return to_native(self.__unicode__()) def __unicode__(self): if self.fqcn is None: return ( u'"virtual collection Git repo"' if self.is_scm else u'"virtual collection namespace"' ) return ( u'{fqcn!s}:{ver!s}'. format(fqcn=to_text(self.fqcn), ver=to_text(self.ver)) ) def _get_separate_ns_n_name(self): # FIXME: use LRU cache return self.fqcn.split('.') @property def namespace(self): if self.is_virtual: raise TypeError('Virtual collections do not have a namespace') return self._get_separate_ns_n_name()[0] @property def name(self): if self.is_virtual: raise TypeError('Virtual collections do not have a name') return self._get_separate_ns_n_name()[-1] @property def canonical_package_id(self): if not self.is_virtual: return to_native(self.fqcn) return ( '<virtual namespace from {src!s} of type {src_type!s}>'. format(src=to_native(self.src), src_type=to_native(self.type)) ) @property def is_virtual(self): return self.is_scm or self.is_subdirs @property def is_file(self): return self.type == 'file' @property def is_dir(self): return self.type == 'dir' @property def namespace_collection_paths(self): return [ to_native(path) for path in _find_collections_in_subdirs(self.src) ] @property def is_subdirs(self): return self.type == 'subdirs' @property def is_url(self): return self.type == 'url' @property def is_scm(self): return self.type == 'git' @property def is_concrete_artifact(self): return self.type in {'git', 'url', 'file', 'dir', 'subdirs'} @property def is_online_index_pointer(self): return not self.is_concrete_artifact class Requirement( _ComputedReqKindsMixin, namedtuple('Requirement', ('fqcn', 'ver', 'src', 'type')), ): """An abstract requirement request.""" class Candidate( _ComputedReqKindsMixin, namedtuple('Candidate', ('fqcn', 'ver', 'src', 'type')) ): """A concrete collection candidate with its version resolved."""
qiqian/shadowsocks
refs/heads/master
shadowsocks/server.py
8
#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2014 clowwindy # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. from __future__ import absolute_import, division, print_function, \ with_statement import sys import os import logging import signal sys.path.insert(0, os.path.join(os.path.dirname(__file__), '../')) from shadowsocks import utils, encrypt, eventloop, tcprelay, udprelay, asyncdns def main(): utils.check_python() config = utils.get_config(False) utils.print_shadowsocks() if config['port_password']: if config['password']: logging.warn('warning: port_password should not be used with ' 'server_port and password. server_port and password ' 'will be ignored') else: config['port_password'] = {} server_port = config['server_port'] if type(server_port) == list: for a_server_port in server_port: config['port_password'][a_server_port] = config['password'] else: config['port_password'][str(server_port)] = config['password'] encrypt.try_cipher(config['password'], config['method']) tcp_servers = [] udp_servers = [] dns_resolver = asyncdns.DNSResolver() for port, password in config['port_password'].items(): a_config = config.copy() a_config['server_port'] = int(port) a_config['password'] = password logging.info("starting server at %s:%d" % (a_config['server'], int(port))) tcp_servers.append(tcprelay.TCPRelay(a_config, dns_resolver, False)) udp_servers.append(udprelay.UDPRelay(a_config, dns_resolver, False)) def run_server(): def child_handler(signum, _): logging.warn('received SIGQUIT, doing graceful shutting down..') list(map(lambda s: s.close(next_tick=True), tcp_servers + udp_servers)) signal.signal(getattr(signal, 'SIGQUIT', signal.SIGTERM), child_handler) try: loop = eventloop.EventLoop() dns_resolver.add_to_loop(loop) list(map(lambda s: s.add_to_loop(loop), tcp_servers + udp_servers)) loop.run() except (KeyboardInterrupt, IOError, OSError) as e: logging.error(e) if config['verbose']: import traceback traceback.print_exc() os._exit(1) if int(config['workers']) > 1: if os.name == 'posix': children = [] is_child = False for i in range(0, int(config['workers'])): r = os.fork() if r == 0: logging.info('worker started') is_child = True run_server() break else: children.append(r) if not is_child: def handler(signum, _): for pid in children: try: os.kill(pid, signum) except OSError: # child may already exited pass sys.exit() signal.signal(signal.SIGTERM, handler) signal.signal(signal.SIGQUIT, handler) # master for a_tcp_server in tcp_servers: a_tcp_server.close() for a_udp_server in udp_servers: a_udp_server.close() dns_resolver.close() for child in children: os.waitpid(child, 0) else: logging.warn('worker is only available on Unix/Linux') run_server() else: run_server() if __name__ == '__main__': main()
seomoz/qless-py
refs/heads/master
qless/util.py
1
'''Some utility functions''' def import_class(klass): '''Import the named class and return that class''' mod = __import__(klass.rpartition('.')[0]) for segment in klass.split('.')[1:-1]: mod = getattr(mod, segment) return getattr(mod, klass.rpartition('.')[2])
pmaunz/pyqtgraph
refs/heads/develop
examples/Arrow.py
27
# -*- coding: utf-8 -*- """ Display an animated arrowhead following a curve. This example uses the CurveArrow class, which is a combination of ArrowItem and CurvePoint. To place a static arrow anywhere in a scene, use ArrowItem. To attach other types of item to a curve, use CurvePoint. """ import initExample ## Add path to library (just for examples; you do not need this) import numpy as np from pyqtgraph.Qt import QtGui, QtCore import pyqtgraph as pg app = QtGui.QApplication([]) w = QtGui.QMainWindow() cw = pg.GraphicsLayoutWidget() w.show() w.resize(400,600) w.setCentralWidget(cw) w.setWindowTitle('pyqtgraph example: Arrow') p = cw.addPlot(row=0, col=0) p2 = cw.addPlot(row=1, col=0) ## variety of arrow shapes a1 = pg.ArrowItem(angle=-160, tipAngle=60, headLen=40, tailLen=40, tailWidth=20, pen={'color': 'w', 'width': 3}) a2 = pg.ArrowItem(angle=-120, tipAngle=30, baseAngle=20, headLen=40, tailLen=40, tailWidth=8, pen=None, brush='y') a3 = pg.ArrowItem(angle=-60, tipAngle=30, baseAngle=20, headLen=40, tailLen=None, brush=None) a4 = pg.ArrowItem(angle=-20, tipAngle=30, baseAngle=-30, headLen=40, tailLen=None) a2.setPos(10,0) a3.setPos(20,0) a4.setPos(30,0) p.addItem(a1) p.addItem(a2) p.addItem(a3) p.addItem(a4) p.setRange(QtCore.QRectF(-20, -10, 60, 20)) ## Animated arrow following curve c = p2.plot(x=np.sin(np.linspace(0, 2*np.pi, 1000)), y=np.cos(np.linspace(0, 6*np.pi, 1000))) a = pg.CurveArrow(c) a.setStyle(headLen=40) p2.addItem(a) anim = a.makeAnimation(loop=-1) anim.start() ## Start Qt event loop unless running in interactive mode or using pyside. if __name__ == '__main__': import sys if (sys.flags.interactive != 1) or not hasattr(QtCore, 'PYQT_VERSION'): QtGui.QApplication.instance().exec_()
ahb0327/intellij-community
refs/heads/master
python/testData/inspections/RedundantParentheses_after.py
167
if True: pass
huang4fstudio/django
refs/heads/master
tests/forms_tests/tests/test_input_formats.py
313
from datetime import date, datetime, time from django import forms from django.test import SimpleTestCase, override_settings from django.utils.translation import activate, deactivate @override_settings(TIME_INPUT_FORMATS=["%I:%M:%S %p", "%I:%M %p"], USE_L10N=True) class LocalizedTimeTests(SimpleTestCase): def setUp(self): # nl/formats.py has customized TIME_INPUT_FORMATS: # ['%H:%M:%S', '%H.%M:%S', '%H.%M', '%H:%M'] activate('nl') def tearDown(self): deactivate() def test_timeField(self): "TimeFields can parse dates in the default format" f = forms.TimeField() # Parse a time in an unaccepted format; get an error self.assertRaises(forms.ValidationError, f.clean, '1:30:05 PM') # Parse a time in a valid format, get a parsed result result = f.clean('13:30:05') self.assertEqual(result, time(13, 30, 5)) # Check that the parsed result does a round trip text = f.widget._format_value(result) self.assertEqual(text, '13:30:05') # Parse a time in a valid, but non-default format, get a parsed result result = f.clean('13:30') self.assertEqual(result, time(13, 30, 0)) # Check that the parsed result does a round trip to default format text = f.widget._format_value(result) self.assertEqual(text, "13:30:00") # ISO formats are accepted, even if not specified in formats.py result = f.clean('13:30:05.000155') self.assertEqual(result, time(13, 30, 5, 155)) def test_localized_timeField(self): "Localized TimeFields act as unlocalized widgets" f = forms.TimeField(localize=True) # Parse a time in an unaccepted format; get an error self.assertRaises(forms.ValidationError, f.clean, '1:30:05 PM') # Parse a time in a valid format, get a parsed result result = f.clean('13:30:05') self.assertEqual(result, time(13, 30, 5)) # Check that the parsed result does a round trip to the same format text = f.widget._format_value(result) self.assertEqual(text, '13:30:05') # Parse a time in a valid format, get a parsed result result = f.clean('13:30') self.assertEqual(result, time(13, 30, 0)) # Check that the parsed result does a round trip to default format text = f.widget._format_value(result) self.assertEqual(text, "13:30:00") def test_timeField_with_inputformat(self): "TimeFields with manually specified input formats can accept those formats" f = forms.TimeField(input_formats=["%H.%M.%S", "%H.%M"]) # Parse a time in an unaccepted format; get an error self.assertRaises(forms.ValidationError, f.clean, '1:30:05 PM') self.assertRaises(forms.ValidationError, f.clean, '13:30:05') # Parse a time in a valid format, get a parsed result result = f.clean('13.30.05') self.assertEqual(result, time(13, 30, 5)) # Check that the parsed result does a round trip to the same format text = f.widget._format_value(result) self.assertEqual(text, "13:30:05") # Parse a time in a valid format, get a parsed result result = f.clean('13.30') self.assertEqual(result, time(13, 30, 0)) # Check that the parsed result does a round trip to default format text = f.widget._format_value(result) self.assertEqual(text, "13:30:00") def test_localized_timeField_with_inputformat(self): "Localized TimeFields with manually specified input formats can accept those formats" f = forms.TimeField(input_formats=["%H.%M.%S", "%H.%M"], localize=True) # Parse a time in an unaccepted format; get an error self.assertRaises(forms.ValidationError, f.clean, '1:30:05 PM') self.assertRaises(forms.ValidationError, f.clean, '13:30:05') # Parse a time in a valid format, get a parsed result result = f.clean('13.30.05') self.assertEqual(result, time(13, 30, 5)) # Check that the parsed result does a round trip to the same format text = f.widget._format_value(result) self.assertEqual(text, "13:30:05") # Parse a time in a valid format, get a parsed result result = f.clean('13.30') self.assertEqual(result, time(13, 30, 0)) # Check that the parsed result does a round trip to default format text = f.widget._format_value(result) self.assertEqual(text, "13:30:00") @override_settings(TIME_INPUT_FORMATS=["%I:%M:%S %p", "%I:%M %p"]) class CustomTimeInputFormatsTests(SimpleTestCase): def test_timeField(self): "TimeFields can parse dates in the default format" f = forms.TimeField() # Parse a time in an unaccepted format; get an error self.assertRaises(forms.ValidationError, f.clean, '13:30:05') # Parse a time in a valid format, get a parsed result result = f.clean('1:30:05 PM') self.assertEqual(result, time(13, 30, 5)) # Check that the parsed result does a round trip text = f.widget._format_value(result) self.assertEqual(text, '01:30:05 PM') # Parse a time in a valid, but non-default format, get a parsed result result = f.clean('1:30 PM') self.assertEqual(result, time(13, 30, 0)) # Check that the parsed result does a round trip to default format text = f.widget._format_value(result) self.assertEqual(text, "01:30:00 PM") def test_localized_timeField(self): "Localized TimeFields act as unlocalized widgets" f = forms.TimeField(localize=True) # Parse a time in an unaccepted format; get an error self.assertRaises(forms.ValidationError, f.clean, '13:30:05') # Parse a time in a valid format, get a parsed result result = f.clean('1:30:05 PM') self.assertEqual(result, time(13, 30, 5)) # Check that the parsed result does a round trip to the same format text = f.widget._format_value(result) self.assertEqual(text, '01:30:05 PM') # Parse a time in a valid format, get a parsed result result = f.clean('01:30 PM') self.assertEqual(result, time(13, 30, 0)) # Check that the parsed result does a round trip to default format text = f.widget._format_value(result) self.assertEqual(text, "01:30:00 PM") def test_timeField_with_inputformat(self): "TimeFields with manually specified input formats can accept those formats" f = forms.TimeField(input_formats=["%H.%M.%S", "%H.%M"]) # Parse a time in an unaccepted format; get an error self.assertRaises(forms.ValidationError, f.clean, '1:30:05 PM') self.assertRaises(forms.ValidationError, f.clean, '13:30:05') # Parse a time in a valid format, get a parsed result result = f.clean('13.30.05') self.assertEqual(result, time(13, 30, 5)) # Check that the parsed result does a round trip to the same format text = f.widget._format_value(result) self.assertEqual(text, "01:30:05 PM") # Parse a time in a valid format, get a parsed result result = f.clean('13.30') self.assertEqual(result, time(13, 30, 0)) # Check that the parsed result does a round trip to default format text = f.widget._format_value(result) self.assertEqual(text, "01:30:00 PM") def test_localized_timeField_with_inputformat(self): "Localized TimeFields with manually specified input formats can accept those formats" f = forms.TimeField(input_formats=["%H.%M.%S", "%H.%M"], localize=True) # Parse a time in an unaccepted format; get an error self.assertRaises(forms.ValidationError, f.clean, '1:30:05 PM') self.assertRaises(forms.ValidationError, f.clean, '13:30:05') # Parse a time in a valid format, get a parsed result result = f.clean('13.30.05') self.assertEqual(result, time(13, 30, 5)) # Check that the parsed result does a round trip to the same format text = f.widget._format_value(result) self.assertEqual(text, "01:30:05 PM") # Parse a time in a valid format, get a parsed result result = f.clean('13.30') self.assertEqual(result, time(13, 30, 0)) # Check that the parsed result does a round trip to default format text = f.widget._format_value(result) self.assertEqual(text, "01:30:00 PM") class SimpleTimeFormatTests(SimpleTestCase): def test_timeField(self): "TimeFields can parse dates in the default format" f = forms.TimeField() # Parse a time in an unaccepted format; get an error self.assertRaises(forms.ValidationError, f.clean, '1:30:05 PM') # Parse a time in a valid format, get a parsed result result = f.clean('13:30:05') self.assertEqual(result, time(13, 30, 5)) # Check that the parsed result does a round trip to the same format text = f.widget._format_value(result) self.assertEqual(text, "13:30:05") # Parse a time in a valid, but non-default format, get a parsed result result = f.clean('13:30') self.assertEqual(result, time(13, 30, 0)) # Check that the parsed result does a round trip to default format text = f.widget._format_value(result) self.assertEqual(text, "13:30:00") def test_localized_timeField(self): "Localized TimeFields in a non-localized environment act as unlocalized widgets" f = forms.TimeField() # Parse a time in an unaccepted format; get an error self.assertRaises(forms.ValidationError, f.clean, '1:30:05 PM') # Parse a time in a valid format, get a parsed result result = f.clean('13:30:05') self.assertEqual(result, time(13, 30, 5)) # Check that the parsed result does a round trip to the same format text = f.widget._format_value(result) self.assertEqual(text, "13:30:05") # Parse a time in a valid format, get a parsed result result = f.clean('13:30') self.assertEqual(result, time(13, 30, 0)) # Check that the parsed result does a round trip to default format text = f.widget._format_value(result) self.assertEqual(text, "13:30:00") def test_timeField_with_inputformat(self): "TimeFields with manually specified input formats can accept those formats" f = forms.TimeField(input_formats=["%I:%M:%S %p", "%I:%M %p"]) # Parse a time in an unaccepted format; get an error self.assertRaises(forms.ValidationError, f.clean, '13:30:05') # Parse a time in a valid format, get a parsed result result = f.clean('1:30:05 PM') self.assertEqual(result, time(13, 30, 5)) # Check that the parsed result does a round trip to the same format text = f.widget._format_value(result) self.assertEqual(text, "13:30:05") # Parse a time in a valid format, get a parsed result result = f.clean('1:30 PM') self.assertEqual(result, time(13, 30, 0)) # Check that the parsed result does a round trip to default format text = f.widget._format_value(result) self.assertEqual(text, "13:30:00") def test_localized_timeField_with_inputformat(self): "Localized TimeFields with manually specified input formats can accept those formats" f = forms.TimeField(input_formats=["%I:%M:%S %p", "%I:%M %p"], localize=True) # Parse a time in an unaccepted format; get an error self.assertRaises(forms.ValidationError, f.clean, '13:30:05') # Parse a time in a valid format, get a parsed result result = f.clean('1:30:05 PM') self.assertEqual(result, time(13, 30, 5)) # Check that the parsed result does a round trip to the same format text = f.widget._format_value(result) self.assertEqual(text, "13:30:05") # Parse a time in a valid format, get a parsed result result = f.clean('1:30 PM') self.assertEqual(result, time(13, 30, 0)) # Check that the parsed result does a round trip to default format text = f.widget._format_value(result) self.assertEqual(text, "13:30:00") @override_settings(DATE_INPUT_FORMATS=["%d/%m/%Y", "%d-%m-%Y"], USE_L10N=True) class LocalizedDateTests(SimpleTestCase): def setUp(self): activate('de') def tearDown(self): deactivate() def test_dateField(self): "DateFields can parse dates in the default format" f = forms.DateField() # Parse a date in an unaccepted format; get an error self.assertRaises(forms.ValidationError, f.clean, '21/12/2010') # ISO formats are accepted, even if not specified in formats.py self.assertEqual(f.clean('2010-12-21'), date(2010, 12, 21)) # Parse a date in a valid format, get a parsed result result = f.clean('21.12.2010') self.assertEqual(result, date(2010, 12, 21)) # Check that the parsed result does a round trip text = f.widget._format_value(result) self.assertEqual(text, '21.12.2010') # Parse a date in a valid, but non-default format, get a parsed result result = f.clean('21.12.10') self.assertEqual(result, date(2010, 12, 21)) # Check that the parsed result does a round trip to default format text = f.widget._format_value(result) self.assertEqual(text, "21.12.2010") def test_localized_dateField(self): "Localized DateFields act as unlocalized widgets" f = forms.DateField(localize=True) # Parse a date in an unaccepted format; get an error self.assertRaises(forms.ValidationError, f.clean, '21/12/2010') # Parse a date in a valid format, get a parsed result result = f.clean('21.12.2010') self.assertEqual(result, date(2010, 12, 21)) # Check that the parsed result does a round trip to the same format text = f.widget._format_value(result) self.assertEqual(text, '21.12.2010') # Parse a date in a valid format, get a parsed result result = f.clean('21.12.10') self.assertEqual(result, date(2010, 12, 21)) # Check that the parsed result does a round trip to default format text = f.widget._format_value(result) self.assertEqual(text, "21.12.2010") def test_dateField_with_inputformat(self): "DateFields with manually specified input formats can accept those formats" f = forms.DateField(input_formats=["%m.%d.%Y", "%m-%d-%Y"]) # Parse a date in an unaccepted format; get an error self.assertRaises(forms.ValidationError, f.clean, '2010-12-21') self.assertRaises(forms.ValidationError, f.clean, '21/12/2010') self.assertRaises(forms.ValidationError, f.clean, '21.12.2010') # Parse a date in a valid format, get a parsed result result = f.clean('12.21.2010') self.assertEqual(result, date(2010, 12, 21)) # Check that the parsed result does a round trip to the same format text = f.widget._format_value(result) self.assertEqual(text, "21.12.2010") # Parse a date in a valid format, get a parsed result result = f.clean('12-21-2010') self.assertEqual(result, date(2010, 12, 21)) # Check that the parsed result does a round trip to default format text = f.widget._format_value(result) self.assertEqual(text, "21.12.2010") def test_localized_dateField_with_inputformat(self): "Localized DateFields with manually specified input formats can accept those formats" f = forms.DateField(input_formats=["%m.%d.%Y", "%m-%d-%Y"], localize=True) # Parse a date in an unaccepted format; get an error self.assertRaises(forms.ValidationError, f.clean, '2010-12-21') self.assertRaises(forms.ValidationError, f.clean, '21/12/2010') self.assertRaises(forms.ValidationError, f.clean, '21.12.2010') # Parse a date in a valid format, get a parsed result result = f.clean('12.21.2010') self.assertEqual(result, date(2010, 12, 21)) # Check that the parsed result does a round trip to the same format text = f.widget._format_value(result) self.assertEqual(text, "21.12.2010") # Parse a date in a valid format, get a parsed result result = f.clean('12-21-2010') self.assertEqual(result, date(2010, 12, 21)) # Check that the parsed result does a round trip to default format text = f.widget._format_value(result) self.assertEqual(text, "21.12.2010") @override_settings(DATE_INPUT_FORMATS=["%d.%m.%Y", "%d-%m-%Y"]) class CustomDateInputFormatsTests(SimpleTestCase): def test_dateField(self): "DateFields can parse dates in the default format" f = forms.DateField() # Parse a date in an unaccepted format; get an error self.assertRaises(forms.ValidationError, f.clean, '2010-12-21') # Parse a date in a valid format, get a parsed result result = f.clean('21.12.2010') self.assertEqual(result, date(2010, 12, 21)) # Check that the parsed result does a round trip text = f.widget._format_value(result) self.assertEqual(text, '21.12.2010') # Parse a date in a valid, but non-default format, get a parsed result result = f.clean('21-12-2010') self.assertEqual(result, date(2010, 12, 21)) # Check that the parsed result does a round trip to default format text = f.widget._format_value(result) self.assertEqual(text, "21.12.2010") def test_localized_dateField(self): "Localized DateFields act as unlocalized widgets" f = forms.DateField(localize=True) # Parse a date in an unaccepted format; get an error self.assertRaises(forms.ValidationError, f.clean, '2010-12-21') # Parse a date in a valid format, get a parsed result result = f.clean('21.12.2010') self.assertEqual(result, date(2010, 12, 21)) # Check that the parsed result does a round trip to the same format text = f.widget._format_value(result) self.assertEqual(text, '21.12.2010') # Parse a date in a valid format, get a parsed result result = f.clean('21-12-2010') self.assertEqual(result, date(2010, 12, 21)) # Check that the parsed result does a round trip to default format text = f.widget._format_value(result) self.assertEqual(text, "21.12.2010") def test_dateField_with_inputformat(self): "DateFields with manually specified input formats can accept those formats" f = forms.DateField(input_formats=["%m.%d.%Y", "%m-%d-%Y"]) # Parse a date in an unaccepted format; get an error self.assertRaises(forms.ValidationError, f.clean, '21.12.2010') self.assertRaises(forms.ValidationError, f.clean, '2010-12-21') # Parse a date in a valid format, get a parsed result result = f.clean('12.21.2010') self.assertEqual(result, date(2010, 12, 21)) # Check that the parsed result does a round trip to the same format text = f.widget._format_value(result) self.assertEqual(text, "21.12.2010") # Parse a date in a valid format, get a parsed result result = f.clean('12-21-2010') self.assertEqual(result, date(2010, 12, 21)) # Check that the parsed result does a round trip to default format text = f.widget._format_value(result) self.assertEqual(text, "21.12.2010") def test_localized_dateField_with_inputformat(self): "Localized DateFields with manually specified input formats can accept those formats" f = forms.DateField(input_formats=["%m.%d.%Y", "%m-%d-%Y"], localize=True) # Parse a date in an unaccepted format; get an error self.assertRaises(forms.ValidationError, f.clean, '21.12.2010') self.assertRaises(forms.ValidationError, f.clean, '2010-12-21') # Parse a date in a valid format, get a parsed result result = f.clean('12.21.2010') self.assertEqual(result, date(2010, 12, 21)) # Check that the parsed result does a round trip to the same format text = f.widget._format_value(result) self.assertEqual(text, "21.12.2010") # Parse a date in a valid format, get a parsed result result = f.clean('12-21-2010') self.assertEqual(result, date(2010, 12, 21)) # Check that the parsed result does a round trip to default format text = f.widget._format_value(result) self.assertEqual(text, "21.12.2010") class SimpleDateFormatTests(SimpleTestCase): def test_dateField(self): "DateFields can parse dates in the default format" f = forms.DateField() # Parse a date in an unaccepted format; get an error self.assertRaises(forms.ValidationError, f.clean, '21.12.2010') # Parse a date in a valid format, get a parsed result result = f.clean('2010-12-21') self.assertEqual(result, date(2010, 12, 21)) # Check that the parsed result does a round trip to the same format text = f.widget._format_value(result) self.assertEqual(text, "2010-12-21") # Parse a date in a valid, but non-default format, get a parsed result result = f.clean('12/21/2010') self.assertEqual(result, date(2010, 12, 21)) # Check that the parsed result does a round trip to default format text = f.widget._format_value(result) self.assertEqual(text, "2010-12-21") def test_localized_dateField(self): "Localized DateFields in a non-localized environment act as unlocalized widgets" f = forms.DateField() # Parse a date in an unaccepted format; get an error self.assertRaises(forms.ValidationError, f.clean, '21.12.2010') # Parse a date in a valid format, get a parsed result result = f.clean('2010-12-21') self.assertEqual(result, date(2010, 12, 21)) # Check that the parsed result does a round trip to the same format text = f.widget._format_value(result) self.assertEqual(text, "2010-12-21") # Parse a date in a valid format, get a parsed result result = f.clean('12/21/2010') self.assertEqual(result, date(2010, 12, 21)) # Check that the parsed result does a round trip to default format text = f.widget._format_value(result) self.assertEqual(text, "2010-12-21") def test_dateField_with_inputformat(self): "DateFields with manually specified input formats can accept those formats" f = forms.DateField(input_formats=["%d.%m.%Y", "%d-%m-%Y"]) # Parse a date in an unaccepted format; get an error self.assertRaises(forms.ValidationError, f.clean, '2010-12-21') # Parse a date in a valid format, get a parsed result result = f.clean('21.12.2010') self.assertEqual(result, date(2010, 12, 21)) # Check that the parsed result does a round trip to the same format text = f.widget._format_value(result) self.assertEqual(text, "2010-12-21") # Parse a date in a valid format, get a parsed result result = f.clean('21-12-2010') self.assertEqual(result, date(2010, 12, 21)) # Check that the parsed result does a round trip to default format text = f.widget._format_value(result) self.assertEqual(text, "2010-12-21") def test_localized_dateField_with_inputformat(self): "Localized DateFields with manually specified input formats can accept those formats" f = forms.DateField(input_formats=["%d.%m.%Y", "%d-%m-%Y"], localize=True) # Parse a date in an unaccepted format; get an error self.assertRaises(forms.ValidationError, f.clean, '2010-12-21') # Parse a date in a valid format, get a parsed result result = f.clean('21.12.2010') self.assertEqual(result, date(2010, 12, 21)) # Check that the parsed result does a round trip to the same format text = f.widget._format_value(result) self.assertEqual(text, "2010-12-21") # Parse a date in a valid format, get a parsed result result = f.clean('21-12-2010') self.assertEqual(result, date(2010, 12, 21)) # Check that the parsed result does a round trip to default format text = f.widget._format_value(result) self.assertEqual(text, "2010-12-21") @override_settings(DATETIME_INPUT_FORMATS=["%I:%M:%S %p %d/%m/%Y", "%I:%M %p %d-%m-%Y"], USE_L10N=True) class LocalizedDateTimeTests(SimpleTestCase): def setUp(self): activate('de') def tearDown(self): deactivate() def test_dateTimeField(self): "DateTimeFields can parse dates in the default format" f = forms.DateTimeField() # Parse a date in an unaccepted format; get an error self.assertRaises(forms.ValidationError, f.clean, '1:30:05 PM 21/12/2010') # ISO formats are accepted, even if not specified in formats.py self.assertEqual(f.clean('2010-12-21 13:30:05'), datetime(2010, 12, 21, 13, 30, 5)) # Parse a date in a valid format, get a parsed result result = f.clean('21.12.2010 13:30:05') self.assertEqual(result, datetime(2010, 12, 21, 13, 30, 5)) # Check that the parsed result does a round trip text = f.widget._format_value(result) self.assertEqual(text, '21.12.2010 13:30:05') # Parse a date in a valid, but non-default format, get a parsed result result = f.clean('21.12.2010 13:30') self.assertEqual(result, datetime(2010, 12, 21, 13, 30)) # Check that the parsed result does a round trip to default format text = f.widget._format_value(result) self.assertEqual(text, "21.12.2010 13:30:00") def test_localized_dateTimeField(self): "Localized DateTimeFields act as unlocalized widgets" f = forms.DateTimeField(localize=True) # Parse a date in an unaccepted format; get an error self.assertRaises(forms.ValidationError, f.clean, '1:30:05 PM 21/12/2010') # Parse a date in a valid format, get a parsed result result = f.clean('21.12.2010 13:30:05') self.assertEqual(result, datetime(2010, 12, 21, 13, 30, 5)) # Check that the parsed result does a round trip to the same format text = f.widget._format_value(result) self.assertEqual(text, '21.12.2010 13:30:05') # Parse a date in a valid format, get a parsed result result = f.clean('21.12.2010 13:30') self.assertEqual(result, datetime(2010, 12, 21, 13, 30)) # Check that the parsed result does a round trip to default format text = f.widget._format_value(result) self.assertEqual(text, "21.12.2010 13:30:00") def test_dateTimeField_with_inputformat(self): "DateTimeFields with manually specified input formats can accept those formats" f = forms.DateTimeField(input_formats=["%H.%M.%S %m.%d.%Y", "%H.%M %m-%d-%Y"]) # Parse a date in an unaccepted format; get an error self.assertRaises(forms.ValidationError, f.clean, '2010-12-21 13:30:05 13:30:05') self.assertRaises(forms.ValidationError, f.clean, '1:30:05 PM 21/12/2010') self.assertRaises(forms.ValidationError, f.clean, '13:30:05 21.12.2010') # Parse a date in a valid format, get a parsed result result = f.clean('13.30.05 12.21.2010') self.assertEqual(result, datetime(2010, 12, 21, 13, 30, 5)) # Check that the parsed result does a round trip to the same format text = f.widget._format_value(result) self.assertEqual(text, "21.12.2010 13:30:05") # Parse a date in a valid format, get a parsed result result = f.clean('13.30 12-21-2010') self.assertEqual(result, datetime(2010, 12, 21, 13, 30)) # Check that the parsed result does a round trip to default format text = f.widget._format_value(result) self.assertEqual(text, "21.12.2010 13:30:00") def test_localized_dateTimeField_with_inputformat(self): "Localized DateTimeFields with manually specified input formats can accept those formats" f = forms.DateTimeField(input_formats=["%H.%M.%S %m.%d.%Y", "%H.%M %m-%d-%Y"], localize=True) # Parse a date in an unaccepted format; get an error self.assertRaises(forms.ValidationError, f.clean, '2010-12-21 13:30:05') self.assertRaises(forms.ValidationError, f.clean, '1:30:05 PM 21/12/2010') self.assertRaises(forms.ValidationError, f.clean, '13:30:05 21.12.2010') # Parse a date in a valid format, get a parsed result result = f.clean('13.30.05 12.21.2010') self.assertEqual(result, datetime(2010, 12, 21, 13, 30, 5)) # Check that the parsed result does a round trip to the same format text = f.widget._format_value(result) self.assertEqual(text, "21.12.2010 13:30:05") # Parse a date in a valid format, get a parsed result result = f.clean('13.30 12-21-2010') self.assertEqual(result, datetime(2010, 12, 21, 13, 30)) # Check that the parsed result does a round trip to default format text = f.widget._format_value(result) self.assertEqual(text, "21.12.2010 13:30:00") @override_settings(DATETIME_INPUT_FORMATS=["%I:%M:%S %p %d/%m/%Y", "%I:%M %p %d-%m-%Y"]) class CustomDateTimeInputFormatsTests(SimpleTestCase): def test_dateTimeField(self): "DateTimeFields can parse dates in the default format" f = forms.DateTimeField() # Parse a date in an unaccepted format; get an error self.assertRaises(forms.ValidationError, f.clean, '2010-12-21 13:30:05') # Parse a date in a valid format, get a parsed result result = f.clean('1:30:05 PM 21/12/2010') self.assertEqual(result, datetime(2010, 12, 21, 13, 30, 5)) # Check that the parsed result does a round trip text = f.widget._format_value(result) self.assertEqual(text, '01:30:05 PM 21/12/2010') # Parse a date in a valid, but non-default format, get a parsed result result = f.clean('1:30 PM 21-12-2010') self.assertEqual(result, datetime(2010, 12, 21, 13, 30)) # Check that the parsed result does a round trip to default format text = f.widget._format_value(result) self.assertEqual(text, "01:30:00 PM 21/12/2010") def test_localized_dateTimeField(self): "Localized DateTimeFields act as unlocalized widgets" f = forms.DateTimeField(localize=True) # Parse a date in an unaccepted format; get an error self.assertRaises(forms.ValidationError, f.clean, '2010-12-21 13:30:05') # Parse a date in a valid format, get a parsed result result = f.clean('1:30:05 PM 21/12/2010') self.assertEqual(result, datetime(2010, 12, 21, 13, 30, 5)) # Check that the parsed result does a round trip to the same format text = f.widget._format_value(result) self.assertEqual(text, '01:30:05 PM 21/12/2010') # Parse a date in a valid format, get a parsed result result = f.clean('1:30 PM 21-12-2010') self.assertEqual(result, datetime(2010, 12, 21, 13, 30)) # Check that the parsed result does a round trip to default format text = f.widget._format_value(result) self.assertEqual(text, "01:30:00 PM 21/12/2010") def test_dateTimeField_with_inputformat(self): "DateTimeFields with manually specified input formats can accept those formats" f = forms.DateTimeField(input_formats=["%m.%d.%Y %H:%M:%S", "%m-%d-%Y %H:%M"]) # Parse a date in an unaccepted format; get an error self.assertRaises(forms.ValidationError, f.clean, '13:30:05 21.12.2010') self.assertRaises(forms.ValidationError, f.clean, '2010-12-21 13:30:05') # Parse a date in a valid format, get a parsed result result = f.clean('12.21.2010 13:30:05') self.assertEqual(result, datetime(2010, 12, 21, 13, 30, 5)) # Check that the parsed result does a round trip to the same format text = f.widget._format_value(result) self.assertEqual(text, "01:30:05 PM 21/12/2010") # Parse a date in a valid format, get a parsed result result = f.clean('12-21-2010 13:30') self.assertEqual(result, datetime(2010, 12, 21, 13, 30)) # Check that the parsed result does a round trip to default format text = f.widget._format_value(result) self.assertEqual(text, "01:30:00 PM 21/12/2010") def test_localized_dateTimeField_with_inputformat(self): "Localized DateTimeFields with manually specified input formats can accept those formats" f = forms.DateTimeField(input_formats=["%m.%d.%Y %H:%M:%S", "%m-%d-%Y %H:%M"], localize=True) # Parse a date in an unaccepted format; get an error self.assertRaises(forms.ValidationError, f.clean, '13:30:05 21.12.2010') self.assertRaises(forms.ValidationError, f.clean, '2010-12-21 13:30:05') # Parse a date in a valid format, get a parsed result result = f.clean('12.21.2010 13:30:05') self.assertEqual(result, datetime(2010, 12, 21, 13, 30, 5)) # Check that the parsed result does a round trip to the same format text = f.widget._format_value(result) self.assertEqual(text, "01:30:05 PM 21/12/2010") # Parse a date in a valid format, get a parsed result result = f.clean('12-21-2010 13:30') self.assertEqual(result, datetime(2010, 12, 21, 13, 30)) # Check that the parsed result does a round trip to default format text = f.widget._format_value(result) self.assertEqual(text, "01:30:00 PM 21/12/2010") class SimpleDateTimeFormatTests(SimpleTestCase): def test_dateTimeField(self): "DateTimeFields can parse dates in the default format" f = forms.DateTimeField() # Parse a date in an unaccepted format; get an error self.assertRaises(forms.ValidationError, f.clean, '13:30:05 21.12.2010') # Parse a date in a valid format, get a parsed result result = f.clean('2010-12-21 13:30:05') self.assertEqual(result, datetime(2010, 12, 21, 13, 30, 5)) # Check that the parsed result does a round trip to the same format text = f.widget._format_value(result) self.assertEqual(text, "2010-12-21 13:30:05") # Parse a date in a valid, but non-default format, get a parsed result result = f.clean('12/21/2010 13:30:05') self.assertEqual(result, datetime(2010, 12, 21, 13, 30, 5)) # Check that the parsed result does a round trip to default format text = f.widget._format_value(result) self.assertEqual(text, "2010-12-21 13:30:05") def test_localized_dateTimeField(self): "Localized DateTimeFields in a non-localized environment act as unlocalized widgets" f = forms.DateTimeField() # Parse a date in an unaccepted format; get an error self.assertRaises(forms.ValidationError, f.clean, '13:30:05 21.12.2010') # Parse a date in a valid format, get a parsed result result = f.clean('2010-12-21 13:30:05') self.assertEqual(result, datetime(2010, 12, 21, 13, 30, 5)) # Check that the parsed result does a round trip to the same format text = f.widget._format_value(result) self.assertEqual(text, "2010-12-21 13:30:05") # Parse a date in a valid format, get a parsed result result = f.clean('12/21/2010 13:30:05') self.assertEqual(result, datetime(2010, 12, 21, 13, 30, 5)) # Check that the parsed result does a round trip to default format text = f.widget._format_value(result) self.assertEqual(text, "2010-12-21 13:30:05") def test_dateTimeField_with_inputformat(self): "DateTimeFields with manually specified input formats can accept those formats" f = forms.DateTimeField(input_formats=["%I:%M:%S %p %d.%m.%Y", "%I:%M %p %d-%m-%Y"]) # Parse a date in an unaccepted format; get an error self.assertRaises(forms.ValidationError, f.clean, '2010-12-21 13:30:05') # Parse a date in a valid format, get a parsed result result = f.clean('1:30:05 PM 21.12.2010') self.assertEqual(result, datetime(2010, 12, 21, 13, 30, 5)) # Check that the parsed result does a round trip to the same format text = f.widget._format_value(result) self.assertEqual(text, "2010-12-21 13:30:05") # Parse a date in a valid format, get a parsed result result = f.clean('1:30 PM 21-12-2010') self.assertEqual(result, datetime(2010, 12, 21, 13, 30)) # Check that the parsed result does a round trip to default format text = f.widget._format_value(result) self.assertEqual(text, "2010-12-21 13:30:00") def test_localized_dateTimeField_with_inputformat(self): "Localized DateTimeFields with manually specified input formats can accept those formats" f = forms.DateTimeField(input_formats=["%I:%M:%S %p %d.%m.%Y", "%I:%M %p %d-%m-%Y"], localize=True) # Parse a date in an unaccepted format; get an error self.assertRaises(forms.ValidationError, f.clean, '2010-12-21 13:30:05') # Parse a date in a valid format, get a parsed result result = f.clean('1:30:05 PM 21.12.2010') self.assertEqual(result, datetime(2010, 12, 21, 13, 30, 5)) # Check that the parsed result does a round trip to the same format text = f.widget._format_value(result) self.assertEqual(text, "2010-12-21 13:30:05") # Parse a date in a valid format, get a parsed result result = f.clean('1:30 PM 21-12-2010') self.assertEqual(result, datetime(2010, 12, 21, 13, 30)) # Check that the parsed result does a round trip to default format text = f.widget._format_value(result) self.assertEqual(text, "2010-12-21 13:30:00")
xforce/diorama-native-modding
refs/heads/master
tools/gyp/test/escaping/gyptest-colon.py
58
#!/usr/bin/env python # Copyright (c) 2012 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ Tests that filenames that contain colons are handled correctly. (This is important for absolute paths on Windows.) """ import os import sys import TestGyp # TODO: Make colons in filenames work with make, if required. test = TestGyp.TestGyp(formats=['!make']) CHDIR = 'colon' source_name = 'colon/a:b.c' copies_name = 'colon/a:b.c-d' if sys.platform == 'win32': # Windows uses : as drive separator and doesn't allow it in regular filenames. # Use abspath() to create a path that contains a colon instead. abs_source = os.path.abspath('colon/file.c') test.write('colon/test.gyp', test.read('colon/test.gyp').replace("'a:b.c'", repr(abs_source))) source_name = abs_source abs_copies = os.path.abspath('colon/file.txt') test.write('colon/test.gyp', test.read('colon/test.gyp').replace("'a:b.c-d'", repr(abs_copies))) copies_name = abs_copies # Create the file dynamically, Windows is unhappy if a file with a colon in # its name is checked in. test.write(source_name, 'int main() {}') test.write(copies_name, 'foo') test.run_gyp('test.gyp', chdir=CHDIR) test.build('test.gyp', test.ALL, chdir=CHDIR) test.built_file_must_exist(os.path.basename(copies_name), chdir=CHDIR) test.pass_test()
evamwangi/bc-7-Todo_List
refs/heads/master
venv/Lib/site-packages/wtforms/ext/i18n/utils.py
81
""" Module is just here for compatibility reasons, and will be removed in a future release. Importing this will cause a DeprecationWarning. """ from wtforms.i18n import (messages_path, get_builtin_gnu_translations, get_translations, DefaultTranslations) import warnings __all__ = ('messages_path', 'get_builtin_gnu_translations', 'get_translations', 'DefaultTranslations') warnings.warn( 'i18n utils have been merged into core, and this module will go away in WTForms 3.0', category=DeprecationWarning, stacklevel=2 )
eeshangarg/oh-mainline
refs/heads/master
vendor/packages/python-social-auth/social/tests/backends/test_dribbble.py
76
import json from social.tests.backends.oauth import OAuth2Test class DribbbleOAuth2Test(OAuth2Test): backend_path = 'social.backends.dribbble.DribbbleOAuth2' user_data_url = 'https://api.dribbble.com/v1/user' expected_username = 'foobar' access_token_body = json.dumps({ 'access_token': 'foobar', 'token_type': 'bearer' }) user_data_body = json.dumps({ 'id': 'foobar', 'username': 'foobar', 'name': 'Foo Bar' }) def test_login(self): self.do_login() def test_partial_pipeline(self): self.do_partial_pipeline()
FFMG/myoddweb.piger
refs/heads/master
monitor/api/python/Python-3.7.2/Lib/tkinter/__main__.py
169
"""Main entry point""" import sys if sys.argv[0].endswith("__main__.py"): sys.argv[0] = "python -m tkinter" from . import _test as main main()
MackZxh/OCA-Choice
refs/heads/8.0
partner-contact/partner_firstname/tests/test_empty.py
7
# -*- coding: utf-8 -*- # Odoo, Open Source Management Solution # Copyright (C) 2014-2015 Grupo ESOC <www.grupoesoc.es> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. """Test situations where names are empty. To have more accurate results, remove the ``mail`` module before testing. """ from openerp.tests.common import TransactionCase from .base import MailInstalled from .. import exceptions as ex class CompanyCase(TransactionCase): """Test ``res.partner`` when it is a company.""" model = "res.partner" context = {"default_is_company": True} def tearDown(self): try: data = {"name": self.name} with self.assertRaises(ex.EmptyNamesError): self.env[self.model].with_context(**self.context).create(data) finally: super(CompanyCase, self).tearDown() def test_name_empty_string(self): """Test what happens when the name is an empty string.""" self.name = "" def test_name_false(self): """Test what happens when the name is ``False``.""" self.name = False class PersonCase(CompanyCase): """Test ``res.partner`` when it is a person.""" context = {"default_is_company": False} class UserCase(CompanyCase, MailInstalled): """Test ``res.users``.""" model = "res.users" context = {"default_login": "user@example.com"} def tearDown(self): # Cannot create users if ``mail`` is installed if self.mail_installed(): # Skip tests super(CompanyCase, self).tearDown() else: # Run tests super(UserCase, self).tearDown()
cloudspace/mockups
refs/heads/master
test/sikuli/operatingsystem.sikuli/operatingsystem.py
1
from sikuli.Sikuli import * myOS = str(Env.getOS()) myOSVer = str(Env.getOSVersion()) print "OS: " + myOS + " (" + myOSVer + ")" def getBrowsers(): if myOS == "MAC": browsers = ["chrome-mac","firefox-mac","safari-mac"] return browsers else: exit("Operating System unknown")
beedesk/django-pipeline
refs/heads/master
tests/tests/test_collector.py
3
import os from django.contrib.staticfiles import finders from django.core.files.storage import FileSystemStorage from django.test import TestCase from pipeline.collector import default_collector from pipeline.finders import PipelineFinder def local_path(path): return os.path.abspath(os.path.join(os.path.dirname(__file__), '..', path)) class CollectorTest(TestCase): def tearDown(self): super(CollectorTest, self).tearDown() default_collector.clear() def test_collect(self): self.assertEqual( set(default_collector.collect()), set(self._get_collectable_files())) def test_collect_with_files(self): self.assertEqual( set(default_collector.collect(files=[ 'pipeline/js/first.js', 'pipeline/js/second.js', ])), set([ 'pipeline/js/first.js', 'pipeline/js/second.js', ])) def test_delete_file_with_modified(self): list(default_collector.collect()) storage = FileSystemStorage(local_path('assets')) new_mtime = os.path.getmtime(storage.path('js/first.js')) - 1000 os.utime(default_collector.storage.path('pipeline/js/first.js'), (new_mtime, new_mtime)) self.assertTrue(default_collector.delete_file( 'js/first.js', 'pipeline/js/first.js', storage)) def test_delete_file_with_unmodified(self): list(default_collector.collect(files=['pipeline/js/first.js'])) self.assertFalse(default_collector.delete_file( 'js/first.js', 'pipeline/js/first.js', FileSystemStorage(local_path('assets')))) def _get_collectable_files(self): for finder in finders.get_finders(): if not isinstance(finder, PipelineFinder): for path, storage in finder.list(['CVS', '.*', '*~']): if getattr(storage, 'prefix', None): yield os.path.join(storage.prefix, path) else: yield path
NervanaSystems/coach
refs/heads/master
rl_coach/tests/architectures/mxnet_components/test_utils.py
1
import pytest import mxnet as mx from mxnet import nd import numpy as np from rl_coach.architectures.mxnet_components.utils import * @pytest.mark.unit_test def test_to_mx_ndarray(): # scalar assert to_mx_ndarray(1.2) == nd.array([1.2]) # list of one scalar assert to_mx_ndarray([1.2]) == [nd.array([1.2])] # list of multiple scalars assert to_mx_ndarray([1.2, 3.4]) == [nd.array([1.2]), nd.array([3.4])] # list of lists of scalars assert to_mx_ndarray([[1.2], [3.4]]) == [[nd.array([1.2])], [nd.array([3.4])]] # numpy assert np.array_equal(to_mx_ndarray(np.array([[1.2], [3.4]])).asnumpy(), nd.array([[1.2], [3.4]]).asnumpy()) # tuple assert to_mx_ndarray(((1.2,), (3.4,))) == ((nd.array([1.2]),), (nd.array([3.4]),)) @pytest.mark.unit_test def test_asnumpy_or_asscalar(): # scalar float32 assert asnumpy_or_asscalar(nd.array([1.2])) == np.float32(1.2) # scalar int32 assert asnumpy_or_asscalar(nd.array([2], dtype=np.int32)) == np.int32(2) # list of one scalar assert asnumpy_or_asscalar([nd.array([1.2])]) == [np.float32(1.2)] # list of multiple scalars assert asnumpy_or_asscalar([nd.array([1.2]), nd.array([3.4])]) == [np.float32([1.2]), np.float32([3.4])] # list of lists of scalars assert asnumpy_or_asscalar([[nd.array([1.2])], [nd.array([3.4])]]) == [[np.float32([1.2])], [np.float32([3.4])]] # tensor assert np.array_equal(asnumpy_or_asscalar(nd.array([[1.2], [3.4]])), np.array([[1.2], [3.4]], dtype=np.float32)) # tuple assert (asnumpy_or_asscalar(((nd.array([1.2]),), (nd.array([3.4]),))) == ((np.array([1.2], dtype=np.float32),), (np.array([3.4], dtype=np.float32),))) @pytest.mark.unit_test def test_global_norm(): data = list() for i in range(1, 6): data.append(np.ones((i * 10, i * 10)) * i) gnorm = np.asscalar(np.sqrt(sum([np.sum(np.square(d)) for d in data]))) assert np.isclose(gnorm, global_norm([nd.array(d) for d in data]).asscalar()) @pytest.mark.unit_test def test_split_outputs_per_head(): class TestHead: def __init__(self, num_outputs): self.num_outputs = num_outputs assert split_outputs_per_head((1, 2, 3, 4), [TestHead(2), TestHead(1), TestHead(1)]) == [[1, 2], [3], [4]] class DummySchema: def __init__(self, num_head_outputs, num_agent_inputs, num_targets): self.head_outputs = ['head_output_{}'.format(i) for i in range(num_head_outputs)] self.agent_inputs = ['agent_input_{}'.format(i) for i in range(num_agent_inputs)] self.targets = ['target_{}'.format(i) for i in range(num_targets)] class DummyLoss: def __init__(self, num_head_outputs, num_agent_inputs, num_targets): self.input_schema = DummySchema(num_head_outputs, num_agent_inputs, num_targets) @pytest.mark.unit_test def test_split_targets_per_loss(): assert split_targets_per_loss([1, 2, 3, 4], [DummyLoss(10, 100, 2), DummyLoss(20, 200, 1), DummyLoss(30, 300, 1)]) == \ [[1, 2], [3], [4]] @pytest.mark.unit_test def test_get_loss_agent_inputs(): input_dict = {'output_0_0': [1, 2], 'output_0_1': [3, 4], 'output_1_0': [5]} assert get_loss_agent_inputs(input_dict, 0, DummyLoss(10, 2, 100)) == [[1, 2], [3, 4]] assert get_loss_agent_inputs(input_dict, 1, DummyLoss(20, 1, 200)) == [[5]] @pytest.mark.unit_test def test_align_loss_args(): class TestLossFwd(DummyLoss): def __init__(self, num_targets, num_agent_inputs, num_head_outputs): super(TestLossFwd, self).__init__(num_targets, num_agent_inputs, num_head_outputs) def loss_forward(self, F, head_output_2, head_output_1, agent_input_2, target_0, agent_input_1, param1, param2): pass assert align_loss_args([1, 2, 3], [4, 5, 6, 7], [8, 9], TestLossFwd(3, 4, 2)) == [3, 2, 6, 8, 5] @pytest.mark.unit_test def test_to_tuple(): assert to_tuple(123) == (123,) assert to_tuple((1, 2, 3)) == (1, 2, 3) assert to_tuple([1, 2, 3]) == (1, 2, 3) @pytest.mark.unit_test def test_to_list(): assert to_list(123) == [123] assert to_list((1, 2, 3)) == [1, 2, 3] assert to_list([1, 2, 3]) == [1, 2, 3] @pytest.mark.unit_test def test_loss_output_dict(): assert loss_output_dict([1, 2, 3], ['loss', 'loss', 'reg']) == {'loss': [1, 2], 'reg': [3]} @pytest.mark.unit_test def test_clip_grad(): a = np.array([1, 2, -3]) b = np.array([4, 5, -6]) clip = 2 gscale = np.minimum(1.0, clip / np.sqrt(np.sum(np.square(a)) + np.sum(np.square(b)))) for lhs, rhs in zip(clip_grad([nd.array(a), nd.array(b)], GradientClippingMethod.ClipByGlobalNorm, clip_val=clip), [a, b]): assert np.allclose(lhs.asnumpy(), rhs * gscale) for lhs, rhs in zip(clip_grad([nd.array(a), nd.array(b)], GradientClippingMethod.ClipByValue, clip_val=clip), [a, b]): assert np.allclose(lhs.asnumpy(), np.clip(rhs, -clip, clip)) for lhs, rhs in zip(clip_grad([nd.array(a), nd.array(b)], GradientClippingMethod.ClipByNorm, clip_val=clip), [a, b]): scale = np.minimum(1.0, clip / np.sqrt(np.sum(np.square(rhs)))) assert np.allclose(lhs.asnumpy(), rhs * scale) @pytest.mark.unit_test def test_hybrid_clip(): x = mx.nd.array((0.5, 1.5, 2.5)) a = mx.nd.array((1,)) b = mx.nd.array((2,)) clipped = hybrid_clip(F=mx.nd, x=x, clip_lower=a, clip_upper=b) assert (np.isclose(a=clipped.asnumpy(), b=(1, 1.5, 2))).all() @pytest.mark.unit_test def test_broadcast_like(): x = nd.ones((1, 2)) * 10 y = nd.ones((100, 100, 2)) * 20 assert mx.test_utils.almost_equal(x.broadcast_like(y).asnumpy(), broadcast_like(nd, x, y).asnumpy()) @pytest.mark.unit_test def test_scoped_onxx_enable(): class Counter(object): def __init__(self): self._count = 0 def increment(self): self._count += 1 @property def count(self): return self._count class TempBlock(gluon.HybridBlock, OnnxHandlerBlock): def __init__(self, counter: Counter): super(TempBlock, self).__init__() OnnxHandlerBlock.__init__(self) self._counter = counter def hybrid_forward(self, F, x, *args, **kwargs): if self._onnx: self._counter.increment() return x counter = Counter() net = gluon.nn.HybridSequential() for _ in range(10): net.add(TempBlock(counter)) # ONNX disabled net(nd.zeros((1,))) assert counter.count == 0 # ONNX enabled with ScopedOnnxEnable(net): net(nd.zeros((1,))) assert counter.count == 10
mosbasik/buzhug
refs/heads/master
javasrc/lib/Jython/Lib/test/test_evalorder.py
23
from unittest import TestCase from test import test_support class Bucket(object): def __init__(self, value): self.__value = value def _get(self): return self.__value def _set(self, value): assert self.__value == value, "Value changed!" value = property(_get,_set) class PropBucket(object): def __init__(self): self.__dict__['_d'] = {} def __getattr__(self, attr): value = self._d.setdefault(attr, 0) self._d[attr] = value + 1 return Bucket(value) def __setattr__(self, attr, value): value.append(attr) class EvaluationOrder(TestCase): def test_TestFunctionality(self): bucket = PropBucket() try: bucket.prop.value = bucket.prop.value + 0 except AssertionError: pass else: assert False, "PropBucket is not working" def test_augassign(self): bucket = PropBucket() bucket.prop.value += 0 def test_AssignOrder(self): bucket = PropBucket() expected = ['one','two','three'] result = [] bucket.one = bucket.two = bucket.three = result assert result == expected, "expected %s, got %s" % (expected, result) def test_operands(self): m = [(2,), (1,)].pop assert m() + m() == (1,2), "faulty operand order" def test_arguments(self): def one(a,b,c,d,*extra): return reduce(lambda r,x: r+x,extra,a+b+c+d) m = list((x,) for x in xrange(100,0,-1)).pop value = one(m(),m(),m(),m()) assert value == (1,2,3,4), "simple call, got: %s " % (value,) value = one(m(),m(),d=m(),c=m()) assert value == (5,6,8,7), "call with keywords, got: %s" % (value,) value = one(m(),m(),m(),m(),m(),m()) assert value == (9,10,11,12,13,14), "long call, got: %s" % (value,) value = one(m(),m(),*[m(),m(),m(),m()]) assert value == (15,16,17,18,19,20), "varcalls, got: %s" % (value,) value = one(m(),m(),**dict(c=m(),d=m())) assert value == (21,22,23,24), "varkeywordcall, got: %s" % (value,) value = one(*[m(),m()],**dict(c=m(),d=m())) assert value == (25,26,27,28), "bothvarcalls, got: %s" % (value,) def test_main(): test_support.run_unittest(EvaluationOrder) if __name__ == '__main__': test_main()
joshainglis/ansible
refs/heads/devel
samples/lookup_pipe.py
255
- hosts: localhost gather_facts: no tasks: - debug: msg="the date is {{ lookup('pipe', 'date') }}"
mkaluza/external_chromium_org
refs/heads/kk44
google_apis/build/check_internal.py
174
#!/usr/bin/env python # Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """google_api's auto-internal gyp integration. Takes one argument, a path. Prints 1 if the path exists, 0 if not. """ import os import sys if __name__ == '__main__': if os.path.exists(sys.argv[1]): print 1 else: print 0
smarkwell/asuswrt-merlin
refs/heads/master
release/src/router/samba36/lib/subunit/python/subunit/chunked.py
21
# # subunit: extensions to python unittest to get test results from subprocesses. # Copyright (C) 2005 Robert Collins <robertc@robertcollins.net> # # Licensed under either the Apache License, Version 2.0 or the BSD 3-clause # license at the users choice. A copy of both licenses are available in the # project source as Apache-2.0 and BSD. You may not use this file except in # compliance with one of these two licences. # # Unless required by applicable law or agreed to in writing, software # distributed under these licenses is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # license you chose for the specific language governing permissions and # limitations under that license. # """Encoder/decoder for http style chunked encoding.""" class Decoder(object): """Decode chunked content to a byte stream.""" def __init__(self, output): """Create a decoder decoding to output. :param output: A file-like object. Bytes written to the Decoder are decoded to strip off the chunking and written to the output. Up to a full write worth of data or a single control line may be buffered (whichever is larger). The close method should be called when no more data is available, to detect short streams; the write method will return none-None when the end of a stream is detected. """ self.output = output self.buffered_bytes = [] self.state = self._read_length self.body_length = 0 def close(self): """Close the decoder. :raises ValueError: If the stream is incomplete ValueError is raised. """ if self.state != self._finished: raise ValueError("incomplete stream") def _finished(self): """Finished reading, return any remaining bytes.""" if self.buffered_bytes: buffered_bytes = self.buffered_bytes self.buffered_bytes = [] return ''.join(buffered_bytes) else: raise ValueError("stream is finished") def _read_body(self): """Pass body bytes to the output.""" while self.body_length and self.buffered_bytes: if self.body_length >= len(self.buffered_bytes[0]): self.output.write(self.buffered_bytes[0]) self.body_length -= len(self.buffered_bytes[0]) del self.buffered_bytes[0] # No more data available. if not self.body_length: self.state = self._read_length else: self.output.write(self.buffered_bytes[0][:self.body_length]) self.buffered_bytes[0] = \ self.buffered_bytes[0][self.body_length:] self.body_length = 0 self.state = self._read_length return self.state() def _read_length(self): """Try to decode a length from the bytes.""" count = -1 match_chars = "0123456789abcdefABCDEF\r\n" count_chars = [] for bytes in self.buffered_bytes: for byte in bytes: if byte not in match_chars: break count_chars.append(byte) if byte == '\n': break if not count_chars: return if count_chars[-1][-1] != '\n': return count_str = ''.join(count_chars) self.body_length = int(count_str[:-2], 16) excess_bytes = len(count_str) while excess_bytes: if excess_bytes >= len(self.buffered_bytes[0]): excess_bytes -= len(self.buffered_bytes[0]) del self.buffered_bytes[0] else: self.buffered_bytes[0] = self.buffered_bytes[0][excess_bytes:] excess_bytes = 0 if not self.body_length: self.state = self._finished if not self.buffered_bytes: # May not call into self._finished with no buffered data. return '' else: self.state = self._read_body return self.state() def write(self, bytes): """Decode bytes to the output stream. :raises ValueError: If the stream has already seen the end of file marker. :returns: None, or the excess bytes beyond the end of file marker. """ if bytes: self.buffered_bytes.append(bytes) return self.state() class Encoder(object): """Encode content to a stream using HTTP Chunked coding.""" def __init__(self, output): """Create an encoder encoding to output. :param output: A file-like object. Bytes written to the Encoder will be encoded using HTTP chunking. Small writes may be buffered and the ``close`` method must be called to finish the stream. """ self.output = output self.buffered_bytes = [] self.buffer_size = 0 def flush(self, extra_len=0): """Flush the encoder to the output stream. :param extra_len: Increase the size of the chunk by this many bytes to allow for a subsequent write. """ if not self.buffer_size and not extra_len: return buffered_bytes = self.buffered_bytes buffer_size = self.buffer_size self.buffered_bytes = [] self.buffer_size = 0 self.output.write("%X\r\n" % (buffer_size + extra_len)) if buffer_size: self.output.write(''.join(buffered_bytes)) return True def write(self, bytes): """Encode bytes to the output stream.""" bytes_len = len(bytes) if self.buffer_size + bytes_len >= 65536: self.flush(bytes_len) self.output.write(bytes) else: self.buffered_bytes.append(bytes) self.buffer_size += bytes_len def close(self): """Finish the stream. This does not close the output stream.""" self.flush() self.output.write("0\r\n")
Spindletop16/namebench
refs/heads/master
nb_third_party/jinja2/ext.py
199
# -*- coding: utf-8 -*- """ jinja2.ext ~~~~~~~~~~ Jinja extensions allow to add custom tags similar to the way django custom tags work. By default two example extensions exist: an i18n and a cache extension. :copyright: (c) 2010 by the Jinja Team. :license: BSD. """ from collections import deque from jinja2 import nodes from jinja2.defaults import * from jinja2.environment import get_spontaneous_environment from jinja2.runtime import Undefined, concat from jinja2.exceptions import TemplateAssertionError, TemplateSyntaxError from jinja2.utils import contextfunction, import_string, Markup, next # the only real useful gettext functions for a Jinja template. Note # that ugettext must be assigned to gettext as Jinja doesn't support # non unicode strings. GETTEXT_FUNCTIONS = ('_', 'gettext', 'ngettext') class ExtensionRegistry(type): """Gives the extension an unique identifier.""" def __new__(cls, name, bases, d): rv = type.__new__(cls, name, bases, d) rv.identifier = rv.__module__ + '.' + rv.__name__ return rv class Extension(object): """Extensions can be used to add extra functionality to the Jinja template system at the parser level. Custom extensions are bound to an environment but may not store environment specific data on `self`. The reason for this is that an extension can be bound to another environment (for overlays) by creating a copy and reassigning the `environment` attribute. As extensions are created by the environment they cannot accept any arguments for configuration. One may want to work around that by using a factory function, but that is not possible as extensions are identified by their import name. The correct way to configure the extension is storing the configuration values on the environment. Because this way the environment ends up acting as central configuration storage the attributes may clash which is why extensions have to ensure that the names they choose for configuration are not too generic. ``prefix`` for example is a terrible name, ``fragment_cache_prefix`` on the other hand is a good name as includes the name of the extension (fragment cache). """ __metaclass__ = ExtensionRegistry #: if this extension parses this is the list of tags it's listening to. tags = set() #: the priority of that extension. This is especially useful for #: extensions that preprocess values. A lower value means higher #: priority. #: #: .. versionadded:: 2.4 priority = 100 def __init__(self, environment): self.environment = environment def bind(self, environment): """Create a copy of this extension bound to another environment.""" rv = object.__new__(self.__class__) rv.__dict__.update(self.__dict__) rv.environment = environment return rv def preprocess(self, source, name, filename=None): """This method is called before the actual lexing and can be used to preprocess the source. The `filename` is optional. The return value must be the preprocessed source. """ return source def filter_stream(self, stream): """It's passed a :class:`~jinja2.lexer.TokenStream` that can be used to filter tokens returned. This method has to return an iterable of :class:`~jinja2.lexer.Token`\s, but it doesn't have to return a :class:`~jinja2.lexer.TokenStream`. In the `ext` folder of the Jinja2 source distribution there is a file called `inlinegettext.py` which implements a filter that utilizes this method. """ return stream def parse(self, parser): """If any of the :attr:`tags` matched this method is called with the parser as first argument. The token the parser stream is pointing at is the name token that matched. This method has to return one or a list of multiple nodes. """ raise NotImplementedError() def attr(self, name, lineno=None): """Return an attribute node for the current extension. This is useful to pass constants on extensions to generated template code:: self.attr('_my_attribute', lineno=lineno) """ return nodes.ExtensionAttribute(self.identifier, name, lineno=lineno) def call_method(self, name, args=None, kwargs=None, dyn_args=None, dyn_kwargs=None, lineno=None): """Call a method of the extension. This is a shortcut for :meth:`attr` + :class:`jinja2.nodes.Call`. """ if args is None: args = [] if kwargs is None: kwargs = [] return nodes.Call(self.attr(name, lineno=lineno), args, kwargs, dyn_args, dyn_kwargs, lineno=lineno) @contextfunction def _gettext_alias(context, string): return context.resolve('gettext')(string) class InternationalizationExtension(Extension): """This extension adds gettext support to Jinja2.""" tags = set(['trans']) # TODO: the i18n extension is currently reevaluating values in a few # situations. Take this example: # {% trans count=something() %}{{ count }} foo{% pluralize # %}{{ count }} fooss{% endtrans %} # something is called twice here. One time for the gettext value and # the other time for the n-parameter of the ngettext function. def __init__(self, environment): Extension.__init__(self, environment) environment.globals['_'] = _gettext_alias environment.extend( install_gettext_translations=self._install, install_null_translations=self._install_null, uninstall_gettext_translations=self._uninstall, extract_translations=self._extract ) def _install(self, translations): gettext = getattr(translations, 'ugettext', None) if gettext is None: gettext = translations.gettext ngettext = getattr(translations, 'ungettext', None) if ngettext is None: ngettext = translations.ngettext self.environment.globals.update(gettext=gettext, ngettext=ngettext) def _install_null(self): self.environment.globals.update( gettext=lambda x: x, ngettext=lambda s, p, n: (n != 1 and (p,) or (s,))[0] ) def _uninstall(self, translations): for key in 'gettext', 'ngettext': self.environment.globals.pop(key, None) def _extract(self, source, gettext_functions=GETTEXT_FUNCTIONS): if isinstance(source, basestring): source = self.environment.parse(source) return extract_from_ast(source, gettext_functions) def parse(self, parser): """Parse a translatable tag.""" lineno = next(parser.stream).lineno # find all the variables referenced. Additionally a variable can be # defined in the body of the trans block too, but this is checked at # a later state. plural_expr = None variables = {} while parser.stream.current.type != 'block_end': if variables: parser.stream.expect('comma') # skip colon for python compatibility if parser.stream.skip_if('colon'): break name = parser.stream.expect('name') if name.value in variables: parser.fail('translatable variable %r defined twice.' % name.value, name.lineno, exc=TemplateAssertionError) # expressions if parser.stream.current.type == 'assign': next(parser.stream) variables[name.value] = var = parser.parse_expression() else: variables[name.value] = var = nodes.Name(name.value, 'load') if plural_expr is None: plural_expr = var parser.stream.expect('block_end') plural = plural_names = None have_plural = False referenced = set() # now parse until endtrans or pluralize singular_names, singular = self._parse_block(parser, True) if singular_names: referenced.update(singular_names) if plural_expr is None: plural_expr = nodes.Name(singular_names[0], 'load') # if we have a pluralize block, we parse that too if parser.stream.current.test('name:pluralize'): have_plural = True next(parser.stream) if parser.stream.current.type != 'block_end': name = parser.stream.expect('name') if name.value not in variables: parser.fail('unknown variable %r for pluralization' % name.value, name.lineno, exc=TemplateAssertionError) plural_expr = variables[name.value] parser.stream.expect('block_end') plural_names, plural = self._parse_block(parser, False) next(parser.stream) referenced.update(plural_names) else: next(parser.stream) # register free names as simple name expressions for var in referenced: if var not in variables: variables[var] = nodes.Name(var, 'load') # no variables referenced? no need to escape if not referenced: singular = singular.replace('%%', '%') if plural: plural = plural.replace('%%', '%') if not have_plural: plural_expr = None elif plural_expr is None: parser.fail('pluralize without variables', lineno) if variables: variables = nodes.Dict([nodes.Pair(nodes.Const(x, lineno=lineno), y) for x, y in variables.items()]) else: variables = None node = self._make_node(singular, plural, variables, plural_expr) node.set_lineno(lineno) return node def _parse_block(self, parser, allow_pluralize): """Parse until the next block tag with a given name.""" referenced = [] buf = [] while 1: if parser.stream.current.type == 'data': buf.append(parser.stream.current.value.replace('%', '%%')) next(parser.stream) elif parser.stream.current.type == 'variable_begin': next(parser.stream) name = parser.stream.expect('name').value referenced.append(name) buf.append('%%(%s)s' % name) parser.stream.expect('variable_end') elif parser.stream.current.type == 'block_begin': next(parser.stream) if parser.stream.current.test('name:endtrans'): break elif parser.stream.current.test('name:pluralize'): if allow_pluralize: break parser.fail('a translatable section can have only one ' 'pluralize section') parser.fail('control structures in translatable sections are ' 'not allowed') elif parser.stream.eos: parser.fail('unclosed translation block') else: assert False, 'internal parser error' return referenced, concat(buf) def _make_node(self, singular, plural, variables, plural_expr): """Generates a useful node from the data provided.""" # singular only: if plural_expr is None: gettext = nodes.Name('gettext', 'load') node = nodes.Call(gettext, [nodes.Const(singular)], [], None, None) # singular and plural else: ngettext = nodes.Name('ngettext', 'load') node = nodes.Call(ngettext, [ nodes.Const(singular), nodes.Const(plural), plural_expr ], [], None, None) # mark the return value as safe if we are in an # environment with autoescaping turned on if self.environment.autoescape: node = nodes.MarkSafe(node) if variables: node = nodes.Mod(node, variables) return nodes.Output([node]) class ExprStmtExtension(Extension): """Adds a `do` tag to Jinja2 that works like the print statement just that it doesn't print the return value. """ tags = set(['do']) def parse(self, parser): node = nodes.ExprStmt(lineno=next(parser.stream).lineno) node.node = parser.parse_tuple() return node class LoopControlExtension(Extension): """Adds break and continue to the template engine.""" tags = set(['break', 'continue']) def parse(self, parser): token = next(parser.stream) if token.value == 'break': return nodes.Break(lineno=token.lineno) return nodes.Continue(lineno=token.lineno) class WithExtension(Extension): """Adds support for a django-like with block.""" tags = set(['with']) def parse(self, parser): node = nodes.Scope(lineno=next(parser.stream).lineno) assignments = [] while parser.stream.current.type != 'block_end': lineno = parser.stream.current.lineno if assignments: parser.stream.expect('comma') target = parser.parse_assign_target() parser.stream.expect('assign') expr = parser.parse_expression() assignments.append(nodes.Assign(target, expr, lineno=lineno)) node.body = assignments + \ list(parser.parse_statements(('name:endwith',), drop_needle=True)) return node class AutoEscapeExtension(Extension): """Changes auto escape rules for a scope.""" tags = set(['autoescape']) def parse(self, parser): node = nodes.ScopedEvalContextModifier(lineno=next(parser.stream).lineno) node.options = [ nodes.Keyword('autoescape', parser.parse_expression()) ] node.body = parser.parse_statements(('name:endautoescape',), drop_needle=True) return nodes.Scope([node]) def extract_from_ast(node, gettext_functions=GETTEXT_FUNCTIONS, babel_style=True): """Extract localizable strings from the given template node. Per default this function returns matches in babel style that means non string parameters as well as keyword arguments are returned as `None`. This allows Babel to figure out what you really meant if you are using gettext functions that allow keyword arguments for placeholder expansion. If you don't want that behavior set the `babel_style` parameter to `False` which causes only strings to be returned and parameters are always stored in tuples. As a consequence invalid gettext calls (calls without a single string parameter or string parameters after non-string parameters) are skipped. This example explains the behavior: >>> from jinja2 import Environment >>> env = Environment() >>> node = env.parse('{{ (_("foo"), _(), ngettext("foo", "bar", 42)) }}') >>> list(extract_from_ast(node)) [(1, '_', 'foo'), (1, '_', ()), (1, 'ngettext', ('foo', 'bar', None))] >>> list(extract_from_ast(node, babel_style=False)) [(1, '_', ('foo',)), (1, 'ngettext', ('foo', 'bar'))] For every string found this function yields a ``(lineno, function, message)`` tuple, where: * ``lineno`` is the number of the line on which the string was found, * ``function`` is the name of the ``gettext`` function used (if the string was extracted from embedded Python code), and * ``message`` is the string itself (a ``unicode`` object, or a tuple of ``unicode`` objects for functions with multiple string arguments). This extraction function operates on the AST and is because of that unable to extract any comments. For comment support you have to use the babel extraction interface or extract comments yourself. """ for node in node.find_all(nodes.Call): if not isinstance(node.node, nodes.Name) or \ node.node.name not in gettext_functions: continue strings = [] for arg in node.args: if isinstance(arg, nodes.Const) and \ isinstance(arg.value, basestring): strings.append(arg.value) else: strings.append(None) for arg in node.kwargs: strings.append(None) if node.dyn_args is not None: strings.append(None) if node.dyn_kwargs is not None: strings.append(None) if not babel_style: strings = tuple(x for x in strings if x is not None) if not strings: continue else: if len(strings) == 1: strings = strings[0] else: strings = tuple(strings) yield node.lineno, node.node.name, strings class _CommentFinder(object): """Helper class to find comments in a token stream. Can only find comments for gettext calls forwards. Once the comment from line 4 is found, a comment for line 1 will not return a usable value. """ def __init__(self, tokens, comment_tags): self.tokens = tokens self.comment_tags = comment_tags self.offset = 0 self.last_lineno = 0 def find_backwards(self, offset): try: for _, token_type, token_value in \ reversed(self.tokens[self.offset:offset]): if token_type in ('comment', 'linecomment'): try: prefix, comment = token_value.split(None, 1) except ValueError: continue if prefix in self.comment_tags: return [comment.rstrip()] return [] finally: self.offset = offset def find_comments(self, lineno): if not self.comment_tags or self.last_lineno > lineno: return [] for idx, (token_lineno, _, _) in enumerate(self.tokens[self.offset:]): if token_lineno > lineno: return self.find_backwards(self.offset + idx) return self.find_backwards(len(self.tokens)) def babel_extract(fileobj, keywords, comment_tags, options): """Babel extraction method for Jinja templates. .. versionchanged:: 2.3 Basic support for translation comments was added. If `comment_tags` is now set to a list of keywords for extraction, the extractor will try to find the best preceeding comment that begins with one of the keywords. For best results, make sure to not have more than one gettext call in one line of code and the matching comment in the same line or the line before. :param fileobj: the file-like object the messages should be extracted from :param keywords: a list of keywords (i.e. function names) that should be recognized as translation functions :param comment_tags: a list of translator tags to search for and include in the results. :param options: a dictionary of additional options (optional) :return: an iterator over ``(lineno, funcname, message, comments)`` tuples. (comments will be empty currently) """ extensions = set() for extension in options.get('extensions', '').split(','): extension = extension.strip() if not extension: continue extensions.add(import_string(extension)) if InternationalizationExtension not in extensions: extensions.add(InternationalizationExtension) environment = get_spontaneous_environment( options.get('block_start_string', BLOCK_START_STRING), options.get('block_end_string', BLOCK_END_STRING), options.get('variable_start_string', VARIABLE_START_STRING), options.get('variable_end_string', VARIABLE_END_STRING), options.get('comment_start_string', COMMENT_START_STRING), options.get('comment_end_string', COMMENT_END_STRING), options.get('line_statement_prefix') or LINE_STATEMENT_PREFIX, options.get('line_comment_prefix') or LINE_COMMENT_PREFIX, str(options.get('trim_blocks', TRIM_BLOCKS)).lower() in \ ('1', 'on', 'yes', 'true'), NEWLINE_SEQUENCE, frozenset(extensions), # fill with defaults so that environments are shared # with other spontaneus environments. The rest of the # arguments are optimizer, undefined, finalize, autoescape, # loader, cache size, auto reloading setting and the # bytecode cache True, Undefined, None, False, None, 0, False, None ) source = fileobj.read().decode(options.get('encoding', 'utf-8')) try: node = environment.parse(source) tokens = list(environment.lex(environment.preprocess(source))) except TemplateSyntaxError, e: # skip templates with syntax errors return finder = _CommentFinder(tokens, comment_tags) for lineno, func, message in extract_from_ast(node, keywords): yield lineno, func, message, finder.find_comments(lineno) #: nicer import names i18n = InternationalizationExtension do = ExprStmtExtension loopcontrols = LoopControlExtension with_ = WithExtension autoescape = AutoEscapeExtension
louyihua/edx-platform
refs/heads/master
common/djangoapps/track/management/__init__.py
12133432
jamesyli/solum
refs/heads/master
solum/tests/builder/v1/__init__.py
12133432
keyurpatel076/MissionPlannerGit
refs/heads/master
packages/IronPython.StdLib.2.7.5-beta1/content/Lib/encodings/zlib_codec.py
533
""" Python 'zlib_codec' Codec - zlib compression encoding Unlike most of the other codecs which target Unicode, this codec will return Python string objects for both encode and decode. Written by Marc-Andre Lemburg (mal@lemburg.com). """ import codecs import zlib # this codec needs the optional zlib module ! ### Codec APIs def zlib_encode(input,errors='strict'): """ Encodes the object input and returns a tuple (output object, length consumed). errors defines the error handling to apply. It defaults to 'strict' handling which is the only currently supported error handling for this codec. """ assert errors == 'strict' output = zlib.compress(input) return (output, len(input)) def zlib_decode(input,errors='strict'): """ Decodes the object input and returns a tuple (output object, length consumed). input must be an object which provides the bf_getreadbuf buffer slot. Python strings, buffer objects and memory mapped files are examples of objects providing this slot. errors defines the error handling to apply. It defaults to 'strict' handling which is the only currently supported error handling for this codec. """ assert errors == 'strict' output = zlib.decompress(input) return (output, len(input)) class Codec(codecs.Codec): def encode(self, input, errors='strict'): return zlib_encode(input, errors) def decode(self, input, errors='strict'): return zlib_decode(input, errors) class IncrementalEncoder(codecs.IncrementalEncoder): def __init__(self, errors='strict'): assert errors == 'strict' self.errors = errors self.compressobj = zlib.compressobj() def encode(self, input, final=False): if final: c = self.compressobj.compress(input) return c + self.compressobj.flush() else: return self.compressobj.compress(input) def reset(self): self.compressobj = zlib.compressobj() class IncrementalDecoder(codecs.IncrementalDecoder): def __init__(self, errors='strict'): assert errors == 'strict' self.errors = errors self.decompressobj = zlib.decompressobj() def decode(self, input, final=False): if final: c = self.decompressobj.decompress(input) return c + self.decompressobj.flush() else: return self.decompressobj.decompress(input) def reset(self): self.decompressobj = zlib.decompressobj() class StreamWriter(Codec,codecs.StreamWriter): pass class StreamReader(Codec,codecs.StreamReader): pass ### encodings module API def getregentry(): return codecs.CodecInfo( name='zlib', encode=zlib_encode, decode=zlib_decode, incrementalencoder=IncrementalEncoder, incrementaldecoder=IncrementalDecoder, streamreader=StreamReader, streamwriter=StreamWriter, )
dexterx17/nodoSocket
refs/heads/master
clients/Python-2.7.6/Doc/includes/sqlite3/execute_3.py
44
import sqlite3 con = sqlite3.connect("mydb") cur = con.cursor() who = "Yeltsin" age = 72 cur.execute("select name_last, age from people where name_last=:who and age=:age", locals()) print cur.fetchone()
alex-march/micropython
refs/heads/master
tests/basics/fun_calldblstar2.py
40
# test passing a string object as the key for a keyword argument # they key in this dict is a string object and is not interned args = {'thisisaverylongargumentname': 123} # when this string is executed it will intern the keyword argument exec("def foo(*,thisisaverylongargumentname=1):\n print(thisisaverylongargumentname)") # test default arg foo() # the string from the dict should match the interned keyword argument foo(**args)
pombredanne/kunai-1
refs/heads/master
etc/packs/linux/collectors/collector_loadaverage.py
1
import os from kunai.log import logger from kunai.collector import Collector class LoadAverage(Collector): def launch(self): logger.debug('getLoadAvrgs: start') # Get the triplet from the python function try: loadAvrgs_1, loadAvrgs_5, loadAvrgs_15 = os.getloadavg() except OSError: # If not available, return nothing return False logger.debug('getLoadAvrgs: parsing') loadavrgs = {'load1': loadAvrgs_1, 'load5': loadAvrgs_5, 'load15': loadAvrgs_15} logger.debug('getLoadAvrgs: completed, returning') return loadavrgs
Livit/Livit.Learn.EdX
refs/heads/labster/develop
common/lib/chem/chem/tests.py
44
import codecs from fractions import Fraction import unittest from .chemcalc import ( compare_chemical_expression, divide_chemical_expression, render_to_html, chemical_equations_equal, ) import chem.miller LOCAL_DEBUG = None def log(msg, output_type=None): """Logging function for tests""" if LOCAL_DEBUG: print msg if output_type == 'html': f.write(msg + '\n<br>\n') class Test_Compare_Equations(unittest.TestCase): def test_simple_equation(self): self.assertTrue(chemical_equations_equal('H2 + O2 -> H2O2', 'O2 + H2 -> H2O2')) # left sides don't match self.assertFalse(chemical_equations_equal('H2 + O2 -> H2O2', 'O2 + 2H2 -> H2O2')) # right sides don't match self.assertFalse(chemical_equations_equal('H2 + O2 -> H2O2', 'O2 + H2 -> H2O')) # factors don't match self.assertFalse(chemical_equations_equal('H2 + O2 -> H2O2', 'O2 + H2 -> 2H2O2')) def test_different_factor(self): self.assertTrue(chemical_equations_equal('H2 + O2 -> H2O2', '2O2 + 2H2 -> 2H2O2')) self.assertFalse( chemical_equations_equal( '2H2 + O2 -> H2O2', '2O2 + 2H2 -> 2H2O2', ) ) def test_different_arrows(self): self.assertTrue(chemical_equations_equal('H2 + O2 -> H2O2', '2O2 + 2H2 -> 2H2O2')) self.assertFalse(chemical_equations_equal('H2 + O2 -> H2O2', 'O2 + H2 <-> 2H2O2')) def test_exact_match(self): self.assertTrue(chemical_equations_equal('H2 + O2 -> H2O2', '2O2 + 2H2 -> 2H2O2')) self.assertFalse( chemical_equations_equal( 'H2 + O2 -> H2O2', '2O2 + 2H2 -> 2H2O2', exact=True, ) ) # order still doesn't matter self.assertTrue(chemical_equations_equal('H2 + O2 -> H2O2', 'O2 + H2 -> H2O2', exact=True)) def test_syntax_errors(self): self.assertFalse(chemical_equations_equal('H2 + O2 a-> H2O2', '2O2 + 2H2 -> 2H2O2')) self.assertFalse(chemical_equations_equal('H2O( -> H2O2', 'H2O -> H2O2')) self.assertFalse(chemical_equations_equal('H2 + O2 ==> H2O2', # strange arrow '2O2 + 2H2 -> 2H2O2')) class Test_Compare_Expressions(unittest.TestCase): def test_compare_incorrect_order_of_atoms_in_molecule(self): self.assertFalse(compare_chemical_expression("H2O + CO2", "O2C + OH2")) def test_compare_same_order_no_phases_no_factors_no_ions(self): self.assertTrue(compare_chemical_expression("H2O + CO2", "CO2+H2O")) def test_compare_different_order_no_phases_no_factors_no_ions(self): self.assertTrue(compare_chemical_expression("H2O + CO2", "CO2 + H2O")) def test_compare_different_order_three_multimolecule(self): self.assertTrue(compare_chemical_expression("H2O + Fe(OH)3 + CO2", "CO2 + H2O + Fe(OH)3")) def test_compare_same_factors(self): self.assertTrue(compare_chemical_expression("3H2O + 2CO2", "2CO2 + 3H2O ")) def test_compare_different_factors(self): self.assertFalse(compare_chemical_expression("2H2O + 3CO2", "2CO2 + 3H2O ")) def test_compare_correct_ions(self): self.assertTrue(compare_chemical_expression("H^+ + OH^-", " OH^- + H^+ ")) def test_compare_wrong_ions(self): self.assertFalse(compare_chemical_expression("H^+ + OH^-", " OH^- + H^- ")) def test_compare_parent_groups_ions(self): self.assertTrue(compare_chemical_expression("Fe(OH)^2- + (OH)^-", " (OH)^- + Fe(OH)^2- ")) def test_compare_correct_factors_ions_and_one(self): self.assertTrue(compare_chemical_expression("3H^+ + 2OH^-", " 2OH^- + 3H^+ ")) def test_compare_wrong_factors_ions(self): self.assertFalse(compare_chemical_expression("2H^+ + 3OH^-", " 2OH^- + 3H^+ ")) def test_compare_float_factors(self): self.assertTrue(compare_chemical_expression("7/2H^+ + 3/5OH^-", " 3/5OH^- + 7/2H^+ ")) # Phases tests def test_compare_phases_ignored(self): self.assertTrue(compare_chemical_expression( "H2O(s) + CO2", "H2O+CO2", ignore_state=True)) def test_compare_phases_not_ignored_explicitly(self): self.assertFalse(compare_chemical_expression( "H2O(s) + CO2", "H2O+CO2", ignore_state=False)) def test_compare_phases_not_ignored(self): # same as previous self.assertFalse(compare_chemical_expression( "H2O(s) + CO2", "H2O+CO2")) # all in one cases def test_complex_additivity(self): self.assertTrue(compare_chemical_expression( "5(H1H212)^70010- + 2H20 + 7/2HCl + H2O", "7/2HCl + 2H20 + H2O + 5(H1H212)^70010-")) def test_complex_additivity_wrong(self): self.assertFalse(compare_chemical_expression( "5(H1H212)^70010- + 2H20 + 7/2HCl + H2O", "2H20 + 7/2HCl + H2O + 5(H1H212)^70011-")) def test_complex_all_grammar(self): self.assertTrue(compare_chemical_expression( "5[Ni(NH3)4]^2+ + 5/2SO4^2-", "5/2SO4^2- + 5[Ni(NH3)4]^2+")) # special cases def test_compare_one_superscript_explicitly_set(self): self.assertTrue(compare_chemical_expression("H^+ + OH^1-", " OH^- + H^+ ")) def test_compare_equal_factors_differently_set(self): self.assertTrue(compare_chemical_expression("6/2H^+ + OH^-", " OH^- + 3H^+ ")) def test_compare_one_subscript_explicitly_set(self): self.assertFalse(compare_chemical_expression("H2 + CO2", "H2 + C102")) class Test_Divide_Expressions(unittest.TestCase): ''' as compare_ use divide_, tests here must consider different division (not equality) cases ''' def test_divide_by_zero(self): self.assertFalse(divide_chemical_expression( "0H2O", "H2O")) def test_divide_wrong_factors(self): self.assertFalse(divide_chemical_expression( "5(H1H212)^70010- + 10H2O", "5H2O + 10(H1H212)^70010-")) def test_divide_right(self): self.assertEqual(divide_chemical_expression( "5(H1H212)^70010- + 10H2O", "10H2O + 5(H1H212)^70010-"), 1) def test_divide_wrong_reagents(self): self.assertFalse(divide_chemical_expression( "H2O + CO2", "CO2")) def test_divide_right_simple(self): self.assertEqual(divide_chemical_expression( "H2O + CO2", "H2O+CO2"), 1) def test_divide_right_phases(self): self.assertEqual(divide_chemical_expression( "H2O(s) + CO2", "2H2O(s)+2CO2"), Fraction(1, 2)) def test_divide_right_phases_other_order(self): self.assertEqual(divide_chemical_expression( "2H2O(s) + 2CO2", "H2O(s)+CO2"), 2) def test_divide_wrong_phases(self): self.assertFalse(divide_chemical_expression( "H2O(s) + CO2", "2H2O+2CO2(s)")) def test_divide_wrong_phases_but_phases_ignored(self): self.assertEqual(divide_chemical_expression( "H2O(s) + CO2", "2H2O+2CO2(s)", ignore_state=True), Fraction(1, 2)) def test_divide_order(self): self.assertEqual(divide_chemical_expression( "2CO2 + H2O", "2H2O+4CO2"), Fraction(1, 2)) def test_divide_fract_to_int(self): self.assertEqual(divide_chemical_expression( "3/2CO2 + H2O", "2H2O+3CO2"), Fraction(1, 2)) def test_divide_fract_to_frac(self): self.assertEqual(divide_chemical_expression( "3/4CO2 + H2O", "2H2O+9/6CO2"), Fraction(1, 2)) def test_divide_fract_to_frac_wrog(self): self.assertFalse(divide_chemical_expression( "6/2CO2 + H2O", "2H2O+9/6CO2"), 2) class Test_Render_Equations(unittest.TestCase): """ Tests to validate the HTML rendering of plaintext (input) equations """ # pylint: disable=line-too-long def test_render1(self): test_string = "H2O + CO2" out = render_to_html(test_string) correct = u'<span class="math">H<sub>2</sub>O+CO<sub>2</sub></span>' log(out + ' ------- ' + correct, 'html') self.assertEqual(out, correct) def test_render_uncorrect_reaction(self): test_string = "O2C + OH2" out = render_to_html(test_string) correct = u'<span class="math">O<sub>2</sub>C+OH<sub>2</sub></span>' log(out + ' ------- ' + correct, 'html') self.assertEqual(out, correct) def test_render2(self): test_string = "CO2 + H2O + Fe(OH)3" out = render_to_html(test_string) correct = u'<span class="math">CO<sub>2</sub>+H<sub>2</sub>O+Fe(OH)<sub>3</sub></span>' log(out + ' ------- ' + correct, 'html') self.assertEqual(out, correct) def test_render3(self): test_string = "3H2O + 2CO2" out = render_to_html(test_string) correct = u'<span class="math">3H<sub>2</sub>O+2CO<sub>2</sub></span>' log(out + ' ------- ' + correct, 'html') self.assertEqual(out, correct) def test_render4(self): test_string = "H^+ + OH^-" out = render_to_html(test_string) correct = u'<span class="math">H<sup>+</sup>+OH<sup>-</sup></span>' log(out + ' ------- ' + correct, 'html') self.assertEqual(out, correct) def test_render5(self): test_string = "Fe(OH)^2- + (OH)^-" out = render_to_html(test_string) correct = u'<span class="math">Fe(OH)<sup>2-</sup>+(OH)<sup>-</sup></span>' log(out + ' ------- ' + correct, 'html') self.assertEqual(out, correct) def test_render6(self): test_string = "7/2H^+ + 3/5OH^-" out = render_to_html(test_string) correct = u'<span class="math"><sup>7</sup>&frasl;<sub>2</sub>H<sup>+</sup>+<sup>3</sup>&frasl;<sub>5</sub>OH<sup>-</sup></span>' log(out + ' ------- ' + correct, 'html') self.assertEqual(out, correct) def test_render7(self): test_string = "5(H1H212)^70010- + 2H2O + 7/2HCl + H2O" out = render_to_html(test_string) correct = u'<span class="math">5(H<sub>1</sub>H<sub>212</sub>)<sup>70010-</sup>+2H<sub>2</sub>O+<sup>7</sup>&frasl;<sub>2</sub>HCl+H<sub>2</sub>O</span>' log(out + ' ------- ' + correct, 'html') self.assertEqual(out, correct) def test_render8(self): test_string = "H2O(s) + CO2" out = render_to_html(test_string) correct = u'<span class="math">H<sub>2</sub>O(s)+CO<sub>2</sub></span>' log(out + ' ------- ' + correct, 'html') self.assertEqual(out, correct) def test_render9(self): test_string = "5[Ni(NH3)4]^2+ + 5/2SO4^2-" out = render_to_html(test_string) correct = u'<span class="math">5[Ni(NH<sub>3</sub>)<sub>4</sub>]<sup>2+</sup>+<sup>5</sup>&frasl;<sub>2</sub>SO<sub>4</sub><sup>2-</sup></span>' log(out + ' ------- ' + correct, 'html') self.assertEqual(out, correct) def test_render_error(self): test_string = "5.2H20" out = render_to_html(test_string) correct = u'<span class="math"><span class="inline-error inline">5.2H20</span></span>' log(out + ' ------- ' + correct, 'html') self.assertEqual(out, correct) def test_render_simple_brackets(self): test_string = "(Ar)" out = render_to_html(test_string) correct = u'<span class="math">(Ar)</span>' log(out + ' ------- ' + correct, 'html') self.assertEqual(out, correct) def test_render_eq1(self): test_string = "H^+ + OH^- -> H2O" out = render_to_html(test_string) correct = u'<span class="math">H<sup>+</sup>+OH<sup>-</sup>\u2192H<sub>2</sub>O</span>' log(out + ' ------- ' + correct, 'html') self.assertEqual(out, correct) def test_render_eq2(self): test_string = "H^+ + OH^- <-> H2O" out = render_to_html(test_string) correct = u'<span class="math">H<sup>+</sup>+OH<sup>-</sup>\u2194H<sub>2</sub>O</span>' log(out + ' ------- ' + correct, 'html') self.assertEqual(out, correct) def test_render_eq3(self): test_string = "H^+ + OH^- <= H2O" # unsupported arrow out = render_to_html(test_string) correct = u'<span class="math"><span class="inline-error inline">H^+ + OH^- <= H2O</span></span>' log(out + ' ------- ' + correct, 'html') self.assertEqual(out, correct) class Test_Crystallography_Miller(unittest.TestCase): """Tests for crystallography grade function.""" # pylint: disable=line-too-long def test_empty_points(self): user_input = '{"lattice": "bcc", "points": []}' self.assertFalse(chem.miller.grade(user_input, {'miller': '(2,2,2)', 'lattice': 'bcc'})) def test_only_one_point(self): user_input = '{"lattice": "bcc", "points": [["0.50", "0.00", "0.00"]]}' self.assertFalse(chem.miller.grade(user_input, {'miller': '(2,2,2)', 'lattice': 'bcc'})) def test_only_two_points(self): user_input = '{"lattice": "bcc", "points": [["0.50", "0.00", "0.00"], ["0.00", "0.50", "0.00"]]}' self.assertFalse(chem.miller.grade(user_input, {'miller': '(2,2,2)', 'lattice': 'bcc'})) def test_1(self): user_input = '{"lattice": "bcc", "points": [["0.50", "0.00", "0.00"], ["0.00", "0.50", "0.00"], ["0.00", "0.00", "0.50"]]}' self.assertTrue(chem.miller.grade(user_input, {'miller': '(2,2,2)', 'lattice': 'bcc'})) def test_2(self): user_input = '{"lattice": "bcc", "points": [["1.00", "0.00", "0.00"], ["0.00", "1.00", "0.00"], ["0.00", "0.00", "1.00"]]}' self.assertTrue(chem.miller.grade(user_input, {'miller': '(1,1,1)', 'lattice': 'bcc'})) def test_3(self): user_input = '{"lattice": "bcc", "points": [["1.00", "0.50", "1.00"], ["1.00", "1.00", "0.50"], ["0.50", "1.00", "1.00"]]}' self.assertTrue(chem.miller.grade(user_input, {'miller': '(2,2,2)', 'lattice': 'bcc'})) def test_4(self): user_input = '{"lattice": "bcc", "points": [["0.33", "1.00", "0.00"], ["0.00", "0.664", "0.00"], ["0.00", "1.00", "0.33"]]}' self.assertTrue(chem.miller.grade(user_input, {'miller': '(-3, 3, -3)', 'lattice': 'bcc'})) def test_5(self): """ return true only in case points coordinates are exact. But if they transform to closest 0.05 value it is not true""" user_input = '{"lattice": "bcc", "points": [["0.33", "1.00", "0.00"], ["0.00", "0.33", "0.00"], ["0.00", "1.00", "0.33"]]}' self.assertFalse(chem.miller.grade(user_input, {'miller': '(-6,3,-6)', 'lattice': 'bcc'})) def test_6(self): user_input = '{"lattice": "bcc", "points": [["0.00", "0.25", "0.00"], ["0.25", "0.00", "0.00"], ["0.00", "0.00", "0.25"]]}' self.assertTrue(chem.miller.grade(user_input, {'miller': '(4,4,4)', 'lattice': 'bcc'})) def test_7(self): # goes throug origin user_input = '{"lattice": "bcc", "points": [["0.00", "1.00", "0.00"], ["1.00", "0.00", "0.00"], ["0.50", "1.00", "0.00"]]}' self.assertTrue(chem.miller.grade(user_input, {'miller': '(0,0,-1)', 'lattice': 'bcc'})) def test_8(self): user_input = '{"lattice": "bcc", "points": [["0.00", "1.00", "0.50"], ["1.00", "0.00", "0.50"], ["0.50", "1.00", "0.50"]]}' self.assertTrue(chem.miller.grade(user_input, {'miller': '(0,0,2)', 'lattice': 'bcc'})) def test_9(self): user_input = '{"lattice": "bcc", "points": [["1.00", "0.00", "1.00"], ["0.00", "1.00", "1.00"], ["1.00", "0.00", "0.00"]]}' self.assertTrue(chem.miller.grade(user_input, {'miller': '(1,1,0)', 'lattice': 'bcc'})) def test_10(self): user_input = '{"lattice": "bcc", "points": [["1.00", "0.00", "1.00"], ["0.00", "0.00", "0.00"], ["0.00", "1.00", "1.00"]]}' self.assertTrue(chem.miller.grade(user_input, {'miller': '(1,1,-1)', 'lattice': 'bcc'})) def test_11(self): user_input = '{"lattice": "bcc", "points": [["1.00", "0.00", "0.50"], ["1.00", "1.00", "0.00"], ["0.00", "1.00", "0.00"]]}' self.assertTrue(chem.miller.grade(user_input, {'miller': '(0,1,2)', 'lattice': 'bcc'})) def test_12(self): user_input = '{"lattice": "bcc", "points": [["1.00", "0.00", "0.50"], ["0.00", "0.00", "0.50"], ["1.00", "1.00", "1.00"]]}' self.assertTrue(chem.miller.grade(user_input, {'miller': '(0,1,-2)', 'lattice': 'bcc'})) def test_13(self): user_input = '{"lattice": "bcc", "points": [["0.50", "0.00", "0.00"], ["0.50", "1.00", "0.00"], ["0.00", "0.00", "1.00"]]}' self.assertTrue(chem.miller.grade(user_input, {'miller': '(2,0,1)', 'lattice': 'bcc'})) def test_14(self): user_input = '{"lattice": "bcc", "points": [["0.00", "0.00", "0.00"], ["0.00", "0.00", "1.00"], ["0.50", "1.00", "0.00"]]}' self.assertTrue(chem.miller.grade(user_input, {'miller': '(2,-1,0)', 'lattice': 'bcc'})) def test_15(self): user_input = '{"lattice": "bcc", "points": [["0.00", "0.00", "0.00"], ["1.00", "1.00", "0.00"], ["0.00", "1.00", "1.00"]]}' self.assertTrue(chem.miller.grade(user_input, {'miller': '(1,-1,1)', 'lattice': 'bcc'})) def test_16(self): user_input = '{"lattice": "bcc", "points": [["1.00", "0.00", "0.00"], ["0.00", "1.00", "0.00"], ["1.00", "1.00", "1.00"]]}' self.assertTrue(chem.miller.grade(user_input, {'miller': '(1,1,-1)', 'lattice': 'bcc'})) def test_17(self): user_input = '{"lattice": "bcc", "points": [["0.00", "0.00", "0.00"], ["1.00", "0.00", "1.00"], ["1.00", "1.00", "0.00"]]}' self.assertTrue(chem.miller.grade(user_input, {'miller': '(-1,1,1)', 'lattice': 'bcc'})) def test_18(self): user_input = '{"lattice": "bcc", "points": [["0.00", "0.00", "0.00"], ["1.00", "1.00", "0.00"], ["0.00", "1.00", "1.00"]]}' self.assertTrue(chem.miller.grade(user_input, {'miller': '(1,-1,1)', 'lattice': 'bcc'})) def test_19(self): user_input = '{"lattice": "bcc", "points": [["0.00", "0.00", "0.00"], ["1.00", "1.00", "0.00"], ["0.00", "0.00", "1.00"]]}' self.assertTrue(chem.miller.grade(user_input, {'miller': '(-1,1,0)', 'lattice': 'bcc'})) def test_20(self): user_input = '{"lattice": "bcc", "points": [["1.00", "0.00", "0.00"], ["1.00", "1.00", "0.00"], ["0.00", "0.00", "1.00"]]}' self.assertTrue(chem.miller.grade(user_input, {'miller': '(1,0,1)', 'lattice': 'bcc'})) def test_21(self): user_input = '{"lattice": "bcc", "points": [["0.00", "0.00", "0.00"], ["0.00", "1.00", "0.00"], ["1.00", "0.00", "1.00"]]}' self.assertTrue(chem.miller.grade(user_input, {'miller': '(-1,0,1)', 'lattice': 'bcc'})) def test_22(self): user_input = '{"lattice": "bcc", "points": [["0.00", "1.00", "0.00"], ["1.00", "1.00", "0.00"], ["0.00", "0.00", "1.00"]]}' self.assertTrue(chem.miller.grade(user_input, {'miller': '(0,1,1)', 'lattice': 'bcc'})) def test_23(self): user_input = '{"lattice": "bcc", "points": [["0.00", "0.00", "0.00"], ["1.00", "0.00", "0.00"], ["1.00", "1.00", "1.00"]]}' self.assertTrue(chem.miller.grade(user_input, {'miller': '(0,-1,1)', 'lattice': 'bcc'})) def test_24(self): user_input = '{"lattice": "bcc", "points": [["0.66", "0.00", "0.00"], ["0.00", "0.66", "0.00"], ["0.00", "0.00", "0.66"]]}' self.assertTrue(chem.miller.grade(user_input, {'miller': '(3,3,3)', 'lattice': 'bcc'})) def test_25(self): user_input = u'{"lattice":"","points":[["0.00","0.00","0.01"],["1.00","1.00","0.01"],["0.00","1.00","1.00"]]}' self.assertTrue(chem.miller.grade(user_input, {'miller': '(1,-1,1)', 'lattice': ''})) def test_26(self): user_input = u'{"lattice":"","points":[["0.00","0.01","0.00"],["1.00","0.00","0.00"],["0.00","0.00","1.00"]]}' self.assertTrue(chem.miller.grade(user_input, {'miller': '(0,-1,0)', 'lattice': ''})) def test_27(self): """ rounding to 0.35""" user_input = u'{"lattice":"","points":[["0.33","0.00","0.00"],["0.00","0.33","0.00"],["0.00","0.00","0.33"]]}' self.assertTrue(chem.miller.grade(user_input, {'miller': '(3,3,3)', 'lattice': ''})) def test_28(self): """ rounding to 0.30""" user_input = u'{"lattice":"","points":[["0.30","0.00","0.00"],["0.00","0.30","0.00"],["0.00","0.00","0.30"]]}' self.assertTrue(chem.miller.grade(user_input, {'miller': '(10,10,10)', 'lattice': ''})) def test_wrong_lattice(self): user_input = '{"lattice": "bcc", "points": [["0.00", "0.00", "0.00"], ["1.00", "0.00", "0.00"], ["1.00", "1.00", "1.00"]]}' self.assertFalse(chem.miller.grade(user_input, {'miller': '(3,3,3)', 'lattice': 'fcc'})) def suite(): testcases = [Test_Compare_Expressions, Test_Divide_Expressions, Test_Render_Equations, Test_Crystallography_Miller] suites = [] for testcase in testcases: suites.append(unittest.TestLoader().loadTestsFromTestCase(testcase)) return unittest.TestSuite(suites) if __name__ == "__main__": LOCAL_DEBUG = True with codecs.open('render.html', 'w', encoding='utf-8') as f: unittest.TextTestRunner(verbosity=2).run(suite()) # open render.html to look at rendered equations
sanjeevtripurari/hue
refs/heads/master
desktop/core/ext-py/Paste-2.0.1/paste/request.py
33
# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org) # Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php # (c) 2005 Ian Bicking and contributors # This module is part of the Python Paste Project and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php """ This module provides helper routines with work directly on a WSGI environment to solve common requirements. * get_cookies(environ) * parse_querystring(environ) * parse_formvars(environ, include_get_vars=True) * construct_url(environ, with_query_string=True, with_path_info=True, script_name=None, path_info=None, querystring=None) * path_info_split(path_info) * path_info_pop(environ) * resolve_relative_url(url, environ) """ import cgi from six.moves.urllib import parse as urlparse from six.moves.urllib.parse import quote try: # Python 3 from http.cookies import SimpleCookie, CookieError except ImportError: # Python 2 from Cookie import SimpleCookie, CookieError try: from UserDict import DictMixin except ImportError: from collections import MutableMapping as DictMixin import six from paste.util.multidict import MultiDict __all__ = ['get_cookies', 'get_cookie_dict', 'parse_querystring', 'parse_formvars', 'construct_url', 'path_info_split', 'path_info_pop', 'resolve_relative_url', 'EnvironHeaders'] def get_cookies(environ): """ Gets a cookie object (which is a dictionary-like object) from the request environment; caches this value in case get_cookies is called again for the same request. """ header = environ.get('HTTP_COOKIE', '') if 'paste.cookies' in environ: cookies, check_header = environ['paste.cookies'] if check_header == header: return cookies cookies = SimpleCookie() try: cookies.load(header) except CookieError: pass environ['paste.cookies'] = (cookies, header) return cookies def get_cookie_dict(environ): """Return a *plain* dictionary of cookies as found in the request. Unlike ``get_cookies`` this returns a dictionary, not a ``SimpleCookie`` object. For incoming cookies a dictionary fully represents the information. Like ``get_cookies`` this caches and checks the cache. """ header = environ.get('HTTP_COOKIE') if not header: return {} if 'paste.cookies.dict' in environ: cookies, check_header = environ['paste.cookies.dict'] if check_header == header: return cookies cookies = SimpleCookie() try: cookies.load(header) except CookieError: pass result = {} for name in cookies: result[name] = cookies[name].value environ['paste.cookies.dict'] = (result, header) return result def parse_querystring(environ): """ Parses a query string into a list like ``[(name, value)]``. Caches this value in case parse_querystring is called again for the same request. You can pass the result to ``dict()``, but be aware that keys that appear multiple times will be lost (only the last value will be preserved). """ source = environ.get('QUERY_STRING', '') if not source: return [] if 'paste.parsed_querystring' in environ: parsed, check_source = environ['paste.parsed_querystring'] if check_source == source: return parsed parsed = cgi.parse_qsl(source, keep_blank_values=True, strict_parsing=False) environ['paste.parsed_querystring'] = (parsed, source) return parsed def parse_dict_querystring(environ): """Parses a query string like parse_querystring, but returns a MultiDict Caches this value in case parse_dict_querystring is called again for the same request. Example:: >>> environ = {'QUERY_STRING': 'day=Monday&user=fred&user=jane'} >>> parsed = parse_dict_querystring(environ) >>> parsed['day'] 'Monday' >>> parsed['user'] 'fred' >>> parsed.getall('user') ['fred', 'jane'] """ source = environ.get('QUERY_STRING', '') if not source: return MultiDict() if 'paste.parsed_dict_querystring' in environ: parsed, check_source = environ['paste.parsed_dict_querystring'] if check_source == source: return parsed parsed = cgi.parse_qsl(source, keep_blank_values=True, strict_parsing=False) multi = MultiDict(parsed) environ['paste.parsed_dict_querystring'] = (multi, source) return multi def parse_formvars(environ, include_get_vars=True): """Parses the request, returning a MultiDict of form variables. If ``include_get_vars`` is true then GET (query string) variables will also be folded into the MultiDict. All values should be strings, except for file uploads which are left as ``FieldStorage`` instances. If the request was not a normal form request (e.g., a POST with an XML body) then ``environ['wsgi.input']`` won't be read. """ source = environ['wsgi.input'] if 'paste.parsed_formvars' in environ: parsed, check_source = environ['paste.parsed_formvars'] if check_source == source: if include_get_vars: parsed.update(parse_querystring(environ)) return parsed # @@: Shouldn't bother FieldStorage parsing during GET/HEAD and # fake_out_cgi requests type = environ.get('CONTENT_TYPE', '').lower() if ';' in type: type = type.split(';', 1)[0] fake_out_cgi = type not in ('', 'application/x-www-form-urlencoded', 'multipart/form-data') # FieldStorage assumes a default CONTENT_LENGTH of -1, but a # default of 0 is better: if not environ.get('CONTENT_LENGTH'): environ['CONTENT_LENGTH'] = '0' # Prevent FieldStorage from parsing QUERY_STRING during GET/HEAD # requests old_query_string = environ.get('QUERY_STRING','') environ['QUERY_STRING'] = '' if fake_out_cgi: input = six.BytesIO(b'') old_content_type = environ.get('CONTENT_TYPE') old_content_length = environ.get('CONTENT_LENGTH') environ['CONTENT_LENGTH'] = '0' environ['CONTENT_TYPE'] = '' else: input = environ['wsgi.input'] fs = cgi.FieldStorage(fp=input, environ=environ, keep_blank_values=1) environ['QUERY_STRING'] = old_query_string if fake_out_cgi: environ['CONTENT_TYPE'] = old_content_type environ['CONTENT_LENGTH'] = old_content_length formvars = MultiDict() if isinstance(fs.value, list): for name in fs.keys(): values = fs[name] if not isinstance(values, list): values = [values] for value in values: if not value.filename: value = value.value formvars.add(name, value) environ['paste.parsed_formvars'] = (formvars, source) if include_get_vars: formvars.update(parse_querystring(environ)) return formvars def construct_url(environ, with_query_string=True, with_path_info=True, script_name=None, path_info=None, querystring=None): """Reconstructs the URL from the WSGI environment. You may override SCRIPT_NAME, PATH_INFO, and QUERYSTRING with the keyword arguments. """ url = environ['wsgi.url_scheme']+'://' if environ.get('HTTP_HOST'): host = environ['HTTP_HOST'] port = None if ':' in host: host, port = host.split(':', 1) if environ['wsgi.url_scheme'] == 'https': if port == '443': port = None elif environ['wsgi.url_scheme'] == 'http': if port == '80': port = None url += host if port: url += ':%s' % port else: url += environ['SERVER_NAME'] if environ['wsgi.url_scheme'] == 'https': if environ['SERVER_PORT'] != '443': url += ':' + environ['SERVER_PORT'] else: if environ['SERVER_PORT'] != '80': url += ':' + environ['SERVER_PORT'] if script_name is None: url += quote(environ.get('SCRIPT_NAME','')) else: url += quote(script_name) if with_path_info: if path_info is None: url += quote(environ.get('PATH_INFO','')) else: url += quote(path_info) if with_query_string: if querystring is None: if environ.get('QUERY_STRING'): url += '?' + environ['QUERY_STRING'] elif querystring: url += '?' + querystring return url def resolve_relative_url(url, environ): """ Resolve the given relative URL as being relative to the location represented by the environment. This can be used for redirecting to a relative path. Note: if url is already absolute, this function will (intentionally) have no effect on it. """ cur_url = construct_url(environ, with_query_string=False) return urlparse.urljoin(cur_url, url) def path_info_split(path_info): """ Splits off the first segment of the path. Returns (first_part, rest_of_path). first_part can be None (if PATH_INFO is empty), '' (if PATH_INFO is '/'), or a name without any /'s. rest_of_path can be '' or a string starting with /. """ if not path_info: return None, '' assert path_info.startswith('/'), ( "PATH_INFO should start with /: %r" % path_info) path_info = path_info.lstrip('/') if '/' in path_info: first, rest = path_info.split('/', 1) return first, '/' + rest else: return path_info, '' def path_info_pop(environ): """ 'Pops' off the next segment of PATH_INFO, pushing it onto SCRIPT_NAME, and returning that segment. For instance:: >>> def call_it(script_name, path_info): ... env = {'SCRIPT_NAME': script_name, 'PATH_INFO': path_info} ... result = path_info_pop(env) ... print('SCRIPT_NAME=%r; PATH_INFO=%r; returns=%r' % ( ... env['SCRIPT_NAME'], env['PATH_INFO'], result)) >>> call_it('/foo', '/bar') SCRIPT_NAME='/foo/bar'; PATH_INFO=''; returns='bar' >>> call_it('/foo/bar', '') SCRIPT_NAME='/foo/bar'; PATH_INFO=''; returns=None >>> call_it('/foo/bar', '/') SCRIPT_NAME='/foo/bar/'; PATH_INFO=''; returns='' >>> call_it('', '/1/2/3') SCRIPT_NAME='/1'; PATH_INFO='/2/3'; returns='1' >>> call_it('', '//1/2') SCRIPT_NAME='//1'; PATH_INFO='/2'; returns='1' """ path = environ.get('PATH_INFO', '') if not path: return None while path.startswith('/'): environ['SCRIPT_NAME'] += '/' path = path[1:] if '/' not in path: environ['SCRIPT_NAME'] += path environ['PATH_INFO'] = '' return path else: segment, path = path.split('/', 1) environ['PATH_INFO'] = '/' + path environ['SCRIPT_NAME'] += segment return segment _parse_headers_special = { # This is a Zope convention, but we'll allow it here: 'HTTP_CGI_AUTHORIZATION': 'Authorization', 'CONTENT_LENGTH': 'Content-Length', 'CONTENT_TYPE': 'Content-Type', } def parse_headers(environ): """ Parse the headers in the environment (like ``HTTP_HOST``) and yield a sequence of those (header_name, value) tuples. """ # @@: Maybe should parse out comma-separated headers? for cgi_var, value in environ.iteritems(): if cgi_var in _parse_headers_special: yield _parse_headers_special[cgi_var], value elif cgi_var.startswith('HTTP_'): yield cgi_var[5:].title().replace('_', '-'), value class EnvironHeaders(DictMixin): """An object that represents the headers as present in a WSGI environment. This object is a wrapper (with no internal state) for a WSGI request object, representing the CGI-style HTTP_* keys as a dictionary. Because a CGI environment can only hold one value for each key, this dictionary is single-valued (unlike outgoing headers). """ def __init__(self, environ): self.environ = environ def _trans_name(self, name): key = 'HTTP_'+name.replace('-', '_').upper() if key == 'HTTP_CONTENT_LENGTH': key = 'CONTENT_LENGTH' elif key == 'HTTP_CONTENT_TYPE': key = 'CONTENT_TYPE' return key def _trans_key(self, key): if key == 'CONTENT_TYPE': return 'Content-Type' elif key == 'CONTENT_LENGTH': return 'Content-Length' elif key.startswith('HTTP_'): return key[5:].replace('_', '-').title() else: return None def __len__(self): return len(self.environ) def __getitem__(self, item): return self.environ[self._trans_name(item)] def __setitem__(self, item, value): # @@: Should this dictionary be writable at all? self.environ[self._trans_name(item)] = value def __delitem__(self, item): del self.environ[self._trans_name(item)] def __iter__(self): for key in self.environ: name = self._trans_key(key) if name is not None: yield name def keys(self): return list(iter(self)) def __contains__(self, item): return self._trans_name(item) in self.environ def _cgi_FieldStorage__repr__patch(self): """ monkey patch for FieldStorage.__repr__ Unbelievely, the default __repr__ on FieldStorage reads the entire file content instead of being sane about it. This is a simple replacement that doesn't do that """ if self.file: return "FieldStorage(%r, %r)" % ( self.name, self.filename) return "FieldStorage(%r, %r, %r)" % ( self.name, self.filename, self.value) cgi.FieldStorage.__repr__ = _cgi_FieldStorage__repr__patch if __name__ == '__main__': import doctest doctest.testmod()
MrP01/PyLatein
refs/heads/master
Trainer/templatetags/tools.py
1
from django import template import importlib register = template.Library() @register.filter def isinst(value, class_str): split = class_str.split(".") return isinstance(value, getattr(importlib.import_module(".".join(split[:-1])), split[-1]))
ryangallen/django
refs/heads/master
django/core/management/commands/loaddata.py
294
from __future__ import unicode_literals import glob import gzip import os import warnings import zipfile from itertools import product from django.apps import apps from django.conf import settings from django.core import serializers from django.core.exceptions import ImproperlyConfigured from django.core.management.base import BaseCommand, CommandError from django.core.management.color import no_style from django.db import ( DEFAULT_DB_ALIAS, DatabaseError, IntegrityError, connections, router, transaction, ) from django.utils import lru_cache from django.utils._os import upath from django.utils.encoding import force_text from django.utils.functional import cached_property from django.utils.glob import glob_escape try: import bz2 has_bz2 = True except ImportError: has_bz2 = False class Command(BaseCommand): help = 'Installs the named fixture(s) in the database.' missing_args_message = ("No database fixture specified. Please provide the " "path of at least one fixture in the command line.") def add_arguments(self, parser): parser.add_argument('args', metavar='fixture', nargs='+', help='Fixture labels.') parser.add_argument('--database', action='store', dest='database', default=DEFAULT_DB_ALIAS, help='Nominates a specific database to load ' 'fixtures into. Defaults to the "default" database.') parser.add_argument('--app', action='store', dest='app_label', default=None, help='Only look for fixtures in the specified app.') parser.add_argument('--ignorenonexistent', '-i', action='store_true', dest='ignore', default=False, help='Ignores entries in the serialized data for fields that do not ' 'currently exist on the model.') def handle(self, *fixture_labels, **options): self.ignore = options.get('ignore') self.using = options.get('database') self.app_label = options.get('app_label') self.hide_empty = options.get('hide_empty', False) self.verbosity = options.get('verbosity') with transaction.atomic(using=self.using): self.loaddata(fixture_labels) # Close the DB connection -- unless we're still in a transaction. This # is required as a workaround for an edge case in MySQL: if the same # connection is used to create tables, load data, and query, the query # can return incorrect results. See Django #7572, MySQL #37735. if transaction.get_autocommit(self.using): connections[self.using].close() def loaddata(self, fixture_labels): connection = connections[self.using] # Keep a count of the installed objects and fixtures self.fixture_count = 0 self.loaded_object_count = 0 self.fixture_object_count = 0 self.models = set() self.serialization_formats = serializers.get_public_serializer_formats() # Forcing binary mode may be revisited after dropping Python 2 support (see #22399) self.compression_formats = { None: (open, 'rb'), 'gz': (gzip.GzipFile, 'rb'), 'zip': (SingleZipReader, 'r'), } if has_bz2: self.compression_formats['bz2'] = (bz2.BZ2File, 'r') with connection.constraint_checks_disabled(): for fixture_label in fixture_labels: self.load_label(fixture_label) # Since we disabled constraint checks, we must manually check for # any invalid keys that might have been added table_names = [model._meta.db_table for model in self.models] try: connection.check_constraints(table_names=table_names) except Exception as e: e.args = ("Problem installing fixtures: %s" % e,) raise # If we found even one object in a fixture, we need to reset the # database sequences. if self.loaded_object_count > 0: sequence_sql = connection.ops.sequence_reset_sql(no_style(), self.models) if sequence_sql: if self.verbosity >= 2: self.stdout.write("Resetting sequences\n") with connection.cursor() as cursor: for line in sequence_sql: cursor.execute(line) if self.verbosity >= 1: if self.fixture_count == 0 and self.hide_empty: pass elif self.fixture_object_count == self.loaded_object_count: self.stdout.write("Installed %d object(s) from %d fixture(s)" % (self.loaded_object_count, self.fixture_count)) else: self.stdout.write("Installed %d object(s) (of %d) from %d fixture(s)" % (self.loaded_object_count, self.fixture_object_count, self.fixture_count)) def load_label(self, fixture_label): """ Loads fixtures files for a given label. """ show_progress = self.verbosity >= 3 for fixture_file, fixture_dir, fixture_name in self.find_fixtures(fixture_label): _, ser_fmt, cmp_fmt = self.parse_name(os.path.basename(fixture_file)) open_method, mode = self.compression_formats[cmp_fmt] fixture = open_method(fixture_file, mode) try: self.fixture_count += 1 objects_in_fixture = 0 loaded_objects_in_fixture = 0 if self.verbosity >= 2: self.stdout.write("Installing %s fixture '%s' from %s." % (ser_fmt, fixture_name, humanize(fixture_dir))) objects = serializers.deserialize(ser_fmt, fixture, using=self.using, ignorenonexistent=self.ignore) for obj in objects: objects_in_fixture += 1 if router.allow_migrate_model(self.using, obj.object.__class__): loaded_objects_in_fixture += 1 self.models.add(obj.object.__class__) try: obj.save(using=self.using) if show_progress: self.stdout.write( '\rProcessed %i object(s).' % loaded_objects_in_fixture, ending='' ) except (DatabaseError, IntegrityError) as e: e.args = ("Could not load %(app_label)s.%(object_name)s(pk=%(pk)s): %(error_msg)s" % { 'app_label': obj.object._meta.app_label, 'object_name': obj.object._meta.object_name, 'pk': obj.object.pk, 'error_msg': force_text(e) },) raise if objects and show_progress: self.stdout.write('') # add a newline after progress indicator self.loaded_object_count += loaded_objects_in_fixture self.fixture_object_count += objects_in_fixture except Exception as e: if not isinstance(e, CommandError): e.args = ("Problem installing fixture '%s': %s" % (fixture_file, e),) raise finally: fixture.close() # Warn if the fixture we loaded contains 0 objects. if objects_in_fixture == 0: warnings.warn( "No fixture data found for '%s'. (File format may be " "invalid.)" % fixture_name, RuntimeWarning ) @lru_cache.lru_cache(maxsize=None) def find_fixtures(self, fixture_label): """ Finds fixture files for a given label. """ fixture_name, ser_fmt, cmp_fmt = self.parse_name(fixture_label) databases = [self.using, None] cmp_fmts = list(self.compression_formats.keys()) if cmp_fmt is None else [cmp_fmt] ser_fmts = serializers.get_public_serializer_formats() if ser_fmt is None else [ser_fmt] if self.verbosity >= 2: self.stdout.write("Loading '%s' fixtures..." % fixture_name) if os.path.isabs(fixture_name): fixture_dirs = [os.path.dirname(fixture_name)] fixture_name = os.path.basename(fixture_name) else: fixture_dirs = self.fixture_dirs if os.path.sep in os.path.normpath(fixture_name): fixture_dirs = [os.path.join(dir_, os.path.dirname(fixture_name)) for dir_ in fixture_dirs] fixture_name = os.path.basename(fixture_name) suffixes = ('.'.join(ext for ext in combo if ext) for combo in product(databases, ser_fmts, cmp_fmts)) targets = set('.'.join((fixture_name, suffix)) for suffix in suffixes) fixture_files = [] for fixture_dir in fixture_dirs: if self.verbosity >= 2: self.stdout.write("Checking %s for fixtures..." % humanize(fixture_dir)) fixture_files_in_dir = [] path = os.path.join(fixture_dir, fixture_name) for candidate in glob.iglob(glob_escape(path) + '*'): if os.path.basename(candidate) in targets: # Save the fixture_dir and fixture_name for future error messages. fixture_files_in_dir.append((candidate, fixture_dir, fixture_name)) if self.verbosity >= 2 and not fixture_files_in_dir: self.stdout.write("No fixture '%s' in %s." % (fixture_name, humanize(fixture_dir))) # Check kept for backwards-compatibility; it isn't clear why # duplicates are only allowed in different directories. if len(fixture_files_in_dir) > 1: raise CommandError( "Multiple fixtures named '%s' in %s. Aborting." % (fixture_name, humanize(fixture_dir))) fixture_files.extend(fixture_files_in_dir) if not fixture_files: # Warning kept for backwards-compatibility; why not an exception? warnings.warn("No fixture named '%s' found." % fixture_name) return fixture_files @cached_property def fixture_dirs(self): """ Return a list of fixture directories. The list contains the 'fixtures' subdirectory of each installed application, if it exists, the directories in FIXTURE_DIRS, and the current directory. """ dirs = [] fixture_dirs = settings.FIXTURE_DIRS if len(fixture_dirs) != len(set(fixture_dirs)): raise ImproperlyConfigured("settings.FIXTURE_DIRS contains duplicates.") for app_config in apps.get_app_configs(): app_label = app_config.label app_dir = os.path.join(app_config.path, 'fixtures') if app_dir in fixture_dirs: raise ImproperlyConfigured( "'%s' is a default fixture directory for the '%s' app " "and cannot be listed in settings.FIXTURE_DIRS." % (app_dir, app_label) ) if self.app_label and app_label != self.app_label: continue if os.path.isdir(app_dir): dirs.append(app_dir) dirs.extend(list(fixture_dirs)) dirs.append('') dirs = [upath(os.path.abspath(os.path.realpath(d))) for d in dirs] return dirs def parse_name(self, fixture_name): """ Splits fixture name in name, serialization format, compression format. """ parts = fixture_name.rsplit('.', 2) if len(parts) > 1 and parts[-1] in self.compression_formats: cmp_fmt = parts[-1] parts = parts[:-1] else: cmp_fmt = None if len(parts) > 1: if parts[-1] in self.serialization_formats: ser_fmt = parts[-1] parts = parts[:-1] else: raise CommandError( "Problem installing fixture '%s': %s is not a known " "serialization format." % (''.join(parts[:-1]), parts[-1])) else: ser_fmt = None name = '.'.join(parts) return name, ser_fmt, cmp_fmt class SingleZipReader(zipfile.ZipFile): def __init__(self, *args, **kwargs): zipfile.ZipFile.__init__(self, *args, **kwargs) if len(self.namelist()) != 1: raise ValueError("Zip-compressed fixtures must contain one file.") def read(self): return zipfile.ZipFile.read(self, self.namelist()[0]) def humanize(dirname): return "'%s'" % dirname if dirname else 'absolute path'
rooshilp/CMPUT410W15-project
refs/heads/master
testenv/lib/python2.7/site-packages/PIL/ImageEnhance.py
26
# # The Python Imaging Library. # $Id$ # # image enhancement classes # # For a background, see "Image Processing By Interpolation and # Extrapolation", Paul Haeberli and Douglas Voorhies. Available # at http://www.graficaobscura.com/interp/index.html # # History: # 1996-03-23 fl Created # 2009-06-16 fl Fixed mean calculation # # Copyright (c) Secret Labs AB 1997. # Copyright (c) Fredrik Lundh 1996. # # See the README file for information on usage and redistribution. # from PIL import Image, ImageFilter, ImageStat class _Enhance: def enhance(self, factor): """ Returns an enhanced image. :param factor: A floating point value controlling the enhancement. Factor 1.0 always returns a copy of the original image, lower factors mean less color (brightness, contrast, etc), and higher values more. There are no restrictions on this value. :rtype: :py:class:`~PIL.Image.Image` """ return Image.blend(self.degenerate, self.image, factor) class Color(_Enhance): """Adjust image color balance. This class can be used to adjust the colour balance of an image, in a manner similar to the controls on a colour TV set. An enhancement factor of 0.0 gives a black and white image. A factor of 1.0 gives the original image. """ def __init__(self, image): self.image = image self.intermediate_mode = 'L' if 'A' in image.getbands(): self.intermediate_mode = 'LA' self.degenerate = image.convert(self.intermediate_mode).convert(image.mode) class Contrast(_Enhance): """Adjust image contrast. This class can be used to control the contrast of an image, similar to the contrast control on a TV set. An enhancement factor of 0.0 gives a solid grey image. A factor of 1.0 gives the original image. """ def __init__(self, image): self.image = image mean = int(ImageStat.Stat(image.convert("L")).mean[0] + 0.5) self.degenerate = Image.new("L", image.size, mean).convert(image.mode) if 'A' in image.getbands(): self.degenerate.putalpha(image.split()[-1]) class Brightness(_Enhance): """Adjust image brightness. This class can be used to control the brighntess of an image. An enhancement factor of 0.0 gives a black image. A factor of 1.0 gives the original image. """ def __init__(self, image): self.image = image self.degenerate = Image.new(image.mode, image.size, 0) if 'A' in image.getbands(): self.degenerate.putalpha(image.split()[-1]) class Sharpness(_Enhance): """Adjust image sharpness. This class can be used to adjust the sharpness of an image. An enhancement factor of 0.0 gives a blurred image, a factor of 1.0 gives the original image, and a factor of 2.0 gives a sharpened image. """ def __init__(self, image): self.image = image self.degenerate = image.filter(ImageFilter.SMOOTH) if 'A' in image.getbands(): self.degenerate.putalpha(image.split()[-1])
redhat-openstack/swift
refs/heads/master-patches
swift/common/constraints.py
11
# Copyright (c) 2010-2012 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import functools import os import urllib import time from urllib import unquote from six.moves.configparser import ConfigParser, NoSectionError, NoOptionError from swift.common import utils, exceptions from swift.common.swob import HTTPBadRequest, HTTPLengthRequired, \ HTTPRequestEntityTooLarge, HTTPPreconditionFailed, HTTPNotImplemented, \ HTTPException MAX_FILE_SIZE = 5368709122 MAX_META_NAME_LENGTH = 128 MAX_META_VALUE_LENGTH = 256 MAX_META_COUNT = 90 MAX_META_OVERALL_SIZE = 4096 MAX_HEADER_SIZE = 8192 MAX_OBJECT_NAME_LENGTH = 1024 CONTAINER_LISTING_LIMIT = 10000 ACCOUNT_LISTING_LIMIT = 10000 MAX_ACCOUNT_NAME_LENGTH = 256 MAX_CONTAINER_NAME_LENGTH = 256 VALID_API_VERSIONS = ["v1", "v1.0"] EXTRA_HEADER_COUNT = 0 # If adding an entry to DEFAULT_CONSTRAINTS, note that # these constraints are automatically published by the # proxy server in responses to /info requests, with values # updated by reload_constraints() DEFAULT_CONSTRAINTS = { 'max_file_size': MAX_FILE_SIZE, 'max_meta_name_length': MAX_META_NAME_LENGTH, 'max_meta_value_length': MAX_META_VALUE_LENGTH, 'max_meta_count': MAX_META_COUNT, 'max_meta_overall_size': MAX_META_OVERALL_SIZE, 'max_header_size': MAX_HEADER_SIZE, 'max_object_name_length': MAX_OBJECT_NAME_LENGTH, 'container_listing_limit': CONTAINER_LISTING_LIMIT, 'account_listing_limit': ACCOUNT_LISTING_LIMIT, 'max_account_name_length': MAX_ACCOUNT_NAME_LENGTH, 'max_container_name_length': MAX_CONTAINER_NAME_LENGTH, 'valid_api_versions': VALID_API_VERSIONS, 'extra_header_count': EXTRA_HEADER_COUNT, } SWIFT_CONSTRAINTS_LOADED = False OVERRIDE_CONSTRAINTS = {} # any constraints overridden by SWIFT_CONF_FILE EFFECTIVE_CONSTRAINTS = {} # populated by reload_constraints def reload_constraints(): """ Parse SWIFT_CONF_FILE and reset module level global contraint attrs, populating OVERRIDE_CONSTRAINTS AND EFFECTIVE_CONSTRAINTS along the way. """ global SWIFT_CONSTRAINTS_LOADED, OVERRIDE_CONSTRAINTS SWIFT_CONSTRAINTS_LOADED = False OVERRIDE_CONSTRAINTS = {} constraints_conf = ConfigParser() if constraints_conf.read(utils.SWIFT_CONF_FILE): SWIFT_CONSTRAINTS_LOADED = True for name in DEFAULT_CONSTRAINTS: try: value = constraints_conf.get('swift-constraints', name) except NoOptionError: pass except NoSectionError: # We are never going to find the section for another option break else: try: value = int(value) except ValueError: value = utils.list_from_csv(value) OVERRIDE_CONSTRAINTS[name] = value for name, default in DEFAULT_CONSTRAINTS.items(): value = OVERRIDE_CONSTRAINTS.get(name, default) EFFECTIVE_CONSTRAINTS[name] = value # "globals" in this context is module level globals, always. globals()[name.upper()] = value reload_constraints() # Maximum slo segments in buffer MAX_BUFFERED_SLO_SEGMENTS = 10000 #: Query string format= values to their corresponding content-type values FORMAT2CONTENT_TYPE = {'plain': 'text/plain', 'json': 'application/json', 'xml': 'application/xml'} # By default the maximum number of allowed headers depends on the number of max # allowed metadata settings plus a default value of 32 for regular http # headers. If for some reason this is not enough (custom middleware for # example) it can be increased with the extra_header_count constraint. MAX_HEADER_COUNT = MAX_META_COUNT + 32 + max(EXTRA_HEADER_COUNT, 0) def check_metadata(req, target_type): """ Check metadata sent in the request headers. This should only check that the metadata in the request given is valid. Checks against account/container overall metadata should be forwarded on to its respective server to be checked. :param req: request object :param target_type: str: one of: object, container, or account: indicates which type the target storage for the metadata is :returns: HTTPBadRequest with bad metadata otherwise None """ prefix = 'x-%s-meta-' % target_type.lower() meta_count = 0 meta_size = 0 for key, value in req.headers.items(): if isinstance(value, basestring) and len(value) > MAX_HEADER_SIZE: return HTTPBadRequest(body='Header value too long: %s' % key[:MAX_META_NAME_LENGTH], request=req, content_type='text/plain') if not key.lower().startswith(prefix): continue key = key[len(prefix):] if not key: return HTTPBadRequest(body='Metadata name cannot be empty', request=req, content_type='text/plain') meta_count += 1 meta_size += len(key) + len(value) if len(key) > MAX_META_NAME_LENGTH: return HTTPBadRequest( body='Metadata name too long: %s%s' % (prefix, key), request=req, content_type='text/plain') elif len(value) > MAX_META_VALUE_LENGTH: return HTTPBadRequest( body='Metadata value longer than %d: %s%s' % ( MAX_META_VALUE_LENGTH, prefix, key), request=req, content_type='text/plain') elif meta_count > MAX_META_COUNT: return HTTPBadRequest( body='Too many metadata items; max %d' % MAX_META_COUNT, request=req, content_type='text/plain') elif meta_size > MAX_META_OVERALL_SIZE: return HTTPBadRequest( body='Total metadata too large; max %d' % MAX_META_OVERALL_SIZE, request=req, content_type='text/plain') return None def check_object_creation(req, object_name): """ Check to ensure that everything is alright about an object to be created. :param req: HTTP request object :param object_name: name of object to be created :returns HTTPRequestEntityTooLarge: the object is too large :returns HTTPLengthRequired: missing content-length header and not a chunked request :returns HTTPBadRequest: missing or bad content-type header, or bad metadata :returns HTTPNotImplemented: unsupported transfer-encoding header value """ try: ml = req.message_length() except ValueError as e: return HTTPBadRequest(request=req, content_type='text/plain', body=str(e)) except AttributeError as e: return HTTPNotImplemented(request=req, content_type='text/plain', body=str(e)) if ml is not None and ml > MAX_FILE_SIZE: return HTTPRequestEntityTooLarge(body='Your request is too large.', request=req, content_type='text/plain') if req.content_length is None and \ req.headers.get('transfer-encoding') != 'chunked': return HTTPLengthRequired(body='Missing Content-Length header.', request=req, content_type='text/plain') if 'X-Copy-From' in req.headers and req.content_length: return HTTPBadRequest(body='Copy requests require a zero byte body', request=req, content_type='text/plain') if len(object_name) > MAX_OBJECT_NAME_LENGTH: return HTTPBadRequest(body='Object name length of %d longer than %d' % (len(object_name), MAX_OBJECT_NAME_LENGTH), request=req, content_type='text/plain') if 'Content-Type' not in req.headers: return HTTPBadRequest(request=req, content_type='text/plain', body='No content type') try: req = check_delete_headers(req) except HTTPException as e: return HTTPBadRequest(request=req, body=e.body, content_type='text/plain') if not check_utf8(req.headers['Content-Type']): return HTTPBadRequest(request=req, body='Invalid Content-Type', content_type='text/plain') return check_metadata(req, 'object') def check_dir(root, drive): """ Verify that the path to the device is a directory and is a lesser constraint that is enforced when a full mount_check isn't possible with, for instance, a VM using loopback or partitions. :param root: base path where the dir is :param drive: drive name to be checked :returns: True if it is a valid directoy, False otherwise """ return os.path.isdir(os.path.join(root, drive)) def check_mount(root, drive): """ Verify that the path to the device is a mount point and mounted. This allows us to fast fail on drives that have been unmounted because of issues, and also prevents us for accidentally filling up the root partition. :param root: base path where the devices are mounted :param drive: drive name to be checked :returns: True if it is a valid mounted device, False otherwise """ if not (urllib.quote_plus(drive) == drive): return False path = os.path.join(root, drive) return utils.ismount(path) def check_float(string): """ Helper function for checking if a string can be converted to a float. :param string: string to be verified as a float :returns: True if the string can be converted to a float, False otherwise """ try: float(string) return True except ValueError: return False def valid_timestamp(request): """ Helper function to extract a timestamp from requests that require one. :param request: the swob request object :returns: a valid Timestamp instance :raises: HTTPBadRequest on missing or invalid X-Timestamp """ try: return request.timestamp except exceptions.InvalidTimestamp as e: raise HTTPBadRequest(body=str(e), request=request, content_type='text/plain') def check_delete_headers(request): """ Validate if 'x-delete' headers are have correct values values should be positive integers and correspond to a time in the future. :param request: the swob request object :returns: HTTPBadRequest in case of invalid values or None if values are ok """ if 'x-delete-after' in request.headers: try: x_delete_after = int(request.headers['x-delete-after']) except ValueError: raise HTTPBadRequest(request=request, content_type='text/plain', body='Non-integer X-Delete-After') actual_del_time = time.time() + x_delete_after if actual_del_time < time.time(): raise HTTPBadRequest(request=request, content_type='text/plain', body='X-Delete-After in past') request.headers['x-delete-at'] = utils.normalize_delete_at_timestamp( actual_del_time) if 'x-delete-at' in request.headers: try: x_delete_at = int(utils.normalize_delete_at_timestamp( int(request.headers['x-delete-at']))) except ValueError: raise HTTPBadRequest(request=request, content_type='text/plain', body='Non-integer X-Delete-At') if x_delete_at < time.time(): raise HTTPBadRequest(request=request, content_type='text/plain', body='X-Delete-At in past') return request def check_utf8(string): """ Validate if a string is valid UTF-8 str or unicode and that it does not contain any null character. :param string: string to be validated :returns: True if the string is valid utf-8 str or unicode and contains no null characters, False otherwise """ if not string: return False try: if isinstance(string, unicode): string.encode('utf-8') else: decoded = string.decode('UTF-8') if decoded.encode('UTF-8') != string: return False # A UTF-8 string with surrogates in it is invalid. if any(0xD800 <= ord(codepoint) <= 0xDFFF for codepoint in decoded): return False return '\x00' not in string # If string is unicode, decode() will raise UnicodeEncodeError # So, we should catch both UnicodeDecodeError & UnicodeEncodeError except UnicodeError: return False def check_path_header(req, name, length, error_msg): """ Validate that the value of path-like header is well formatted. We assume the caller ensures that specific header is present in req.headers. :param req: HTTP request object :param name: header name :param length: length of path segment check :param error_msg: error message for client :returns: A tuple with path parts according to length :raise: HTTPPreconditionFailed if header value is not well formatted. """ src_header = unquote(req.headers.get(name)) if not src_header.startswith('/'): src_header = '/' + src_header try: return utils.split_path(src_header, length, length, True) except ValueError: raise HTTPPreconditionFailed( request=req, body=error_msg) def check_copy_from_header(req): """ Validate that the value from x-copy-from header is well formatted. We assume the caller ensures that x-copy-from header is present in req.headers. :param req: HTTP request object :returns: A tuple with container name and object name :raise: HTTPPreconditionFailed if x-copy-from value is not well formatted. """ return check_path_header(req, 'X-Copy-From', 2, 'X-Copy-From header must be of the form ' '<container name>/<object name>') def check_destination_header(req): """ Validate that the value from destination header is well formatted. We assume the caller ensures that destination header is present in req.headers. :param req: HTTP request object :returns: A tuple with container name and object name :raise: HTTPPreconditionFailed if destination value is not well formatted. """ return check_path_header(req, 'Destination', 2, 'Destination header must be of the form ' '<container name>/<object name>') def check_name_format(req, name, target_type): """ Validate that the header contains valid account or container name. :param req: HTTP request object :param name: header value to validate :param target_type: which header is being validated (Account or Container) :returns: A properly encoded account name or container name :raise: HTTPPreconditionFailed if account header is not well formatted. """ if not name: raise HTTPPreconditionFailed( request=req, body='%s name cannot be empty' % target_type) if isinstance(name, unicode): name = name.encode('utf-8') if '/' in name: raise HTTPPreconditionFailed( request=req, body='%s name cannot contain slashes' % target_type) return name check_account_format = functools.partial(check_name_format, target_type='Account') check_container_format = functools.partial(check_name_format, target_type='Container') def valid_api_version(version): """ Checks if the requested version is valid. Currently Swift only supports "v1" and "v1.0". """ global VALID_API_VERSIONS if not isinstance(VALID_API_VERSIONS, list): VALID_API_VERSIONS = [str(VALID_API_VERSIONS)] return version in VALID_API_VERSIONS
nicoboss/Floatmotion
refs/heads/master
OpenGL/GL/VERSION/GL_1_2.py
9
'''OpenGL extension VERSION.GL_1_2 This module customises the behaviour of the OpenGL.raw.GL.VERSION.GL_1_2 to provide a more Python-friendly API The official definition of this extension is available here: http://www.opengl.org/registry/specs/VERSION/GL_1_2.txt ''' from OpenGL import platform, constant, arrays from OpenGL import extensions, wrapper import ctypes from OpenGL.raw.GL import _types, _glgets from OpenGL.raw.GL.VERSION.GL_1_2 import * from OpenGL.raw.GL.VERSION.GL_1_2 import _EXTENSION_NAME def glInitGl12VERSION(): '''Return boolean indicating whether this extension is available''' from OpenGL import extensions return extensions.hasGLExtension( _EXTENSION_NAME ) # INPUT glDrawRangeElements.indices size not checked against 'count,type' glDrawRangeElements=wrapper.wrapper(glDrawRangeElements).setInputArraySize( 'indices', None ) # INPUT glTexImage3D.pixels size not checked against 'format,type,width,height,depth' glTexImage3D=wrapper.wrapper(glTexImage3D).setInputArraySize( 'pixels', None ) # INPUT glTexSubImage3D.pixels size not checked against 'format,type,width,height,depth' glTexSubImage3D=wrapper.wrapper(glTexSubImage3D).setInputArraySize( 'pixels', None ) ### END AUTOGENERATED SECTION from OpenGL.GL.ARB.imaging import * from OpenGL.GL.VERSION.GL_1_2_images import * GL_POINT_SIZE_GRANULARITY = GL_SMOOTH_POINT_SIZE_GRANULARITY # alias GL_POINT_SIZE_RANGE = GL_SMOOTH_POINT_SIZE_RANGE # alias GL_LINE_WIDTH_GRANULARITY = GL_SMOOTH_LINE_WIDTH_GRANULARITY # alias GL_LINE_WIDTH_RANGE = GL_SMOOTH_LINE_WIDTH_RANGE # alias glDrawRangeElements = wrapper.wrapper( glDrawRangeElements ).setPyConverter( 'indices', arrays.AsArrayOfType( 'indices', 'type' ), ).setReturnValues( wrapper.returnPyArgument( 'indices' ) )
chen0031/rekall
refs/heads/master
rekall-core/rekall/plugins/overlays/darwin/__init__.py
8
"""Profiles to support OSX specific data structures.""" from rekall.plugins.overlays.darwin import darwin from rekall.plugins.overlays.darwin import macho
repotvsupertuga/repo
refs/heads/master
script.module.dateutil/lib/dateutil/zoneinfo/__init__.py
97
# -*- coding: utf-8 -*- import logging import os import warnings import tempfile import shutil from subprocess import check_call from tarfile import TarFile from pkgutil import get_data from io import BytesIO from contextlib import closing from dateutil.tz import tzfile __all__ = ["gettz", "rebuild"] _ZONEFILENAME = "dateutil-zoneinfo.tar.gz" # python2.6 compatability. Note that TarFile.__exit__ != TarFile.close, but # it's close enough for python2.6 _tar_open = TarFile.open if not hasattr(TarFile, '__exit__'): def _tar_open(*args, **kwargs): return closing(TarFile.open(*args, **kwargs)) class tzfile(tzfile): def __reduce__(self): return (gettz, (self._filename,)) def getzoneinfofile_stream(): try: return BytesIO(get_data(__name__, _ZONEFILENAME)) except IOError as e: # TODO switch to FileNotFoundError? warnings.warn("I/O error({0}): {1}".format(e.errno, e.strerror)) return None class ZoneInfoFile(object): def __init__(self, zonefile_stream=None): if zonefile_stream is not None: with _tar_open(fileobj=zonefile_stream, mode='r') as tf: # dict comprehension does not work on python2.6 # TODO: get back to the nicer syntax when we ditch python2.6 # self.zones = {zf.name: tzfile(tf.extractfile(zf), # filename = zf.name) # for zf in tf.getmembers() if zf.isfile()} self.zones = dict((zf.name, tzfile(tf.extractfile(zf), filename=zf.name)) for zf in tf.getmembers() if zf.isfile()) # deal with links: They'll point to their parent object. Less # waste of memory # links = {zl.name: self.zones[zl.linkname] # for zl in tf.getmembers() if zl.islnk() or zl.issym()} links = dict((zl.name, self.zones[zl.linkname]) for zl in tf.getmembers() if zl.islnk() or zl.issym()) self.zones.update(links) else: self.zones = dict() # The current API has gettz as a module function, although in fact it taps into # a stateful class. So as a workaround for now, without changing the API, we # will create a new "global" class instance the first time a user requests a # timezone. Ugly, but adheres to the api. # # TODO: deprecate this. _CLASS_ZONE_INSTANCE = list() def gettz(name): if len(_CLASS_ZONE_INSTANCE) == 0: _CLASS_ZONE_INSTANCE.append(ZoneInfoFile(getzoneinfofile_stream())) return _CLASS_ZONE_INSTANCE[0].zones.get(name) def rebuild(filename, tag=None, format="gz", zonegroups=[]): """Rebuild the internal timezone info in dateutil/zoneinfo/zoneinfo*tar* filename is the timezone tarball from ftp.iana.org/tz. """ tmpdir = tempfile.mkdtemp() zonedir = os.path.join(tmpdir, "zoneinfo") moduledir = os.path.dirname(__file__) try: with _tar_open(filename) as tf: for name in zonegroups: tf.extract(name, tmpdir) filepaths = [os.path.join(tmpdir, n) for n in zonegroups] try: check_call(["zic", "-d", zonedir] + filepaths) except OSError as e: if e.errno == 2: logging.error( "Could not find zic. Perhaps you need to install " "libc-bin or some other package that provides it, " "or it's not in your PATH?") raise target = os.path.join(moduledir, _ZONEFILENAME) with _tar_open(target, "w:%s" % format) as tf: for entry in os.listdir(zonedir): entrypath = os.path.join(zonedir, entry) tf.add(entrypath, entry) finally: shutil.rmtree(tmpdir)
campbe13/openhatch
refs/heads/master
vendor/packages/twill/twill/extensions/__init__.py
9480
#
mojones/Axelrod
refs/heads/master
axelrod/cooperation.py
2
from math import sqrt from . import eigen from axelrod import Actions C, D = Actions.C, Actions.D # As yet unused until RoundRobin returns interactions def cooperation_matrix(interactions): """ The cooperation matrix from a single round robin. Parameters ---------- interactions : dictionary A dictionary of the form: e.g. for a round robin between Cooperator, Defector and Alternator with 2 turns per round: { (0, 0): [(C, C), (C, C)]. (0, 1): [(C, D), (C, D)], (0, 2): [(C, C), (C, D)], (1, 1): [(D, D), (D, D)], (1, 2): [(D, C), (D, D)], (2, 2): [(C, C), (D, D)] } i.e. the key is a pair of player index numbers and the value, a list of plays. The list contains one pair per turn in the round robin. The dictionary contains one entry for each combination of players. nplayers : integer The number of players in the round robin Returns ------- list The cooperation matrix (C) of the form: [ [a, b, c], [d, e, f], [g, h, i], ] i.e. an n by n matrix where n is the number of players. Each row (i) and column (j) represents an individual player and the the value Cij is the number of times player i cooperated against opponent j. """ # The number of ways (c) to select groups of r members from a set of n # members is given by: # # c = n! / r!(n - r)! # # In this case, we are selecting pairs of players (p) and thus r = 2, # giving: # # p = n(n-1) / 2 or p = (n^2 - n) / 2 # # However, we also have the case where each player plays itself gving: # # p = (n^2 + n) / 2 # # Using the quadratic equation to rearrange for n gives: # # n = (-1 +- sqrt(1 + 8p)) / 2 # # Taking only the real roots allows us to derive the number of players # given the number of pairs: # # n = (sqrt(8p + 1) -1) / 2 nplayers = int((sqrt(len(interactions) * 8 + 1) - 1) / 2) cooperation = [[0 for i in range(nplayers)] for j in range(nplayers)] for players, actions in interactions.items(): p1_actions, p2_actions = zip(*actions) p1_cooperation = p1_actions.count(C) p2_cooperation = p2_actions.count(C) cooperation[players[0]][players[1]] = p1_cooperation if players[0] != players[1]: cooperation[players[1]][players[0]] = p2_cooperation return cooperation def cooperation(results): """ The total cooperation matrix from a tournament of multiple repetitions. Parameters ---------- results : list A matrix of the form: [ [[a, j], [b, k], [c, l]], [[d, m], [e, n], [f, o]], [[g, p], [h, q], [i, r]], ] i.e. one row per player, containing one element per opponent (in order of player index) which lists cooperation values for each repetition. Returns ------- list The cooperation matrix (C) of the form: [ [[a + j], [b + k], [c + l]], [[d + m], [e + n], [f + o]], [[g + p], [h + q], [i + r]], ] i.e. an n by n matrix where n is the number of players. Each row (i) and column (j) represents an individual player and the the value Cij is the number of times player i cooperated against opponent j. """ return[[sum(element) for element in row] for row in results] def normalised_cooperation(cooperation, turns, repetitions): """ The per-turn normalised cooperation matrix for a tournament of n repetitions. Parameters ---------- cooperation : list The cooperation matrix (C) turns : integer The number of turns in each round robin. repetitions : integer The number of repetitions in the tournament. Returns ------- list A matrix (N) such that: N = C / t where t is the total number of turns played in the tournament. """ turns = turns * repetitions return[ [1.0 * element / turns for element in row] for row in cooperation] def vengeful_cooperation(cooperation): """ The vengeful cooperation matrix derived from the cooperation matrix. Parameters ---------- cooperation : list A cooperation matrix (C) Returns ------- list A matrix (D) such that: Dij = 2(Cij -0.5) """ return [[2 * (element - 0.5) for element in row] for row in cooperation] def cooperating_rating(cooperation, nplayers, turns, repetitions): """ A list of cooperation ratings for each player Parameters ---------- cooperation : list The cooperation matrix nplayers : integer The number of players in the tournament. turns : integer The number of turns in each round robin. repetitions : integer The number of repetitions in the tournament. Returns ------- list a list of cooperation rates ordered by player index """ total_turns = turns * repetitions * nplayers return [1.0 * sum(row) / total_turns for row in cooperation] def null_matrix(nplayers): """ A null n by n matrix for n players Parameters ---------- nplayers : integer The number of players in the tournament. Returns ------- list A null n by n matrix where n is the number of players. """ plist = list(range(nplayers)) return [[0 for j in plist] for i in plist] def good_partner_matrix(results, nplayers, repetitions): """ An n by n matrix of good partner ratings for n players Parameters ---------- results : list A cooperation results matrix of the form: [ [[a, j], [b, k], [c, l]], [[d, m], [e, n], [f, o]], [[g, p], [h, q], [i, r]], ] i.e. one row per player, containing one element per opponent (in order of player index) which lists cooperation values for each repetition. nplayers : integer The number of players in the tournament. repetitions : integer The number of repetitions in the tournament. Returns ------- list The good partner matrix (P) of the form: [ [0, 0 + (1 if b >= d) + (1 if k >= m), 0 + (1 if c >= g) + (1 if l >= p) ], [0 + (1 if e >= g) + (1 if n >= p), 0, 0 + (1 if f >= h) + (1 if o >= q)], [0 + (1 if g >= c) + (1 if p >= l), 0 + (1 if h >= f) + (1 if q >= o), 0] ] i.e. an n by n matrix where n is the number of players. Each row (i) and column (j) represents an individual player and the the value Pij is the sum of the number of repetitions where player i cooperated as often or more than opponent j. """ matrix = null_matrix(nplayers) for r in range(repetitions): for i in range(nplayers): for j in range(nplayers): if i != j and results[i][j][r] >= results[j][i][r]: matrix[i][j] += 1 return matrix def n_interactions(nplayers, repetitions): """ The number of interactions between n players Parameters ---------- nplayers : integer The number of players in the tournament. repetitions : integer The number of repetitions in the tournament. Returns ------- integer The number of interactions between players excluding self-interactions. """ return repetitions * (nplayers - 1) def good_partner_rating(good_partner_matrix, nplayers, repetitions): """ A list of good partner ratings for n players in order of rating Parameters ---------- good_partner_matrix : list The good partner matrix nplayers : integer The number of players in the tournament. repetitions : integer The number of repetitions in the tournament. Returns ------- list A list of good partner ratings ordered by player index. """ return [1.0 * sum(row) / n_interactions(nplayers, repetitions) for row in good_partner_matrix] def eigenvector(cooperation_matrix): """ The principal eigenvector of the cooperation matrix Parameters ---------- cooperation_matrix : list A cooperation matrix Returns ------- list The principal eigenvector of the cooperation matrix. """ eigenvector, eigenvalue = eigen.principal_eigenvector( cooperation_matrix, 1000, 1e-3 ) return eigenvector.tolist()
iEngage/python-sdk
refs/heads/master
test/test_verve_response_interaction_category.py
1
# coding: utf-8 """ Stakeholder engagement API This API enables Intelligent Engagement for your Business. iEngage is a platform that combines process, augmented intelligence and rewards to help you intelligently engage customers. OpenAPI spec version: 1.0 Generated by: https://github.com/swagger-api/swagger-codegen.git """ from __future__ import absolute_import import os import sys import unittest import iengage_client from iengage_client.rest import ApiException from iengage_client.models.verve_response_interaction_category import VerveResponseInteractionCategory class TestVerveResponseInteractionCategory(unittest.TestCase): """ VerveResponseInteractionCategory unit test stubs """ def setUp(self): pass def tearDown(self): pass def testVerveResponseInteractionCategory(self): """ Test VerveResponseInteractionCategory """ model = iengage_client.models.verve_response_interaction_category.VerveResponseInteractionCategory() if __name__ == '__main__': unittest.main()
boberfly/gaffer
refs/heads/master
doc/source/WorkingWithTheNodeGraph/BoxNode/screengrab.py
4
# BuildTarget: images/interfaceUIEditor.png import os import Gaffer import GafferScene import GafferUI scriptWindow = GafferUI.ScriptWindow.acquire( script ) graphEditor = scriptWindow.getLayout().editors( GafferUI.GraphEditor )[0] # Illustration of the basics of a Box # script["fileName"].setValue( os.path.abspath( "scripts/illustrationBoxBasics.gfr" ) ) # TODO: Automate `images/illustrationBoxBasics.svg` when these tools become available: # - Borders around images # - Simple color-to-alpha-channel conversion # - 2D curves with end caps # Illustration of the generic uses of a Box # script["fileName"].setValue( os.path.abspath( "scripts/illustrationBoxUses1.gfr" ) ) # script["fileName"].setValue( os.path.abspath( "scripts/illustrationBoxUses2.gfr" ) ) # script["fileName"].setValue( os.path.abspath( "scripts/illustrationBoxUses3.gfr" ) ) # TODO: Automate `images/illustrationBoxUses.png` when these tools become available: # - 2D curves with end caps # Illustration of Box nesting # script["fileName"].setValue( os.path.abspath( "scripts/illustrationBoxNesting.gfr" ) ) # TODO: Automate `images/illustrationBoxNesting.svg` when these tools become available: # - Borders around images # - Simple color-to-alpha-channel conversion # - 2D curves with end caps # Illustration of importing a saved Box reference, using the example mySurfaceShaders Box # script["fileName"].setValue( os.path.abspath( "scripts/illustrationBoxReferences.gfr" ) ) # TODO: Automate `images/illustrationBoxReferences.svg` when these tools become available: # - Borders around images # - 2D curves with end caps # - Screengrabs containing the window frame # Illustration of promoted plugs # script["fileName"].setValue( os.path.abspath( "scripts/illustrationBoxBasics.gfr" ) ) # TODO: Automate `images/illustrationPromotedPlug.svg` when these tools become available: # - 2D curves with end caps # - Embed a saved window layout to script # Task animation of boxing up a bunch of nodes # script["fileName"].setValue( os.path.abspath( "scripts/taskBoxUpNodesBefore.gfr" ) ) # TODO: Automate `images/taskBoxUpNodes.gif` when these tools become available: # - KB/M recording and simulated playback # - On-screen keystroke monitor # Task result of boxing up nodes # script["fileName"].setValue( os.path.abspath( "scripts/taskBoxUpNodesAfter.gfr" ) ) # script.load() # graphEditor.frame( script.children( Gaffer.Node ) ) # GafferUI.WidgetAlgo.grab( widget = graphEditor, imagePath = "images/taskBoxUpNodesResult.png" ) # Task animation of the quickest way to connect a Box # script["fileName"].setValue( os.path.abspath( "scripts/taskConnectBox.gfr" ) ) # TODO: Automate `images/taskConnectBox.gif` when these tools become available: # - KB/M recording and simulated playback # - On-screen keystroke monitor # Task animation of an alternate way of connecting a Box # script["fileName"].setValue( os.path.abspath( "scripts/taskConnectBox.gfr" ) ) # TODO: Automate `images/taskConnectBoxAlt.gif` when these tools become available: # - KB/M recording and simulated playback # - On-screen keystroke monitor # Interface capture of the passThrough plug on a BoxOut node # script.addChild( Gaffer.BoxOut() ) # script["BoxOut"]["in"] = GafferScene.ScenePlug() # script["BoxOut"]["passThrough"] = GafferScene.ScenePlug() # TODO: Automate `images/interfacePassthroughPlug.png` when these tools become available: # - Easily trigger plug tooltips to appear # Task animation of connecting the passThrough plug # script["fileName"].setValue( os.path.abspath( "scripts/taskConnectPassthroughPlug.gfr" ) ) # TODO: Automate `images/taskConnectPassthroughPlug.gif` when these tools become available: # - KB/M recording and simulated playback # - On-screen keystroke monitor # Task animation of connecting the passThrough plug # script["fileName"].setValue( os.path.abspath( "scripts/taskConnectPromotePlug.gfr" ) ) # TODO: Automate `images/taskPromotePlug.gif` when these tools become available: # - KB/M recording and simulated playback # - On-screen keystroke monitor # Task animation of demoting a plug1 # script["fileName"].setValue( os.path.abspath( "scripts/taskDemotePlug.gfr" ) ) # TODO: Automate `images/taskDemotePlug.gif` when these tools become available: # - KB/M recording and simulated playback # - On-screen keystroke monitor # Task animation of adjusting plug position # script["fileName"].setValue( os.path.abspath( "scripts/taskAdjustPlugPosition.gfr" ) ) # TODO: Automate `images/taskAdjustPlugPosition.gif` when these tools become available: # - KB/M recording and simulated playback # - On-screen keystroke monitor # Interface capture of the UI Editor, using the example mySurfaceShaders Box script["fileName"].setValue( os.path.abspath( "scripts/illustrationBoxReferences.gfr" ) ) script.load() UIEditorWindow = GafferUI.UIEditor.acquire( script["mySurfaceShaders"], floating=True ) GafferUI.WidgetAlgo.grab( widget = UIEditorWindow, imagePath = "images/interfaceUIEditor.png" ) del UIEditorWindow # Box Node Basics example script["fileName"].setValue( os.path.abspath( "../../../examples/boxBasics.gfr" ) ) script.load() script.removeChild( script[ "Backdrop" ] ) graphEditor.frame( script.children( Gaffer.Node ) ) GafferUI.WidgetAlgo.grab( widget = graphEditor, imagePath = "images/exampleBoxBasics.png" )
safwanrahman/mozillians
refs/heads/master
vendor-local/lib/python/rest_framework/runtests/runcoverage.py
22
#!/usr/bin/env python """ Useful tool to run the test suite for rest_framework and generate a coverage report. """ # http://ericholscher.com/blog/2009/jun/29/enable-setuppy-test-your-django-apps/ # http://www.travisswicegood.com/2010/01/17/django-virtualenv-pip-and-fabric/ # http://code.djangoproject.com/svn/django/trunk/tests/runtests.py import os import sys # fix sys path so we don't need to setup PYTHONPATH sys.path.append(os.path.join(os.path.dirname(__file__), "../..")) os.environ['DJANGO_SETTINGS_MODULE'] = 'rest_framework.runtests.settings' from coverage import coverage def main(): """Run the tests for rest_framework and generate a coverage report.""" cov = coverage() cov.erase() cov.start() from django.conf import settings from django.test.utils import get_runner TestRunner = get_runner(settings) if hasattr(TestRunner, 'func_name'): # Pre 1.2 test runners were just functions, # and did not support the 'failfast' option. import warnings warnings.warn( 'Function-based test runners are deprecated. Test runners should be classes with a run_tests() method.', DeprecationWarning ) failures = TestRunner(['tests']) else: test_runner = TestRunner() failures = test_runner.run_tests(['tests']) cov.stop() # Discover the list of all modules that we should test coverage for import rest_framework project_dir = os.path.dirname(rest_framework.__file__) cov_files = [] for (path, dirs, files) in os.walk(project_dir): # Drop tests and runtests directories from the test coverage report if os.path.basename(path) in ['tests', 'runtests', 'migrations']: continue # Drop the compat and six modules from coverage, since we're not interested in the coverage # of modules which are specifically for resolving environment dependant imports. # (Because we'll end up getting different coverage reports for it for each environment) if 'compat.py' in files: files.remove('compat.py') if 'six.py' in files: files.remove('six.py') # Same applies to template tags module. # This module has to include branching on Django versions, # so it's never possible for it to have full coverage. if 'rest_framework.py' in files: files.remove('rest_framework.py') cov_files.extend([os.path.join(path, file) for file in files if file.endswith('.py')]) cov.report(cov_files) if '--html' in sys.argv: cov.html_report(cov_files, directory='coverage') sys.exit(failures) if __name__ == '__main__': main()
coderbone/SickRage-alt
refs/heads/master
lib/sqlalchemy/event/__init__.py
79
# event/__init__.py # Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file> # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php from .api import CANCEL, NO_RETVAL, listen, listens_for, remove, contains from .base import Events, dispatcher from .attr import RefCollection from .legacy import _legacy_signature
Piasy/proxy-searcher
refs/heads/master
site-packages/django/contrib/flatpages/tests/middleware.py
77
import os from django.conf import settings from django.contrib.auth.models import User from django.contrib.flatpages.models import FlatPage from django.test import TestCase class FlatpageMiddlewareTests(TestCase): fixtures = ['sample_flatpages'] urls = 'django.contrib.flatpages.tests.urls' def setUp(self): self.old_MIDDLEWARE_CLASSES = settings.MIDDLEWARE_CLASSES flatpage_middleware_class = 'django.contrib.flatpages.middleware.FlatpageFallbackMiddleware' if flatpage_middleware_class not in settings.MIDDLEWARE_CLASSES: settings.MIDDLEWARE_CLASSES += (flatpage_middleware_class,) self.old_TEMPLATE_DIRS = settings.TEMPLATE_DIRS settings.TEMPLATE_DIRS = ( os.path.join( os.path.dirname(__file__), 'templates' ), ) self.old_LOGIN_URL = settings.LOGIN_URL settings.LOGIN_URL = '/accounts/login/' def tearDown(self): settings.MIDDLEWARE_CLASSES = self.old_MIDDLEWARE_CLASSES settings.TEMPLATE_DIRS = self.old_TEMPLATE_DIRS settings.LOGIN_URL = self.old_LOGIN_URL def test_view_flatpage(self): "A flatpage can be served through a view, even when the middleware is in use" response = self.client.get('/flatpage_root/flatpage/') self.assertEqual(response.status_code, 200) self.assertContains(response, "<p>Isn't it flat!</p>") def test_view_non_existent_flatpage(self): "A non-existent flatpage raises 404 when served through a view, even when the middleware is in use" response = self.client.get('/flatpage_root/no_such_flatpage/') self.assertEqual(response.status_code, 404) def test_view_authenticated_flatpage(self): "A flatpage served through a view can require authentication" response = self.client.get('/flatpage_root/sekrit/') self.assertRedirects(response, '/accounts/login/?next=/flatpage_root/sekrit/') User.objects.create_user('testuser', 'test@example.com', 's3krit') self.client.login(username='testuser',password='s3krit') response = self.client.get('/flatpage_root/sekrit/') self.assertEqual(response.status_code, 200) self.assertContains(response, "<p>Isn't it sekrit!</p>") def test_fallback_flatpage(self): "A flatpage can be served by the fallback middlware" response = self.client.get('/flatpage/') self.assertEqual(response.status_code, 200) self.assertContains(response, "<p>Isn't it flat!</p>") def test_fallback_non_existent_flatpage(self): "A non-existent flatpage raises a 404 when served by the fallback middlware" response = self.client.get('/no_such_flatpage/') self.assertEqual(response.status_code, 404) def test_fallback_authenticated_flatpage(self): "A flatpage served by the middleware can require authentication" response = self.client.get('/sekrit/') self.assertRedirects(response, '/accounts/login/?next=/sekrit/') User.objects.create_user('testuser', 'test@example.com', 's3krit') self.client.login(username='testuser',password='s3krit') response = self.client.get('/sekrit/') self.assertEqual(response.status_code, 200) self.assertContains(response, "<p>Isn't it sekrit!</p>") def test_fallback_flatpage_special_chars(self): "A flatpage with special chars in the URL can be served by the fallback middleware" fp = FlatPage.objects.create( url="/some.very_special~chars-here/", title="A very special page", content="Isn't it special!", enable_comments=False, registration_required=False, ) fp.sites.add(1) response = self.client.get('/some.very_special~chars-here/') self.assertEqual(response.status_code, 200) self.assertContains(response, "<p>Isn't it special!</p>") class FlatpageMiddlewareAppendSlashTests(TestCase): fixtures = ['sample_flatpages'] urls = 'django.contrib.flatpages.tests.urls' def setUp(self): self.old_MIDDLEWARE_CLASSES = settings.MIDDLEWARE_CLASSES flatpage_middleware_class = 'django.contrib.flatpages.middleware.FlatpageFallbackMiddleware' if flatpage_middleware_class not in settings.MIDDLEWARE_CLASSES: settings.MIDDLEWARE_CLASSES += (flatpage_middleware_class,) self.old_TEMPLATE_DIRS = settings.TEMPLATE_DIRS settings.TEMPLATE_DIRS = ( os.path.join( os.path.dirname(__file__), 'templates' ), ) self.old_LOGIN_URL = settings.LOGIN_URL settings.LOGIN_URL = '/accounts/login/' self.old_APPEND_SLASH = settings.APPEND_SLASH settings.APPEND_SLASH = True def tearDown(self): settings.MIDDLEWARE_CLASSES = self.old_MIDDLEWARE_CLASSES settings.TEMPLATE_DIRS = self.old_TEMPLATE_DIRS settings.LOGIN_URL = self.old_LOGIN_URL settings.APPEND_SLASH = self.old_APPEND_SLASH def test_redirect_view_flatpage(self): "A flatpage can be served through a view and should add a slash" response = self.client.get('/flatpage_root/flatpage') self.assertRedirects(response, '/flatpage_root/flatpage/', status_code=301) def test_redirect_view_non_existent_flatpage(self): "A non-existent flatpage raises 404 when served through a view and should not add a slash" response = self.client.get('/flatpage_root/no_such_flatpage') self.assertEqual(response.status_code, 404) def test_redirect_fallback_flatpage(self): "A flatpage can be served by the fallback middlware and should add a slash" response = self.client.get('/flatpage') self.assertRedirects(response, '/flatpage/', status_code=301) def test_redirect_fallback_non_existent_flatpage(self): "A non-existent flatpage raises a 404 when served by the fallback middlware and should not add a slash" response = self.client.get('/no_such_flatpage') self.assertEqual(response.status_code, 404) def test_redirect_fallback_flatpage_special_chars(self): "A flatpage with special chars in the URL can be served by the fallback middleware and should add a slash" fp = FlatPage.objects.create( url="/some.very_special~chars-here/", title="A very special page", content="Isn't it special!", enable_comments=False, registration_required=False, ) fp.sites.add(1) response = self.client.get('/some.very_special~chars-here') self.assertRedirects(response, '/some.very_special~chars-here/', status_code=301) def test_redirect_fallback_flatpage_root(self): "A flatpage at / should not cause a redirect loop when APPEND_SLASH is set" fp = FlatPage.objects.create( url="/", title="Root", content="Root", enable_comments=False, registration_required=False, ) fp.sites.add(1) response = self.client.get('/') self.assertEqual(response.status_code, 200) self.assertContains(response, "<p>Root</p>")
Phuehvk/gyp
refs/heads/master
test/win/gyptest-cl-enable-enhanced-instruction-set.py
34
#!/usr/bin/env python # Copyright (c) 2014 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ Test VCCLCompilerTool EnableEnhancedInstructionSet setting. """ import TestGyp import os import sys if sys.platform == 'win32': print "This test is currently disabled: https://crbug.com/483696." sys.exit(0) test = TestGyp.TestGyp() CHDIR = 'compiler-flags' test.run_gyp('enable-enhanced-instruction-set.gyp', chdir=CHDIR) test.build('enable-enhanced-instruction-set.gyp', test.ALL, chdir=CHDIR) test.run_built_executable('sse_extensions', chdir=CHDIR, stdout='/arch:SSE\n') test.run_built_executable('sse2_extensions', chdir=CHDIR, stdout='/arch:SSE2\n') # /arch:AVX introduced in VS2010, but MSBuild support lagged until 2012. if os.path.exists(test.built_file_path('avx_extensions')): test.run_built_executable('avx_extensions', chdir=CHDIR, stdout='/arch:AVX\n') # /arch:IA32 introduced in VS2012. if os.path.exists(test.built_file_path('no_extensions')): test.run_built_executable('no_extensions', chdir=CHDIR, stdout='/arch:IA32\n') # /arch:AVX2 introduced in VS2013r2. if os.path.exists(test.built_file_path('avx2_extensions')): test.run_built_executable('avx2_extensions', chdir=CHDIR, stdout='/arch:AVX2\n') test.pass_test()
TathagataChakraborti/resource-conflicts
refs/heads/master
PLANROB-2015/py2.5/lib/python2.5/rfc822.py
89
"""RFC 2822 message manipulation. Note: This is only a very rough sketch of a full RFC-822 parser; in particular the tokenizing of addresses does not adhere to all the quoting rules. Note: RFC 2822 is a long awaited update to RFC 822. This module should conform to RFC 2822, and is thus mis-named (it's not worth renaming it). Some effort at RFC 2822 updates have been made, but a thorough audit has not been performed. Consider any RFC 2822 non-conformance to be a bug. RFC 2822: http://www.faqs.org/rfcs/rfc2822.html RFC 822 : http://www.faqs.org/rfcs/rfc822.html (obsolete) Directions for use: To create a Message object: first open a file, e.g.: fp = open(file, 'r') You can use any other legal way of getting an open file object, e.g. use sys.stdin or call os.popen(). Then pass the open file object to the Message() constructor: m = Message(fp) This class can work with any input object that supports a readline method. If the input object has seek and tell capability, the rewindbody method will work; also illegal lines will be pushed back onto the input stream. If the input object lacks seek but has an `unread' method that can push back a line of input, Message will use that to push back illegal lines. Thus this class can be used to parse messages coming from a buffered stream. The optional `seekable' argument is provided as a workaround for certain stdio libraries in which tell() discards buffered data before discovering that the lseek() system call doesn't work. For maximum portability, you should set the seekable argument to zero to prevent that initial \code{tell} when passing in an unseekable object such as a a file object created from a socket object. If it is 1 on entry -- which it is by default -- the tell() method of the open file object is called once; if this raises an exception, seekable is reset to 0. For other nonzero values of seekable, this test is not made. To get the text of a particular header there are several methods: str = m.getheader(name) str = m.getrawheader(name) where name is the name of the header, e.g. 'Subject'. The difference is that getheader() strips the leading and trailing whitespace, while getrawheader() doesn't. Both functions retain embedded whitespace (including newlines) exactly as they are specified in the header, and leave the case of the text unchanged. For addresses and address lists there are functions realname, mailaddress = m.getaddr(name) list = m.getaddrlist(name) where the latter returns a list of (realname, mailaddr) tuples. There is also a method time = m.getdate(name) which parses a Date-like field and returns a time-compatible tuple, i.e. a tuple such as returned by time.localtime() or accepted by time.mktime(). See the class definition for lower level access methods. There are also some utility functions here. """ # Cleanup and extensions by Eric S. Raymond <esr@thyrsus.com> import time __all__ = ["Message","AddressList","parsedate","parsedate_tz","mktime_tz"] _blanklines = ('\r\n', '\n') # Optimization for islast() class Message: """Represents a single RFC 2822-compliant message.""" def __init__(self, fp, seekable = 1): """Initialize the class instance and read the headers.""" if seekable == 1: # Exercise tell() to make sure it works # (and then assume seek() works, too) try: fp.tell() except (AttributeError, IOError): seekable = 0 self.fp = fp self.seekable = seekable self.startofheaders = None self.startofbody = None # if self.seekable: try: self.startofheaders = self.fp.tell() except IOError: self.seekable = 0 # self.readheaders() # if self.seekable: try: self.startofbody = self.fp.tell() except IOError: self.seekable = 0 def rewindbody(self): """Rewind the file to the start of the body (if seekable).""" if not self.seekable: raise IOError, "unseekable file" self.fp.seek(self.startofbody) def readheaders(self): """Read header lines. Read header lines up to the entirely blank line that terminates them. The (normally blank) line that ends the headers is skipped, but not included in the returned list. If a non-header line ends the headers, (which is an error), an attempt is made to backspace over it; it is never included in the returned list. The variable self.status is set to the empty string if all went well, otherwise it is an error message. The variable self.headers is a completely uninterpreted list of lines contained in the header (so printing them will reproduce the header exactly as it appears in the file). """ self.dict = {} self.unixfrom = '' self.headers = lst = [] self.status = '' headerseen = "" firstline = 1 startofline = unread = tell = None if hasattr(self.fp, 'unread'): unread = self.fp.unread elif self.seekable: tell = self.fp.tell while 1: if tell: try: startofline = tell() except IOError: startofline = tell = None self.seekable = 0 line = self.fp.readline() if not line: self.status = 'EOF in headers' break # Skip unix From name time lines if firstline and line.startswith('From '): self.unixfrom = self.unixfrom + line continue firstline = 0 if headerseen and line[0] in ' \t': # It's a continuation line. lst.append(line) x = (self.dict[headerseen] + "\n " + line.strip()) self.dict[headerseen] = x.strip() continue elif self.iscomment(line): # It's a comment. Ignore it. continue elif self.islast(line): # Note! No pushback here! The delimiter line gets eaten. break headerseen = self.isheader(line) if headerseen: # It's a legal header line, save it. lst.append(line) self.dict[headerseen] = line[len(headerseen)+1:].strip() continue else: # It's not a header line; throw it back and stop here. if not self.dict: self.status = 'No headers' else: self.status = 'Non-header line where header expected' # Try to undo the read. if unread: unread(line) elif tell: self.fp.seek(startofline) else: self.status = self.status + '; bad seek' break def isheader(self, line): """Determine whether a given line is a legal header. This method should return the header name, suitably canonicalized. You may override this method in order to use Message parsing on tagged data in RFC 2822-like formats with special header formats. """ i = line.find(':') if i > 0: return line[:i].lower() return None def islast(self, line): """Determine whether a line is a legal end of RFC 2822 headers. You may override this method if your application wants to bend the rules, e.g. to strip trailing whitespace, or to recognize MH template separators ('--------'). For convenience (e.g. for code reading from sockets) a line consisting of \r\n also matches. """ return line in _blanklines def iscomment(self, line): """Determine whether a line should be skipped entirely. You may override this method in order to use Message parsing on tagged data in RFC 2822-like formats that support embedded comments or free-text data. """ return False def getallmatchingheaders(self, name): """Find all header lines matching a given header name. Look through the list of headers and find all lines matching a given header name (and their continuation lines). A list of the lines is returned, without interpretation. If the header does not occur, an empty list is returned. If the header occurs multiple times, all occurrences are returned. Case is not important in the header name. """ name = name.lower() + ':' n = len(name) lst = [] hit = 0 for line in self.headers: if line[:n].lower() == name: hit = 1 elif not line[:1].isspace(): hit = 0 if hit: lst.append(line) return lst def getfirstmatchingheader(self, name): """Get the first header line matching name. This is similar to getallmatchingheaders, but it returns only the first matching header (and its continuation lines). """ name = name.lower() + ':' n = len(name) lst = [] hit = 0 for line in self.headers: if hit: if not line[:1].isspace(): break elif line[:n].lower() == name: hit = 1 if hit: lst.append(line) return lst def getrawheader(self, name): """A higher-level interface to getfirstmatchingheader(). Return a string containing the literal text of the header but with the keyword stripped. All leading, trailing and embedded whitespace is kept in the string, however. Return None if the header does not occur. """ lst = self.getfirstmatchingheader(name) if not lst: return None lst[0] = lst[0][len(name) + 1:] return ''.join(lst) def getheader(self, name, default=None): """Get the header value for a name. This is the normal interface: it returns a stripped version of the header value for a given header name, or None if it doesn't exist. This uses the dictionary version which finds the *last* such header. """ return self.dict.get(name.lower(), default) get = getheader def getheaders(self, name): """Get all values for a header. This returns a list of values for headers given more than once; each value in the result list is stripped in the same way as the result of getheader(). If the header is not given, return an empty list. """ result = [] current = '' have_header = 0 for s in self.getallmatchingheaders(name): if s[0].isspace(): if current: current = "%s\n %s" % (current, s.strip()) else: current = s.strip() else: if have_header: result.append(current) current = s[s.find(":") + 1:].strip() have_header = 1 if have_header: result.append(current) return result def getaddr(self, name): """Get a single address from a header, as a tuple. An example return value: ('Guido van Rossum', 'guido@cwi.nl') """ # New, by Ben Escoto alist = self.getaddrlist(name) if alist: return alist[0] else: return (None, None) def getaddrlist(self, name): """Get a list of addresses from a header. Retrieves a list of addresses from a header, where each address is a tuple as returned by getaddr(). Scans all named headers, so it works properly with multiple To: or Cc: headers for example. """ raw = [] for h in self.getallmatchingheaders(name): if h[0] in ' \t': raw.append(h) else: if raw: raw.append(', ') i = h.find(':') if i > 0: addr = h[i+1:] raw.append(addr) alladdrs = ''.join(raw) a = AddressList(alladdrs) return a.addresslist def getdate(self, name): """Retrieve a date field from a header. Retrieves a date field from the named header, returning a tuple compatible with time.mktime(). """ try: data = self[name] except KeyError: return None return parsedate(data) def getdate_tz(self, name): """Retrieve a date field from a header as a 10-tuple. The first 9 elements make up a tuple compatible with time.mktime(), and the 10th is the offset of the poster's time zone from GMT/UTC. """ try: data = self[name] except KeyError: return None return parsedate_tz(data) # Access as a dictionary (only finds *last* header of each type): def __len__(self): """Get the number of headers in a message.""" return len(self.dict) def __getitem__(self, name): """Get a specific header, as from a dictionary.""" return self.dict[name.lower()] def __setitem__(self, name, value): """Set the value of a header. Note: This is not a perfect inversion of __getitem__, because any changed headers get stuck at the end of the raw-headers list rather than where the altered header was. """ del self[name] # Won't fail if it doesn't exist self.dict[name.lower()] = value text = name + ": " + value for line in text.split("\n"): self.headers.append(line + "\n") def __delitem__(self, name): """Delete all occurrences of a specific header, if it is present.""" name = name.lower() if not name in self.dict: return del self.dict[name] name = name + ':' n = len(name) lst = [] hit = 0 for i in range(len(self.headers)): line = self.headers[i] if line[:n].lower() == name: hit = 1 elif not line[:1].isspace(): hit = 0 if hit: lst.append(i) for i in reversed(lst): del self.headers[i] def setdefault(self, name, default=""): lowername = name.lower() if lowername in self.dict: return self.dict[lowername] else: text = name + ": " + default for line in text.split("\n"): self.headers.append(line + "\n") self.dict[lowername] = default return default def has_key(self, name): """Determine whether a message contains the named header.""" return name.lower() in self.dict def __contains__(self, name): """Determine whether a message contains the named header.""" return name.lower() in self.dict def __iter__(self): return iter(self.dict) def keys(self): """Get all of a message's header field names.""" return self.dict.keys() def values(self): """Get all of a message's header field values.""" return self.dict.values() def items(self): """Get all of a message's headers. Returns a list of name, value tuples. """ return self.dict.items() def __str__(self): return ''.join(self.headers) # Utility functions # ----------------- # XXX Should fix unquote() and quote() to be really conformant. # XXX The inverses of the parse functions may also be useful. def unquote(s): """Remove quotes from a string.""" if len(s) > 1: if s.startswith('"') and s.endswith('"'): return s[1:-1].replace('\\\\', '\\').replace('\\"', '"') if s.startswith('<') and s.endswith('>'): return s[1:-1] return s def quote(s): """Add quotes around a string.""" return s.replace('\\', '\\\\').replace('"', '\\"') def parseaddr(address): """Parse an address into a (realname, mailaddr) tuple.""" a = AddressList(address) lst = a.addresslist if not lst: return (None, None) return lst[0] class AddrlistClass: """Address parser class by Ben Escoto. To understand what this class does, it helps to have a copy of RFC 2822 in front of you. http://www.faqs.org/rfcs/rfc2822.html Note: this class interface is deprecated and may be removed in the future. Use rfc822.AddressList instead. """ def __init__(self, field): """Initialize a new instance. `field' is an unparsed address header field, containing one or more addresses. """ self.specials = '()<>@,:;.\"[]' self.pos = 0 self.LWS = ' \t' self.CR = '\r\n' self.atomends = self.specials + self.LWS + self.CR # Note that RFC 2822 now specifies `.' as obs-phrase, meaning that it # is obsolete syntax. RFC 2822 requires that we recognize obsolete # syntax, so allow dots in phrases. self.phraseends = self.atomends.replace('.', '') self.field = field self.commentlist = [] def gotonext(self): """Parse up to the start of the next address.""" while self.pos < len(self.field): if self.field[self.pos] in self.LWS + '\n\r': self.pos = self.pos + 1 elif self.field[self.pos] == '(': self.commentlist.append(self.getcomment()) else: break def getaddrlist(self): """Parse all addresses. Returns a list containing all of the addresses. """ result = [] ad = self.getaddress() while ad: result += ad ad = self.getaddress() return result def getaddress(self): """Parse the next address.""" self.commentlist = [] self.gotonext() oldpos = self.pos oldcl = self.commentlist plist = self.getphraselist() self.gotonext() returnlist = [] if self.pos >= len(self.field): # Bad email address technically, no domain. if plist: returnlist = [(' '.join(self.commentlist), plist[0])] elif self.field[self.pos] in '.@': # email address is just an addrspec # this isn't very efficient since we start over self.pos = oldpos self.commentlist = oldcl addrspec = self.getaddrspec() returnlist = [(' '.join(self.commentlist), addrspec)] elif self.field[self.pos] == ':': # address is a group returnlist = [] fieldlen = len(self.field) self.pos += 1 while self.pos < len(self.field): self.gotonext() if self.pos < fieldlen and self.field[self.pos] == ';': self.pos += 1 break returnlist = returnlist + self.getaddress() elif self.field[self.pos] == '<': # Address is a phrase then a route addr routeaddr = self.getrouteaddr() if self.commentlist: returnlist = [(' '.join(plist) + ' (' + \ ' '.join(self.commentlist) + ')', routeaddr)] else: returnlist = [(' '.join(plist), routeaddr)] else: if plist: returnlist = [(' '.join(self.commentlist), plist[0])] elif self.field[self.pos] in self.specials: self.pos += 1 self.gotonext() if self.pos < len(self.field) and self.field[self.pos] == ',': self.pos += 1 return returnlist def getrouteaddr(self): """Parse a route address (Return-path value). This method just skips all the route stuff and returns the addrspec. """ if self.field[self.pos] != '<': return expectroute = 0 self.pos += 1 self.gotonext() adlist = "" while self.pos < len(self.field): if expectroute: self.getdomain() expectroute = 0 elif self.field[self.pos] == '>': self.pos += 1 break elif self.field[self.pos] == '@': self.pos += 1 expectroute = 1 elif self.field[self.pos] == ':': self.pos += 1 else: adlist = self.getaddrspec() self.pos += 1 break self.gotonext() return adlist def getaddrspec(self): """Parse an RFC 2822 addr-spec.""" aslist = [] self.gotonext() while self.pos < len(self.field): if self.field[self.pos] == '.': aslist.append('.') self.pos += 1 elif self.field[self.pos] == '"': aslist.append('"%s"' % self.getquote()) elif self.field[self.pos] in self.atomends: break else: aslist.append(self.getatom()) self.gotonext() if self.pos >= len(self.field) or self.field[self.pos] != '@': return ''.join(aslist) aslist.append('@') self.pos += 1 self.gotonext() return ''.join(aslist) + self.getdomain() def getdomain(self): """Get the complete domain name from an address.""" sdlist = [] while self.pos < len(self.field): if self.field[self.pos] in self.LWS: self.pos += 1 elif self.field[self.pos] == '(': self.commentlist.append(self.getcomment()) elif self.field[self.pos] == '[': sdlist.append(self.getdomainliteral()) elif self.field[self.pos] == '.': self.pos += 1 sdlist.append('.') elif self.field[self.pos] in self.atomends: break else: sdlist.append(self.getatom()) return ''.join(sdlist) def getdelimited(self, beginchar, endchars, allowcomments = 1): """Parse a header fragment delimited by special characters. `beginchar' is the start character for the fragment. If self is not looking at an instance of `beginchar' then getdelimited returns the empty string. `endchars' is a sequence of allowable end-delimiting characters. Parsing stops when one of these is encountered. If `allowcomments' is non-zero, embedded RFC 2822 comments are allowed within the parsed fragment. """ if self.field[self.pos] != beginchar: return '' slist = [''] quote = 0 self.pos += 1 while self.pos < len(self.field): if quote == 1: slist.append(self.field[self.pos]) quote = 0 elif self.field[self.pos] in endchars: self.pos += 1 break elif allowcomments and self.field[self.pos] == '(': slist.append(self.getcomment()) continue # have already advanced pos from getcomment elif self.field[self.pos] == '\\': quote = 1 else: slist.append(self.field[self.pos]) self.pos += 1 return ''.join(slist) def getquote(self): """Get a quote-delimited fragment from self's field.""" return self.getdelimited('"', '"\r', 0) def getcomment(self): """Get a parenthesis-delimited fragment from self's field.""" return self.getdelimited('(', ')\r', 1) def getdomainliteral(self): """Parse an RFC 2822 domain-literal.""" return '[%s]' % self.getdelimited('[', ']\r', 0) def getatom(self, atomends=None): """Parse an RFC 2822 atom. Optional atomends specifies a different set of end token delimiters (the default is to use self.atomends). This is used e.g. in getphraselist() since phrase endings must not include the `.' (which is legal in phrases).""" atomlist = [''] if atomends is None: atomends = self.atomends while self.pos < len(self.field): if self.field[self.pos] in atomends: break else: atomlist.append(self.field[self.pos]) self.pos += 1 return ''.join(atomlist) def getphraselist(self): """Parse a sequence of RFC 2822 phrases. A phrase is a sequence of words, which are in turn either RFC 2822 atoms or quoted-strings. Phrases are canonicalized by squeezing all runs of continuous whitespace into one space. """ plist = [] while self.pos < len(self.field): if self.field[self.pos] in self.LWS: self.pos += 1 elif self.field[self.pos] == '"': plist.append(self.getquote()) elif self.field[self.pos] == '(': self.commentlist.append(self.getcomment()) elif self.field[self.pos] in self.phraseends: break else: plist.append(self.getatom(self.phraseends)) return plist class AddressList(AddrlistClass): """An AddressList encapsulates a list of parsed RFC 2822 addresses.""" def __init__(self, field): AddrlistClass.__init__(self, field) if field: self.addresslist = self.getaddrlist() else: self.addresslist = [] def __len__(self): return len(self.addresslist) def __str__(self): return ", ".join(map(dump_address_pair, self.addresslist)) def __add__(self, other): # Set union newaddr = AddressList(None) newaddr.addresslist = self.addresslist[:] for x in other.addresslist: if not x in self.addresslist: newaddr.addresslist.append(x) return newaddr def __iadd__(self, other): # Set union, in-place for x in other.addresslist: if not x in self.addresslist: self.addresslist.append(x) return self def __sub__(self, other): # Set difference newaddr = AddressList(None) for x in self.addresslist: if not x in other.addresslist: newaddr.addresslist.append(x) return newaddr def __isub__(self, other): # Set difference, in-place for x in other.addresslist: if x in self.addresslist: self.addresslist.remove(x) return self def __getitem__(self, index): # Make indexing, slices, and 'in' work return self.addresslist[index] def dump_address_pair(pair): """Dump a (name, address) pair in a canonicalized form.""" if pair[0]: return '"' + pair[0] + '" <' + pair[1] + '>' else: return pair[1] # Parse a date field _monthnames = ['jan', 'feb', 'mar', 'apr', 'may', 'jun', 'jul', 'aug', 'sep', 'oct', 'nov', 'dec', 'january', 'february', 'march', 'april', 'may', 'june', 'july', 'august', 'september', 'october', 'november', 'december'] _daynames = ['mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun'] # The timezone table does not include the military time zones defined # in RFC822, other than Z. According to RFC1123, the description in # RFC822 gets the signs wrong, so we can't rely on any such time # zones. RFC1123 recommends that numeric timezone indicators be used # instead of timezone names. _timezones = {'UT':0, 'UTC':0, 'GMT':0, 'Z':0, 'AST': -400, 'ADT': -300, # Atlantic (used in Canada) 'EST': -500, 'EDT': -400, # Eastern 'CST': -600, 'CDT': -500, # Central 'MST': -700, 'MDT': -600, # Mountain 'PST': -800, 'PDT': -700 # Pacific } def parsedate_tz(data): """Convert a date string to a time tuple. Accounts for military timezones. """ if not data: return None data = data.split() if data[0][-1] in (',', '.') or data[0].lower() in _daynames: # There's a dayname here. Skip it del data[0] else: # no space after the "weekday,"? i = data[0].rfind(',') if i >= 0: data[0] = data[0][i+1:] if len(data) == 3: # RFC 850 date, deprecated stuff = data[0].split('-') if len(stuff) == 3: data = stuff + data[1:] if len(data) == 4: s = data[3] i = s.find('+') if i > 0: data[3:] = [s[:i], s[i+1:]] else: data.append('') # Dummy tz if len(data) < 5: return None data = data[:5] [dd, mm, yy, tm, tz] = data mm = mm.lower() if not mm in _monthnames: dd, mm = mm, dd.lower() if not mm in _monthnames: return None mm = _monthnames.index(mm)+1 if mm > 12: mm = mm - 12 if dd[-1] == ',': dd = dd[:-1] i = yy.find(':') if i > 0: yy, tm = tm, yy if yy[-1] == ',': yy = yy[:-1] if not yy[0].isdigit(): yy, tz = tz, yy if tm[-1] == ',': tm = tm[:-1] tm = tm.split(':') if len(tm) == 2: [thh, tmm] = tm tss = '0' elif len(tm) == 3: [thh, tmm, tss] = tm else: return None try: yy = int(yy) dd = int(dd) thh = int(thh) tmm = int(tmm) tss = int(tss) except ValueError: return None tzoffset = None tz = tz.upper() if tz in _timezones: tzoffset = _timezones[tz] else: try: tzoffset = int(tz) except ValueError: pass # Convert a timezone offset into seconds ; -0500 -> -18000 if tzoffset: if tzoffset < 0: tzsign = -1 tzoffset = -tzoffset else: tzsign = 1 tzoffset = tzsign * ( (tzoffset//100)*3600 + (tzoffset % 100)*60) return (yy, mm, dd, thh, tmm, tss, 0, 1, 0, tzoffset) def parsedate(data): """Convert a time string to a time tuple.""" t = parsedate_tz(data) if t is None: return t return t[:9] def mktime_tz(data): """Turn a 10-tuple as returned by parsedate_tz() into a UTC timestamp.""" if data[9] is None: # No zone info, so localtime is better assumption than GMT return time.mktime(data[:8] + (-1,)) else: t = time.mktime(data[:8] + (0,)) return t - data[9] - time.timezone def formatdate(timeval=None): """Returns time format preferred for Internet standards. Sun, 06 Nov 1994 08:49:37 GMT ; RFC 822, updated by RFC 1123 According to RFC 1123, day and month names must always be in English. If not for that, this code could use strftime(). It can't because strftime() honors the locale and could generated non-English names. """ if timeval is None: timeval = time.time() timeval = time.gmtime(timeval) return "%s, %02d %s %04d %02d:%02d:%02d GMT" % ( ("Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun")[timeval[6]], timeval[2], ("Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec")[timeval[1]-1], timeval[0], timeval[3], timeval[4], timeval[5]) # When used as script, run a small test program. # The first command line argument must be a filename containing one # message in RFC-822 format. if __name__ == '__main__': import sys, os file = os.path.join(os.environ['HOME'], 'Mail/inbox/1') if sys.argv[1:]: file = sys.argv[1] f = open(file, 'r') m = Message(f) print 'From:', m.getaddr('from') print 'To:', m.getaddrlist('to') print 'Subject:', m.getheader('subject') print 'Date:', m.getheader('date') date = m.getdate_tz('date') tz = date[-1] date = time.localtime(mktime_tz(date)) if date: print 'ParsedDate:', time.asctime(date), hhmmss = tz hhmm, ss = divmod(hhmmss, 60) hh, mm = divmod(hhmm, 60) print "%+03d%02d" % (hh, mm), if ss: print ".%02d" % ss, print else: print 'ParsedDate:', None m.rewindbody() n = 0 while f.readline(): n += 1 print 'Lines:', n print '-'*70 print 'len =', len(m) if 'Date' in m: print 'Date =', m['Date'] if 'X-Nonsense' in m: pass print 'keys =', m.keys() print 'values =', m.values() print 'items =', m.items()
shaistaansari/django
refs/heads/master
tests/i18n/other/locale/de/__init__.py
12133432
aitgon/wopmars
refs/heads/master
wopmars/tests/resource/__init__.py
12133432
saurabh6790/medsynaptic1-app
refs/heads/master
clinical/page/clinical_home/__init__.py
12133432
mick-d/nipype_source
refs/heads/master
nipype/fixes/numpy/testing/utils.py
29
# Allow numpy fixes noseclasses to do local import of utils from numpy.testing.utils import *
googleapis/java-notification
refs/heads/master
.github/readme/synth.py
128
# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """This script is used to synthesize generated the README for this library.""" from synthtool.languages import java java.custom_templates(["java_library/README.md"])
metaml/nupic
refs/heads/master
tests/unit/nupic/algorithms/nab_detector_test.py
31
#! /usr/bin/env python # ---------------------------------------------------------------------- # Numenta Platform for Intelligent Computing (NuPIC) # Copyright (C) 2015, Numenta, Inc. Unless you have an agreement # with Numenta, Inc., for a separate license for this software code, the # following terms and conditions apply: # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero Public License version 3 as # published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # See the GNU Affero Public License for more details. # # You should have received a copy of the GNU Affero Public License # along with this program. If not, see http://www.gnu.org/licenses. # # http://numenta.org/licenses/ # ---------------------------------------------------------------------- """ Test the NuPIC imports run as expected in nab/detectors/numenta/numenta_detector.py. They are nupic/algorithms/anomaly_likelihood and nupic/frameworks/opf/modelfactory.ModelFactory. The intent here is not to test functionality but rather that the functions are able to run in NAB. NAB repo: https://github.com/numenta/NAB """ import copy import csv import datetime import os import unittest from nupic.algorithms import anomaly_likelihood as an from nupic.frameworks.opf.modelfactory import ModelFactory from nupic.frameworks.opf.clamodel import CLAModel from nupic.support.unittesthelpers.testcasebase import TestCaseBase def _getDateList(numSamples, startDatetime): """ Generate a sequence of sample dates starting at startDatetime and incrementing every 5 minutes. @param numSamples (int) number of datetimes to generate @param startDatetime (datetime) the start (first) datetime @return dateList (list) generated sequence of datetimes """ dateList = [] td = datetime.timedelta(minutes=5) currentDate = startDatetime + td for _ in xrange(numSamples): dateList.append(currentDate) currentDate = currentDate + td return dateList def _addSampleData(numSamples=20, spikeValue=1.0, spikePeriod=10): """ Add sample anomaly data to the existing/new data list. Data is constant 0.0, where anomalies are spikes to 1.0 at an interval set by spikePeriod. The test data is trivial, as explicit testing of functions is done in other unit tests. @param numSamples (int) number of data entries to produce @param spikeValue (float) value of the anomaly spikes @param spikePeriod (int) periodicity of anomaly spikes, where one will occur for every spikePeriod data entries @return data (list) list of generated data entries """ # Generate datetimes lastDate = datetime.datetime(2015, 4, 1) dateList = _getDateList(numSamples, lastDate) # Generate data with anomaly spikes data = [] for idx, date in enumerate(dateList): if (spikePeriod > 0) and ( (idx + 1) % spikePeriod == 0): data.append([date, idx, spikeValue]) else: data.append([date, idx, 0.0]) return data def _writeToCSV(data, headers, fileName): """ Write list of data to CSV. @param data (list) list of data entries, where each row is a list @param headers (list) column headers, where each entry in list is a string """ with open(fileName, "wb") as f: writer = csv.writer(f, delimiter=",", lineterminator="\n") writer.writerow(headers) writer.writerows(data) class NABTest(TestCaseBase): def setUp(self): # Generate sample data, save to CSV (not used now, but put in place # for future NAB tests) self.data = _addSampleData() self.dataFileName = "temp_data.csv" _writeToCSV(self.data, ["datetime", "index", "value"], self.dataFileName) def tearDown(self): os.remove(self.dataFileName) def testModelCreator(self): """ Tests the ModelFactory.create() method in "nupic/frameworks/opf/modelfactory.py" by creating a new model object, as in "NAB/detectors/numenta/numenta_detector.py". Model paramaters are same as in NAB v0.8. """ # Create model as in NAB/.../numenta_detector.py modelParams = { "aggregationInfo": { "days": 0, "fields": [], "hours": 0, "microseconds": 0, "milliseconds": 0, "minutes": 0, "months": 0, "seconds": 0, "weeks": 0, "years": 0 }, "model": "CLA", "modelParams": { "anomalyParams": { "anomalyCacheRecords": None, "autoDetectThreshold": None, "autoDetectWaitRecords": 5030 }, "clEnable": False, "clParams": { "alpha": 0.035828933612158, "clVerbosity": 0, "regionName": "CLAClassifierRegion", "steps": "1" }, "inferenceType": "TemporalAnomaly", "sensorParams": { "encoders": { "timestamp_timeOfDay": { "fieldname": "timestamp", "name": "timestamp_timeOfDay", "timeOfDay": [ 21, 9.49122334747737 ], "type": "DateEncoder" }, "timestamp_dayOfWeek": None, "timestamp_weekend": None, "value": { "name": "value", "fieldname": "value", "numBuckets": 94.0, "seed": 42, "type": "RandomDistributedScalarEncoder" } }, "sensorAutoReset": None, "verbosity": 0 }, "spEnable": True, "spParams": { "potentialPct": 0.8, "columnCount": 2048, "globalInhibition": 1, "inputWidth": 0, "maxBoost": 1.0, "numActiveColumnsPerInhArea": 40, "seed": 1956, "spVerbosity": 0, "spatialImp": "cpp", "synPermActiveInc": 0.0015, "synPermConnected": 0.1, "synPermInactiveDec": 0.0005 }, "tpEnable": True, "tpParams": { "activationThreshold": 13, "cellsPerColumn": 32, "columnCount": 2048, "globalDecay": 0.0, "initialPerm": 0.21, "inputWidth": 2048, "maxAge": 0, "maxSegmentsPerCell": 128, "maxSynapsesPerSegment": 32, "minThreshold": 10, "newSynapseCount": 20, "outputType": "normal", "pamLength": 3, "permanenceDec": 0.1, "permanenceInc": 0.1, "seed": 1960, "temporalImp": "cpp", "verbosity": 0 }, "trainSPNetOnlyIfRequested": False }, "predictAheadTime": None, "version": 1 } sensorParams = (modelParams["modelParams"]["sensorParams"] ["encoders"]["value"]) sensorParams["resolution"] = max(0.001, (1.2 - 0.2) / sensorParams.pop("numBuckets")) model = ModelFactory.create(modelParams) self.assertIs(type(model), CLAModel, msg="The created model is not a" "CLAModel, but rather is of type %s" % type(model)) def testNABAnomalyLikelihood(self): """ Tests the specific calls to nupic/algorithms/anomaly_likelihood as they"re made in "NAB/detectors/numenta/numenta_detector.py". Note "NAB/.../numenta_detector.py" has its own class AnomalyLikelihood, different from nupic/algorithms/anomaly_likelihood.AnomalyLikelihood, but which calls the functions estimateAnomalyLikelihoods() and updateAnomalyLikelihoods() from "nupic/algorithms/anomaly_likelihood.py". """ # AnomalyLikelihood object initial values iteration = 0 probationaryPeriod = 4 historicalScores = [] likelihoodList = [] for dataPoint in self.data: # Ignore the first probationaryPeriod data points if len(historicalScores) < probationaryPeriod: likelihood = 0.5 else: if iteration % 4 == 0: _, _, distribution = an.estimateAnomalyLikelihoods( historicalScores, skipRecords = probationaryPeriod) likelihoods, _, distribution = an.updateAnomalyLikelihoods( [dataPoint], distribution) likelihood = 1.0 - likelihoods[0] historicalScores.append(dataPoint) iteration += 1 likelihoodList.append(likelihood) truthLikelihoodList = [0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.044565462999999972, 0.044565462999999972, 0.044565462999999972, 0.044565462999999972, 0.90319951499999995, 0.90319951499999995, 0.90319951499999995, 0.90319951499999995, 0.78814460099999994, 0.78814460099999994, 0.78814460099999994, 0.78814460099999994] for i in xrange(len(likelihoodList)): self.assertAlmostEqual(likelihoodList[i], truthLikelihoodList[i], msg="unequal values are at index %i" % i) if __name__ == "__main__": unittest.main()
encukou/freeipa
refs/heads/master
ipaclient/remote_plugins/2_156/passwd.py
16
# # Copyright (C) 2016 FreeIPA Contributors see COPYING for license # # pylint: disable=unused-import import six from . import Command, Method, Object from ipalib import api, parameters, output from ipalib.parameters import DefaultFrom from ipalib.plugable import Registry from ipalib.text import _ from ipapython.dn import DN from ipapython.dnsutil import DNSName if six.PY3: unicode = str __doc__ = _(""" Set a user's password If someone other than a user changes that user's password (e.g., Helpdesk resets it) then the password will need to be changed the first time it is used. This is so the end-user is the only one who knows the password. The IPA password policy controls how often a password may be changed, what strength requirements exist, and the length of the password history. EXAMPLES: To reset your own password: ipa passwd To change another user's password: ipa passwd tuser1 """) register = Registry() @register() class passwd(Command): __doc__ = _("Set a user's password.") takes_args = ( parameters.Str( 'principal', cli_name='user', label=_(u'User name'), default_from=DefaultFrom(lambda : None), # FIXME: # lambda: util.get_current_principal() autofill=True, no_convert=True, ), parameters.Password( 'password', label=_(u'New Password'), confirm=True, ), parameters.Password( 'current_password', label=_(u'Current Password'), default_from=DefaultFrom(lambda principal: None, 'principal'), # FIXME: # lambda principal: get_current_password(principal) autofill=True, ), ) takes_options = ( parameters.Password( 'otp', required=False, label=_(u'OTP'), doc=_(u'One Time Password'), ), ) has_output = ( output.Output( 'summary', (unicode, type(None)), doc=_(u'User-friendly description of action performed'), ), output.Output( 'result', bool, doc=_(u'True means the operation was successful'), ), output.PrimaryKey( 'value', doc=_(u"The primary_key value of the entry, e.g. 'jdoe' for a user"), ), )
AkizukiRyoko/mtasa-blue
refs/heads/master
vendor/google-breakpad/src/third_party/protobuf/protobuf/python/google/protobuf/internal/type_checkers.py
527
# Protocol Buffers - Google's data interchange format # Copyright 2008 Google Inc. All rights reserved. # http://code.google.com/p/protobuf/ # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Provides type checking routines. This module defines type checking utilities in the forms of dictionaries: VALUE_CHECKERS: A dictionary of field types and a value validation object. TYPE_TO_BYTE_SIZE_FN: A dictionary with field types and a size computing function. TYPE_TO_SERIALIZE_METHOD: A dictionary with field types and serialization function. FIELD_TYPE_TO_WIRE_TYPE: A dictionary with field typed and their coresponding wire types. TYPE_TO_DESERIALIZE_METHOD: A dictionary with field types and deserialization function. """ __author__ = 'robinson@google.com (Will Robinson)' from google.protobuf.internal import decoder from google.protobuf.internal import encoder from google.protobuf.internal import wire_format from google.protobuf import descriptor _FieldDescriptor = descriptor.FieldDescriptor def GetTypeChecker(cpp_type, field_type): """Returns a type checker for a message field of the specified types. Args: cpp_type: C++ type of the field (see descriptor.py). field_type: Protocol message field type (see descriptor.py). Returns: An instance of TypeChecker which can be used to verify the types of values assigned to a field of the specified type. """ if (cpp_type == _FieldDescriptor.CPPTYPE_STRING and field_type == _FieldDescriptor.TYPE_STRING): return UnicodeValueChecker() return _VALUE_CHECKERS[cpp_type] # None of the typecheckers below make any attempt to guard against people # subclassing builtin types and doing weird things. We're not trying to # protect against malicious clients here, just people accidentally shooting # themselves in the foot in obvious ways. class TypeChecker(object): """Type checker used to catch type errors as early as possible when the client is setting scalar fields in protocol messages. """ def __init__(self, *acceptable_types): self._acceptable_types = acceptable_types def CheckValue(self, proposed_value): if not isinstance(proposed_value, self._acceptable_types): message = ('%.1024r has type %s, but expected one of: %s' % (proposed_value, type(proposed_value), self._acceptable_types)) raise TypeError(message) # IntValueChecker and its subclasses perform integer type-checks # and bounds-checks. class IntValueChecker(object): """Checker used for integer fields. Performs type-check and range check.""" def CheckValue(self, proposed_value): if not isinstance(proposed_value, (int, long)): message = ('%.1024r has type %s, but expected one of: %s' % (proposed_value, type(proposed_value), (int, long))) raise TypeError(message) if not self._MIN <= proposed_value <= self._MAX: raise ValueError('Value out of range: %d' % proposed_value) class UnicodeValueChecker(object): """Checker used for string fields.""" def CheckValue(self, proposed_value): if not isinstance(proposed_value, (str, unicode)): message = ('%.1024r has type %s, but expected one of: %s' % (proposed_value, type(proposed_value), (str, unicode))) raise TypeError(message) # If the value is of type 'str' make sure that it is in 7-bit ASCII # encoding. if isinstance(proposed_value, str): try: unicode(proposed_value, 'ascii') except UnicodeDecodeError: raise ValueError('%.1024r has type str, but isn\'t in 7-bit ASCII ' 'encoding. Non-ASCII strings must be converted to ' 'unicode objects before being added.' % (proposed_value)) class Int32ValueChecker(IntValueChecker): # We're sure to use ints instead of longs here since comparison may be more # efficient. _MIN = -2147483648 _MAX = 2147483647 class Uint32ValueChecker(IntValueChecker): _MIN = 0 _MAX = (1 << 32) - 1 class Int64ValueChecker(IntValueChecker): _MIN = -(1 << 63) _MAX = (1 << 63) - 1 class Uint64ValueChecker(IntValueChecker): _MIN = 0 _MAX = (1 << 64) - 1 # Type-checkers for all scalar CPPTYPEs. _VALUE_CHECKERS = { _FieldDescriptor.CPPTYPE_INT32: Int32ValueChecker(), _FieldDescriptor.CPPTYPE_INT64: Int64ValueChecker(), _FieldDescriptor.CPPTYPE_UINT32: Uint32ValueChecker(), _FieldDescriptor.CPPTYPE_UINT64: Uint64ValueChecker(), _FieldDescriptor.CPPTYPE_DOUBLE: TypeChecker( float, int, long), _FieldDescriptor.CPPTYPE_FLOAT: TypeChecker( float, int, long), _FieldDescriptor.CPPTYPE_BOOL: TypeChecker(bool, int), _FieldDescriptor.CPPTYPE_ENUM: Int32ValueChecker(), _FieldDescriptor.CPPTYPE_STRING: TypeChecker(str), } # Map from field type to a function F, such that F(field_num, value) # gives the total byte size for a value of the given type. This # byte size includes tag information and any other additional space # associated with serializing "value". TYPE_TO_BYTE_SIZE_FN = { _FieldDescriptor.TYPE_DOUBLE: wire_format.DoubleByteSize, _FieldDescriptor.TYPE_FLOAT: wire_format.FloatByteSize, _FieldDescriptor.TYPE_INT64: wire_format.Int64ByteSize, _FieldDescriptor.TYPE_UINT64: wire_format.UInt64ByteSize, _FieldDescriptor.TYPE_INT32: wire_format.Int32ByteSize, _FieldDescriptor.TYPE_FIXED64: wire_format.Fixed64ByteSize, _FieldDescriptor.TYPE_FIXED32: wire_format.Fixed32ByteSize, _FieldDescriptor.TYPE_BOOL: wire_format.BoolByteSize, _FieldDescriptor.TYPE_STRING: wire_format.StringByteSize, _FieldDescriptor.TYPE_GROUP: wire_format.GroupByteSize, _FieldDescriptor.TYPE_MESSAGE: wire_format.MessageByteSize, _FieldDescriptor.TYPE_BYTES: wire_format.BytesByteSize, _FieldDescriptor.TYPE_UINT32: wire_format.UInt32ByteSize, _FieldDescriptor.TYPE_ENUM: wire_format.EnumByteSize, _FieldDescriptor.TYPE_SFIXED32: wire_format.SFixed32ByteSize, _FieldDescriptor.TYPE_SFIXED64: wire_format.SFixed64ByteSize, _FieldDescriptor.TYPE_SINT32: wire_format.SInt32ByteSize, _FieldDescriptor.TYPE_SINT64: wire_format.SInt64ByteSize } # Maps from field types to encoder constructors. TYPE_TO_ENCODER = { _FieldDescriptor.TYPE_DOUBLE: encoder.DoubleEncoder, _FieldDescriptor.TYPE_FLOAT: encoder.FloatEncoder, _FieldDescriptor.TYPE_INT64: encoder.Int64Encoder, _FieldDescriptor.TYPE_UINT64: encoder.UInt64Encoder, _FieldDescriptor.TYPE_INT32: encoder.Int32Encoder, _FieldDescriptor.TYPE_FIXED64: encoder.Fixed64Encoder, _FieldDescriptor.TYPE_FIXED32: encoder.Fixed32Encoder, _FieldDescriptor.TYPE_BOOL: encoder.BoolEncoder, _FieldDescriptor.TYPE_STRING: encoder.StringEncoder, _FieldDescriptor.TYPE_GROUP: encoder.GroupEncoder, _FieldDescriptor.TYPE_MESSAGE: encoder.MessageEncoder, _FieldDescriptor.TYPE_BYTES: encoder.BytesEncoder, _FieldDescriptor.TYPE_UINT32: encoder.UInt32Encoder, _FieldDescriptor.TYPE_ENUM: encoder.EnumEncoder, _FieldDescriptor.TYPE_SFIXED32: encoder.SFixed32Encoder, _FieldDescriptor.TYPE_SFIXED64: encoder.SFixed64Encoder, _FieldDescriptor.TYPE_SINT32: encoder.SInt32Encoder, _FieldDescriptor.TYPE_SINT64: encoder.SInt64Encoder, } # Maps from field types to sizer constructors. TYPE_TO_SIZER = { _FieldDescriptor.TYPE_DOUBLE: encoder.DoubleSizer, _FieldDescriptor.TYPE_FLOAT: encoder.FloatSizer, _FieldDescriptor.TYPE_INT64: encoder.Int64Sizer, _FieldDescriptor.TYPE_UINT64: encoder.UInt64Sizer, _FieldDescriptor.TYPE_INT32: encoder.Int32Sizer, _FieldDescriptor.TYPE_FIXED64: encoder.Fixed64Sizer, _FieldDescriptor.TYPE_FIXED32: encoder.Fixed32Sizer, _FieldDescriptor.TYPE_BOOL: encoder.BoolSizer, _FieldDescriptor.TYPE_STRING: encoder.StringSizer, _FieldDescriptor.TYPE_GROUP: encoder.GroupSizer, _FieldDescriptor.TYPE_MESSAGE: encoder.MessageSizer, _FieldDescriptor.TYPE_BYTES: encoder.BytesSizer, _FieldDescriptor.TYPE_UINT32: encoder.UInt32Sizer, _FieldDescriptor.TYPE_ENUM: encoder.EnumSizer, _FieldDescriptor.TYPE_SFIXED32: encoder.SFixed32Sizer, _FieldDescriptor.TYPE_SFIXED64: encoder.SFixed64Sizer, _FieldDescriptor.TYPE_SINT32: encoder.SInt32Sizer, _FieldDescriptor.TYPE_SINT64: encoder.SInt64Sizer, } # Maps from field type to a decoder constructor. TYPE_TO_DECODER = { _FieldDescriptor.TYPE_DOUBLE: decoder.DoubleDecoder, _FieldDescriptor.TYPE_FLOAT: decoder.FloatDecoder, _FieldDescriptor.TYPE_INT64: decoder.Int64Decoder, _FieldDescriptor.TYPE_UINT64: decoder.UInt64Decoder, _FieldDescriptor.TYPE_INT32: decoder.Int32Decoder, _FieldDescriptor.TYPE_FIXED64: decoder.Fixed64Decoder, _FieldDescriptor.TYPE_FIXED32: decoder.Fixed32Decoder, _FieldDescriptor.TYPE_BOOL: decoder.BoolDecoder, _FieldDescriptor.TYPE_STRING: decoder.StringDecoder, _FieldDescriptor.TYPE_GROUP: decoder.GroupDecoder, _FieldDescriptor.TYPE_MESSAGE: decoder.MessageDecoder, _FieldDescriptor.TYPE_BYTES: decoder.BytesDecoder, _FieldDescriptor.TYPE_UINT32: decoder.UInt32Decoder, _FieldDescriptor.TYPE_ENUM: decoder.EnumDecoder, _FieldDescriptor.TYPE_SFIXED32: decoder.SFixed32Decoder, _FieldDescriptor.TYPE_SFIXED64: decoder.SFixed64Decoder, _FieldDescriptor.TYPE_SINT32: decoder.SInt32Decoder, _FieldDescriptor.TYPE_SINT64: decoder.SInt64Decoder, } # Maps from field type to expected wiretype. FIELD_TYPE_TO_WIRE_TYPE = { _FieldDescriptor.TYPE_DOUBLE: wire_format.WIRETYPE_FIXED64, _FieldDescriptor.TYPE_FLOAT: wire_format.WIRETYPE_FIXED32, _FieldDescriptor.TYPE_INT64: wire_format.WIRETYPE_VARINT, _FieldDescriptor.TYPE_UINT64: wire_format.WIRETYPE_VARINT, _FieldDescriptor.TYPE_INT32: wire_format.WIRETYPE_VARINT, _FieldDescriptor.TYPE_FIXED64: wire_format.WIRETYPE_FIXED64, _FieldDescriptor.TYPE_FIXED32: wire_format.WIRETYPE_FIXED32, _FieldDescriptor.TYPE_BOOL: wire_format.WIRETYPE_VARINT, _FieldDescriptor.TYPE_STRING: wire_format.WIRETYPE_LENGTH_DELIMITED, _FieldDescriptor.TYPE_GROUP: wire_format.WIRETYPE_START_GROUP, _FieldDescriptor.TYPE_MESSAGE: wire_format.WIRETYPE_LENGTH_DELIMITED, _FieldDescriptor.TYPE_BYTES: wire_format.WIRETYPE_LENGTH_DELIMITED, _FieldDescriptor.TYPE_UINT32: wire_format.WIRETYPE_VARINT, _FieldDescriptor.TYPE_ENUM: wire_format.WIRETYPE_VARINT, _FieldDescriptor.TYPE_SFIXED32: wire_format.WIRETYPE_FIXED32, _FieldDescriptor.TYPE_SFIXED64: wire_format.WIRETYPE_FIXED64, _FieldDescriptor.TYPE_SINT32: wire_format.WIRETYPE_VARINT, _FieldDescriptor.TYPE_SINT64: wire_format.WIRETYPE_VARINT, }
koyuawsmbrtn/eclock
refs/heads/master
windows/Python27/Lib/site-packages/pip/_vendor/requests/packages/charade/euckrfreq.py
3120
######################## BEGIN LICENSE BLOCK ######################## # The Original Code is Mozilla Communicator client code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### # Sampling from about 20M text materials include literature and computer technology # 128 --> 0.79 # 256 --> 0.92 # 512 --> 0.986 # 1024 --> 0.99944 # 2048 --> 0.99999 # # Idea Distribution Ratio = 0.98653 / (1-0.98653) = 73.24 # Random Distribution Ration = 512 / (2350-512) = 0.279. # # Typical Distribution Ratio EUCKR_TYPICAL_DISTRIBUTION_RATIO = 6.0 EUCKR_TABLE_SIZE = 2352 # Char to FreqOrder table , EUCKRCharToFreqOrder = ( \ 13, 130, 120,1396, 481,1719,1720, 328, 609, 212,1721, 707, 400, 299,1722, 87, 1397,1723, 104, 536,1117,1203,1724,1267, 685,1268, 508,1725,1726,1727,1728,1398, 1399,1729,1730,1731, 141, 621, 326,1057, 368,1732, 267, 488, 20,1733,1269,1734, 945,1400,1735, 47, 904,1270,1736,1737, 773, 248,1738, 409, 313, 786, 429,1739, 116, 987, 813,1401, 683, 75,1204, 145,1740,1741,1742,1743, 16, 847, 667, 622, 708,1744,1745,1746, 966, 787, 304, 129,1747, 60, 820, 123, 676,1748,1749,1750, 1751, 617,1752, 626,1753,1754,1755,1756, 653,1757,1758,1759,1760,1761,1762, 856, 344,1763,1764,1765,1766, 89, 401, 418, 806, 905, 848,1767,1768,1769, 946,1205, 709,1770,1118,1771, 241,1772,1773,1774,1271,1775, 569,1776, 999,1777,1778,1779, 1780, 337, 751,1058, 28, 628, 254,1781, 177, 906, 270, 349, 891,1079,1782, 19, 1783, 379,1784, 315,1785, 629, 754,1402, 559,1786, 636, 203,1206,1787, 710, 567, 1788, 935, 814,1789,1790,1207, 766, 528,1791,1792,1208,1793,1794,1795,1796,1797, 1403,1798,1799, 533,1059,1404,1405,1156,1406, 936, 884,1080,1800, 351,1801,1802, 1803,1804,1805, 801,1806,1807,1808,1119,1809,1157, 714, 474,1407,1810, 298, 899, 885,1811,1120, 802,1158,1812, 892,1813,1814,1408, 659,1815,1816,1121,1817,1818, 1819,1820,1821,1822, 319,1823, 594, 545,1824, 815, 937,1209,1825,1826, 573,1409, 1022,1827,1210,1828,1829,1830,1831,1832,1833, 556, 722, 807,1122,1060,1834, 697, 1835, 900, 557, 715,1836,1410, 540,1411, 752,1159, 294, 597,1211, 976, 803, 770, 1412,1837,1838, 39, 794,1413, 358,1839, 371, 925,1840, 453, 661, 788, 531, 723, 544,1023,1081, 869, 91,1841, 392, 430, 790, 602,1414, 677,1082, 457,1415,1416, 1842,1843, 475, 327,1024,1417, 795, 121,1844, 733, 403,1418,1845,1846,1847, 300, 119, 711,1212, 627,1848,1272, 207,1849,1850, 796,1213, 382,1851, 519,1852,1083, 893,1853,1854,1855, 367, 809, 487, 671,1856, 663,1857,1858, 956, 471, 306, 857, 1859,1860,1160,1084,1861,1862,1863,1864,1865,1061,1866,1867,1868,1869,1870,1871, 282, 96, 574,1872, 502,1085,1873,1214,1874, 907,1875,1876, 827, 977,1419,1420, 1421, 268,1877,1422,1878,1879,1880, 308,1881, 2, 537,1882,1883,1215,1884,1885, 127, 791,1886,1273,1423,1887, 34, 336, 404, 643,1888, 571, 654, 894, 840,1889, 0, 886,1274, 122, 575, 260, 908, 938,1890,1275, 410, 316,1891,1892, 100,1893, 1894,1123, 48,1161,1124,1025,1895, 633, 901,1276,1896,1897, 115, 816,1898, 317, 1899, 694,1900, 909, 734,1424, 572, 866,1425, 691, 85, 524,1010, 543, 394, 841, 1901,1902,1903,1026,1904,1905,1906,1907,1908,1909, 30, 451, 651, 988, 310,1910, 1911,1426, 810,1216, 93,1912,1913,1277,1217,1914, 858, 759, 45, 58, 181, 610, 269,1915,1916, 131,1062, 551, 443,1000, 821,1427, 957, 895,1086,1917,1918, 375, 1919, 359,1920, 687,1921, 822,1922, 293,1923,1924, 40, 662, 118, 692, 29, 939, 887, 640, 482, 174,1925, 69,1162, 728,1428, 910,1926,1278,1218,1279, 386, 870, 217, 854,1163, 823,1927,1928,1929,1930, 834,1931, 78,1932, 859,1933,1063,1934, 1935,1936,1937, 438,1164, 208, 595,1938,1939,1940,1941,1219,1125,1942, 280, 888, 1429,1430,1220,1431,1943,1944,1945,1946,1947,1280, 150, 510,1432,1948,1949,1950, 1951,1952,1953,1954,1011,1087,1955,1433,1043,1956, 881,1957, 614, 958,1064,1065, 1221,1958, 638,1001, 860, 967, 896,1434, 989, 492, 553,1281,1165,1959,1282,1002, 1283,1222,1960,1961,1962,1963, 36, 383, 228, 753, 247, 454,1964, 876, 678,1965, 1966,1284, 126, 464, 490, 835, 136, 672, 529, 940,1088,1435, 473,1967,1968, 467, 50, 390, 227, 587, 279, 378, 598, 792, 968, 240, 151, 160, 849, 882,1126,1285, 639,1044, 133, 140, 288, 360, 811, 563,1027, 561, 142, 523,1969,1970,1971, 7, 103, 296, 439, 407, 506, 634, 990,1972,1973,1974,1975, 645,1976,1977,1978,1979, 1980,1981, 236,1982,1436,1983,1984,1089, 192, 828, 618, 518,1166, 333,1127,1985, 818,1223,1986,1987,1988,1989,1990,1991,1992,1993, 342,1128,1286, 746, 842,1994, 1995, 560, 223,1287, 98, 8, 189, 650, 978,1288,1996,1437,1997, 17, 345, 250, 423, 277, 234, 512, 226, 97, 289, 42, 167,1998, 201,1999,2000, 843, 836, 824, 532, 338, 783,1090, 182, 576, 436,1438,1439, 527, 500,2001, 947, 889,2002,2003, 2004,2005, 262, 600, 314, 447,2006, 547,2007, 693, 738,1129,2008, 71,1440, 745, 619, 688,2009, 829,2010,2011, 147,2012, 33, 948,2013,2014, 74, 224,2015, 61, 191, 918, 399, 637,2016,1028,1130, 257, 902,2017,2018,2019,2020,2021,2022,2023, 2024,2025,2026, 837,2027,2028,2029,2030, 179, 874, 591, 52, 724, 246,2031,2032, 2033,2034,1167, 969,2035,1289, 630, 605, 911,1091,1168,2036,2037,2038,1441, 912, 2039, 623,2040,2041, 253,1169,1290,2042,1442, 146, 620, 611, 577, 433,2043,1224, 719,1170, 959, 440, 437, 534, 84, 388, 480,1131, 159, 220, 198, 679,2044,1012, 819,1066,1443, 113,1225, 194, 318,1003,1029,2045,2046,2047,2048,1067,2049,2050, 2051,2052,2053, 59, 913, 112,2054, 632,2055, 455, 144, 739,1291,2056, 273, 681, 499,2057, 448,2058,2059, 760,2060,2061, 970, 384, 169, 245,1132,2062,2063, 414, 1444,2064,2065, 41, 235,2066, 157, 252, 877, 568, 919, 789, 580,2067, 725,2068, 2069,1292,2070,2071,1445,2072,1446,2073,2074, 55, 588, 66,1447, 271,1092,2075, 1226,2076, 960,1013, 372,2077,2078,2079,2080,2081,1293,2082,2083,2084,2085, 850, 2086,2087,2088,2089,2090, 186,2091,1068, 180,2092,2093,2094, 109,1227, 522, 606, 2095, 867,1448,1093, 991,1171, 926, 353,1133,2096, 581,2097,2098,2099,1294,1449, 1450,2100, 596,1172,1014,1228,2101,1451,1295,1173,1229,2102,2103,1296,1134,1452, 949,1135,2104,2105,1094,1453,1454,1455,2106,1095,2107,2108,2109,2110,2111,2112, 2113,2114,2115,2116,2117, 804,2118,2119,1230,1231, 805,1456, 405,1136,2120,2121, 2122,2123,2124, 720, 701,1297, 992,1457, 927,1004,2125,2126,2127,2128,2129,2130, 22, 417,2131, 303,2132, 385,2133, 971, 520, 513,2134,1174, 73,1096, 231, 274, 962,1458, 673,2135,1459,2136, 152,1137,2137,2138,2139,2140,1005,1138,1460,1139, 2141,2142,2143,2144, 11, 374, 844,2145, 154,1232, 46,1461,2146, 838, 830, 721, 1233, 106,2147, 90, 428, 462, 578, 566,1175, 352,2148,2149, 538,1234, 124,1298, 2150,1462, 761, 565,2151, 686,2152, 649,2153, 72, 173,2154, 460, 415,2155,1463, 2156,1235, 305,2157,2158,2159,2160,2161,2162, 579,2163,2164,2165,2166,2167, 747, 2168,2169,2170,2171,1464, 669,2172,2173,2174,2175,2176,1465,2177, 23, 530, 285, 2178, 335, 729,2179, 397,2180,2181,2182,1030,2183,2184, 698,2185,2186, 325,2187, 2188, 369,2189, 799,1097,1015, 348,2190,1069, 680,2191, 851,1466,2192,2193, 10, 2194, 613, 424,2195, 979, 108, 449, 589, 27, 172, 81,1031, 80, 774, 281, 350, 1032, 525, 301, 582,1176,2196, 674,1045,2197,2198,1467, 730, 762,2199,2200,2201, 2202,1468,2203, 993,2204,2205, 266,1070, 963,1140,2206,2207,2208, 664,1098, 972, 2209,2210,2211,1177,1469,1470, 871,2212,2213,2214,2215,2216,1471,2217,2218,2219, 2220,2221,2222,2223,2224,2225,2226,2227,1472,1236,2228,2229,2230,2231,2232,2233, 2234,2235,1299,2236,2237, 200,2238, 477, 373,2239,2240, 731, 825, 777,2241,2242, 2243, 521, 486, 548,2244,2245,2246,1473,1300, 53, 549, 137, 875, 76, 158,2247, 1301,1474, 469, 396,1016, 278, 712,2248, 321, 442, 503, 767, 744, 941,1237,1178, 1475,2249, 82, 178,1141,1179, 973,2250,1302,2251, 297,2252,2253, 570,2254,2255, 2256, 18, 450, 206,2257, 290, 292,1142,2258, 511, 162, 99, 346, 164, 735,2259, 1476,1477, 4, 554, 343, 798,1099,2260,1100,2261, 43, 171,1303, 139, 215,2262, 2263, 717, 775,2264,1033, 322, 216,2265, 831,2266, 149,2267,1304,2268,2269, 702, 1238, 135, 845, 347, 309,2270, 484,2271, 878, 655, 238,1006,1478,2272, 67,2273, 295,2274,2275, 461,2276, 478, 942, 412,2277,1034,2278,2279,2280, 265,2281, 541, 2282,2283,2284,2285,2286, 70, 852,1071,2287,2288,2289,2290, 21, 56, 509, 117, 432,2291,2292, 331, 980, 552,1101, 148, 284, 105, 393,1180,1239, 755,2293, 187, 2294,1046,1479,2295, 340,2296, 63,1047, 230,2297,2298,1305, 763,1306, 101, 800, 808, 494,2299,2300,2301, 903,2302, 37,1072, 14, 5,2303, 79, 675,2304, 312, 2305,2306,2307,2308,2309,1480, 6,1307,2310,2311,2312, 1, 470, 35, 24, 229, 2313, 695, 210, 86, 778, 15, 784, 592, 779, 32, 77, 855, 964,2314, 259,2315, 501, 380,2316,2317, 83, 981, 153, 689,1308,1481,1482,1483,2318,2319, 716,1484, 2320,2321,2322,2323,2324,2325,1485,2326,2327, 128, 57, 68, 261,1048, 211, 170, 1240, 31,2328, 51, 435, 742,2329,2330,2331, 635,2332, 264, 456,2333,2334,2335, 425,2336,1486, 143, 507, 263, 943,2337, 363, 920,1487, 256,1488,1102, 243, 601, 1489,2338,2339,2340,2341,2342,2343,2344, 861,2345,2346,2347,2348,2349,2350, 395, 2351,1490,1491, 62, 535, 166, 225,2352,2353, 668, 419,1241, 138, 604, 928,2354, 1181,2355,1492,1493,2356,2357,2358,1143,2359, 696,2360, 387, 307,1309, 682, 476, 2361,2362, 332, 12, 222, 156,2363, 232,2364, 641, 276, 656, 517,1494,1495,1035, 416, 736,1496,2365,1017, 586,2366,2367,2368,1497,2369, 242,2370,2371,2372,1498, 2373, 965, 713,2374,2375,2376,2377, 740, 982,1499, 944,1500,1007,2378,2379,1310, 1501,2380,2381,2382, 785, 329,2383,2384,1502,2385,2386,2387, 932,2388,1503,2389, 2390,2391,2392,1242,2393,2394,2395,2396,2397, 994, 950,2398,2399,2400,2401,1504, 1311,2402,2403,2404,2405,1049, 749,2406,2407, 853, 718,1144,1312,2408,1182,1505, 2409,2410, 255, 516, 479, 564, 550, 214,1506,1507,1313, 413, 239, 444, 339,1145, 1036,1508,1509,1314,1037,1510,1315,2411,1511,2412,2413,2414, 176, 703, 497, 624, 593, 921, 302,2415, 341, 165,1103,1512,2416,1513,2417,2418,2419, 376,2420, 700, 2421,2422,2423, 258, 768,1316,2424,1183,2425, 995, 608,2426,2427,2428,2429, 221, 2430,2431,2432,2433,2434,2435,2436,2437, 195, 323, 726, 188, 897, 983,1317, 377, 644,1050, 879,2438, 452,2439,2440,2441,2442,2443,2444, 914,2445,2446,2447,2448, 915, 489,2449,1514,1184,2450,2451, 515, 64, 427, 495,2452, 583,2453, 483, 485, 1038, 562, 213,1515, 748, 666,2454,2455,2456,2457, 334,2458, 780, 996,1008, 705, 1243,2459,2460,2461,2462,2463, 114,2464, 493,1146, 366, 163,1516, 961,1104,2465, 291,2466,1318,1105,2467,1517, 365,2468, 355, 951,1244,2469,1319,2470, 631,2471, 2472, 218,1320, 364, 320, 756,1518,1519,1321,1520,1322,2473,2474,2475,2476, 997, 2477,2478,2479,2480, 665,1185,2481, 916,1521,2482,2483,2484, 584, 684,2485,2486, 797,2487,1051,1186,2488,2489,2490,1522,2491,2492, 370,2493,1039,1187, 65,2494, 434, 205, 463,1188,2495, 125, 812, 391, 402, 826, 699, 286, 398, 155, 781, 771, 585,2496, 590, 505,1073,2497, 599, 244, 219, 917,1018, 952, 646,1523,2498,1323, 2499,2500, 49, 984, 354, 741,2501, 625,2502,1324,2503,1019, 190, 357, 757, 491, 95, 782, 868,2504,2505,2506,2507,2508,2509, 134,1524,1074, 422,1525, 898,2510, 161,2511,2512,2513,2514, 769,2515,1526,2516,2517, 411,1325,2518, 472,1527,2519, 2520,2521,2522,2523,2524, 985,2525,2526,2527,2528,2529,2530, 764,2531,1245,2532, 2533, 25, 204, 311,2534, 496,2535,1052,2536,2537,2538,2539,2540,2541,2542, 199, 704, 504, 468, 758, 657,1528, 196, 44, 839,1246, 272, 750,2543, 765, 862,2544, 2545,1326,2546, 132, 615, 933,2547, 732,2548,2549,2550,1189,1529,2551, 283,1247, 1053, 607, 929,2552,2553,2554, 930, 183, 872, 616,1040,1147,2555,1148,1020, 441, 249,1075,2556,2557,2558, 466, 743,2559,2560,2561, 92, 514, 426, 420, 526,2562, 2563,2564,2565,2566,2567,2568, 185,2569,2570,2571,2572, 776,1530, 658,2573, 362, 2574, 361, 922,1076, 793,2575,2576,2577,2578,2579,2580,1531, 251,2581,2582,2583, 2584,1532, 54, 612, 237,1327,2585,2586, 275, 408, 647, 111,2587,1533,1106, 465, 3, 458, 9, 38,2588, 107, 110, 890, 209, 26, 737, 498,2589,1534,2590, 431, 202, 88,1535, 356, 287,1107, 660,1149,2591, 381,1536, 986,1150, 445,1248,1151, 974,2592,2593, 846,2594, 446, 953, 184,1249,1250, 727,2595, 923, 193, 883,2596, 2597,2598, 102, 324, 539, 817,2599, 421,1041,2600, 832,2601, 94, 175, 197, 406, 2602, 459,2603,2604,2605,2606,2607, 330, 555,2608,2609,2610, 706,1108, 389,2611, 2612,2613,2614, 233,2615, 833, 558, 931, 954,1251,2616,2617,1537, 546,2618,2619, 1009,2620,2621,2622,1538, 690,1328,2623, 955,2624,1539,2625,2626, 772,2627,2628, 2629,2630,2631, 924, 648, 863, 603,2632,2633, 934,1540, 864, 865,2634, 642,1042, 670,1190,2635,2636,2637,2638, 168,2639, 652, 873, 542,1054,1541,2640,2641,2642, # 512, 256 #Everything below is of no interest for detection purpose 2643,2644,2645,2646,2647,2648,2649,2650,2651,2652,2653,2654,2655,2656,2657,2658, 2659,2660,2661,2662,2663,2664,2665,2666,2667,2668,2669,2670,2671,2672,2673,2674, 2675,2676,2677,2678,2679,2680,2681,2682,2683,2684,2685,2686,2687,2688,2689,2690, 2691,2692,2693,2694,2695,2696,2697,2698,2699,1542, 880,2700,2701,2702,2703,2704, 2705,2706,2707,2708,2709,2710,2711,2712,2713,2714,2715,2716,2717,2718,2719,2720, 2721,2722,2723,2724,2725,1543,2726,2727,2728,2729,2730,2731,2732,1544,2733,2734, 2735,2736,2737,2738,2739,2740,2741,2742,2743,2744,2745,2746,2747,2748,2749,2750, 2751,2752,2753,2754,1545,2755,2756,2757,2758,2759,2760,2761,2762,2763,2764,2765, 2766,1546,2767,1547,2768,2769,2770,2771,2772,2773,2774,2775,2776,2777,2778,2779, 2780,2781,2782,2783,2784,2785,2786,1548,2787,2788,2789,1109,2790,2791,2792,2793, 2794,2795,2796,2797,2798,2799,2800,2801,2802,2803,2804,2805,2806,2807,2808,2809, 2810,2811,2812,1329,2813,2814,2815,2816,2817,2818,2819,2820,2821,2822,2823,2824, 2825,2826,2827,2828,2829,2830,2831,2832,2833,2834,2835,2836,2837,2838,2839,2840, 2841,2842,2843,2844,2845,2846,2847,2848,2849,2850,2851,2852,2853,2854,2855,2856, 1549,2857,2858,2859,2860,1550,2861,2862,1551,2863,2864,2865,2866,2867,2868,2869, 2870,2871,2872,2873,2874,1110,1330,2875,2876,2877,2878,2879,2880,2881,2882,2883, 2884,2885,2886,2887,2888,2889,2890,2891,2892,2893,2894,2895,2896,2897,2898,2899, 2900,2901,2902,2903,2904,2905,2906,2907,2908,2909,2910,2911,2912,2913,2914,2915, 2916,2917,2918,2919,2920,2921,2922,2923,2924,2925,2926,2927,2928,2929,2930,1331, 2931,2932,2933,2934,2935,2936,2937,2938,2939,2940,2941,2942,2943,1552,2944,2945, 2946,2947,2948,2949,2950,2951,2952,2953,2954,2955,2956,2957,2958,2959,2960,2961, 2962,2963,2964,1252,2965,2966,2967,2968,2969,2970,2971,2972,2973,2974,2975,2976, 2977,2978,2979,2980,2981,2982,2983,2984,2985,2986,2987,2988,2989,2990,2991,2992, 2993,2994,2995,2996,2997,2998,2999,3000,3001,3002,3003,3004,3005,3006,3007,3008, 3009,3010,3011,3012,1553,3013,3014,3015,3016,3017,1554,3018,1332,3019,3020,3021, 3022,3023,3024,3025,3026,3027,3028,3029,3030,3031,3032,3033,3034,3035,3036,3037, 3038,3039,3040,3041,3042,3043,3044,3045,3046,3047,3048,3049,3050,1555,3051,3052, 3053,1556,1557,3054,3055,3056,3057,3058,3059,3060,3061,3062,3063,3064,3065,3066, 3067,1558,3068,3069,3070,3071,3072,3073,3074,3075,3076,1559,3077,3078,3079,3080, 3081,3082,3083,1253,3084,3085,3086,3087,3088,3089,3090,3091,3092,3093,3094,3095, 3096,3097,3098,3099,3100,3101,3102,3103,3104,3105,3106,3107,3108,1152,3109,3110, 3111,3112,3113,1560,3114,3115,3116,3117,1111,3118,3119,3120,3121,3122,3123,3124, 3125,3126,3127,3128,3129,3130,3131,3132,3133,3134,3135,3136,3137,3138,3139,3140, 3141,3142,3143,3144,3145,3146,3147,3148,3149,3150,3151,3152,3153,3154,3155,3156, 3157,3158,3159,3160,3161,3162,3163,3164,3165,3166,3167,3168,3169,3170,3171,3172, 3173,3174,3175,3176,1333,3177,3178,3179,3180,3181,3182,3183,3184,3185,3186,3187, 3188,3189,1561,3190,3191,1334,3192,3193,3194,3195,3196,3197,3198,3199,3200,3201, 3202,3203,3204,3205,3206,3207,3208,3209,3210,3211,3212,3213,3214,3215,3216,3217, 3218,3219,3220,3221,3222,3223,3224,3225,3226,3227,3228,3229,3230,3231,3232,3233, 3234,1562,3235,3236,3237,3238,3239,3240,3241,3242,3243,3244,3245,3246,3247,3248, 3249,3250,3251,3252,3253,3254,3255,3256,3257,3258,3259,3260,3261,3262,3263,3264, 3265,3266,3267,3268,3269,3270,3271,3272,3273,3274,3275,3276,3277,1563,3278,3279, 3280,3281,3282,3283,3284,3285,3286,3287,3288,3289,3290,3291,3292,3293,3294,3295, 3296,3297,3298,3299,3300,3301,3302,3303,3304,3305,3306,3307,3308,3309,3310,3311, 3312,3313,3314,3315,3316,3317,3318,3319,3320,3321,3322,3323,3324,3325,3326,3327, 3328,3329,3330,3331,3332,3333,3334,3335,3336,3337,3338,3339,3340,3341,3342,3343, 3344,3345,3346,3347,3348,3349,3350,3351,3352,3353,3354,3355,3356,3357,3358,3359, 3360,3361,3362,3363,3364,1335,3365,3366,3367,3368,3369,3370,3371,3372,3373,3374, 3375,3376,3377,3378,3379,3380,3381,3382,3383,3384,3385,3386,3387,1336,3388,3389, 3390,3391,3392,3393,3394,3395,3396,3397,3398,3399,3400,3401,3402,3403,3404,3405, 3406,3407,3408,3409,3410,3411,3412,3413,3414,1337,3415,3416,3417,3418,3419,1338, 3420,3421,3422,1564,1565,3423,3424,3425,3426,3427,3428,3429,3430,3431,1254,3432, 3433,3434,1339,3435,3436,3437,3438,3439,1566,3440,3441,3442,3443,3444,3445,3446, 3447,3448,3449,3450,3451,3452,3453,3454,1255,3455,3456,3457,3458,3459,1567,1191, 3460,1568,1569,3461,3462,3463,1570,3464,3465,3466,3467,3468,1571,3469,3470,3471, 3472,3473,1572,3474,3475,3476,3477,3478,3479,3480,3481,3482,3483,3484,3485,3486, 1340,3487,3488,3489,3490,3491,3492,1021,3493,3494,3495,3496,3497,3498,1573,3499, 1341,3500,3501,3502,3503,3504,3505,3506,3507,3508,3509,3510,3511,1342,3512,3513, 3514,3515,3516,1574,1343,3517,3518,3519,1575,3520,1576,3521,3522,3523,3524,3525, 3526,3527,3528,3529,3530,3531,3532,3533,3534,3535,3536,3537,3538,3539,3540,3541, 3542,3543,3544,3545,3546,3547,3548,3549,3550,3551,3552,3553,3554,3555,3556,3557, 3558,3559,3560,3561,3562,3563,3564,3565,3566,3567,3568,3569,3570,3571,3572,3573, 3574,3575,3576,3577,3578,3579,3580,1577,3581,3582,1578,3583,3584,3585,3586,3587, 3588,3589,3590,3591,3592,3593,3594,3595,3596,3597,3598,3599,3600,3601,3602,3603, 3604,1579,3605,3606,3607,3608,3609,3610,3611,3612,3613,3614,3615,3616,3617,3618, 3619,3620,3621,3622,3623,3624,3625,3626,3627,3628,3629,1580,3630,3631,1581,3632, 3633,3634,3635,3636,3637,3638,3639,3640,3641,3642,3643,3644,3645,3646,3647,3648, 3649,3650,3651,3652,3653,3654,3655,3656,1582,3657,3658,3659,3660,3661,3662,3663, 3664,3665,3666,3667,3668,3669,3670,3671,3672,3673,3674,3675,3676,3677,3678,3679, 3680,3681,3682,3683,3684,3685,3686,3687,3688,3689,3690,3691,3692,3693,3694,3695, 3696,3697,3698,3699,3700,1192,3701,3702,3703,3704,1256,3705,3706,3707,3708,1583, 1257,3709,3710,3711,3712,3713,3714,3715,3716,1584,3717,3718,3719,3720,3721,3722, 3723,3724,3725,3726,3727,3728,3729,3730,3731,3732,3733,3734,3735,3736,3737,3738, 3739,3740,3741,3742,3743,3744,3745,1344,3746,3747,3748,3749,3750,3751,3752,3753, 3754,3755,3756,1585,3757,3758,3759,3760,3761,3762,3763,3764,3765,3766,1586,3767, 3768,3769,3770,3771,3772,3773,3774,3775,3776,3777,3778,1345,3779,3780,3781,3782, 3783,3784,3785,3786,3787,3788,3789,3790,3791,3792,3793,3794,3795,1346,1587,3796, 3797,1588,3798,3799,3800,3801,3802,3803,3804,3805,3806,1347,3807,3808,3809,3810, 3811,1589,3812,3813,3814,3815,3816,3817,3818,3819,3820,3821,1590,3822,3823,1591, 1348,3824,3825,3826,3827,3828,3829,3830,1592,3831,3832,1593,3833,3834,3835,3836, 3837,3838,3839,3840,3841,3842,3843,3844,1349,3845,3846,3847,3848,3849,3850,3851, 3852,3853,3854,3855,3856,3857,3858,1594,3859,3860,3861,3862,3863,3864,3865,3866, 3867,3868,3869,1595,3870,3871,3872,3873,1596,3874,3875,3876,3877,3878,3879,3880, 3881,3882,3883,3884,3885,3886,1597,3887,3888,3889,3890,3891,3892,3893,3894,3895, 1598,3896,3897,3898,1599,1600,3899,1350,3900,1351,3901,3902,1352,3903,3904,3905, 3906,3907,3908,3909,3910,3911,3912,3913,3914,3915,3916,3917,3918,3919,3920,3921, 3922,3923,3924,1258,3925,3926,3927,3928,3929,3930,3931,1193,3932,1601,3933,3934, 3935,3936,3937,3938,3939,3940,3941,3942,3943,1602,3944,3945,3946,3947,3948,1603, 3949,3950,3951,3952,3953,3954,3955,3956,3957,3958,3959,3960,3961,3962,3963,3964, 3965,1604,3966,3967,3968,3969,3970,3971,3972,3973,3974,3975,3976,3977,1353,3978, 3979,3980,3981,3982,3983,3984,3985,3986,3987,3988,3989,3990,3991,1354,3992,3993, 3994,3995,3996,3997,3998,3999,4000,4001,4002,4003,4004,4005,4006,4007,4008,4009, 4010,4011,4012,4013,4014,4015,4016,4017,4018,4019,4020,4021,4022,4023,1355,4024, 4025,4026,4027,4028,4029,4030,4031,4032,4033,4034,4035,4036,4037,4038,4039,4040, 1605,4041,4042,4043,4044,4045,4046,4047,4048,4049,4050,4051,4052,4053,4054,4055, 4056,4057,4058,4059,4060,1606,4061,4062,4063,4064,1607,4065,4066,4067,4068,4069, 4070,4071,4072,4073,4074,4075,4076,1194,4077,4078,1608,4079,4080,4081,4082,4083, 4084,4085,4086,4087,1609,4088,4089,4090,4091,4092,4093,4094,4095,4096,4097,4098, 4099,4100,4101,4102,4103,4104,4105,4106,4107,4108,1259,4109,4110,4111,4112,4113, 4114,4115,4116,4117,4118,4119,4120,4121,4122,4123,4124,1195,4125,4126,4127,1610, 4128,4129,4130,4131,4132,4133,4134,4135,4136,4137,1356,4138,4139,4140,4141,4142, 4143,4144,1611,4145,4146,4147,4148,4149,4150,4151,4152,4153,4154,4155,4156,4157, 4158,4159,4160,4161,4162,4163,4164,4165,4166,4167,4168,4169,4170,4171,4172,4173, 4174,4175,4176,4177,4178,4179,4180,4181,4182,4183,4184,4185,4186,4187,4188,4189, 4190,4191,4192,4193,4194,4195,4196,4197,4198,4199,4200,4201,4202,4203,4204,4205, 4206,4207,4208,4209,4210,4211,4212,4213,4214,4215,4216,4217,4218,4219,1612,4220, 4221,4222,4223,4224,4225,4226,4227,1357,4228,1613,4229,4230,4231,4232,4233,4234, 4235,4236,4237,4238,4239,4240,4241,4242,4243,1614,4244,4245,4246,4247,4248,4249, 4250,4251,4252,4253,4254,4255,4256,4257,4258,4259,4260,4261,4262,4263,4264,4265, 4266,4267,4268,4269,4270,1196,1358,4271,4272,4273,4274,4275,4276,4277,4278,4279, 4280,4281,4282,4283,4284,4285,4286,4287,1615,4288,4289,4290,4291,4292,4293,4294, 4295,4296,4297,4298,4299,4300,4301,4302,4303,4304,4305,4306,4307,4308,4309,4310, 4311,4312,4313,4314,4315,4316,4317,4318,4319,4320,4321,4322,4323,4324,4325,4326, 4327,4328,4329,4330,4331,4332,4333,4334,1616,4335,4336,4337,4338,4339,4340,4341, 4342,4343,4344,4345,4346,4347,4348,4349,4350,4351,4352,4353,4354,4355,4356,4357, 4358,4359,4360,1617,4361,4362,4363,4364,4365,1618,4366,4367,4368,4369,4370,4371, 4372,4373,4374,4375,4376,4377,4378,4379,4380,4381,4382,4383,4384,4385,4386,4387, 4388,4389,4390,4391,4392,4393,4394,4395,4396,4397,4398,4399,4400,4401,4402,4403, 4404,4405,4406,4407,4408,4409,4410,4411,4412,4413,4414,4415,4416,1619,4417,4418, 4419,4420,4421,4422,4423,4424,4425,1112,4426,4427,4428,4429,4430,1620,4431,4432, 4433,4434,4435,4436,4437,4438,4439,4440,4441,4442,1260,1261,4443,4444,4445,4446, 4447,4448,4449,4450,4451,4452,4453,4454,4455,1359,4456,4457,4458,4459,4460,4461, 4462,4463,4464,4465,1621,4466,4467,4468,4469,4470,4471,4472,4473,4474,4475,4476, 4477,4478,4479,4480,4481,4482,4483,4484,4485,4486,4487,4488,4489,1055,4490,4491, 4492,4493,4494,4495,4496,4497,4498,4499,4500,4501,4502,4503,4504,4505,4506,4507, 4508,4509,4510,4511,4512,4513,4514,4515,4516,4517,4518,1622,4519,4520,4521,1623, 4522,4523,4524,4525,4526,4527,4528,4529,4530,4531,4532,4533,4534,4535,1360,4536, 4537,4538,4539,4540,4541,4542,4543, 975,4544,4545,4546,4547,4548,4549,4550,4551, 4552,4553,4554,4555,4556,4557,4558,4559,4560,4561,4562,4563,4564,4565,4566,4567, 4568,4569,4570,4571,1624,4572,4573,4574,4575,4576,1625,4577,4578,4579,4580,4581, 4582,4583,4584,1626,4585,4586,4587,4588,4589,4590,4591,4592,4593,4594,4595,1627, 4596,4597,4598,4599,4600,4601,4602,4603,4604,4605,4606,4607,4608,4609,4610,4611, 4612,4613,4614,4615,1628,4616,4617,4618,4619,4620,4621,4622,4623,4624,4625,4626, 4627,4628,4629,4630,4631,4632,4633,4634,4635,4636,4637,4638,4639,4640,4641,4642, 4643,4644,4645,4646,4647,4648,4649,1361,4650,4651,4652,4653,4654,4655,4656,4657, 4658,4659,4660,4661,1362,4662,4663,4664,4665,4666,4667,4668,4669,4670,4671,4672, 4673,4674,4675,4676,4677,4678,4679,4680,4681,4682,1629,4683,4684,4685,4686,4687, 1630,4688,4689,4690,4691,1153,4692,4693,4694,1113,4695,4696,4697,4698,4699,4700, 4701,4702,4703,4704,4705,4706,4707,4708,4709,4710,4711,1197,4712,4713,4714,4715, 4716,4717,4718,4719,4720,4721,4722,4723,4724,4725,4726,4727,4728,4729,4730,4731, 4732,4733,4734,4735,1631,4736,1632,4737,4738,4739,4740,4741,4742,4743,4744,1633, 4745,4746,4747,4748,4749,1262,4750,4751,4752,4753,4754,1363,4755,4756,4757,4758, 4759,4760,4761,4762,4763,4764,4765,4766,4767,4768,1634,4769,4770,4771,4772,4773, 4774,4775,4776,4777,4778,1635,4779,4780,4781,4782,4783,4784,4785,4786,4787,4788, 4789,1636,4790,4791,4792,4793,4794,4795,4796,4797,4798,4799,4800,4801,4802,4803, 4804,4805,4806,1637,4807,4808,4809,1638,4810,4811,4812,4813,4814,4815,4816,4817, 4818,1639,4819,4820,4821,4822,4823,4824,4825,4826,4827,4828,4829,4830,4831,4832, 4833,1077,4834,4835,4836,4837,4838,4839,4840,4841,4842,4843,4844,4845,4846,4847, 4848,4849,4850,4851,4852,4853,4854,4855,4856,4857,4858,4859,4860,4861,4862,4863, 4864,4865,4866,4867,4868,4869,4870,4871,4872,4873,4874,4875,4876,4877,4878,4879, 4880,4881,4882,4883,1640,4884,4885,1641,4886,4887,4888,4889,4890,4891,4892,4893, 4894,4895,4896,4897,4898,4899,4900,4901,4902,4903,4904,4905,4906,4907,4908,4909, 4910,4911,1642,4912,4913,4914,1364,4915,4916,4917,4918,4919,4920,4921,4922,4923, 4924,4925,4926,4927,4928,4929,4930,4931,1643,4932,4933,4934,4935,4936,4937,4938, 4939,4940,4941,4942,4943,4944,4945,4946,4947,4948,4949,4950,4951,4952,4953,4954, 4955,4956,4957,4958,4959,4960,4961,4962,4963,4964,4965,4966,4967,4968,4969,4970, 4971,4972,4973,4974,4975,4976,4977,4978,4979,4980,1644,4981,4982,4983,4984,1645, 4985,4986,1646,4987,4988,4989,4990,4991,4992,4993,4994,4995,4996,4997,4998,4999, 5000,5001,5002,5003,5004,5005,1647,5006,1648,5007,5008,5009,5010,5011,5012,1078, 5013,5014,5015,5016,5017,5018,5019,5020,5021,5022,5023,5024,5025,5026,5027,5028, 1365,5029,5030,5031,5032,5033,5034,5035,5036,5037,5038,5039,1649,5040,5041,5042, 5043,5044,5045,1366,5046,5047,5048,5049,5050,5051,5052,5053,5054,5055,1650,5056, 5057,5058,5059,5060,5061,5062,5063,5064,5065,5066,5067,5068,5069,5070,5071,5072, 5073,5074,5075,5076,5077,1651,5078,5079,5080,5081,5082,5083,5084,5085,5086,5087, 5088,5089,5090,5091,5092,5093,5094,5095,5096,5097,5098,5099,5100,5101,5102,5103, 5104,5105,5106,5107,5108,5109,5110,1652,5111,5112,5113,5114,5115,5116,5117,5118, 1367,5119,5120,5121,5122,5123,5124,5125,5126,5127,5128,5129,1653,5130,5131,5132, 5133,5134,5135,5136,5137,5138,5139,5140,5141,5142,5143,5144,5145,5146,5147,5148, 5149,1368,5150,1654,5151,1369,5152,5153,5154,5155,5156,5157,5158,5159,5160,5161, 5162,5163,5164,5165,5166,5167,5168,5169,5170,5171,5172,5173,5174,5175,5176,5177, 5178,1370,5179,5180,5181,5182,5183,5184,5185,5186,5187,5188,5189,5190,5191,5192, 5193,5194,5195,5196,5197,5198,1655,5199,5200,5201,5202,1656,5203,5204,5205,5206, 1371,5207,1372,5208,5209,5210,5211,1373,5212,5213,1374,5214,5215,5216,5217,5218, 5219,5220,5221,5222,5223,5224,5225,5226,5227,5228,5229,5230,5231,5232,5233,5234, 5235,5236,5237,5238,5239,5240,5241,5242,5243,5244,5245,5246,5247,1657,5248,5249, 5250,5251,1658,1263,5252,5253,5254,5255,5256,1375,5257,5258,5259,5260,5261,5262, 5263,5264,5265,5266,5267,5268,5269,5270,5271,5272,5273,5274,5275,5276,5277,5278, 5279,5280,5281,5282,5283,1659,5284,5285,5286,5287,5288,5289,5290,5291,5292,5293, 5294,5295,5296,5297,5298,5299,5300,1660,5301,5302,5303,5304,5305,5306,5307,5308, 5309,5310,5311,5312,5313,5314,5315,5316,5317,5318,5319,5320,5321,1376,5322,5323, 5324,5325,5326,5327,5328,5329,5330,5331,5332,5333,1198,5334,5335,5336,5337,5338, 5339,5340,5341,5342,5343,1661,5344,5345,5346,5347,5348,5349,5350,5351,5352,5353, 5354,5355,5356,5357,5358,5359,5360,5361,5362,5363,5364,5365,5366,5367,5368,5369, 5370,5371,5372,5373,5374,5375,5376,5377,5378,5379,5380,5381,5382,5383,5384,5385, 5386,5387,5388,5389,5390,5391,5392,5393,5394,5395,5396,5397,5398,1264,5399,5400, 5401,5402,5403,5404,5405,5406,5407,5408,5409,5410,5411,5412,1662,5413,5414,5415, 5416,1663,5417,5418,5419,5420,5421,5422,5423,5424,5425,5426,5427,5428,5429,5430, 5431,5432,5433,5434,5435,5436,5437,5438,1664,5439,5440,5441,5442,5443,5444,5445, 5446,5447,5448,5449,5450,5451,5452,5453,5454,5455,5456,5457,5458,5459,5460,5461, 5462,5463,5464,5465,5466,5467,5468,5469,5470,5471,5472,5473,5474,5475,5476,5477, 5478,1154,5479,5480,5481,5482,5483,5484,5485,1665,5486,5487,5488,5489,5490,5491, 5492,5493,5494,5495,5496,5497,5498,5499,5500,5501,5502,5503,5504,5505,5506,5507, 5508,5509,5510,5511,5512,5513,5514,5515,5516,5517,5518,5519,5520,5521,5522,5523, 5524,5525,5526,5527,5528,5529,5530,5531,5532,5533,5534,5535,5536,5537,5538,5539, 5540,5541,5542,5543,5544,5545,5546,5547,5548,1377,5549,5550,5551,5552,5553,5554, 5555,5556,5557,5558,5559,5560,5561,5562,5563,5564,5565,5566,5567,5568,5569,5570, 1114,5571,5572,5573,5574,5575,5576,5577,5578,5579,5580,5581,5582,5583,5584,5585, 5586,5587,5588,5589,5590,5591,5592,1378,5593,5594,5595,5596,5597,5598,5599,5600, 5601,5602,5603,5604,5605,5606,5607,5608,5609,5610,5611,5612,5613,5614,1379,5615, 5616,5617,5618,5619,5620,5621,5622,5623,5624,5625,5626,5627,5628,5629,5630,5631, 5632,5633,5634,1380,5635,5636,5637,5638,5639,5640,5641,5642,5643,5644,5645,5646, 5647,5648,5649,1381,1056,5650,5651,5652,5653,5654,5655,5656,5657,5658,5659,5660, 1666,5661,5662,5663,5664,5665,5666,5667,5668,1667,5669,1668,5670,5671,5672,5673, 5674,5675,5676,5677,5678,1155,5679,5680,5681,5682,5683,5684,5685,5686,5687,5688, 5689,5690,5691,5692,5693,5694,5695,5696,5697,5698,1669,5699,5700,5701,5702,5703, 5704,5705,1670,5706,5707,5708,5709,5710,1671,5711,5712,5713,5714,1382,5715,5716, 5717,5718,5719,5720,5721,5722,5723,5724,5725,1672,5726,5727,1673,1674,5728,5729, 5730,5731,5732,5733,5734,5735,5736,1675,5737,5738,5739,5740,5741,5742,5743,5744, 1676,5745,5746,5747,5748,5749,5750,5751,1383,5752,5753,5754,5755,5756,5757,5758, 5759,5760,5761,5762,5763,5764,5765,5766,5767,5768,1677,5769,5770,5771,5772,5773, 1678,5774,5775,5776, 998,5777,5778,5779,5780,5781,5782,5783,5784,5785,1384,5786, 5787,5788,5789,5790,5791,5792,5793,5794,5795,5796,5797,5798,5799,5800,1679,5801, 5802,5803,1115,1116,5804,5805,5806,5807,5808,5809,5810,5811,5812,5813,5814,5815, 5816,5817,5818,5819,5820,5821,5822,5823,5824,5825,5826,5827,5828,5829,5830,5831, 5832,5833,5834,5835,5836,5837,5838,5839,5840,5841,5842,5843,5844,5845,5846,5847, 5848,5849,5850,5851,5852,5853,5854,5855,1680,5856,5857,5858,5859,5860,5861,5862, 5863,5864,1681,5865,5866,5867,1682,5868,5869,5870,5871,5872,5873,5874,5875,5876, 5877,5878,5879,1683,5880,1684,5881,5882,5883,5884,1685,5885,5886,5887,5888,5889, 5890,5891,5892,5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904,5905, 5906,5907,1686,5908,5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920, 5921,5922,5923,5924,5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,1687, 5936,5937,5938,5939,5940,5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951, 5952,1688,1689,5953,1199,5954,5955,5956,5957,5958,5959,5960,5961,1690,5962,5963, 5964,5965,5966,5967,5968,5969,5970,5971,5972,5973,5974,5975,5976,5977,5978,5979, 5980,5981,1385,5982,1386,5983,5984,5985,5986,5987,5988,5989,5990,5991,5992,5993, 5994,5995,5996,5997,5998,5999,6000,6001,6002,6003,6004,6005,6006,6007,6008,6009, 6010,6011,6012,6013,6014,6015,6016,6017,6018,6019,6020,6021,6022,6023,6024,6025, 6026,6027,1265,6028,6029,1691,6030,6031,6032,6033,6034,6035,6036,6037,6038,6039, 6040,6041,6042,6043,6044,6045,6046,6047,6048,6049,6050,6051,6052,6053,6054,6055, 6056,6057,6058,6059,6060,6061,6062,6063,6064,6065,6066,6067,6068,6069,6070,6071, 6072,6073,6074,6075,6076,6077,6078,6079,6080,6081,6082,6083,6084,1692,6085,6086, 6087,6088,6089,6090,6091,6092,6093,6094,6095,6096,6097,6098,6099,6100,6101,6102, 6103,6104,6105,6106,6107,6108,6109,6110,6111,6112,6113,6114,6115,6116,6117,6118, 6119,6120,6121,6122,6123,6124,6125,6126,6127,6128,6129,6130,6131,1693,6132,6133, 6134,6135,6136,1694,6137,6138,6139,6140,6141,1695,6142,6143,6144,6145,6146,6147, 6148,6149,6150,6151,6152,6153,6154,6155,6156,6157,6158,6159,6160,6161,6162,6163, 6164,6165,6166,6167,6168,6169,6170,6171,6172,6173,6174,6175,6176,6177,6178,6179, 6180,6181,6182,6183,6184,6185,1696,6186,6187,6188,6189,6190,6191,6192,6193,6194, 6195,6196,6197,6198,6199,6200,6201,6202,6203,6204,6205,6206,6207,6208,6209,6210, 6211,6212,6213,6214,6215,6216,6217,6218,6219,1697,6220,6221,6222,6223,6224,6225, 6226,6227,6228,6229,6230,6231,6232,6233,6234,6235,6236,6237,6238,6239,6240,6241, 6242,6243,6244,6245,6246,6247,6248,6249,6250,6251,6252,6253,1698,6254,6255,6256, 6257,6258,6259,6260,6261,6262,6263,1200,6264,6265,6266,6267,6268,6269,6270,6271, #1024 6272,6273,6274,6275,6276,6277,6278,6279,6280,6281,6282,6283,6284,6285,6286,6287, 6288,6289,6290,6291,6292,6293,6294,6295,6296,6297,6298,6299,6300,6301,6302,1699, 6303,6304,1700,6305,6306,6307,6308,6309,6310,6311,6312,6313,6314,6315,6316,6317, 6318,6319,6320,6321,6322,6323,6324,6325,6326,6327,6328,6329,6330,6331,6332,6333, 6334,6335,6336,6337,6338,6339,1701,6340,6341,6342,6343,6344,1387,6345,6346,6347, 6348,6349,6350,6351,6352,6353,6354,6355,6356,6357,6358,6359,6360,6361,6362,6363, 6364,6365,6366,6367,6368,6369,6370,6371,6372,6373,6374,6375,6376,6377,6378,6379, 6380,6381,6382,6383,6384,6385,6386,6387,6388,6389,6390,6391,6392,6393,6394,6395, 6396,6397,6398,6399,6400,6401,6402,6403,6404,6405,6406,6407,6408,6409,6410,6411, 6412,6413,1702,6414,6415,6416,6417,6418,6419,6420,6421,6422,1703,6423,6424,6425, 6426,6427,6428,6429,6430,6431,6432,6433,6434,6435,6436,6437,6438,1704,6439,6440, 6441,6442,6443,6444,6445,6446,6447,6448,6449,6450,6451,6452,6453,6454,6455,6456, 6457,6458,6459,6460,6461,6462,6463,6464,6465,6466,6467,6468,6469,6470,6471,6472, 6473,6474,6475,6476,6477,6478,6479,6480,6481,6482,6483,6484,6485,6486,6487,6488, 6489,6490,6491,6492,6493,6494,6495,6496,6497,6498,6499,6500,6501,6502,6503,1266, 6504,6505,6506,6507,6508,6509,6510,6511,6512,6513,6514,6515,6516,6517,6518,6519, 6520,6521,6522,6523,6524,6525,6526,6527,6528,6529,6530,6531,6532,6533,6534,6535, 6536,6537,6538,6539,6540,6541,6542,6543,6544,6545,6546,6547,6548,6549,6550,6551, 1705,1706,6552,6553,6554,6555,6556,6557,6558,6559,6560,6561,6562,6563,6564,6565, 6566,6567,6568,6569,6570,6571,6572,6573,6574,6575,6576,6577,6578,6579,6580,6581, 6582,6583,6584,6585,6586,6587,6588,6589,6590,6591,6592,6593,6594,6595,6596,6597, 6598,6599,6600,6601,6602,6603,6604,6605,6606,6607,6608,6609,6610,6611,6612,6613, 6614,6615,6616,6617,6618,6619,6620,6621,6622,6623,6624,6625,6626,6627,6628,6629, 6630,6631,6632,6633,6634,6635,6636,6637,1388,6638,6639,6640,6641,6642,6643,6644, 1707,6645,6646,6647,6648,6649,6650,6651,6652,6653,6654,6655,6656,6657,6658,6659, 6660,6661,6662,6663,1708,6664,6665,6666,6667,6668,6669,6670,6671,6672,6673,6674, 1201,6675,6676,6677,6678,6679,6680,6681,6682,6683,6684,6685,6686,6687,6688,6689, 6690,6691,6692,6693,6694,6695,6696,6697,6698,6699,6700,6701,6702,6703,6704,6705, 6706,6707,6708,6709,6710,6711,6712,6713,6714,6715,6716,6717,6718,6719,6720,6721, 6722,6723,6724,6725,1389,6726,6727,6728,6729,6730,6731,6732,6733,6734,6735,6736, 1390,1709,6737,6738,6739,6740,6741,6742,1710,6743,6744,6745,6746,1391,6747,6748, 6749,6750,6751,6752,6753,6754,6755,6756,6757,1392,6758,6759,6760,6761,6762,6763, 6764,6765,6766,6767,6768,6769,6770,6771,6772,6773,6774,6775,6776,6777,6778,6779, 6780,1202,6781,6782,6783,6784,6785,6786,6787,6788,6789,6790,6791,6792,6793,6794, 6795,6796,6797,6798,6799,6800,6801,6802,6803,6804,6805,6806,6807,6808,6809,1711, 6810,6811,6812,6813,6814,6815,6816,6817,6818,6819,6820,6821,6822,6823,6824,6825, 6826,6827,6828,6829,6830,6831,6832,6833,6834,6835,6836,1393,6837,6838,6839,6840, 6841,6842,6843,6844,6845,6846,6847,6848,6849,6850,6851,6852,6853,6854,6855,6856, 6857,6858,6859,6860,6861,6862,6863,6864,6865,6866,6867,6868,6869,6870,6871,6872, 6873,6874,6875,6876,6877,6878,6879,6880,6881,6882,6883,6884,6885,6886,6887,6888, 6889,6890,6891,6892,6893,6894,6895,6896,6897,6898,6899,6900,6901,6902,1712,6903, 6904,6905,6906,6907,6908,6909,6910,1713,6911,6912,6913,6914,6915,6916,6917,6918, 6919,6920,6921,6922,6923,6924,6925,6926,6927,6928,6929,6930,6931,6932,6933,6934, 6935,6936,6937,6938,6939,6940,6941,6942,6943,6944,6945,6946,6947,6948,6949,6950, 6951,6952,6953,6954,6955,6956,6957,6958,6959,6960,6961,6962,6963,6964,6965,6966, 6967,6968,6969,6970,6971,6972,6973,6974,1714,6975,6976,6977,6978,6979,6980,6981, 6982,6983,6984,6985,6986,6987,6988,1394,6989,6990,6991,6992,6993,6994,6995,6996, 6997,6998,6999,7000,1715,7001,7002,7003,7004,7005,7006,7007,7008,7009,7010,7011, 7012,7013,7014,7015,7016,7017,7018,7019,7020,7021,7022,7023,7024,7025,7026,7027, 7028,1716,7029,7030,7031,7032,7033,7034,7035,7036,7037,7038,7039,7040,7041,7042, 7043,7044,7045,7046,7047,7048,7049,7050,7051,7052,7053,7054,7055,7056,7057,7058, 7059,7060,7061,7062,7063,7064,7065,7066,7067,7068,7069,7070,7071,7072,7073,7074, 7075,7076,7077,7078,7079,7080,7081,7082,7083,7084,7085,7086,7087,7088,7089,7090, 7091,7092,7093,7094,7095,7096,7097,7098,7099,7100,7101,7102,7103,7104,7105,7106, 7107,7108,7109,7110,7111,7112,7113,7114,7115,7116,7117,7118,7119,7120,7121,7122, 7123,7124,7125,7126,7127,7128,7129,7130,7131,7132,7133,7134,7135,7136,7137,7138, 7139,7140,7141,7142,7143,7144,7145,7146,7147,7148,7149,7150,7151,7152,7153,7154, 7155,7156,7157,7158,7159,7160,7161,7162,7163,7164,7165,7166,7167,7168,7169,7170, 7171,7172,7173,7174,7175,7176,7177,7178,7179,7180,7181,7182,7183,7184,7185,7186, 7187,7188,7189,7190,7191,7192,7193,7194,7195,7196,7197,7198,7199,7200,7201,7202, 7203,7204,7205,7206,7207,1395,7208,7209,7210,7211,7212,7213,1717,7214,7215,7216, 7217,7218,7219,7220,7221,7222,7223,7224,7225,7226,7227,7228,7229,7230,7231,7232, 7233,7234,7235,7236,7237,7238,7239,7240,7241,7242,7243,7244,7245,7246,7247,7248, 7249,7250,7251,7252,7253,7254,7255,7256,7257,7258,7259,7260,7261,7262,7263,7264, 7265,7266,7267,7268,7269,7270,7271,7272,7273,7274,7275,7276,7277,7278,7279,7280, 7281,7282,7283,7284,7285,7286,7287,7288,7289,7290,7291,7292,7293,7294,7295,7296, 7297,7298,7299,7300,7301,7302,7303,7304,7305,7306,7307,7308,7309,7310,7311,7312, 7313,1718,7314,7315,7316,7317,7318,7319,7320,7321,7322,7323,7324,7325,7326,7327, 7328,7329,7330,7331,7332,7333,7334,7335,7336,7337,7338,7339,7340,7341,7342,7343, 7344,7345,7346,7347,7348,7349,7350,7351,7352,7353,7354,7355,7356,7357,7358,7359, 7360,7361,7362,7363,7364,7365,7366,7367,7368,7369,7370,7371,7372,7373,7374,7375, 7376,7377,7378,7379,7380,7381,7382,7383,7384,7385,7386,7387,7388,7389,7390,7391, 7392,7393,7394,7395,7396,7397,7398,7399,7400,7401,7402,7403,7404,7405,7406,7407, 7408,7409,7410,7411,7412,7413,7414,7415,7416,7417,7418,7419,7420,7421,7422,7423, 7424,7425,7426,7427,7428,7429,7430,7431,7432,7433,7434,7435,7436,7437,7438,7439, 7440,7441,7442,7443,7444,7445,7446,7447,7448,7449,7450,7451,7452,7453,7454,7455, 7456,7457,7458,7459,7460,7461,7462,7463,7464,7465,7466,7467,7468,7469,7470,7471, 7472,7473,7474,7475,7476,7477,7478,7479,7480,7481,7482,7483,7484,7485,7486,7487, 7488,7489,7490,7491,7492,7493,7494,7495,7496,7497,7498,7499,7500,7501,7502,7503, 7504,7505,7506,7507,7508,7509,7510,7511,7512,7513,7514,7515,7516,7517,7518,7519, 7520,7521,7522,7523,7524,7525,7526,7527,7528,7529,7530,7531,7532,7533,7534,7535, 7536,7537,7538,7539,7540,7541,7542,7543,7544,7545,7546,7547,7548,7549,7550,7551, 7552,7553,7554,7555,7556,7557,7558,7559,7560,7561,7562,7563,7564,7565,7566,7567, 7568,7569,7570,7571,7572,7573,7574,7575,7576,7577,7578,7579,7580,7581,7582,7583, 7584,7585,7586,7587,7588,7589,7590,7591,7592,7593,7594,7595,7596,7597,7598,7599, 7600,7601,7602,7603,7604,7605,7606,7607,7608,7609,7610,7611,7612,7613,7614,7615, 7616,7617,7618,7619,7620,7621,7622,7623,7624,7625,7626,7627,7628,7629,7630,7631, 7632,7633,7634,7635,7636,7637,7638,7639,7640,7641,7642,7643,7644,7645,7646,7647, 7648,7649,7650,7651,7652,7653,7654,7655,7656,7657,7658,7659,7660,7661,7662,7663, 7664,7665,7666,7667,7668,7669,7670,7671,7672,7673,7674,7675,7676,7677,7678,7679, 7680,7681,7682,7683,7684,7685,7686,7687,7688,7689,7690,7691,7692,7693,7694,7695, 7696,7697,7698,7699,7700,7701,7702,7703,7704,7705,7706,7707,7708,7709,7710,7711, 7712,7713,7714,7715,7716,7717,7718,7719,7720,7721,7722,7723,7724,7725,7726,7727, 7728,7729,7730,7731,7732,7733,7734,7735,7736,7737,7738,7739,7740,7741,7742,7743, 7744,7745,7746,7747,7748,7749,7750,7751,7752,7753,7754,7755,7756,7757,7758,7759, 7760,7761,7762,7763,7764,7765,7766,7767,7768,7769,7770,7771,7772,7773,7774,7775, 7776,7777,7778,7779,7780,7781,7782,7783,7784,7785,7786,7787,7788,7789,7790,7791, 7792,7793,7794,7795,7796,7797,7798,7799,7800,7801,7802,7803,7804,7805,7806,7807, 7808,7809,7810,7811,7812,7813,7814,7815,7816,7817,7818,7819,7820,7821,7822,7823, 7824,7825,7826,7827,7828,7829,7830,7831,7832,7833,7834,7835,7836,7837,7838,7839, 7840,7841,7842,7843,7844,7845,7846,7847,7848,7849,7850,7851,7852,7853,7854,7855, 7856,7857,7858,7859,7860,7861,7862,7863,7864,7865,7866,7867,7868,7869,7870,7871, 7872,7873,7874,7875,7876,7877,7878,7879,7880,7881,7882,7883,7884,7885,7886,7887, 7888,7889,7890,7891,7892,7893,7894,7895,7896,7897,7898,7899,7900,7901,7902,7903, 7904,7905,7906,7907,7908,7909,7910,7911,7912,7913,7914,7915,7916,7917,7918,7919, 7920,7921,7922,7923,7924,7925,7926,7927,7928,7929,7930,7931,7932,7933,7934,7935, 7936,7937,7938,7939,7940,7941,7942,7943,7944,7945,7946,7947,7948,7949,7950,7951, 7952,7953,7954,7955,7956,7957,7958,7959,7960,7961,7962,7963,7964,7965,7966,7967, 7968,7969,7970,7971,7972,7973,7974,7975,7976,7977,7978,7979,7980,7981,7982,7983, 7984,7985,7986,7987,7988,7989,7990,7991,7992,7993,7994,7995,7996,7997,7998,7999, 8000,8001,8002,8003,8004,8005,8006,8007,8008,8009,8010,8011,8012,8013,8014,8015, 8016,8017,8018,8019,8020,8021,8022,8023,8024,8025,8026,8027,8028,8029,8030,8031, 8032,8033,8034,8035,8036,8037,8038,8039,8040,8041,8042,8043,8044,8045,8046,8047, 8048,8049,8050,8051,8052,8053,8054,8055,8056,8057,8058,8059,8060,8061,8062,8063, 8064,8065,8066,8067,8068,8069,8070,8071,8072,8073,8074,8075,8076,8077,8078,8079, 8080,8081,8082,8083,8084,8085,8086,8087,8088,8089,8090,8091,8092,8093,8094,8095, 8096,8097,8098,8099,8100,8101,8102,8103,8104,8105,8106,8107,8108,8109,8110,8111, 8112,8113,8114,8115,8116,8117,8118,8119,8120,8121,8122,8123,8124,8125,8126,8127, 8128,8129,8130,8131,8132,8133,8134,8135,8136,8137,8138,8139,8140,8141,8142,8143, 8144,8145,8146,8147,8148,8149,8150,8151,8152,8153,8154,8155,8156,8157,8158,8159, 8160,8161,8162,8163,8164,8165,8166,8167,8168,8169,8170,8171,8172,8173,8174,8175, 8176,8177,8178,8179,8180,8181,8182,8183,8184,8185,8186,8187,8188,8189,8190,8191, 8192,8193,8194,8195,8196,8197,8198,8199,8200,8201,8202,8203,8204,8205,8206,8207, 8208,8209,8210,8211,8212,8213,8214,8215,8216,8217,8218,8219,8220,8221,8222,8223, 8224,8225,8226,8227,8228,8229,8230,8231,8232,8233,8234,8235,8236,8237,8238,8239, 8240,8241,8242,8243,8244,8245,8246,8247,8248,8249,8250,8251,8252,8253,8254,8255, 8256,8257,8258,8259,8260,8261,8262,8263,8264,8265,8266,8267,8268,8269,8270,8271, 8272,8273,8274,8275,8276,8277,8278,8279,8280,8281,8282,8283,8284,8285,8286,8287, 8288,8289,8290,8291,8292,8293,8294,8295,8296,8297,8298,8299,8300,8301,8302,8303, 8304,8305,8306,8307,8308,8309,8310,8311,8312,8313,8314,8315,8316,8317,8318,8319, 8320,8321,8322,8323,8324,8325,8326,8327,8328,8329,8330,8331,8332,8333,8334,8335, 8336,8337,8338,8339,8340,8341,8342,8343,8344,8345,8346,8347,8348,8349,8350,8351, 8352,8353,8354,8355,8356,8357,8358,8359,8360,8361,8362,8363,8364,8365,8366,8367, 8368,8369,8370,8371,8372,8373,8374,8375,8376,8377,8378,8379,8380,8381,8382,8383, 8384,8385,8386,8387,8388,8389,8390,8391,8392,8393,8394,8395,8396,8397,8398,8399, 8400,8401,8402,8403,8404,8405,8406,8407,8408,8409,8410,8411,8412,8413,8414,8415, 8416,8417,8418,8419,8420,8421,8422,8423,8424,8425,8426,8427,8428,8429,8430,8431, 8432,8433,8434,8435,8436,8437,8438,8439,8440,8441,8442,8443,8444,8445,8446,8447, 8448,8449,8450,8451,8452,8453,8454,8455,8456,8457,8458,8459,8460,8461,8462,8463, 8464,8465,8466,8467,8468,8469,8470,8471,8472,8473,8474,8475,8476,8477,8478,8479, 8480,8481,8482,8483,8484,8485,8486,8487,8488,8489,8490,8491,8492,8493,8494,8495, 8496,8497,8498,8499,8500,8501,8502,8503,8504,8505,8506,8507,8508,8509,8510,8511, 8512,8513,8514,8515,8516,8517,8518,8519,8520,8521,8522,8523,8524,8525,8526,8527, 8528,8529,8530,8531,8532,8533,8534,8535,8536,8537,8538,8539,8540,8541,8542,8543, 8544,8545,8546,8547,8548,8549,8550,8551,8552,8553,8554,8555,8556,8557,8558,8559, 8560,8561,8562,8563,8564,8565,8566,8567,8568,8569,8570,8571,8572,8573,8574,8575, 8576,8577,8578,8579,8580,8581,8582,8583,8584,8585,8586,8587,8588,8589,8590,8591, 8592,8593,8594,8595,8596,8597,8598,8599,8600,8601,8602,8603,8604,8605,8606,8607, 8608,8609,8610,8611,8612,8613,8614,8615,8616,8617,8618,8619,8620,8621,8622,8623, 8624,8625,8626,8627,8628,8629,8630,8631,8632,8633,8634,8635,8636,8637,8638,8639, 8640,8641,8642,8643,8644,8645,8646,8647,8648,8649,8650,8651,8652,8653,8654,8655, 8656,8657,8658,8659,8660,8661,8662,8663,8664,8665,8666,8667,8668,8669,8670,8671, 8672,8673,8674,8675,8676,8677,8678,8679,8680,8681,8682,8683,8684,8685,8686,8687, 8688,8689,8690,8691,8692,8693,8694,8695,8696,8697,8698,8699,8700,8701,8702,8703, 8704,8705,8706,8707,8708,8709,8710,8711,8712,8713,8714,8715,8716,8717,8718,8719, 8720,8721,8722,8723,8724,8725,8726,8727,8728,8729,8730,8731,8732,8733,8734,8735, 8736,8737,8738,8739,8740,8741) # flake8: noqa
commtrack/temp-aquatest
refs/heads/master
apps/hq/reporter/api_/resources.py
1
import operator from datetime import datetime, timedelta from django.http import HttpResponseBadRequest from transformers.xml import xmlify from transformers.http import responsify from xformmanager.models import FormDefModel, Metadata from hq.models import ReporterProfile from hq.reporter.api_.reports import Report, DataSet, Values from hq.reporter.metadata import get_username_count, get_timespan from domain.decorators import login_and_domain_required # TODO - clean up index/value once we hash out this spec more properly # TODO - pull out authentication stuff into some generic wrapper def report(request, ids=[], index='', value=[]): """ Parses GET values, calls the appropriate report, formats it, and returns """ if not ids: if request.REQUEST.has_key('ids'): ids = [v.strip() for v in request.GET['id'].split(',')] if not index: if request.REQUEST.has_key('index'): index = request.GET['index'] else: return HttpResponseBadRequest("Must specify index (x-axis)") if not value: if request.REQUEST.has_key('value'): value = [v.strip() for v in request.GET['value'].split(',')] else: return HttpResponseBadRequest("Must specify value (y-axes)") start_date = None if request.REQUEST.has_key('start-date'): start_date = datetime.strptime(request.GET['start-date'],"%Y-%m-%d") if start_date is None: return HttpResponseBadRequest("Must specify start_date") end_date = None if request.REQUEST.has_key('end-date'): end_date = datetime.strptime(request.GET['end-date'],"%Y-%m-%d") if end_date is None: return HttpResponseBadRequest("Must specify end_date") stats = None if request.REQUEST.has_key('stats'): stats = [v.strip() for v in request.GET['stats'].split(',')] try: _report = get_report(request, ids, index, value, start_date, end_date, stats) except Exception, e: return HttpResponseBadRequest(str(e)) xml = xmlify(_report) response = responsify('xml', xml) return response @login_and_domain_required def get_report(request, ids, index, value, start_date, end_date, stats): """ There's probably a more generic way of hanlding this, but we do need to support fairly custom titles and names for some of these elements. Anyways, it's worth revisiting and refactoring this later """ if index.lower() == 'user': return get_user_activity_report(request, ids, index, value, start_date, end_date, stats) elif index.lower() == 'day': return get_daily_activity_report(request, ids, index, value, start_date, end_date, stats) raise Exception("Your request does not match any known reports.") @login_and_domain_required def get_user_activity_report(request, ids, index, value, start_date, end_date, stats): """ CHW Group Total Activity Report - submissions per user over time ids: list of form id's index: title for the x-axis. something like, 'users', 'chws', etc. value: title for the y-axis. usually corresponds to the form name(s) start_date: start of reporting period end_date: end of reporting period stats: any requested stats. Returns a Report object populated with requested data. """ domain = request.user.selected_domain if not ids: raise Exception("The requested form was not found") _report = Report("CHW Group Total Activity Report") _report.generating_url = request.path metadata = Metadata.objects.filter(timestart__gte=start_date) # the query below is used if you want to query by submission time (instead of form completion time) #metadata = Metadata.objects.filter(attachment__submission__submit_time__gte=start_date) # since we are working at a granularity of 'days', we want to make sure include # complete days in our queries, so we round up timespan = get_timespan(start_date, end_date) delta = timedelta(days=timespan.days+1) metadata = metadata.filter(timeend__lt=start_date+delta) # the query below is used if you want to query by submission time (instead of form completion time) #metadata = metadata.filter(attachment__submission__submit_time__lte=end_date) dataset = DataSet( unicode(value[0]) + " per " + unicode(index) ) dataset.indices = unicode(index) dataset.params = request.GET # when 'organization' is properly populated, we can start using that # member_list = utils.get_members(organization) # for now, just use domain member_list = [r.chw_username for r in ReporterProfile.objects.filter(domain=domain).order_by("chw_username")] # get a sum of all forms visits_per_member = Values( "visits" ) for member in member_list: visits_per_member.append( (member, metadata.filter(username=member).count()) ) visits_per_member.run_stats(stats) visits_per_member.sort(key=operator.itemgetter(1), reverse=True) dataset.valuesets.append( visits_per_member ) # this report only requires the first form. you can imagine other reports doing # this iteration: for id in ids: form_per_member = Values( unicode(value[0]) ) form_metadata = metadata.filter(formdefmodel=ids[0]) for member in member_list: # values are tuples of dates and counts form_per_member.append( (member, form_metadata.filter(username=member).count()) ) form_per_member.run_stats(stats) form_per_member.sort(key=operator.itemgetter(1), reverse=True) dataset.valuesets.append( form_per_member ) _report.datasets.append(dataset) return _report @login_and_domain_required def get_daily_activity_report(request, ids, index, value, start_date, end_date, stats): """ CHW Daily Activity Report - submissions per day by user ids: list of form id's. this report returns the sum of all ids listed. index: title for the x-axis. something like, 'day', 'session', etc. value: title for the y-axis. usually corresponds to the form name(s) start_date: start of reporting period end_date: end of reporting period stats: any requested stats. Returns a Report object populated with requested data. """ domain = request.user.selected_domain if request.GET.has_key('chw'): chw = request.GET['chw'] else: raise Exception("This reports requires a CHW parameter") if not ids: raise Exception("The requested form was not found") # TODO - this currrently only tested for value lists of size 1. test. _report = Report("CHW Daily Activity Report") _report.generating_url = request.path # when 'organization' is properly populated, we can start using that # member_list = utils.get_members(organization) # for now, just use domain member_list = [r.chw_username for r in ReporterProfile.objects.filter(domain=domain)] if chw not in member_list: raise Exception("No matching CHW could be identified") dataset = DataSet( unicode(value[0]) + " per " + unicode(index) ) dataset.indices = unicode(index) dataset.params = request.GET values = get_daily_activity_values('Visits', None, chw, member_list, start_date, end_date, stats, domain) dataset.valuesets.append( values ) form_list = FormDefModel.objects.filter(pk__in=ids) values = get_daily_activity_values(unicode(value[0]), form_list, chw, member_list, start_date, end_date, stats, domain) dataset.valuesets.append( values ) _report.datasets.append(dataset) return _report def get_daily_activity_values(name, form_list, chw, member_list, start_date, end_date, stats, domain): # get a sum of all the forms member_list = [r.chw_username for r in ReporterProfile.objects.filter(domain=domain)] if chw not in member_list: raise Exception("No matching CHW could be identified") username_counts = get_username_count(form_list, member_list, start_date, end_date) date = start_date day = timedelta(days=1) values_per_day = Values( name ) if chw in username_counts: for daily_count in username_counts[chw]: # values are tuples of dates and daily counts values_per_day.append( (date.strftime("%Y-%m-%d"), daily_count) ) date = date + day else: # should return a set of '0s' even when no forms submitted timespan = get_timespan(start_date, end_date) for i in range(0,timespan.days+1): values_per_day.append( (date.strftime("%Y-%m-%d"), 0) ) date = date + day values_per_day.run_stats(stats) return values_per_day
codeinvain/object_detection
refs/heads/master
app/target.py
1
import cv2 import tracks from ostruct import OpenStruct class Target: def __init__(self,contour): self.contour = contour self._ratio = None self._rect = None def area(self): self._area = self._area or cv2.contourArea(self.contour) return self._area def rect(self): if self._rect==None: self._rect = OpenStruct() self._rect.y,self._rect.x,self._rect.w,self._rect.h = cv2.boundingRect(self.contour) return self._rect def ratio(self): self._ratio = self._ratio or (self.rect().w / self.rect().h) return self._ratio def isValid(self): return self.isInRatio() # expecting ratio of 2/5 ~ 0.4 def isInRatio(self): return self.ratio() > tracks.config['target']['min_ratio'] and self.ratio() < tracks.config['target']['max_ratio']
thePortus/generator-djangular-gift
refs/heads/master
django-viewset/templates/_.tests.__init__.py
51
# -*- coding: UTF-8 -*-
thisismedium/md
refs/heads/master
md/__init__.py
12133432
group-policy/rally
refs/heads/master
tests/unit/plugins/openstack/scenarios/ec2/__init__.py
12133432
bikong2/django
refs/heads/master
tests/i18n/other/locale/__init__.py
12133432
xapharius/HadoopML
refs/heads/master
Engine/src/ensemble/regression/__init__.py
12133432
bunnyitvn/webptn
refs/heads/master
django/conf/locale/mn/__init__.py
12133432
mknz/furiganasan
refs/heads/master
write2odt.py
1
# -*- coding: utf-8 -*- import re from odf.opendocument import OpenDocumentText from odf import teletype from odf.text import P from odf.text import Ruby from odf.text import RubyBase from odf.text import RubyText TOKENS_KANJI = re.compile(u'[一-龠]+') # kanji def add_str(paragraph_element, string): """Add string to paragraph element.""" teletype.addTextToElement(paragraph_element, string) def add_rubi(paragraph_element, str_base, str_rubi): """Add rubi to paragraph element.""" rubi_element = Ruby(stylename="Rubi1") rubi_element.addElement(RubyBase(text=str_base)) rubi_element.addElement(RubyText(text=str_rubi)) paragraph_element.addElement(rubi_element) def create_paragraph(string): """Create a paragraph element with rubi text.""" paragraph_element = P() while 1: m = re.search(TOKENS_KANJI, string) # detect kanji start position if m is not None: # if first part of the string is non-kanji add_str(paragraph_element, string[:m.start()]) string = string[m.start():] # truncate non-kanji m = re.match(r'(.*?)\((.*?)\)(.*)', string) if m is None: add_str(paragraph_element, string) break else: add_rubi(paragraph_element, m.group(1), m.group(2)) string = m.group(3) return paragraph_element def convert_and_save(text, filename): textdoc = OpenDocumentText() # Useless: This style setting is somehow overwritten by LibreOffice ''' rubistyle = Style(name="Rubi1", family="ruby") rubistyle.addElement(RubyProperties(attributes={"rubyalign": "center", "rubyposition": "above"})) textdoc.styles.addElement(rubistyle) ''' lines = text.splitlines() for line in lines: paragraph_element = create_paragraph(line) textdoc.text.addElement(paragraph_element) textdoc.save(filename)
JamesJGarner/cms
refs/heads/master
src/cms/apps/links/tests.py
1
from django.test import TestCase from django.contrib.contenttypes.models import ContentType from cms.apps.pages.models import Page from cms.apps.links.models import Link class TestLinks(TestCase): def setUp(self): page = Page.objects.create( title = "Homepage", content_type = ContentType.objects.get_for_model(Link), ) Link.objects.create( page = page, link_url = "http://www.example.com/", ) def testLinkRedirect(self): response = self.client.get("/") self.assertEquals(response.status_code, 302) self.assertEquals(response["Location"], "http://www.example.com/")
KangHsi/youtube-8m
refs/heads/master
youtube-8m/readers.py
9
# Copyright 2016 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS-IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Provides readers configured for different datasets.""" import tensorflow as tf import utils from tensorflow import logging def resize_axis(tensor, axis, new_size, fill_value=0): """Truncates or pads a tensor to new_size on on a given axis. Truncate or extend tensor such that tensor.shape[axis] == new_size. If the size increases, the padding will be performed at the end, using fill_value. Args: tensor: The tensor to be resized. axis: An integer representing the dimension to be sliced. new_size: An integer or 0d tensor representing the new value for tensor.shape[axis]. fill_value: Value to use to fill any new entries in the tensor. Will be cast to the type of tensor. Returns: The resized tensor. """ tensor = tf.convert_to_tensor(tensor) shape = tf.unstack(tf.shape(tensor)) pad_shape = shape[:] pad_shape[axis] = tf.maximum(0, new_size - shape[axis]) shape[axis] = tf.minimum(shape[axis], new_size) shape = tf.stack(shape) resized = tf.concat([ tf.slice(tensor, tf.zeros_like(shape), shape), tf.fill(tf.stack(pad_shape), tf.cast(fill_value, tensor.dtype)) ], axis) # Update shape. new_shape = tensor.get_shape().as_list() # A copy is being made. new_shape[axis] = new_size resized.set_shape(new_shape) return resized class BaseReader(object): """Inherit from this class when implementing new readers.""" def prepare_reader(self, unused_filename_queue): """Create a thread for generating prediction and label tensors.""" raise NotImplementedError() class YT8MAggregatedFeatureReader(BaseReader): """Reads TFRecords of pre-aggregated Examples. The TFRecords must contain Examples with a sparse int64 'labels' feature and a fixed length float32 feature, obtained from the features in 'feature_name'. The float features are assumed to be an average of dequantized values. """ def __init__(self, num_classes=4716, feature_sizes=[1024], feature_names=["mean_inc3"]): """Construct a YT8MAggregatedFeatureReader. Args: num_classes: a positive integer for the number of classes. feature_sizes: positive integer(s) for the feature dimensions as a list. feature_names: the feature name(s) in the tensorflow record as a list. """ assert len(feature_names) == len(feature_sizes), \ "length of feature_names (={}) != length of feature_sizes (={})".format( \ len(feature_names), len(feature_sizes)) self.num_classes = num_classes self.feature_sizes = feature_sizes self.feature_names = feature_names def prepare_reader(self, filename_queue, batch_size=1024): """Creates a single reader thread for pre-aggregated YouTube 8M Examples. Args: filename_queue: A tensorflow queue of filename locations. Returns: A tuple of video indexes, features, labels, and padding data. """ reader = tf.TFRecordReader() _, serialized_examples = reader.read_up_to(filename_queue, batch_size) tf.add_to_collection("serialized_examples", serialized_examples) return self.prepare_serialized_examples(serialized_examples) def prepare_serialized_examples(self, serialized_examples): # set the mapping from the fields to data types in the proto num_features = len(self.feature_names) assert num_features > 0, "self.feature_names is empty!" assert len(self.feature_names) == len(self.feature_sizes), \ "length of feature_names (={}) != length of feature_sizes (={})".format( \ len(self.feature_names), len(self.feature_sizes)) feature_map = {"video_id": tf.FixedLenFeature([], tf.string), "labels": tf.VarLenFeature(tf.int64)} for feature_index in range(num_features): feature_map[self.feature_names[feature_index]] = tf.FixedLenFeature( [self.feature_sizes[feature_index]], tf.float32) features = tf.parse_example(serialized_examples, features=feature_map) labels = tf.sparse_to_indicator(features["labels"], self.num_classes) labels.set_shape([None, self.num_classes]) concatenated_features = tf.concat([ features[feature_name] for feature_name in self.feature_names], 1) return features["video_id"], concatenated_features, labels, tf.ones([tf.shape(serialized_examples)[0]]) class YT8MFrameFeatureReader(BaseReader): """Reads TFRecords of SequenceExamples. The TFRecords must contain SequenceExamples with the sparse in64 'labels' context feature and a fixed length byte-quantized feature vector, obtained from the features in 'feature_names'. The quantized features will be mapped back into a range between min_quantized_value and max_quantized_value. """ def __init__(self, num_classes=4716, feature_sizes=[1024], feature_names=["inc3"], max_frames=300): """Construct a YT8MFrameFeatureReader. Args: num_classes: a positive integer for the number of classes. feature_sizes: positive integer(s) for the feature dimensions as a list. feature_names: the feature name(s) in the tensorflow record as a list. max_frames: the maximum number of frames to process. """ assert len(feature_names) == len(feature_sizes), \ "length of feature_names (={}) != length of feature_sizes (={})".format( \ len(feature_names), len(feature_sizes)) self.num_classes = num_classes self.feature_sizes = feature_sizes self.feature_names = feature_names self.max_frames = max_frames def get_video_matrix(self, features, feature_size, max_frames, max_quantized_value, min_quantized_value): """Decodes features from an input string and quantizes it. Args: features: raw feature values feature_size: length of each frame feature vector max_frames: number of frames (rows) in the output feature_matrix max_quantized_value: the maximum of the quantized value. min_quantized_value: the minimum of the quantized value. Returns: feature_matrix: matrix of all frame-features num_frames: number of frames in the sequence """ decoded_features = tf.reshape( tf.cast(tf.decode_raw(features, tf.uint8), tf.float32), [-1, feature_size]) num_frames = tf.minimum(tf.shape(decoded_features)[0], max_frames) feature_matrix = utils.Dequantize(decoded_features, max_quantized_value, min_quantized_value) feature_matrix = resize_axis(feature_matrix, 0, max_frames) return feature_matrix, num_frames def prepare_reader(self, filename_queue, max_quantized_value=2, min_quantized_value=-2): """Creates a single reader thread for YouTube8M SequenceExamples. Args: filename_queue: A tensorflow queue of filename locations. max_quantized_value: the maximum of the quantized value. min_quantized_value: the minimum of the quantized value. Returns: A tuple of video indexes, video features, labels, and padding data. """ reader = tf.TFRecordReader() _, serialized_example = reader.read(filename_queue) return self.prepare_serialized_examples(serialized_example, max_quantized_value, min_quantized_value) def prepare_serialized_examples(self, serialized_example, max_quantized_value=2, min_quantized_value=-2): contexts, features = tf.parse_single_sequence_example( serialized_example, context_features={"video_id": tf.FixedLenFeature( [], tf.string), "labels": tf.VarLenFeature(tf.int64)}, sequence_features={ feature_name : tf.FixedLenSequenceFeature([], dtype=tf.string) for feature_name in self.feature_names }) # read ground truth labels labels = (tf.cast( tf.sparse_to_dense(contexts["labels"].values, (self.num_classes,), 1, validate_indices=False), tf.bool)) # loads (potentially) different types of features and concatenates them num_features = len(self.feature_names) assert num_features > 0, "No feature selected: feature_names is empty!" assert len(self.feature_names) == len(self.feature_sizes), \ "length of feature_names (={}) != length of feature_sizes (={})".format( \ len(self.feature_names), len(self.feature_sizes)) num_frames = -1 # the number of frames in the video feature_matrices = [None] * num_features # an array of different features for feature_index in range(num_features): feature_matrix, num_frames_in_this_feature = self.get_video_matrix( features[self.feature_names[feature_index]], self.feature_sizes[feature_index], self.max_frames, max_quantized_value, min_quantized_value) if num_frames == -1: num_frames = num_frames_in_this_feature else: tf.assert_equal(num_frames, num_frames_in_this_feature) feature_matrices[feature_index] = feature_matrix # cap the number of frames at self.max_frames num_frames = tf.minimum(num_frames, self.max_frames) # concatenate different features video_matrix = tf.concat(feature_matrices, 1) # convert to batch format. # TODO: Do proper batch reads to remove the IO bottleneck. batch_video_ids = tf.expand_dims(contexts["video_id"], 0) batch_video_matrix = tf.expand_dims(video_matrix, 0) batch_labels = tf.expand_dims(labels, 0) batch_frames = tf.expand_dims(num_frames, 0) return batch_video_ids, batch_video_matrix, batch_labels, batch_frames
OnRampOrg/onramp
refs/heads/master
server/ui/admin/users/views.py
2
import json from django.contrib.auth.decorators import login_required from django.contrib.auth.models import User from django.http import HttpResponse from django.template import Context from django.template.loader import get_template from ui.admin.models import job, user_to_workspace @login_required def main(request): """ Renders the main Admin dashboard on login URL: /admin/Dashboard :param request: :return: """ template = get_template('admin_users.html') return HttpResponse(template.render({}, request)) # @login_required def get_all_users(request): """ Retrieve all OnRamp users URL: /admin/Dashboard/GetUsers/ :param request: :return: """ users = User.objects.all().values('id', 'first_name', 'last_name', 'username', 'email', 'is_superuser', 'is_active') data = [] for user in users: data.append({ 'user_id':user['id'], 'username':user['username'], 'first_name':user['first_name'], 'last_name':user['last_name'], 'is_enabled':user['is_active'], 'email':user['email'], 'is_admin':user['is_superuser'] }) response = { 'status':0, 'status_message':'Success', 'users':data } return HttpResponse(json.dumps(response)) # @login_required def update_user(request): """ Update a specific user with new settings URL: /admin/Users/Update/ :param request: :return: """ post = request.POST.dict() user = post.get('user_id') if user is None: response = {'status':-1, 'status_message':'No user_id specified'} return HttpResponse(json.dumps(response)) try: user_obj = User.objects.get(id=user) except User.DoesNotExist: response = {'status':-1, 'status_message':'Invalid user_id: {}'.format(user)} return HttpResponse(json.dumps(response)) user_obj.first_name = post.get('first_name') user_obj.last_name = post.get('last_name') password = post.get('password') if password and password != "**********": # update the password user_obj.set_password(password) if post.get('username'): user_obj.username = post['username'] user_obj.email = post.get('email') user_obj.is_superuser = json.loads(post.get('is_admin', 'false')) user_obj.is_active = json.loads(post.get('is_enabled', 'false')) user_obj.save() response = {'status': 1, 'status_message': 'Success'} return HttpResponse(json.dumps(response)) # @login_required def create_user(request): """ Creates a new user with specified settings URL: /admin/Users/Create/ :param request: :return: """ post = request.POST.dict() username = post.get('username') if username is None: response = {'status':-1, 'status_message':'No username specified.'} return HttpResponse(json.dumps(response)) password = post.get('password') if password is None: response = {'status': -1, 'status_message': 'No password specified.'} return HttpResponse(json.dumps(response)) user_obj = User( username=username, first_name=post.get('first_name'), last_name=post.get('last_name'), email=post.get('email'), is_superuser=json.loads(post.get('is_admin', 'false')), is_active=json.loads(post.get('is_enabled', 'false')) ) user_obj.set_password(password) user_obj.save() response = {'status':1, 'status_message':'Success'} return HttpResponse(json.dumps(response)) # @login_required def disable_user(request): """ Disable a specific enabled user They will no longer be able to login URL: /admin/Users/Disable :param request: :return: """ user_id = request.POST.get('user_id') if user_id is None: response = {'status':-1, 'status_message':'No user_id specified.'} return HttpResponse(json.dumps(response)) try: user_obj = User.objects.get(id=user_id) except User.DoesNotExist: response = {'status':-1, 'status_message':'No user with id {} exists'.format(user_id)} return HttpResponse(json.dumps(response)) user_obj.is_active = False user_obj.save() response = {'status':1, 'status_message':'Success'} return HttpResponse(json.dumps(response)) # @login_required def enable_user(request): """ Enable a specific disabled user They will be able to login again URL: /admin/Users/Enable :param request: :return: """ user_id = request.POST.get('user_id') if user_id is None: response = {'status': -1, 'status_message': 'No user with id {} exists'.format(user_id)} return HttpResponse(json.dumps(response)) try: user_obj = User.objects.get(id=user_id) except User.DoesNotExist: response = {'status': -1, 'status_message': 'No user with id {} exists'.format(user_id)} return HttpResponse(json.dumps(response)) user_obj.is_active = True user_obj.save() response = {'status': 1, 'status_message': 'Success'} return HttpResponse(json.dumps(response)) # @login_required def delete_user(request): """ Remove a specific user URL: /admin/Users/Delete :param request: :return: """ user_id = request.POST.get('user_id') User.objects.filter(id=user_id).delete() response = {'status': 1, 'status_message': 'Success'} return HttpResponse(json.dumps(response)) # @login_required def get_user_jobs(request): """ Retrieve all jobs run by a specific user URL: /admin/Users/Jobs :param request: :return: """ post = request.POST.dict() user = post.get('user_id') if not user: response = {'status':-1, 'status_message':'No user supplied'} return HttpResponse(json.dumps(response)) response = { 'status':1, 'status_message':'Success', 'jobs': list(job.objects.filter(user_id=user).defer('output_file')) } return HttpResponse(json.dumps(response)) # @login_required def get_user_workspaces(request): """ Retrieve all workspaces for a specific user URL: /admin/Users/Workspaces :param request: :return: """ post = request.POST.dict() user = post.get('user_id') if not user: response = {'status': -1, 'status_message': 'No user supplied'} return HttpResponse(json.dumps(response)) response = { 'status': 1, 'status_message': 'Success', 'workspaces':[] } qs = user_to_workspace.objects.filter(user_id=user) for row in qs: response['workspaces'].append({ 'workspace_id':row.workspace_id.workspace_id, 'workspace_name':row.workspace_id.workspace_name, 'description':row.workspace_id.description }) return HttpResponse(json.dumps(response))
gregdek/ansible
refs/heads/devel
lib/ansible/modules/cloud/oneandone/oneandone_monitoring_policy.py
93
#!/usr/bin/python # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: oneandone_monitoring_policy short_description: Configure 1&1 monitoring policy. description: - Create, remove, update monitoring policies (and add/remove ports, processes, and servers). This module has a dependency on 1and1 >= 1.0 version_added: "2.5" options: state: description: - Define a monitoring policy's state to create, remove, update. required: false default: present choices: [ "present", "absent", "update" ] auth_token: description: - Authenticating API token provided by 1&1. required: true api_url: description: - Custom API URL. Overrides the ONEANDONE_API_URL environement variable. required: false name: description: - Monitoring policy name used with present state. Used as identifier (id or name) when used with absent state. maxLength=128 required: true monitoring_policy: description: - The identifier (id or name) of the monitoring policy used with update state. required: true agent: description: - Set true for using agent. required: true email: description: - User's email. maxLength=128 required: true description: description: - Monitoring policy description. maxLength=256 required: false thresholds: description: - Monitoring policy thresholds. Each of the suboptions have warning and critical, which both have alert and value suboptions. Warning is used to set limits for warning alerts, critical is used to set critical alerts. alert enables alert, and value is used to advise when the value is exceeded. required: true suboptions: cpu: description: - Consumption limits of CPU. required: true ram: description: - Consumption limits of RAM. required: true disk: description: - Consumption limits of hard disk. required: true internal_ping: description: - Response limits of internal ping. required: true transfer: description: - Consumption limits for transfer. required: true ports: description: - Array of ports that will be monitoring. required: true suboptions: protocol: description: - Internet protocol. choices: [ "TCP", "UDP" ] required: true port: description: - Port number. minimum=1, maximum=65535 required: true alert_if: description: - Case of alert. choices: [ "RESPONDING", "NOT_RESPONDING" ] required: true email_notification: description: - Set true for sending e-mail notifications. required: true processes: description: - Array of processes that will be monitoring. required: true suboptions: process: description: - Name of the process. maxLength=50 required: true alert_if: description: - Case of alert. choices: [ "RUNNING", "NOT_RUNNING" ] required: true add_ports: description: - Ports to add to the monitoring policy. required: false add_processes: description: - Processes to add to the monitoring policy. required: false add_servers: description: - Servers to add to the monitoring policy. required: false remove_ports: description: - Ports to remove from the monitoring policy. required: false remove_processes: description: - Processes to remove from the monitoring policy. required: false remove_servers: description: - Servers to remove from the monitoring policy. required: false update_ports: description: - Ports to be updated on the monitoring policy. required: false update_processes: description: - Processes to be updated on the monitoring policy. required: false wait: description: - wait for the instance to be in state 'running' before returning required: false default: "yes" type: bool wait_timeout: description: - how long before wait gives up, in seconds default: 600 wait_interval: description: - Defines the number of seconds to wait when using the _wait_for methods default: 5 requirements: - "1and1" - "python >= 2.6" author: - "Amel Ajdinovic (@aajdinov)" - "Ethan Devenport (@edevenport)" ''' EXAMPLES = ''' # Provisioning example. Create and destroy a monitoring policy. - oneandone_moitoring_policy: auth_token: oneandone_private_api_key name: ansible monitoring policy description: Testing creation of a monitoring policy with ansible email: your@emailaddress.com agent: true thresholds: - cpu: warning: value: 80 alert: false critical: value: 92 alert: false - ram: warning: value: 80 alert: false critical: value: 90 alert: false - disk: warning: value: 80 alert: false critical: value: 90 alert: false - internal_ping: warning: value: 50 alert: false critical: value: 100 alert: false - transfer: warning: value: 1000 alert: false critical: value: 2000 alert: false ports: - protocol: TCP port: 22 alert_if: RESPONDING email_notification: false processes: - process: test alert_if: NOT_RUNNING email_notification: false wait: true - oneandone_moitoring_policy: auth_token: oneandone_private_api_key state: absent name: ansible monitoring policy # Update a monitoring policy. - oneandone_moitoring_policy: auth_token: oneandone_private_api_key monitoring_policy: ansible monitoring policy name: ansible monitoring policy updated description: Testing creation of a monitoring policy with ansible updated email: another@emailaddress.com thresholds: - cpu: warning: value: 70 alert: false critical: value: 90 alert: false - ram: warning: value: 70 alert: false critical: value: 80 alert: false - disk: warning: value: 70 alert: false critical: value: 80 alert: false - internal_ping: warning: value: 60 alert: false critical: value: 90 alert: false - transfer: warning: value: 900 alert: false critical: value: 1900 alert: false wait: true state: update # Add a port to a monitoring policy. - oneandone_moitoring_policy: auth_token: oneandone_private_api_key monitoring_policy: ansible monitoring policy updated add_ports: - protocol: TCP port: 33 alert_if: RESPONDING email_notification: false wait: true state: update # Update existing ports of a monitoring policy. - oneandone_moitoring_policy: auth_token: oneandone_private_api_key monitoring_policy: ansible monitoring policy updated update_ports: - id: existing_port_id protocol: TCP port: 34 alert_if: RESPONDING email_notification: false - id: existing_port_id protocol: TCP port: 23 alert_if: RESPONDING email_notification: false wait: true state: update # Remove a port from a monitoring policy. - oneandone_moitoring_policy: auth_token: oneandone_private_api_key monitoring_policy: ansible monitoring policy updated remove_ports: - port_id state: update # Add a process to a monitoring policy. - oneandone_moitoring_policy: auth_token: oneandone_private_api_key monitoring_policy: ansible monitoring policy updated add_processes: - process: test_2 alert_if: NOT_RUNNING email_notification: false wait: true state: update # Update existing processes of a monitoring policy. - oneandone_moitoring_policy: auth_token: oneandone_private_api_key monitoring_policy: ansible monitoring policy updated update_processes: - id: process_id process: test_1 alert_if: NOT_RUNNING email_notification: false - id: process_id process: test_3 alert_if: NOT_RUNNING email_notification: false wait: true state: update # Remove a process from a monitoring policy. - oneandone_moitoring_policy: auth_token: oneandone_private_api_key monitoring_policy: ansible monitoring policy updated remove_processes: - process_id wait: true state: update # Add server to a monitoring policy. - oneandone_moitoring_policy: auth_token: oneandone_private_api_key monitoring_policy: ansible monitoring policy updated add_servers: - server id or name wait: true state: update # Remove server from a monitoring policy. - oneandone_moitoring_policy: auth_token: oneandone_private_api_key monitoring_policy: ansible monitoring policy updated remove_servers: - server01 wait: true state: update ''' RETURN = ''' monitoring_policy: description: Information about the monitoring policy that was processed type: dict sample: '{"id": "92B74394A397ECC3359825C1656D67A6", "name": "Default Policy"}' returned: always ''' import os from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.oneandone import ( get_monitoring_policy, get_server, OneAndOneResources, wait_for_resource_creation_completion ) HAS_ONEANDONE_SDK = True try: import oneandone.client except ImportError: HAS_ONEANDONE_SDK = False def _check_mode(module, result): if module.check_mode: module.exit_json( changed=result ) def _add_ports(module, oneandone_conn, monitoring_policy_id, ports): """ Adds new ports to a monitoring policy. """ try: monitoring_policy_ports = [] for _port in ports: monitoring_policy_port = oneandone.client.Port( protocol=_port['protocol'], port=_port['port'], alert_if=_port['alert_if'], email_notification=_port['email_notification'] ) monitoring_policy_ports.append(monitoring_policy_port) if module.check_mode: if monitoring_policy_ports: return True return False monitoring_policy = oneandone_conn.add_port( monitoring_policy_id=monitoring_policy_id, ports=monitoring_policy_ports) return monitoring_policy except Exception as ex: module.fail_json(msg=str(ex)) def _delete_monitoring_policy_port(module, oneandone_conn, monitoring_policy_id, port_id): """ Removes a port from a monitoring policy. """ try: if module.check_mode: monitoring_policy = oneandone_conn.delete_monitoring_policy_port( monitoring_policy_id=monitoring_policy_id, port_id=port_id) if monitoring_policy: return True return False monitoring_policy = oneandone_conn.delete_monitoring_policy_port( monitoring_policy_id=monitoring_policy_id, port_id=port_id) return monitoring_policy except Exception as ex: module.fail_json(msg=str(ex)) def _modify_port(module, oneandone_conn, monitoring_policy_id, port_id, port): """ Modifies a monitoring policy port. """ try: if module.check_mode: cm_port = oneandone_conn.get_monitoring_policy_port( monitoring_policy_id=monitoring_policy_id, port_id=port_id) if cm_port: return True return False monitoring_policy_port = oneandone.client.Port( protocol=port['protocol'], port=port['port'], alert_if=port['alert_if'], email_notification=port['email_notification'] ) monitoring_policy = oneandone_conn.modify_port( monitoring_policy_id=monitoring_policy_id, port_id=port_id, port=monitoring_policy_port) return monitoring_policy except Exception as ex: module.fail_json(msg=str(ex)) def _add_processes(module, oneandone_conn, monitoring_policy_id, processes): """ Adds new processes to a monitoring policy. """ try: monitoring_policy_processes = [] for _process in processes: monitoring_policy_process = oneandone.client.Process( process=_process['process'], alert_if=_process['alert_if'], email_notification=_process['email_notification'] ) monitoring_policy_processes.append(monitoring_policy_process) if module.check_mode: mp_id = get_monitoring_policy(oneandone_conn, monitoring_policy_id) if (monitoring_policy_processes and mp_id): return True return False monitoring_policy = oneandone_conn.add_process( monitoring_policy_id=monitoring_policy_id, processes=monitoring_policy_processes) return monitoring_policy except Exception as ex: module.fail_json(msg=str(ex)) def _delete_monitoring_policy_process(module, oneandone_conn, monitoring_policy_id, process_id): """ Removes a process from a monitoring policy. """ try: if module.check_mode: process = oneandone_conn.get_monitoring_policy_process( monitoring_policy_id=monitoring_policy_id, process_id=process_id ) if process: return True return False monitoring_policy = oneandone_conn.delete_monitoring_policy_process( monitoring_policy_id=monitoring_policy_id, process_id=process_id) return monitoring_policy except Exception as ex: module.fail_json(msg=str(ex)) def _modify_process(module, oneandone_conn, monitoring_policy_id, process_id, process): """ Modifies a monitoring policy process. """ try: if module.check_mode: cm_process = oneandone_conn.get_monitoring_policy_process( monitoring_policy_id=monitoring_policy_id, process_id=process_id) if cm_process: return True return False monitoring_policy_process = oneandone.client.Process( process=process['process'], alert_if=process['alert_if'], email_notification=process['email_notification'] ) monitoring_policy = oneandone_conn.modify_process( monitoring_policy_id=monitoring_policy_id, process_id=process_id, process=monitoring_policy_process) return monitoring_policy except Exception as ex: module.fail_json(msg=str(ex)) def _attach_monitoring_policy_server(module, oneandone_conn, monitoring_policy_id, servers): """ Attaches servers to a monitoring policy. """ try: attach_servers = [] for _server_id in servers: server_id = get_server(oneandone_conn, _server_id) attach_server = oneandone.client.AttachServer( server_id=server_id ) attach_servers.append(attach_server) if module.check_mode: if attach_servers: return True return False monitoring_policy = oneandone_conn.attach_monitoring_policy_server( monitoring_policy_id=monitoring_policy_id, servers=attach_servers) return monitoring_policy except Exception as ex: module.fail_json(msg=str(ex)) def _detach_monitoring_policy_server(module, oneandone_conn, monitoring_policy_id, server_id): """ Detaches a server from a monitoring policy. """ try: if module.check_mode: mp_server = oneandone_conn.get_monitoring_policy_server( monitoring_policy_id=monitoring_policy_id, server_id=server_id) if mp_server: return True return False monitoring_policy = oneandone_conn.detach_monitoring_policy_server( monitoring_policy_id=monitoring_policy_id, server_id=server_id) return monitoring_policy except Exception as ex: module.fail_json(msg=str(ex)) def update_monitoring_policy(module, oneandone_conn): """ Updates a monitoring_policy based on input arguments. Monitoring policy ports, processes and servers can be added/removed to/from a monitoring policy. Monitoring policy name, description, email, thresholds for cpu, ram, disk, transfer and internal_ping can be updated as well. module : AnsibleModule object oneandone_conn: authenticated oneandone object """ try: monitoring_policy_id = module.params.get('monitoring_policy') name = module.params.get('name') description = module.params.get('description') email = module.params.get('email') thresholds = module.params.get('thresholds') add_ports = module.params.get('add_ports') update_ports = module.params.get('update_ports') remove_ports = module.params.get('remove_ports') add_processes = module.params.get('add_processes') update_processes = module.params.get('update_processes') remove_processes = module.params.get('remove_processes') add_servers = module.params.get('add_servers') remove_servers = module.params.get('remove_servers') changed = False monitoring_policy = get_monitoring_policy(oneandone_conn, monitoring_policy_id, True) if monitoring_policy is None: _check_mode(module, False) _monitoring_policy = oneandone.client.MonitoringPolicy( name=name, description=description, email=email ) _thresholds = None if thresholds: threshold_entities = ['cpu', 'ram', 'disk', 'internal_ping', 'transfer'] _thresholds = [] for treshold in thresholds: key = treshold.keys()[0] if key in threshold_entities: _threshold = oneandone.client.Threshold( entity=key, warning_value=treshold[key]['warning']['value'], warning_alert=str(treshold[key]['warning']['alert']).lower(), critical_value=treshold[key]['critical']['value'], critical_alert=str(treshold[key]['critical']['alert']).lower()) _thresholds.append(_threshold) if name or description or email or thresholds: _check_mode(module, True) monitoring_policy = oneandone_conn.modify_monitoring_policy( monitoring_policy_id=monitoring_policy['id'], monitoring_policy=_monitoring_policy, thresholds=_thresholds) changed = True if add_ports: if module.check_mode: _check_mode(module, _add_ports(module, oneandone_conn, monitoring_policy['id'], add_ports)) monitoring_policy = _add_ports(module, oneandone_conn, monitoring_policy['id'], add_ports) changed = True if update_ports: chk_changed = False for update_port in update_ports: if module.check_mode: chk_changed |= _modify_port(module, oneandone_conn, monitoring_policy['id'], update_port['id'], update_port) _modify_port(module, oneandone_conn, monitoring_policy['id'], update_port['id'], update_port) monitoring_policy = get_monitoring_policy(oneandone_conn, monitoring_policy['id'], True) changed = True if remove_ports: chk_changed = False for port_id in remove_ports: if module.check_mode: chk_changed |= _delete_monitoring_policy_port(module, oneandone_conn, monitoring_policy['id'], port_id) _delete_monitoring_policy_port(module, oneandone_conn, monitoring_policy['id'], port_id) _check_mode(module, chk_changed) monitoring_policy = get_monitoring_policy(oneandone_conn, monitoring_policy['id'], True) changed = True if add_processes: monitoring_policy = _add_processes(module, oneandone_conn, monitoring_policy['id'], add_processes) _check_mode(module, monitoring_policy) changed = True if update_processes: chk_changed = False for update_process in update_processes: if module.check_mode: chk_changed |= _modify_process(module, oneandone_conn, monitoring_policy['id'], update_process['id'], update_process) _modify_process(module, oneandone_conn, monitoring_policy['id'], update_process['id'], update_process) _check_mode(module, chk_changed) monitoring_policy = get_monitoring_policy(oneandone_conn, monitoring_policy['id'], True) changed = True if remove_processes: chk_changed = False for process_id in remove_processes: if module.check_mode: chk_changed |= _delete_monitoring_policy_process(module, oneandone_conn, monitoring_policy['id'], process_id) _delete_monitoring_policy_process(module, oneandone_conn, monitoring_policy['id'], process_id) _check_mode(module, chk_changed) monitoring_policy = get_monitoring_policy(oneandone_conn, monitoring_policy['id'], True) changed = True if add_servers: monitoring_policy = _attach_monitoring_policy_server(module, oneandone_conn, monitoring_policy['id'], add_servers) _check_mode(module, monitoring_policy) changed = True if remove_servers: chk_changed = False for _server_id in remove_servers: server_id = get_server(oneandone_conn, _server_id) if module.check_mode: chk_changed |= _detach_monitoring_policy_server(module, oneandone_conn, monitoring_policy['id'], server_id) _detach_monitoring_policy_server(module, oneandone_conn, monitoring_policy['id'], server_id) _check_mode(module, chk_changed) monitoring_policy = get_monitoring_policy(oneandone_conn, monitoring_policy['id'], True) changed = True return (changed, monitoring_policy) except Exception as ex: module.fail_json(msg=str(ex)) def create_monitoring_policy(module, oneandone_conn): """ Creates a new monitoring policy. module : AnsibleModule object oneandone_conn: authenticated oneandone object """ try: name = module.params.get('name') description = module.params.get('description') email = module.params.get('email') agent = module.params.get('agent') thresholds = module.params.get('thresholds') ports = module.params.get('ports') processes = module.params.get('processes') wait = module.params.get('wait') wait_timeout = module.params.get('wait_timeout') wait_interval = module.params.get('wait_interval') _monitoring_policy = oneandone.client.MonitoringPolicy(name, description, email, agent, ) _monitoring_policy.specs['agent'] = str(_monitoring_policy.specs['agent']).lower() threshold_entities = ['cpu', 'ram', 'disk', 'internal_ping', 'transfer'] _thresholds = [] for treshold in thresholds: key = treshold.keys()[0] if key in threshold_entities: _threshold = oneandone.client.Threshold( entity=key, warning_value=treshold[key]['warning']['value'], warning_alert=str(treshold[key]['warning']['alert']).lower(), critical_value=treshold[key]['critical']['value'], critical_alert=str(treshold[key]['critical']['alert']).lower()) _thresholds.append(_threshold) _ports = [] for port in ports: _port = oneandone.client.Port( protocol=port['protocol'], port=port['port'], alert_if=port['alert_if'], email_notification=str(port['email_notification']).lower()) _ports.append(_port) _processes = [] for process in processes: _process = oneandone.client.Process( process=process['process'], alert_if=process['alert_if'], email_notification=str(process['email_notification']).lower()) _processes.append(_process) _check_mode(module, True) monitoring_policy = oneandone_conn.create_monitoring_policy( monitoring_policy=_monitoring_policy, thresholds=_thresholds, ports=_ports, processes=_processes ) if wait: wait_for_resource_creation_completion( oneandone_conn, OneAndOneResources.monitoring_policy, monitoring_policy['id'], wait_timeout, wait_interval) changed = True if monitoring_policy else False _check_mode(module, False) return (changed, monitoring_policy) except Exception as ex: module.fail_json(msg=str(ex)) def remove_monitoring_policy(module, oneandone_conn): """ Removes a monitoring policy. module : AnsibleModule object oneandone_conn: authenticated oneandone object """ try: mp_id = module.params.get('name') monitoring_policy_id = get_monitoring_policy(oneandone_conn, mp_id) if module.check_mode: if monitoring_policy_id is None: _check_mode(module, False) _check_mode(module, True) monitoring_policy = oneandone_conn.delete_monitoring_policy(monitoring_policy_id) changed = True if monitoring_policy else False return (changed, { 'id': monitoring_policy['id'], 'name': monitoring_policy['name'] }) except Exception as ex: module.fail_json(msg=str(ex)) def main(): module = AnsibleModule( argument_spec=dict( auth_token=dict( type='str', default=os.environ.get('ONEANDONE_AUTH_TOKEN')), api_url=dict( type='str', default=os.environ.get('ONEANDONE_API_URL')), name=dict(type='str'), monitoring_policy=dict(type='str'), agent=dict(type='str'), email=dict(type='str'), description=dict(type='str'), thresholds=dict(type='list', default=[]), ports=dict(type='list', default=[]), processes=dict(type='list', default=[]), add_ports=dict(type='list', default=[]), update_ports=dict(type='list', default=[]), remove_ports=dict(type='list', default=[]), add_processes=dict(type='list', default=[]), update_processes=dict(type='list', default=[]), remove_processes=dict(type='list', default=[]), add_servers=dict(type='list', default=[]), remove_servers=dict(type='list', default=[]), wait=dict(type='bool', default=True), wait_timeout=dict(type='int', default=600), wait_interval=dict(type='int', default=5), state=dict(type='str', default='present', choices=['present', 'absent', 'update']), ), supports_check_mode=True ) if not HAS_ONEANDONE_SDK: module.fail_json(msg='1and1 required for this module') if not module.params.get('auth_token'): module.fail_json( msg='auth_token parameter is required.') if not module.params.get('api_url'): oneandone_conn = oneandone.client.OneAndOneService( api_token=module.params.get('auth_token')) else: oneandone_conn = oneandone.client.OneAndOneService( api_token=module.params.get('auth_token'), api_url=module.params.get('api_url')) state = module.params.get('state') if state == 'absent': if not module.params.get('name'): module.fail_json( msg="'name' parameter is required to delete a monitoring policy.") try: (changed, monitoring_policy) = remove_monitoring_policy(module, oneandone_conn) except Exception as ex: module.fail_json(msg=str(ex)) elif state == 'update': if not module.params.get('monitoring_policy'): module.fail_json( msg="'monitoring_policy' parameter is required to update a monitoring policy.") try: (changed, monitoring_policy) = update_monitoring_policy(module, oneandone_conn) except Exception as ex: module.fail_json(msg=str(ex)) elif state == 'present': for param in ('name', 'agent', 'email', 'thresholds', 'ports', 'processes'): if not module.params.get(param): module.fail_json( msg="%s parameter is required for a new monitoring policy." % param) try: (changed, monitoring_policy) = create_monitoring_policy(module, oneandone_conn) except Exception as ex: module.fail_json(msg=str(ex)) module.exit_json(changed=changed, monitoring_policy=monitoring_policy) if __name__ == '__main__': main()
schreiberx/sweet
refs/heads/master
benchmarks_sphere/galewsky/galewski_rk4_robert_nonlinear_T128/pp_plot_csv.py
1
#! /usr/bin/python3 import matplotlib matplotlib.use('Agg') import matplotlib.pyplot as plt import numpy as np import sys first = True s = 2e-5 vort_contour_levels = np.append(np.arange(-1e-4, 0, s), np.arange(s, 1e-4, s)) zoom_lat = True zoom_lat = False zoom_lat = 'vort' in sys.argv[1] fontsize=8 figsize=(9, 3) for filename in sys.argv[1:]: print(filename) data = np.loadtxt(filename, skiprows=3) labelsx = data[0,1:] labelsy = data[1:,0] data = data[1:,1:] if zoom_lat: while labelsy[1] < 10: labelsy = labelsy[1:] data = data[1:] while labelsy[-2] > 80: labelsy = labelsy[0:-2] data = data[0:-2] # while labelsx[1] < 90: # tmplabelsx = labelsx[0] # labelsx[0:-1] = labelsx[1:] # labelsx[-1] = tmplabelsx # # tmpdata = data[:,0] # data[:,0:-1] = data[:,1:] # data[:,-1] = tmpdata if first: lon_min = labelsx[0] lon_max = labelsx[-1] lat_min = labelsy[0] lat_max = labelsy[-1] new_labelsx = np.linspace(lon_min, lon_max, 7) new_labelsy = np.linspace(lat_min, lat_max, 7) labelsx = np.interp(new_labelsx, labelsx, labelsx) labelsy = np.interp(new_labelsy, labelsy, labelsy) if first: cmin = np.amin(data) cmax = np.amax(data) if 'vort' in filename: cmin *= 1.2 cmax *= 1.2 extent = (labelsx[0], labelsx[-1], labelsy[0], labelsy[-1]) plt.figure(figsize=figsize) plt.imshow(data, interpolation='nearest', extent=extent, origin='lower', aspect='auto') plt.clim(cmin, cmax) cbar = plt.colorbar() cbar.ax.tick_params(labelsize=fontsize) plt.title(filename, fontsize=fontsize) if 'vort' in filename: plt.contour(data, colors="black", origin='lower', extent=extent, vmin=cmin, vmax=cmax, levels=vort_contour_levels, linewidths=0.5) else: if cmin != cmax: plt.contour(data, colors="black", origin='lower', extent=extent, vmin=cmin, vmax=cmax, linewidths=0.5) ax = plt.gca() ax.xaxis.set_label_coords(0.5, -0.075) plt.xticks(labelsx, fontsize=fontsize) plt.xlabel("Longitude", fontsize=fontsize) plt.yticks(labelsy, fontsize=fontsize) plt.ylabel("Latitude", fontsize=fontsize) #plt.show() outfilename = filename.replace('.csv', '.png') print(outfilename) plt.savefig(outfilename, dpi=200) plt.close() first = False
archen/django
refs/heads/master
tests/template_tests/templatetags/subpackage/__init__.py
12133432
HyperBaton/ansible
refs/heads/devel
test/units/plugins/callback/__init__.py
12133432
mjumbewu/jokosher
refs/heads/master
extensions/eggs/ExtensionConsole/setup.py
2
from setuptools import setup import sys, os from shutil import copy version = "1.1" setup(name="ExtensionConsole", version=version, author='Laszlo Pandy', author_email='laszlok2@gmail.com', maintainer='Laszlo Pandy', maintainer_email='laszlok2@gmail.com', description='A powerful python console for Jokosher.', long_description="A fully functional python console with access to the extension API and the Jokosher internals", license='GNU GPL', platforms='linux', packages=["ExtensionConsole"], package_dir={"ExtensionConsole":"src"}, package_data={"":["pyconsole.py", "ConsoleDialog.ui", "SearchDialog.ui"]}, entry_points=""" [jokosher.extensions] extension = ExtensionConsole:ExtensionConsole """ ) # copy egg file to the deployment directory in the svn structure copy("dist/ExtensionConsole-%s-py%d.%d.egg" % (version, sys.version_info[0], sys.version_info[1]), "../../")
keisuke-umezawa/chainer
refs/heads/master
tests/chainer_tests/functions_tests/loss_tests/test_cross_covariance.py
11
import unittest import numpy import six import chainer from chainer.backends import cuda from chainer import functions from chainer import gradient_check from chainer import testing from chainer.testing import attr def _cross_covariance(y, z, dtype): row = y.shape[1] col = z.shape[1] y, z = cuda.to_cpu(y), cuda.to_cpu(z) y_mean = y.mean(axis=0) z_mean = z.mean(axis=0) N = y.shape[0] loss_expect = numpy.zeros((row, col), dtype=dtype) for i in six.moves.xrange(row): for j in six.moves.xrange(col): for n in six.moves.xrange(N): loss_expect[i, j] += (y[n, i] - y_mean[i]) * ( z[n, j] - z_mean[j]) loss_expect /= N return loss_expect @testing.parameterize(*testing.product_dict( [{'dtype': numpy.float16, 'forward_options': {'rtol': 1e-3, 'atol': 1e-3}, 'backward_options': {'rtol': 3e-2, 'atol': 3e-2}, 'double_backward_options': {'rtol': 5e-1, 'atol': 5e-1}}, {'dtype': numpy.float32, 'forward_options': {'rtol': 1e-4, 'atol': 1e-4}, 'backward_options': {'rtol': 1e-4, 'atol': 1e-4}, 'double_backward_options': {'rtol': 1e-4, 'atol': 1e-4}}, {'dtype': numpy.float64, 'forward_options': {'rtol': 1e-4, 'atol': 1e-4}, 'backward_options': {'rtol': 1e-4, 'atol': 1e-4}, 'double_backward_options': {'rtol': 1e-4, 'atol': 1e-4}}, ], [{'reduce': 'half_squared_sum'}, {'reduce': 'no'}, ] )) class TestCrossCovariance(unittest.TestCase): def setUp(self): self.y = numpy.random.uniform(-1, 1, (4, 3)).astype(self.dtype) self.z = numpy.random.uniform(-1, 1, (4, 2)).astype(self.dtype) if self.reduce == 'half_squared_sum': gloss_shape = () else: gloss_shape = (3, 2) self.gloss = numpy.random.uniform( -1, 1, gloss_shape).astype(self.dtype) self.ggy = numpy.random.uniform(-1, 1, (4, 3)).astype(self.dtype) self.ggz = numpy.random.uniform(-1, 1, (4, 2)).astype(self.dtype) def check_forward(self, y_data, z_data): y = chainer.Variable(y_data) z = chainer.Variable(z_data) loss = functions.cross_covariance(y, z, self.reduce) self.assertEqual(loss.shape, self.gloss.shape) self.assertEqual(loss.data.dtype, self.dtype) loss_value = cuda.to_cpu(loss.data) # Compute expected value loss_expect = _cross_covariance(y_data, z_data, dtype=self.dtype) if self.reduce == 'half_squared_sum': loss_expect = numpy.sum(loss_expect ** 2) * 0.5 numpy.testing.assert_allclose( loss_expect, loss_value, **self.forward_options) def test_forward_cpu(self): self.check_forward(self.y, self.z) @attr.gpu def test_forward_gpu(self): self.check_forward(cuda.to_gpu(self.y), cuda.to_gpu(self.z)) def check_backward(self, y_data, z_data, gloss_data): def f(y, z): return functions.cross_covariance(y, z, self.reduce) gradient_check.check_backward( f, (y_data, z_data), gloss_data, eps=0.02, **self.backward_options) def test_backward_cpu(self): self.check_backward(self.y, self.z, self.gloss) @attr.gpu def test_backward_gpu(self): self.check_backward(cuda.to_gpu(self.y), cuda.to_gpu(self.z), cuda.to_gpu(self.gloss)) def check_type(self, y_data, z_data, gloss_data): y = chainer.Variable(y_data) z = chainer.Variable(z_data) loss = functions.cross_covariance(y, z, self.reduce) loss.grad = gloss_data loss.backward() self.assertEqual(y_data.dtype, y.grad.dtype) self.assertEqual(z_data.dtype, z.grad.dtype) def test_backward_type_cpu(self): self.check_type(self.y, self.z, self.gloss) @attr.gpu def test_backward_type_gpu(self): self.check_type(cuda.to_gpu(self.y), cuda.to_gpu(self.z), cuda.to_gpu(self.gloss)) def check_double_backward(self, y_data, z_data, gloss_data, ggy_data, ggz_data): def f(y, z): return functions.cross_covariance(y, z, self.reduce) gradient_check.check_double_backward( f, (y_data, z_data), gloss_data, (ggy_data, ggz_data), **self.double_backward_options) def test_double_backward_cpu(self): self.check_double_backward( self.y, self.z, self.gloss, self.ggy, self.ggz) @attr.gpu def test_double_backward_gpu(self): self.check_double_backward( cuda.to_gpu(self.y), cuda.to_gpu(self.z), cuda.to_gpu(self.gloss), cuda.to_gpu(self.ggy), cuda.to_gpu(self.ggz)) class TestCrossCovarianceInvalidReductionOption(unittest.TestCase): def setUp(self): self.y = numpy.random.uniform(-1, 1, (4, 3)).astype(numpy.float32) self.z = numpy.random.uniform(-1, 1, (4, 2)).astype(numpy.float32) def check_invalid_option(self, xp): y = xp.asarray(self.y) z = xp.asarray(self.z) with self.assertRaises(ValueError): functions.cross_covariance(y, z, 'invalid_option') def test_invalid_option_cpu(self): self.check_invalid_option(numpy) @attr.gpu def test_invalid_option_gpu(self): self.check_invalid_option(cuda.cupy) testing.run_module(__name__, __file__)
fedosov/django-generic-ratings
refs/heads/master
ratings/management/commands/__init__.py
12133432
e-gob/plataforma-kioscos-autoatencion
refs/heads/master
scripts/ansible-play/.venv/lib/python2.7/site-packages/pip/_vendor/requests/packages/chardet/gb2312freq.py
3131
######################## BEGIN LICENSE BLOCK ######################## # The Original Code is Mozilla Communicator client code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### # GB2312 most frequently used character table # # Char to FreqOrder table , from hz6763 # 512 --> 0.79 -- 0.79 # 1024 --> 0.92 -- 0.13 # 2048 --> 0.98 -- 0.06 # 6768 --> 1.00 -- 0.02 # # Ideal Distribution Ratio = 0.79135/(1-0.79135) = 3.79 # Random Distribution Ration = 512 / (3755 - 512) = 0.157 # # Typical Distribution Ratio about 25% of Ideal one, still much higher that RDR GB2312_TYPICAL_DISTRIBUTION_RATIO = 0.9 GB2312_TABLE_SIZE = 3760 GB2312CharToFreqOrder = ( 1671, 749,1443,2364,3924,3807,2330,3921,1704,3463,2691,1511,1515, 572,3191,2205, 2361, 224,2558, 479,1711, 963,3162, 440,4060,1905,2966,2947,3580,2647,3961,3842, 2204, 869,4207, 970,2678,5626,2944,2956,1479,4048, 514,3595, 588,1346,2820,3409, 249,4088,1746,1873,2047,1774, 581,1813, 358,1174,3590,1014,1561,4844,2245, 670, 1636,3112, 889,1286, 953, 556,2327,3060,1290,3141, 613, 185,3477,1367, 850,3820, 1715,2428,2642,2303,2732,3041,2562,2648,3566,3946,1349, 388,3098,2091,1360,3585, 152,1687,1539, 738,1559, 59,1232,2925,2267,1388,1249,1741,1679,2960, 151,1566, 1125,1352,4271, 924,4296, 385,3166,4459, 310,1245,2850, 70,3285,2729,3534,3575, 2398,3298,3466,1960,2265, 217,3647, 864,1909,2084,4401,2773,1010,3269,5152, 853, 3051,3121,1244,4251,1895, 364,1499,1540,2313,1180,3655,2268, 562, 715,2417,3061, 544, 336,3768,2380,1752,4075, 950, 280,2425,4382, 183,2759,3272, 333,4297,2155, 1688,2356,1444,1039,4540, 736,1177,3349,2443,2368,2144,2225, 565, 196,1482,3406, 927,1335,4147, 692, 878,1311,1653,3911,3622,1378,4200,1840,2969,3149,2126,1816, 2534,1546,2393,2760, 737,2494, 13, 447, 245,2747, 38,2765,2129,2589,1079, 606, 360, 471,3755,2890, 404, 848, 699,1785,1236, 370,2221,1023,3746,2074,2026,2023, 2388,1581,2119, 812,1141,3091,2536,1519, 804,2053, 406,1596,1090, 784, 548,4414, 1806,2264,2936,1100, 343,4114,5096, 622,3358, 743,3668,1510,1626,5020,3567,2513, 3195,4115,5627,2489,2991, 24,2065,2697,1087,2719, 48,1634, 315, 68, 985,2052, 198,2239,1347,1107,1439, 597,2366,2172, 871,3307, 919,2487,2790,1867, 236,2570, 1413,3794, 906,3365,3381,1701,1982,1818,1524,2924,1205, 616,2586,2072,2004, 575, 253,3099, 32,1365,1182, 197,1714,2454,1201, 554,3388,3224,2748, 756,2587, 250, 2567,1507,1517,3529,1922,2761,2337,3416,1961,1677,2452,2238,3153, 615, 911,1506, 1474,2495,1265,1906,2749,3756,3280,2161, 898,2714,1759,3450,2243,2444, 563, 26, 3286,2266,3769,3344,2707,3677, 611,1402, 531,1028,2871,4548,1375, 261,2948, 835, 1190,4134, 353, 840,2684,1900,3082,1435,2109,1207,1674, 329,1872,2781,4055,2686, 2104, 608,3318,2423,2957,2768,1108,3739,3512,3271,3985,2203,1771,3520,1418,2054, 1681,1153, 225,1627,2929, 162,2050,2511,3687,1954, 124,1859,2431,1684,3032,2894, 585,4805,3969,2869,2704,2088,2032,2095,3656,2635,4362,2209, 256, 518,2042,2105, 3777,3657, 643,2298,1148,1779, 190, 989,3544, 414, 11,2135,2063,2979,1471, 403, 3678, 126, 770,1563, 671,2499,3216,2877, 600,1179, 307,2805,4937,1268,1297,2694, 252,4032,1448,1494,1331,1394, 127,2256, 222,1647,1035,1481,3056,1915,1048, 873, 3651, 210, 33,1608,2516, 200,1520, 415, 102, 0,3389,1287, 817, 91,3299,2940, 836,1814, 549,2197,1396,1669,2987,3582,2297,2848,4528,1070, 687, 20,1819, 121, 1552,1364,1461,1968,2617,3540,2824,2083, 177, 948,4938,2291, 110,4549,2066, 648, 3359,1755,2110,2114,4642,4845,1693,3937,3308,1257,1869,2123, 208,1804,3159,2992, 2531,2549,3361,2418,1350,2347,2800,2568,1291,2036,2680, 72, 842,1990, 212,1233, 1154,1586, 75,2027,3410,4900,1823,1337,2710,2676, 728,2810,1522,3026,4995, 157, 755,1050,4022, 710, 785,1936,2194,2085,1406,2777,2400, 150,1250,4049,1206, 807, 1910, 534, 529,3309,1721,1660, 274, 39,2827, 661,2670,1578, 925,3248,3815,1094, 4278,4901,4252, 41,1150,3747,2572,2227,4501,3658,4902,3813,3357,3617,2884,2258, 887, 538,4187,3199,1294,2439,3042,2329,2343,2497,1255, 107, 543,1527, 521,3478, 3568, 194,5062, 15, 961,3870,1241,1192,2664, 66,5215,3260,2111,1295,1127,2152, 3805,4135, 901,1164,1976, 398,1278, 530,1460, 748, 904,1054,1966,1426, 53,2909, 509, 523,2279,1534, 536,1019, 239,1685, 460,2353, 673,1065,2401,3600,4298,2272, 1272,2363, 284,1753,3679,4064,1695, 81, 815,2677,2757,2731,1386, 859, 500,4221, 2190,2566, 757,1006,2519,2068,1166,1455, 337,2654,3203,1863,1682,1914,3025,1252, 1409,1366, 847, 714,2834,2038,3209, 964,2970,1901, 885,2553,1078,1756,3049, 301, 1572,3326, 688,2130,1996,2429,1805,1648,2930,3421,2750,3652,3088, 262,1158,1254, 389,1641,1812, 526,1719, 923,2073,1073,1902, 468, 489,4625,1140, 857,2375,3070, 3319,2863, 380, 116,1328,2693,1161,2244, 273,1212,1884,2769,3011,1775,1142, 461, 3066,1200,2147,2212, 790, 702,2695,4222,1601,1058, 434,2338,5153,3640, 67,2360, 4099,2502, 618,3472,1329, 416,1132, 830,2782,1807,2653,3211,3510,1662, 192,2124, 296,3979,1739,1611,3684, 23, 118, 324, 446,1239,1225, 293,2520,3814,3795,2535, 3116, 17,1074, 467,2692,2201, 387,2922, 45,1326,3055,1645,3659,2817, 958, 243, 1903,2320,1339,2825,1784,3289, 356, 576, 865,2315,2381,3377,3916,1088,3122,1713, 1655, 935, 628,4689,1034,1327, 441, 800, 720, 894,1979,2183,1528,5289,2702,1071, 4046,3572,2399,1571,3281, 79, 761,1103, 327, 134, 758,1899,1371,1615, 879, 442, 215,2605,2579, 173,2048,2485,1057,2975,3317,1097,2253,3801,4263,1403,1650,2946, 814,4968,3487,1548,2644,1567,1285, 2, 295,2636, 97, 946,3576, 832, 141,4257, 3273, 760,3821,3521,3156,2607, 949,1024,1733,1516,1803,1920,2125,2283,2665,3180, 1501,2064,3560,2171,1592, 803,3518,1416, 732,3897,4258,1363,1362,2458, 119,1427, 602,1525,2608,1605,1639,3175, 694,3064, 10, 465, 76,2000,4846,4208, 444,3781, 1619,3353,2206,1273,3796, 740,2483, 320,1723,2377,3660,2619,1359,1137,1762,1724, 2345,2842,1850,1862, 912, 821,1866, 612,2625,1735,2573,3369,1093, 844, 89, 937, 930,1424,3564,2413,2972,1004,3046,3019,2011, 711,3171,1452,4178, 428, 801,1943, 432, 445,2811, 206,4136,1472, 730, 349, 73, 397,2802,2547, 998,1637,1167, 789, 396,3217, 154,1218, 716,1120,1780,2819,4826,1931,3334,3762,2139,1215,2627, 552, 3664,3628,3232,1405,2383,3111,1356,2652,3577,3320,3101,1703, 640,1045,1370,1246, 4996, 371,1575,2436,1621,2210, 984,4033,1734,2638, 16,4529, 663,2755,3255,1451, 3917,2257,1253,1955,2234,1263,2951, 214,1229, 617, 485, 359,1831,1969, 473,2310, 750,2058, 165, 80,2864,2419, 361,4344,2416,2479,1134, 796,3726,1266,2943, 860, 2715, 938, 390,2734,1313,1384, 248, 202, 877,1064,2854, 522,3907, 279,1602, 297, 2357, 395,3740, 137,2075, 944,4089,2584,1267,3802, 62,1533,2285, 178, 176, 780, 2440, 201,3707, 590, 478,1560,4354,2117,1075, 30, 74,4643,4004,1635,1441,2745, 776,2596, 238,1077,1692,1912,2844, 605, 499,1742,3947, 241,3053, 980,1749, 936, 2640,4511,2582, 515,1543,2162,5322,2892,2993, 890,2148,1924, 665,1827,3581,1032, 968,3163, 339,1044,1896, 270, 583,1791,1720,4367,1194,3488,3669, 43,2523,1657, 163,2167, 290,1209,1622,3378, 550, 634,2508,2510, 695,2634,2384,2512,1476,1414, 220,1469,2341,2138,2852,3183,2900,4939,2865,3502,1211,3680, 854,3227,1299,2976, 3172, 186,2998,1459, 443,1067,3251,1495, 321,1932,3054, 909, 753,1410,1828, 436, 2441,1119,1587,3164,2186,1258, 227, 231,1425,1890,3200,3942, 247, 959, 725,5254, 2741, 577,2158,2079, 929, 120, 174, 838,2813, 591,1115, 417,2024, 40,3240,1536, 1037, 291,4151,2354, 632,1298,2406,2500,3535,1825,1846,3451, 205,1171, 345,4238, 18,1163, 811, 685,2208,1217, 425,1312,1508,1175,4308,2552,1033, 587,1381,3059, 2984,3482, 340,1316,4023,3972, 792,3176, 519, 777,4690, 918, 933,4130,2981,3741, 90,3360,2911,2200,5184,4550, 609,3079,2030, 272,3379,2736, 363,3881,1130,1447, 286, 779, 357,1169,3350,3137,1630,1220,2687,2391, 747,1277,3688,2618,2682,2601, 1156,3196,5290,4034,3102,1689,3596,3128, 874, 219,2783, 798, 508,1843,2461, 269, 1658,1776,1392,1913,2983,3287,2866,2159,2372, 829,4076, 46,4253,2873,1889,1894, 915,1834,1631,2181,2318, 298, 664,2818,3555,2735, 954,3228,3117, 527,3511,2173, 681,2712,3033,2247,2346,3467,1652, 155,2164,3382, 113,1994, 450, 899, 494, 994, 1237,2958,1875,2336,1926,3727, 545,1577,1550, 633,3473, 204,1305,3072,2410,1956, 2471, 707,2134, 841,2195,2196,2663,3843,1026,4940, 990,3252,4997, 368,1092, 437, 3212,3258,1933,1829, 675,2977,2893, 412, 943,3723,4644,3294,3283,2230,2373,5154, 2389,2241,2661,2323,1404,2524, 593, 787, 677,3008,1275,2059, 438,2709,2609,2240, 2269,2246,1446, 36,1568,1373,3892,1574,2301,1456,3962, 693,2276,5216,2035,1143, 2720,1919,1797,1811,2763,4137,2597,1830,1699,1488,1198,2090, 424,1694, 312,3634, 3390,4179,3335,2252,1214, 561,1059,3243,2295,2561, 975,5155,2321,2751,3772, 472, 1537,3282,3398,1047,2077,2348,2878,1323,3340,3076, 690,2906, 51, 369, 170,3541, 1060,2187,2688,3670,2541,1083,1683, 928,3918, 459, 109,4427, 599,3744,4286, 143, 2101,2730,2490, 82,1588,3036,2121, 281,1860, 477,4035,1238,2812,3020,2716,3312, 1530,2188,2055,1317, 843, 636,1808,1173,3495, 649, 181,1002, 147,3641,1159,2414, 3750,2289,2795, 813,3123,2610,1136,4368, 5,3391,4541,2174, 420, 429,1728, 754, 1228,2115,2219, 347,2223,2733, 735,1518,3003,2355,3134,1764,3948,3329,1888,2424, 1001,1234,1972,3321,3363,1672,1021,1450,1584, 226, 765, 655,2526,3404,3244,2302, 3665, 731, 594,2184, 319,1576, 621, 658,2656,4299,2099,3864,1279,2071,2598,2739, 795,3086,3699,3908,1707,2352,2402,1382,3136,2475,1465,4847,3496,3865,1085,3004, 2591,1084, 213,2287,1963,3565,2250, 822, 793,4574,3187,1772,1789,3050, 595,1484, 1959,2770,1080,2650, 456, 422,2996, 940,3322,4328,4345,3092,2742, 965,2784, 739, 4124, 952,1358,2498,2949,2565, 332,2698,2378, 660,2260,2473,4194,3856,2919, 535, 1260,2651,1208,1428,1300,1949,1303,2942, 433,2455,2450,1251,1946, 614,1269, 641, 1306,1810,2737,3078,2912, 564,2365,1419,1415,1497,4460,2367,2185,1379,3005,1307, 3218,2175,1897,3063, 682,1157,4040,4005,1712,1160,1941,1399, 394, 402,2952,1573, 1151,2986,2404, 862, 299,2033,1489,3006, 346, 171,2886,3401,1726,2932, 168,2533, 47,2507,1030,3735,1145,3370,1395,1318,1579,3609,4560,2857,4116,1457,2529,1965, 504,1036,2690,2988,2405, 745,5871, 849,2397,2056,3081, 863,2359,3857,2096, 99, 1397,1769,2300,4428,1643,3455,1978,1757,3718,1440, 35,4879,3742,1296,4228,2280, 160,5063,1599,2013, 166, 520,3479,1646,3345,3012, 490,1937,1545,1264,2182,2505, 1096,1188,1369,1436,2421,1667,2792,2460,1270,2122, 727,3167,2143, 806,1706,1012, 1800,3037, 960,2218,1882, 805, 139,2456,1139,1521, 851,1052,3093,3089, 342,2039, 744,5097,1468,1502,1585,2087, 223, 939, 326,2140,2577, 892,2481,1623,4077, 982, 3708, 135,2131, 87,2503,3114,2326,1106, 876,1616, 547,2997,2831,2093,3441,4530, 4314, 9,3256,4229,4148, 659,1462,1986,1710,2046,2913,2231,4090,4880,5255,3392, 3274,1368,3689,4645,1477, 705,3384,3635,1068,1529,2941,1458,3782,1509, 100,1656, 2548, 718,2339, 408,1590,2780,3548,1838,4117,3719,1345,3530, 717,3442,2778,3220, 2898,1892,4590,3614,3371,2043,1998,1224,3483, 891, 635, 584,2559,3355, 733,1766, 1729,1172,3789,1891,2307, 781,2982,2271,1957,1580,5773,2633,2005,4195,3097,1535, 3213,1189,1934,5693,3262, 586,3118,1324,1598, 517,1564,2217,1868,1893,4445,3728, 2703,3139,1526,1787,1992,3882,2875,1549,1199,1056,2224,1904,2711,5098,4287, 338, 1993,3129,3489,2689,1809,2815,1997, 957,1855,3898,2550,3275,3057,1105,1319, 627, 1505,1911,1883,3526, 698,3629,3456,1833,1431, 746, 77,1261,2017,2296,1977,1885, 125,1334,1600, 525,1798,1109,2222,1470,1945, 559,2236,1186,3443,2476,1929,1411, 2411,3135,1777,3372,2621,1841,1613,3229, 668,1430,1839,2643,2916, 195,1989,2671, 2358,1387, 629,3205,2293,5256,4439, 123,1310, 888,1879,4300,3021,3605,1003,1162, 3192,2910,2010, 140,2395,2859, 55,1082,2012,2901, 662, 419,2081,1438, 680,2774, 4654,3912,1620,1731,1625,5035,4065,2328, 512,1344, 802,5443,2163,2311,2537, 524, 3399, 98,1155,2103,1918,2606,3925,2816,1393,2465,1504,3773,2177,3963,1478,4346, 180,1113,4655,3461,2028,1698, 833,2696,1235,1322,1594,4408,3623,3013,3225,2040, 3022, 541,2881, 607,3632,2029,1665,1219, 639,1385,1686,1099,2803,3231,1938,3188, 2858, 427, 676,2772,1168,2025, 454,3253,2486,3556, 230,1950, 580, 791,1991,1280, 1086,1974,2034, 630, 257,3338,2788,4903,1017, 86,4790, 966,2789,1995,1696,1131, 259,3095,4188,1308, 179,1463,5257, 289,4107,1248, 42,3413,1725,2288, 896,1947, 774,4474,4254, 604,3430,4264, 392,2514,2588, 452, 237,1408,3018, 988,4531,1970, 3034,3310, 540,2370,1562,1288,2990, 502,4765,1147, 4,1853,2708, 207, 294,2814, 4078,2902,2509, 684, 34,3105,3532,2551, 644, 709,2801,2344, 573,1727,3573,3557, 2021,1081,3100,4315,2100,3681, 199,2263,1837,2385, 146,3484,1195,2776,3949, 997, 1939,3973,1008,1091,1202,1962,1847,1149,4209,5444,1076, 493, 117,5400,2521, 972, 1490,2934,1796,4542,2374,1512,2933,2657, 413,2888,1135,2762,2314,2156,1355,2369, 766,2007,2527,2170,3124,2491,2593,2632,4757,2437, 234,3125,3591,1898,1750,1376, 1942,3468,3138, 570,2127,2145,3276,4131, 962, 132,1445,4196, 19, 941,3624,3480, 3366,1973,1374,4461,3431,2629, 283,2415,2275, 808,2887,3620,2112,2563,1353,3610, 955,1089,3103,1053, 96, 88,4097, 823,3808,1583, 399, 292,4091,3313, 421,1128, 642,4006, 903,2539,1877,2082, 596, 29,4066,1790, 722,2157, 130, 995,1569, 769, 1485, 464, 513,2213, 288,1923,1101,2453,4316, 133, 486,2445, 50, 625, 487,2207, 57, 423, 481,2962, 159,3729,1558, 491, 303, 482, 501, 240,2837, 112,3648,2392, 1783, 362, 8,3433,3422, 610,2793,3277,1390,1284,1654, 21,3823, 734, 367, 623, 193, 287, 374,1009,1483, 816, 476, 313,2255,2340,1262,2150,2899,1146,2581, 782, 2116,1659,2018,1880, 255,3586,3314,1110,2867,2137,2564, 986,2767,5185,2006, 650, 158, 926, 762, 881,3157,2717,2362,3587, 306,3690,3245,1542,3077,2427,1691,2478, 2118,2985,3490,2438, 539,2305, 983, 129,1754, 355,4201,2386, 827,2923, 104,1773, 2838,2771, 411,2905,3919, 376, 767, 122,1114, 828,2422,1817,3506, 266,3460,1007, 1609,4998, 945,2612,4429,2274, 726,1247,1964,2914,2199,2070,4002,4108, 657,3323, 1422, 579, 455,2764,4737,1222,2895,1670, 824,1223,1487,2525, 558, 861,3080, 598, 2659,2515,1967, 752,2583,2376,2214,4180, 977, 704,2464,4999,2622,4109,1210,2961, 819,1541, 142,2284, 44, 418, 457,1126,3730,4347,4626,1644,1876,3671,1864, 302, 1063,5694, 624, 723,1984,3745,1314,1676,2488,1610,1449,3558,3569,2166,2098, 409, 1011,2325,3704,2306, 818,1732,1383,1824,1844,3757, 999,2705,3497,1216,1423,2683, 2426,2954,2501,2726,2229,1475,2554,5064,1971,1794,1666,2014,1343, 783, 724, 191, 2434,1354,2220,5065,1763,2752,2472,4152, 131, 175,2885,3434, 92,1466,4920,2616, 3871,3872,3866, 128,1551,1632, 669,1854,3682,4691,4125,1230, 188,2973,3290,1302, 1213, 560,3266, 917, 763,3909,3249,1760, 868,1958, 764,1782,2097, 145,2277,3774, 4462, 64,1491,3062, 971,2132,3606,2442, 221,1226,1617, 218, 323,1185,3207,3147, 571, 619,1473,1005,1744,2281, 449,1887,2396,3685, 275, 375,3816,1743,3844,3731, 845,1983,2350,4210,1377, 773, 967,3499,3052,3743,2725,4007,1697,1022,3943,1464, 3264,2855,2722,1952,1029,2839,2467, 84,4383,2215, 820,1391,2015,2448,3672, 377, 1948,2168, 797,2545,3536,2578,2645, 94,2874,1678, 405,1259,3071, 771, 546,1315, 470,1243,3083, 895,2468, 981, 969,2037, 846,4181, 653,1276,2928, 14,2594, 557, 3007,2474, 156, 902,1338,1740,2574, 537,2518, 973,2282,2216,2433,1928, 138,2903, 1293,2631,1612, 646,3457, 839,2935, 111, 496,2191,2847, 589,3186, 149,3994,2060, 4031,2641,4067,3145,1870, 37,3597,2136,1025,2051,3009,3383,3549,1121,1016,3261, 1301, 251,2446,2599,2153, 872,3246, 637, 334,3705, 831, 884, 921,3065,3140,4092, 2198,1944, 246,2964, 108,2045,1152,1921,2308,1031, 203,3173,4170,1907,3890, 810, 1401,2003,1690, 506, 647,1242,2828,1761,1649,3208,2249,1589,3709,2931,5156,1708, 498, 666,2613, 834,3817,1231, 184,2851,1124, 883,3197,2261,3710,1765,1553,2658, 1178,2639,2351, 93,1193, 942,2538,2141,4402, 235,1821, 870,1591,2192,1709,1871, 3341,1618,4126,2595,2334, 603, 651, 69, 701, 268,2662,3411,2555,1380,1606, 503, 448, 254,2371,2646, 574,1187,2309,1770, 322,2235,1292,1801, 305, 566,1133, 229, 2067,2057, 706, 167, 483,2002,2672,3295,1820,3561,3067, 316, 378,2746,3452,1112, 136,1981, 507,1651,2917,1117, 285,4591, 182,2580,3522,1304, 335,3303,1835,2504, 1795,1792,2248, 674,1018,2106,2449,1857,2292,2845, 976,3047,1781,2600,2727,1389, 1281, 52,3152, 153, 265,3950, 672,3485,3951,4463, 430,1183, 365, 278,2169, 27, 1407,1336,2304, 209,1340,1730,2202,1852,2403,2883, 979,1737,1062, 631,2829,2542, 3876,2592, 825,2086,2226,3048,3625, 352,1417,3724, 542, 991, 431,1351,3938,1861, 2294, 826,1361,2927,3142,3503,1738, 463,2462,2723, 582,1916,1595,2808, 400,3845, 3891,2868,3621,2254, 58,2492,1123, 910,2160,2614,1372,1603,1196,1072,3385,1700, 3267,1980, 696, 480,2430, 920, 799,1570,2920,1951,2041,4047,2540,1321,4223,2469, 3562,2228,1271,2602, 401,2833,3351,2575,5157, 907,2312,1256, 410, 263,3507,1582, 996, 678,1849,2316,1480, 908,3545,2237, 703,2322, 667,1826,2849,1531,2604,2999, 2407,3146,2151,2630,1786,3711, 469,3542, 497,3899,2409, 858, 837,4446,3393,1274, 786, 620,1845,2001,3311, 484, 308,3367,1204,1815,3691,2332,1532,2557,1842,2020, 2724,1927,2333,4440, 567, 22,1673,2728,4475,1987,1858,1144,1597, 101,1832,3601, 12, 974,3783,4391, 951,1412, 1,3720, 453,4608,4041, 528,1041,1027,3230,2628, 1129, 875,1051,3291,1203,2262,1069,2860,2799,2149,2615,3278, 144,1758,3040, 31, 475,1680, 366,2685,3184, 311,1642,4008,2466,5036,1593,1493,2809, 216,1420,1668, 233, 304,2128,3284, 232,1429,1768,1040,2008,3407,2740,2967,2543, 242,2133, 778, 1565,2022,2620, 505,2189,2756,1098,2273, 372,1614, 708, 553,2846,2094,2278, 169, 3626,2835,4161, 228,2674,3165, 809,1454,1309, 466,1705,1095, 900,3423, 880,2667, 3751,5258,2317,3109,2571,4317,2766,1503,1342, 866,4447,1118, 63,2076, 314,1881, 1348,1061, 172, 978,3515,1747, 532, 511,3970, 6, 601, 905,2699,3300,1751, 276, 1467,3725,2668, 65,4239,2544,2779,2556,1604, 578,2451,1802, 992,2331,2624,1320, 3446, 713,1513,1013, 103,2786,2447,1661, 886,1702, 916, 654,3574,2031,1556, 751, 2178,2821,2179,1498,1538,2176, 271, 914,2251,2080,1325, 638,1953,2937,3877,2432, 2754, 95,3265,1716, 260,1227,4083, 775, 106,1357,3254, 426,1607, 555,2480, 772, 1985, 244,2546, 474, 495,1046,2611,1851,2061, 71,2089,1675,2590, 742,3758,2843, 3222,1433, 267,2180,2576,2826,2233,2092,3913,2435, 956,1745,3075, 856,2113,1116, 451, 3,1988,2896,1398, 993,2463,1878,2049,1341,2718,2721,2870,2108, 712,2904, 4363,2753,2324, 277,2872,2349,2649, 384, 987, 435, 691,3000, 922, 164,3939, 652, 1500,1184,4153,2482,3373,2165,4848,2335,3775,3508,3154,2806,2830,1554,2102,1664, 2530,1434,2408, 893,1547,2623,3447,2832,2242,2532,3169,2856,3223,2078, 49,3770, 3469, 462, 318, 656,2259,3250,3069, 679,1629,2758, 344,1138,1104,3120,1836,1283, 3115,2154,1437,4448, 934, 759,1999, 794,2862,1038, 533,2560,1722,2342, 855,2626, 1197,1663,4476,3127, 85,4240,2528, 25,1111,1181,3673, 407,3470,4561,2679,2713, 768,1925,2841,3986,1544,1165, 932, 373,1240,2146,1930,2673, 721,4766, 354,4333, 391,2963, 187, 61,3364,1442,1102, 330,1940,1767, 341,3809,4118, 393,2496,2062, 2211, 105, 331, 300, 439, 913,1332, 626, 379,3304,1557, 328, 689,3952, 309,1555, 931, 317,2517,3027, 325, 569, 686,2107,3084, 60,1042,1333,2794, 264,3177,4014, 1628, 258,3712, 7,4464,1176,1043,1778, 683, 114,1975, 78,1492, 383,1886, 510, 386, 645,5291,2891,2069,3305,4138,3867,2939,2603,2493,1935,1066,1848,3588,1015, 1282,1289,4609, 697,1453,3044,2666,3611,1856,2412, 54, 719,1330, 568,3778,2459, 1748, 788, 492, 551,1191,1000, 488,3394,3763, 282,1799, 348,2016,1523,3155,2390, 1049, 382,2019,1788,1170, 729,2968,3523, 897,3926,2785,2938,3292, 350,2319,3238, 1718,1717,2655,3453,3143,4465, 161,2889,2980,2009,1421, 56,1908,1640,2387,2232, 1917,1874,2477,4921, 148, 83,3438, 592,4245,2882,1822,1055, 741, 115,1496,1624, 381,1638,4592,1020, 516,3214, 458, 947,4575,1432, 211,1514,2926,1865,2142, 189, 852,1221,1400,1486, 882,2299,4036, 351, 28,1122, 700,6479,6480,6481,6482,6483, # last 512 #Everything below is of no interest for detection purpose 5508,6484,3900,3414,3974,4441,4024,3537,4037,5628,5099,3633,6485,3148,6486,3636, 5509,3257,5510,5973,5445,5872,4941,4403,3174,4627,5873,6276,2286,4230,5446,5874, 5122,6102,6103,4162,5447,5123,5323,4849,6277,3980,3851,5066,4246,5774,5067,6278, 3001,2807,5695,3346,5775,5974,5158,5448,6487,5975,5976,5776,3598,6279,5696,4806, 4211,4154,6280,6488,6489,6490,6281,4212,5037,3374,4171,6491,4562,4807,4722,4827, 5977,6104,4532,4079,5159,5324,5160,4404,3858,5359,5875,3975,4288,4610,3486,4512, 5325,3893,5360,6282,6283,5560,2522,4231,5978,5186,5449,2569,3878,6284,5401,3578, 4415,6285,4656,5124,5979,2506,4247,4449,3219,3417,4334,4969,4329,6492,4576,4828, 4172,4416,4829,5402,6286,3927,3852,5361,4369,4830,4477,4867,5876,4173,6493,6105, 4657,6287,6106,5877,5450,6494,4155,4868,5451,3700,5629,4384,6288,6289,5878,3189, 4881,6107,6290,6495,4513,6496,4692,4515,4723,5100,3356,6497,6291,3810,4080,5561, 3570,4430,5980,6498,4355,5697,6499,4724,6108,6109,3764,4050,5038,5879,4093,3226, 6292,5068,5217,4693,3342,5630,3504,4831,4377,4466,4309,5698,4431,5777,6293,5778, 4272,3706,6110,5326,3752,4676,5327,4273,5403,4767,5631,6500,5699,5880,3475,5039, 6294,5562,5125,4348,4301,4482,4068,5126,4593,5700,3380,3462,5981,5563,3824,5404, 4970,5511,3825,4738,6295,6501,5452,4516,6111,5881,5564,6502,6296,5982,6503,4213, 4163,3454,6504,6112,4009,4450,6113,4658,6297,6114,3035,6505,6115,3995,4904,4739, 4563,4942,4110,5040,3661,3928,5362,3674,6506,5292,3612,4791,5565,4149,5983,5328, 5259,5021,4725,4577,4564,4517,4364,6298,5405,4578,5260,4594,4156,4157,5453,3592, 3491,6507,5127,5512,4709,4922,5984,5701,4726,4289,6508,4015,6116,5128,4628,3424, 4241,5779,6299,4905,6509,6510,5454,5702,5780,6300,4365,4923,3971,6511,5161,3270, 3158,5985,4100, 867,5129,5703,6117,5363,3695,3301,5513,4467,6118,6512,5455,4232, 4242,4629,6513,3959,4478,6514,5514,5329,5986,4850,5162,5566,3846,4694,6119,5456, 4869,5781,3779,6301,5704,5987,5515,4710,6302,5882,6120,4392,5364,5705,6515,6121, 6516,6517,3736,5988,5457,5989,4695,2457,5883,4551,5782,6303,6304,6305,5130,4971, 6122,5163,6123,4870,3263,5365,3150,4871,6518,6306,5783,5069,5706,3513,3498,4409, 5330,5632,5366,5458,5459,3991,5990,4502,3324,5991,5784,3696,4518,5633,4119,6519, 4630,5634,4417,5707,4832,5992,3418,6124,5993,5567,4768,5218,6520,4595,3458,5367, 6125,5635,6126,4202,6521,4740,4924,6307,3981,4069,4385,6308,3883,2675,4051,3834, 4302,4483,5568,5994,4972,4101,5368,6309,5164,5884,3922,6127,6522,6523,5261,5460, 5187,4164,5219,3538,5516,4111,3524,5995,6310,6311,5369,3181,3386,2484,5188,3464, 5569,3627,5708,6524,5406,5165,4677,4492,6312,4872,4851,5885,4468,5996,6313,5709, 5710,6128,2470,5886,6314,5293,4882,5785,3325,5461,5101,6129,5711,5786,6525,4906, 6526,6527,4418,5887,5712,4808,2907,3701,5713,5888,6528,3765,5636,5331,6529,6530, 3593,5889,3637,4943,3692,5714,5787,4925,6315,6130,5462,4405,6131,6132,6316,5262, 6531,6532,5715,3859,5716,5070,4696,5102,3929,5788,3987,4792,5997,6533,6534,3920, 4809,5000,5998,6535,2974,5370,6317,5189,5263,5717,3826,6536,3953,5001,4883,3190, 5463,5890,4973,5999,4741,6133,6134,3607,5570,6000,4711,3362,3630,4552,5041,6318, 6001,2950,2953,5637,4646,5371,4944,6002,2044,4120,3429,6319,6537,5103,4833,6538, 6539,4884,4647,3884,6003,6004,4758,3835,5220,5789,4565,5407,6540,6135,5294,4697, 4852,6320,6321,3206,4907,6541,6322,4945,6542,6136,6543,6323,6005,4631,3519,6544, 5891,6545,5464,3784,5221,6546,5571,4659,6547,6324,6137,5190,6548,3853,6549,4016, 4834,3954,6138,5332,3827,4017,3210,3546,4469,5408,5718,3505,4648,5790,5131,5638, 5791,5465,4727,4318,6325,6326,5792,4553,4010,4698,3439,4974,3638,4335,3085,6006, 5104,5042,5166,5892,5572,6327,4356,4519,5222,5573,5333,5793,5043,6550,5639,5071, 4503,6328,6139,6551,6140,3914,3901,5372,6007,5640,4728,4793,3976,3836,4885,6552, 4127,6553,4451,4102,5002,6554,3686,5105,6555,5191,5072,5295,4611,5794,5296,6556, 5893,5264,5894,4975,5466,5265,4699,4976,4370,4056,3492,5044,4886,6557,5795,4432, 4769,4357,5467,3940,4660,4290,6141,4484,4770,4661,3992,6329,4025,4662,5022,4632, 4835,4070,5297,4663,4596,5574,5132,5409,5895,6142,4504,5192,4664,5796,5896,3885, 5575,5797,5023,4810,5798,3732,5223,4712,5298,4084,5334,5468,6143,4052,4053,4336, 4977,4794,6558,5335,4908,5576,5224,4233,5024,4128,5469,5225,4873,6008,5045,4729, 4742,4633,3675,4597,6559,5897,5133,5577,5003,5641,5719,6330,6560,3017,2382,3854, 4406,4811,6331,4393,3964,4946,6561,2420,3722,6562,4926,4378,3247,1736,4442,6332, 5134,6333,5226,3996,2918,5470,4319,4003,4598,4743,4744,4485,3785,3902,5167,5004, 5373,4394,5898,6144,4874,1793,3997,6334,4085,4214,5106,5642,4909,5799,6009,4419, 4189,3330,5899,4165,4420,5299,5720,5227,3347,6145,4081,6335,2876,3930,6146,3293, 3786,3910,3998,5900,5300,5578,2840,6563,5901,5579,6147,3531,5374,6564,6565,5580, 4759,5375,6566,6148,3559,5643,6336,6010,5517,6337,6338,5721,5902,3873,6011,6339, 6567,5518,3868,3649,5722,6568,4771,4947,6569,6149,4812,6570,2853,5471,6340,6341, 5644,4795,6342,6012,5723,6343,5724,6013,4349,6344,3160,6150,5193,4599,4514,4493, 5168,4320,6345,4927,3666,4745,5169,5903,5005,4928,6346,5725,6014,4730,4203,5046, 4948,3395,5170,6015,4150,6016,5726,5519,6347,5047,3550,6151,6348,4197,4310,5904, 6571,5581,2965,6152,4978,3960,4291,5135,6572,5301,5727,4129,4026,5905,4853,5728, 5472,6153,6349,4533,2700,4505,5336,4678,3583,5073,2994,4486,3043,4554,5520,6350, 6017,5800,4487,6351,3931,4103,5376,6352,4011,4321,4311,4190,5136,6018,3988,3233, 4350,5906,5645,4198,6573,5107,3432,4191,3435,5582,6574,4139,5410,6353,5411,3944, 5583,5074,3198,6575,6354,4358,6576,5302,4600,5584,5194,5412,6577,6578,5585,5413, 5303,4248,5414,3879,4433,6579,4479,5025,4854,5415,6355,4760,4772,3683,2978,4700, 3797,4452,3965,3932,3721,4910,5801,6580,5195,3551,5907,3221,3471,3029,6019,3999, 5908,5909,5266,5267,3444,3023,3828,3170,4796,5646,4979,4259,6356,5647,5337,3694, 6357,5648,5338,4520,4322,5802,3031,3759,4071,6020,5586,4836,4386,5048,6581,3571, 4679,4174,4949,6154,4813,3787,3402,3822,3958,3215,3552,5268,4387,3933,4950,4359, 6021,5910,5075,3579,6358,4234,4566,5521,6359,3613,5049,6022,5911,3375,3702,3178, 4911,5339,4521,6582,6583,4395,3087,3811,5377,6023,6360,6155,4027,5171,5649,4421, 4249,2804,6584,2270,6585,4000,4235,3045,6156,5137,5729,4140,4312,3886,6361,4330, 6157,4215,6158,3500,3676,4929,4331,3713,4930,5912,4265,3776,3368,5587,4470,4855, 3038,4980,3631,6159,6160,4132,4680,6161,6362,3923,4379,5588,4255,6586,4121,6587, 6363,4649,6364,3288,4773,4774,6162,6024,6365,3543,6588,4274,3107,3737,5050,5803, 4797,4522,5589,5051,5730,3714,4887,5378,4001,4523,6163,5026,5522,4701,4175,2791, 3760,6589,5473,4224,4133,3847,4814,4815,4775,3259,5416,6590,2738,6164,6025,5304, 3733,5076,5650,4816,5590,6591,6165,6592,3934,5269,6593,3396,5340,6594,5804,3445, 3602,4042,4488,5731,5732,3525,5591,4601,5196,6166,6026,5172,3642,4612,3202,4506, 4798,6366,3818,5108,4303,5138,5139,4776,3332,4304,2915,3415,4434,5077,5109,4856, 2879,5305,4817,6595,5913,3104,3144,3903,4634,5341,3133,5110,5651,5805,6167,4057, 5592,2945,4371,5593,6596,3474,4182,6367,6597,6168,4507,4279,6598,2822,6599,4777, 4713,5594,3829,6169,3887,5417,6170,3653,5474,6368,4216,2971,5228,3790,4579,6369, 5733,6600,6601,4951,4746,4555,6602,5418,5475,6027,3400,4665,5806,6171,4799,6028, 5052,6172,3343,4800,4747,5006,6370,4556,4217,5476,4396,5229,5379,5477,3839,5914, 5652,5807,4714,3068,4635,5808,6173,5342,4192,5078,5419,5523,5734,6174,4557,6175, 4602,6371,6176,6603,5809,6372,5735,4260,3869,5111,5230,6029,5112,6177,3126,4681, 5524,5915,2706,3563,4748,3130,6178,4018,5525,6604,6605,5478,4012,4837,6606,4534, 4193,5810,4857,3615,5479,6030,4082,3697,3539,4086,5270,3662,4508,4931,5916,4912, 5811,5027,3888,6607,4397,3527,3302,3798,2775,2921,2637,3966,4122,4388,4028,4054, 1633,4858,5079,3024,5007,3982,3412,5736,6608,3426,3236,5595,3030,6179,3427,3336, 3279,3110,6373,3874,3039,5080,5917,5140,4489,3119,6374,5812,3405,4494,6031,4666, 4141,6180,4166,6032,5813,4981,6609,5081,4422,4982,4112,3915,5653,3296,3983,6375, 4266,4410,5654,6610,6181,3436,5082,6611,5380,6033,3819,5596,4535,5231,5306,5113, 6612,4952,5918,4275,3113,6613,6376,6182,6183,5814,3073,4731,4838,5008,3831,6614, 4888,3090,3848,4280,5526,5232,3014,5655,5009,5737,5420,5527,6615,5815,5343,5173, 5381,4818,6616,3151,4953,6617,5738,2796,3204,4360,2989,4281,5739,5174,5421,5197, 3132,5141,3849,5142,5528,5083,3799,3904,4839,5480,2880,4495,3448,6377,6184,5271, 5919,3771,3193,6034,6035,5920,5010,6036,5597,6037,6378,6038,3106,5422,6618,5423, 5424,4142,6619,4889,5084,4890,4313,5740,6620,3437,5175,5307,5816,4199,5198,5529, 5817,5199,5656,4913,5028,5344,3850,6185,2955,5272,5011,5818,4567,4580,5029,5921, 3616,5233,6621,6622,6186,4176,6039,6379,6380,3352,5200,5273,2908,5598,5234,3837, 5308,6623,6624,5819,4496,4323,5309,5201,6625,6626,4983,3194,3838,4167,5530,5922, 5274,6381,6382,3860,3861,5599,3333,4292,4509,6383,3553,5481,5820,5531,4778,6187, 3955,3956,4324,4389,4218,3945,4325,3397,2681,5923,4779,5085,4019,5482,4891,5382, 5383,6040,4682,3425,5275,4094,6627,5310,3015,5483,5657,4398,5924,3168,4819,6628, 5925,6629,5532,4932,4613,6041,6630,4636,6384,4780,4204,5658,4423,5821,3989,4683, 5822,6385,4954,6631,5345,6188,5425,5012,5384,3894,6386,4490,4104,6632,5741,5053, 6633,5823,5926,5659,5660,5927,6634,5235,5742,5824,4840,4933,4820,6387,4859,5928, 4955,6388,4143,3584,5825,5346,5013,6635,5661,6389,5014,5484,5743,4337,5176,5662, 6390,2836,6391,3268,6392,6636,6042,5236,6637,4158,6638,5744,5663,4471,5347,3663, 4123,5143,4293,3895,6639,6640,5311,5929,5826,3800,6189,6393,6190,5664,5348,3554, 3594,4749,4603,6641,5385,4801,6043,5827,4183,6642,5312,5426,4761,6394,5665,6191, 4715,2669,6643,6644,5533,3185,5427,5086,5930,5931,5386,6192,6044,6645,4781,4013, 5745,4282,4435,5534,4390,4267,6045,5746,4984,6046,2743,6193,3501,4087,5485,5932, 5428,4184,4095,5747,4061,5054,3058,3862,5933,5600,6646,5144,3618,6395,3131,5055, 5313,6396,4650,4956,3855,6194,3896,5202,4985,4029,4225,6195,6647,5828,5486,5829, 3589,3002,6648,6397,4782,5276,6649,6196,6650,4105,3803,4043,5237,5830,6398,4096, 3643,6399,3528,6651,4453,3315,4637,6652,3984,6197,5535,3182,3339,6653,3096,2660, 6400,6654,3449,5934,4250,4236,6047,6401,5831,6655,5487,3753,4062,5832,6198,6199, 6656,3766,6657,3403,4667,6048,6658,4338,2897,5833,3880,2797,3780,4326,6659,5748, 5015,6660,5387,4351,5601,4411,6661,3654,4424,5935,4339,4072,5277,4568,5536,6402, 6662,5238,6663,5349,5203,6200,5204,6201,5145,4536,5016,5056,4762,5834,4399,4957, 6202,6403,5666,5749,6664,4340,6665,5936,5177,5667,6666,6667,3459,4668,6404,6668, 6669,4543,6203,6670,4276,6405,4480,5537,6671,4614,5205,5668,6672,3348,2193,4763, 6406,6204,5937,5602,4177,5669,3419,6673,4020,6205,4443,4569,5388,3715,3639,6407, 6049,4058,6206,6674,5938,4544,6050,4185,4294,4841,4651,4615,5488,6207,6408,6051, 5178,3241,3509,5835,6208,4958,5836,4341,5489,5278,6209,2823,5538,5350,5206,5429, 6675,4638,4875,4073,3516,4684,4914,4860,5939,5603,5389,6052,5057,3237,5490,3791, 6676,6409,6677,4821,4915,4106,5351,5058,4243,5539,4244,5604,4842,4916,5239,3028, 3716,5837,5114,5605,5390,5940,5430,6210,4332,6678,5540,4732,3667,3840,6053,4305, 3408,5670,5541,6410,2744,5240,5750,6679,3234,5606,6680,5607,5671,3608,4283,4159, 4400,5352,4783,6681,6411,6682,4491,4802,6211,6412,5941,6413,6414,5542,5751,6683, 4669,3734,5942,6684,6415,5943,5059,3328,4670,4144,4268,6685,6686,6687,6688,4372, 3603,6689,5944,5491,4373,3440,6416,5543,4784,4822,5608,3792,4616,5838,5672,3514, 5391,6417,4892,6690,4639,6691,6054,5673,5839,6055,6692,6056,5392,6212,4038,5544, 5674,4497,6057,6693,5840,4284,5675,4021,4545,5609,6418,4454,6419,6213,4113,4472, 5314,3738,5087,5279,4074,5610,4959,4063,3179,4750,6058,6420,6214,3476,4498,4716, 5431,4960,4685,6215,5241,6694,6421,6216,6695,5841,5945,6422,3748,5946,5179,3905, 5752,5545,5947,4374,6217,4455,6423,4412,6218,4803,5353,6696,3832,5280,6219,4327, 4702,6220,6221,6059,4652,5432,6424,3749,4751,6425,5753,4986,5393,4917,5948,5030, 5754,4861,4733,6426,4703,6697,6222,4671,5949,4546,4961,5180,6223,5031,3316,5281, 6698,4862,4295,4934,5207,3644,6427,5842,5950,6428,6429,4570,5843,5282,6430,6224, 5088,3239,6060,6699,5844,5755,6061,6431,2701,5546,6432,5115,5676,4039,3993,3327, 4752,4425,5315,6433,3941,6434,5677,4617,4604,3074,4581,6225,5433,6435,6226,6062, 4823,5756,5116,6227,3717,5678,4717,5845,6436,5679,5846,6063,5847,6064,3977,3354, 6437,3863,5117,6228,5547,5394,4499,4524,6229,4605,6230,4306,4500,6700,5951,6065, 3693,5952,5089,4366,4918,6701,6231,5548,6232,6702,6438,4704,5434,6703,6704,5953, 4168,6705,5680,3420,6706,5242,4407,6066,3812,5757,5090,5954,4672,4525,3481,5681, 4618,5395,5354,5316,5955,6439,4962,6707,4526,6440,3465,4673,6067,6441,5682,6708, 5435,5492,5758,5683,4619,4571,4674,4804,4893,4686,5493,4753,6233,6068,4269,6442, 6234,5032,4705,5146,5243,5208,5848,6235,6443,4963,5033,4640,4226,6236,5849,3387, 6444,6445,4436,4437,5850,4843,5494,4785,4894,6709,4361,6710,5091,5956,3331,6237, 4987,5549,6069,6711,4342,3517,4473,5317,6070,6712,6071,4706,6446,5017,5355,6713, 6714,4988,5436,6447,4734,5759,6715,4735,4547,4456,4754,6448,5851,6449,6450,3547, 5852,5318,6451,6452,5092,4205,6716,6238,4620,4219,5611,6239,6072,4481,5760,5957, 5958,4059,6240,6453,4227,4537,6241,5761,4030,4186,5244,5209,3761,4457,4876,3337, 5495,5181,6242,5959,5319,5612,5684,5853,3493,5854,6073,4169,5613,5147,4895,6074, 5210,6717,5182,6718,3830,6243,2798,3841,6075,6244,5855,5614,3604,4606,5496,5685, 5118,5356,6719,6454,5960,5357,5961,6720,4145,3935,4621,5119,5962,4261,6721,6455, 4786,5963,4375,4582,6245,6246,6247,6076,5437,4877,5856,3376,4380,6248,4160,6722, 5148,6456,5211,6457,6723,4718,6458,6724,6249,5358,4044,3297,6459,6250,5857,5615, 5497,5245,6460,5498,6725,6251,6252,5550,3793,5499,2959,5396,6461,6462,4572,5093, 5500,5964,3806,4146,6463,4426,5762,5858,6077,6253,4755,3967,4220,5965,6254,4989, 5501,6464,4352,6726,6078,4764,2290,5246,3906,5438,5283,3767,4964,2861,5763,5094, 6255,6256,4622,5616,5859,5860,4707,6727,4285,4708,4824,5617,6257,5551,4787,5212, 4965,4935,4687,6465,6728,6466,5686,6079,3494,4413,2995,5247,5966,5618,6729,5967, 5764,5765,5687,5502,6730,6731,6080,5397,6467,4990,6258,6732,4538,5060,5619,6733, 4719,5688,5439,5018,5149,5284,5503,6734,6081,4607,6259,5120,3645,5861,4583,6260, 4584,4675,5620,4098,5440,6261,4863,2379,3306,4585,5552,5689,4586,5285,6735,4864, 6736,5286,6082,6737,4623,3010,4788,4381,4558,5621,4587,4896,3698,3161,5248,4353, 4045,6262,3754,5183,4588,6738,6263,6739,6740,5622,3936,6741,6468,6742,6264,5095, 6469,4991,5968,6743,4992,6744,6083,4897,6745,4256,5766,4307,3108,3968,4444,5287, 3889,4343,6084,4510,6085,4559,6086,4898,5969,6746,5623,5061,4919,5249,5250,5504, 5441,6265,5320,4878,3242,5862,5251,3428,6087,6747,4237,5624,5442,6266,5553,4539, 6748,2585,3533,5398,4262,6088,5150,4736,4438,6089,6267,5505,4966,6749,6268,6750, 6269,5288,5554,3650,6090,6091,4624,6092,5690,6751,5863,4270,5691,4277,5555,5864, 6752,5692,4720,4865,6470,5151,4688,4825,6753,3094,6754,6471,3235,4653,6755,5213, 5399,6756,3201,4589,5865,4967,6472,5866,6473,5019,3016,6757,5321,4756,3957,4573, 6093,4993,5767,4721,6474,6758,5625,6759,4458,6475,6270,6760,5556,4994,5214,5252, 6271,3875,5768,6094,5034,5506,4376,5769,6761,2120,6476,5253,5770,6762,5771,5970, 3990,5971,5557,5558,5772,6477,6095,2787,4641,5972,5121,6096,6097,6272,6763,3703, 5867,5507,6273,4206,6274,4789,6098,6764,3619,3646,3833,3804,2394,3788,4936,3978, 4866,4899,6099,6100,5559,6478,6765,3599,5868,6101,5869,5870,6275,6766,4527,6767) # flake8: noqa
BjornW/autokey
refs/heads/master
src/lib/__init__.py
12133432
artschwagerb/axolotl
refs/heads/master
tv/urls.py
1
from django.conf.urls import patterns, url from django.views.generic import DetailView from tv import views urlpatterns = patterns('tv.views', url(r'^$', views.index, name='tv-index'), url(r'^admin/dashboard', views.admin_dashboard, name='tv-admin-dashboard'), url(r'^show/(?P<pk>\d+)/$', views.show, name='tv-show'), url(r'^show/(?P<pk>\d+)/favorite/$', views.show_favorite, name='tv-show-favorite'), url(r'^season/(?P<pk>\d+)/$', views.season, name='tv-season'), url(r'^episode/(?P<pk>\d+)/$', views.episode, name='tv-episode'), url(r'^episode/(?P<pk>\d+)/update/$', views.episode_update, name='tv-episode-update'), url(r'^search', views.search, name='tv-search'), url(r'^favorites', views.show_favorites, name='tv-favorites'), )
edx/lettuce
refs/heads/master
tests/integration/lib/Django-1.2.5/tests/regressiontests/admin_scripts/complex_app/models/foo.py
102
from django.db import models class Foo(models.Model): name = models.CharField(max_length=5) class Meta: app_label = 'complex_app'
alsandeep/kernel-4.4
refs/heads/master
tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/SchedGui.py
12980
# SchedGui.py - Python extension for perf script, basic GUI code for # traces drawing and overview. # # Copyright (C) 2010 by Frederic Weisbecker <fweisbec@gmail.com> # # This software is distributed under the terms of the GNU General # Public License ("GPL") version 2 as published by the Free Software # Foundation. try: import wx except ImportError: raise ImportError, "You need to install the wxpython lib for this script" class RootFrame(wx.Frame): Y_OFFSET = 100 RECT_HEIGHT = 100 RECT_SPACE = 50 EVENT_MARKING_WIDTH = 5 def __init__(self, sched_tracer, title, parent = None, id = -1): wx.Frame.__init__(self, parent, id, title) (self.screen_width, self.screen_height) = wx.GetDisplaySize() self.screen_width -= 10 self.screen_height -= 10 self.zoom = 0.5 self.scroll_scale = 20 self.sched_tracer = sched_tracer self.sched_tracer.set_root_win(self) (self.ts_start, self.ts_end) = sched_tracer.interval() self.update_width_virtual() self.nr_rects = sched_tracer.nr_rectangles() + 1 self.height_virtual = RootFrame.Y_OFFSET + (self.nr_rects * (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE)) # whole window panel self.panel = wx.Panel(self, size=(self.screen_width, self.screen_height)) # scrollable container self.scroll = wx.ScrolledWindow(self.panel) self.scroll.SetScrollbars(self.scroll_scale, self.scroll_scale, self.width_virtual / self.scroll_scale, self.height_virtual / self.scroll_scale) self.scroll.EnableScrolling(True, True) self.scroll.SetFocus() # scrollable drawing area self.scroll_panel = wx.Panel(self.scroll, size=(self.screen_width - 15, self.screen_height / 2)) self.scroll_panel.Bind(wx.EVT_PAINT, self.on_paint) self.scroll_panel.Bind(wx.EVT_KEY_DOWN, self.on_key_press) self.scroll_panel.Bind(wx.EVT_LEFT_DOWN, self.on_mouse_down) self.scroll.Bind(wx.EVT_PAINT, self.on_paint) self.scroll.Bind(wx.EVT_KEY_DOWN, self.on_key_press) self.scroll.Bind(wx.EVT_LEFT_DOWN, self.on_mouse_down) self.scroll.Fit() self.Fit() self.scroll_panel.SetDimensions(-1, -1, self.width_virtual, self.height_virtual, wx.SIZE_USE_EXISTING) self.txt = None self.Show(True) def us_to_px(self, val): return val / (10 ** 3) * self.zoom def px_to_us(self, val): return (val / self.zoom) * (10 ** 3) def scroll_start(self): (x, y) = self.scroll.GetViewStart() return (x * self.scroll_scale, y * self.scroll_scale) def scroll_start_us(self): (x, y) = self.scroll_start() return self.px_to_us(x) def paint_rectangle_zone(self, nr, color, top_color, start, end): offset_px = self.us_to_px(start - self.ts_start) width_px = self.us_to_px(end - self.ts_start) offset_py = RootFrame.Y_OFFSET + (nr * (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE)) width_py = RootFrame.RECT_HEIGHT dc = self.dc if top_color is not None: (r, g, b) = top_color top_color = wx.Colour(r, g, b) brush = wx.Brush(top_color, wx.SOLID) dc.SetBrush(brush) dc.DrawRectangle(offset_px, offset_py, width_px, RootFrame.EVENT_MARKING_WIDTH) width_py -= RootFrame.EVENT_MARKING_WIDTH offset_py += RootFrame.EVENT_MARKING_WIDTH (r ,g, b) = color color = wx.Colour(r, g, b) brush = wx.Brush(color, wx.SOLID) dc.SetBrush(brush) dc.DrawRectangle(offset_px, offset_py, width_px, width_py) def update_rectangles(self, dc, start, end): start += self.ts_start end += self.ts_start self.sched_tracer.fill_zone(start, end) def on_paint(self, event): dc = wx.PaintDC(self.scroll_panel) self.dc = dc width = min(self.width_virtual, self.screen_width) (x, y) = self.scroll_start() start = self.px_to_us(x) end = self.px_to_us(x + width) self.update_rectangles(dc, start, end) def rect_from_ypixel(self, y): y -= RootFrame.Y_OFFSET rect = y / (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE) height = y % (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE) if rect < 0 or rect > self.nr_rects - 1 or height > RootFrame.RECT_HEIGHT: return -1 return rect def update_summary(self, txt): if self.txt: self.txt.Destroy() self.txt = wx.StaticText(self.panel, -1, txt, (0, (self.screen_height / 2) + 50)) def on_mouse_down(self, event): (x, y) = event.GetPositionTuple() rect = self.rect_from_ypixel(y) if rect == -1: return t = self.px_to_us(x) + self.ts_start self.sched_tracer.mouse_down(rect, t) def update_width_virtual(self): self.width_virtual = self.us_to_px(self.ts_end - self.ts_start) def __zoom(self, x): self.update_width_virtual() (xpos, ypos) = self.scroll.GetViewStart() xpos = self.us_to_px(x) / self.scroll_scale self.scroll.SetScrollbars(self.scroll_scale, self.scroll_scale, self.width_virtual / self.scroll_scale, self.height_virtual / self.scroll_scale, xpos, ypos) self.Refresh() def zoom_in(self): x = self.scroll_start_us() self.zoom *= 2 self.__zoom(x) def zoom_out(self): x = self.scroll_start_us() self.zoom /= 2 self.__zoom(x) def on_key_press(self, event): key = event.GetRawKeyCode() if key == ord("+"): self.zoom_in() return if key == ord("-"): self.zoom_out() return key = event.GetKeyCode() (x, y) = self.scroll.GetViewStart() if key == wx.WXK_RIGHT: self.scroll.Scroll(x + 1, y) elif key == wx.WXK_LEFT: self.scroll.Scroll(x - 1, y) elif key == wx.WXK_DOWN: self.scroll.Scroll(x, y + 1) elif key == wx.WXK_UP: self.scroll.Scroll(x, y - 1)
abilian/abilian-sbe
refs/heads/master
conftest.py
1
"""Configuration and injectable fixtures for Pytest. Reuses fixtures defined in abilian-core. """ import logging import os from pytest import fixture from abilian.sbe.app import create_app from abilian.testing.fixtures import TestConfig pytest_plugins = [ "abilian.testing.fixtures", "abilian.sbe.apps.communities.tests.fixtures", ] if os.environ.get("COLLECT_ANNOTATIONS"): def pytest_collection_finish(session): """Handle the pytest collection finish hook: configure pyannotate. Explicitly delay importing `collect_types` until all tests have been collected. This gives gevent a chance to monkey patch the world before importing pyannotate. """ from pyannotate_runtime import collect_types collect_types.init_types_collection() @fixture(autouse=True) def collect_types_fixture(): from pyannotate_runtime import collect_types collect_types.resume() yield collect_types.pause() def pytest_sessionfinish(session, exitstatus): from pyannotate_runtime import collect_types collect_types.dump_stats("type_info.json") class NoCsrfTestConfig(TestConfig): WTF_CSRF_ENABLED = False @fixture def config(): return NoCsrfTestConfig @fixture def app(config): """Return an App configured with config=TestConfig.""" return create_app(config=config) @fixture def req_ctx(app, request_ctx): """Simple alias (TBR)""" return request_ctx
pinkflozd/android_kernel_motorola_falcon
refs/heads/lolipop
tools/perf/scripts/python/net_dropmonitor.py
4235
# Monitor the system for dropped packets and proudce a report of drop locations and counts import os import sys sys.path.append(os.environ['PERF_EXEC_PATH'] + \ '/scripts/python/Perf-Trace-Util/lib/Perf/Trace') from perf_trace_context import * from Core import * from Util import * drop_log = {} kallsyms = [] def get_kallsyms_table(): global kallsyms try: f = open("/proc/kallsyms", "r") linecount = 0 for line in f: linecount = linecount+1 f.seek(0) except: return j = 0 for line in f: loc = int(line.split()[0], 16) name = line.split()[2] j = j +1 if ((j % 100) == 0): print "\r" + str(j) + "/" + str(linecount), kallsyms.append({ 'loc': loc, 'name' : name}) print "\r" + str(j) + "/" + str(linecount) kallsyms.sort() return def get_sym(sloc): loc = int(sloc) for i in kallsyms: if (i['loc'] >= loc): return (i['name'], i['loc']-loc) return (None, 0) def print_drop_table(): print "%25s %25s %25s" % ("LOCATION", "OFFSET", "COUNT") for i in drop_log.keys(): (sym, off) = get_sym(i) if sym == None: sym = i print "%25s %25s %25s" % (sym, off, drop_log[i]) def trace_begin(): print "Starting trace (Ctrl-C to dump results)" def trace_end(): print "Gathering kallsyms data" get_kallsyms_table() print_drop_table() # called from perf, when it finds a correspoinding event def skb__kfree_skb(name, context, cpu, sec, nsec, pid, comm, skbaddr, protocol, location): slocation = str(location) try: drop_log[slocation] = drop_log[slocation] + 1 except: drop_log[slocation] = 1
norayr/unisubs
refs/heads/staging
apps/caching/templatetags/__init__.py
12133432
RedMadRobot/rmr_django
refs/heads/master
rmr/middleware/__init__.py
12133432
divio/django
refs/heads/master
tests/admin_scripts/app_with_import/__init__.py
12133432
windyuuy/opera
refs/heads/master
chromium/src/third_party/scons-2.0.1/engine/SCons/Tool/intelc.py
61
"""SCons.Tool.icl Tool-specific initialization for the Intel C/C++ compiler. Supports Linux and Windows compilers, v7 and up. There normally shouldn't be any need to import this module directly. It will usually be imported through the generic SCons.Tool.Tool() selection method. """ # # Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. from __future__ import division __revision__ = "src/engine/SCons/Tool/intelc.py 5134 2010/08/16 23:02:40 bdeegan" import math, sys, os.path, glob, string, re is_windows = sys.platform == 'win32' is_win64 = is_windows and (os.environ['PROCESSOR_ARCHITECTURE'] == 'AMD64' or ('PROCESSOR_ARCHITEW6432' in os.environ and os.environ['PROCESSOR_ARCHITEW6432'] == 'AMD64')) is_linux = sys.platform == 'linux2' is_mac = sys.platform == 'darwin' if is_windows: import SCons.Tool.msvc elif is_linux: import SCons.Tool.gcc elif is_mac: import SCons.Tool.gcc import SCons.Util import SCons.Warnings # Exceptions for this tool class IntelCError(SCons.Errors.InternalError): pass class MissingRegistryError(IntelCError): # missing registry entry pass class MissingDirError(IntelCError): # dir not found pass class NoRegistryModuleError(IntelCError): # can't read registry at all pass def uniquify(s): """Return a sequence containing only one copy of each unique element from input sequence s. Does not preserve order. Input sequence must be hashable (i.e. must be usable as a dictionary key).""" u = {} for x in s: u[x] = 1 return list(u.keys()) def linux_ver_normalize(vstr): """Normalize a Linux compiler version number. Intel changed from "80" to "9.0" in 2005, so we assume if the number is greater than 60 it's an old-style number and otherwise new-style. Always returns an old-style float like 80 or 90 for compatibility with Windows. Shades of Y2K!""" # Check for version number like 9.1.026: return 91.026 m = re.match(r'([0-9]+)\.([0-9]+)\.([0-9]+)', vstr) if m: vmaj,vmin,build = m.groups() return float(vmaj) * 10. + float(vmin) + float(build) / 1000.; else: f = float(vstr) if is_windows: return f else: if f < 60: return f * 10.0 else: return f def check_abi(abi): """Check for valid ABI (application binary interface) name, and map into canonical one""" if not abi: return None abi = abi.lower() # valid_abis maps input name to canonical name if is_windows: valid_abis = {'ia32' : 'ia32', 'x86' : 'ia32', 'ia64' : 'ia64', 'em64t' : 'em64t', 'amd64' : 'em64t'} if is_linux: valid_abis = {'ia32' : 'ia32', 'x86' : 'ia32', 'x86_64' : 'x86_64', 'em64t' : 'x86_64', 'amd64' : 'x86_64'} if is_mac: valid_abis = {'ia32' : 'ia32', 'x86' : 'ia32', 'x86_64' : 'x86_64', 'em64t' : 'x86_64'} try: abi = valid_abis[abi] except KeyError: raise SCons.Errors.UserError("Intel compiler: Invalid ABI %s, valid values are %s"% \ (abi, list(valid_abis.keys()))) return abi def vercmp(a, b): """Compare strings as floats, but Intel changed Linux naming convention at 9.0""" return cmp(linux_ver_normalize(b), linux_ver_normalize(a)) def get_version_from_list(v, vlist): """See if we can match v (string) in vlist (list of strings) Linux has to match in a fuzzy way.""" if is_windows: # Simple case, just find it in the list if v in vlist: return v else: return None else: # Fuzzy match: normalize version number first, but still return # original non-normalized form. fuzz = 0.001 for vi in vlist: if math.fabs(linux_ver_normalize(vi) - linux_ver_normalize(v)) < fuzz: return vi # Not found return None def get_intel_registry_value(valuename, version=None, abi=None): """ Return a value from the Intel compiler registry tree. (Windows only) """ # Open the key: if is_win64: K = 'Software\\Wow6432Node\\Intel\\Compilers\\C++\\' + version + '\\'+abi.upper() else: K = 'Software\\Intel\\Compilers\\C++\\' + version + '\\'+abi.upper() try: k = SCons.Util.RegOpenKeyEx(SCons.Util.HKEY_LOCAL_MACHINE, K) except SCons.Util.RegError: raise MissingRegistryError("%s was not found in the registry, for Intel compiler version %s, abi='%s'"%(K, version,abi)) # Get the value: try: v = SCons.Util.RegQueryValueEx(k, valuename)[0] return v # or v.encode('iso-8859-1', 'replace') to remove unicode? except SCons.Util.RegError: raise MissingRegistryError("%s\\%s was not found in the registry."%(K, valuename)) def get_all_compiler_versions(): """Returns a sorted list of strings, like "70" or "80" or "9.0" with most recent compiler version first. """ versions=[] if is_windows: if is_win64: keyname = 'Software\\WoW6432Node\\Intel\\Compilers\\C++' else: keyname = 'Software\\Intel\\Compilers\\C++' try: k = SCons.Util.RegOpenKeyEx(SCons.Util.HKEY_LOCAL_MACHINE, keyname) except WindowsError: return [] i = 0 versions = [] try: while i < 100: subkey = SCons.Util.RegEnumKey(k, i) # raises EnvironmentError # Check that this refers to an existing dir. # This is not 100% perfect but should catch common # installation issues like when the compiler was installed # and then the install directory deleted or moved (rather # than uninstalling properly), so the registry values # are still there. ok = False for try_abi in ('IA32', 'IA32e', 'IA64', 'EM64T'): try: d = get_intel_registry_value('ProductDir', subkey, try_abi) except MissingRegistryError: continue # not found in reg, keep going if os.path.exists(d): ok = True if ok: versions.append(subkey) else: try: # Registry points to nonexistent dir. Ignore this # version. value = get_intel_registry_value('ProductDir', subkey, 'IA32') except MissingRegistryError, e: # Registry key is left dangling (potentially # after uninstalling). print \ "scons: *** Ignoring the registry key for the Intel compiler version %s.\n" \ "scons: *** It seems that the compiler was uninstalled and that the registry\n" \ "scons: *** was not cleaned up properly.\n" % subkey else: print "scons: *** Ignoring "+str(value) i = i + 1 except EnvironmentError: # no more subkeys pass elif is_linux: for d in glob.glob('/opt/intel_cc_*'): # Typical dir here is /opt/intel_cc_80. m = re.search(r'cc_(.*)$', d) if m: versions.append(m.group(1)) for d in glob.glob('/opt/intel/cc*/*'): # Typical dir here is /opt/intel/cc/9.0 for IA32, # /opt/intel/cce/9.0 for EMT64 (AMD64) m = re.search(r'([0-9.]+)$', d) if m: versions.append(m.group(1)) elif is_mac: for d in glob.glob('/opt/intel/cc*/*'): # Typical dir here is /opt/intel/cc/9.0 for IA32, # /opt/intel/cce/9.0 for EMT64 (AMD64) m = re.search(r'([0-9.]+)$', d) if m: versions.append(m.group(1)) return sorted(uniquify(versions)) # remove dups def get_intel_compiler_top(version, abi): """ Return the main path to the top-level dir of the Intel compiler, using the given version. The compiler will be in <top>/bin/icl.exe (icc on linux), the include dir is <top>/include, etc. """ if is_windows: if not SCons.Util.can_read_reg: raise NoRegistryModuleError("No Windows registry module was found") top = get_intel_registry_value('ProductDir', version, abi) # pre-11, icl was in Bin. 11 and later, it's in Bin/<abi> apparently. if not os.path.exists(os.path.join(top, "Bin", "icl.exe")) \ and not os.path.exists(os.path.join(top, "Bin", abi, "icl.exe")): raise MissingDirError("Can't find Intel compiler in %s"%(top)) elif is_mac or is_linux: # first dir is new (>=9.0) style, second is old (8.0) style. dirs=('/opt/intel/cc/%s', '/opt/intel_cc_%s') if abi == 'x86_64': dirs=('/opt/intel/cce/%s',) # 'e' stands for 'em64t', aka x86_64 aka amd64 top=None for d in dirs: if os.path.exists(os.path.join(d%version, "bin", "icc")): top = d%version break if not top: raise MissingDirError("Can't find version %s Intel compiler in %s (abi='%s')"%(version,top, abi)) return top def generate(env, version=None, abi=None, topdir=None, verbose=0): """Add Builders and construction variables for Intel C/C++ compiler to an Environment. args: version: (string) compiler version to use, like "80" abi: (string) 'win32' or whatever Itanium version wants topdir: (string) compiler top dir, like "c:\Program Files\Intel\Compiler70" If topdir is used, version and abi are ignored. verbose: (int) if >0, prints compiler version used. """ if not (is_mac or is_linux or is_windows): # can't handle this platform return if is_windows: SCons.Tool.msvc.generate(env) elif is_linux: SCons.Tool.gcc.generate(env) elif is_mac: SCons.Tool.gcc.generate(env) # if version is unspecified, use latest vlist = get_all_compiler_versions() if not version: if vlist: version = vlist[0] else: # User may have specified '90' but we need to get actual dirname '9.0'. # get_version_from_list does that mapping. v = get_version_from_list(version, vlist) if not v: raise SCons.Errors.UserError("Invalid Intel compiler version %s: "%version + \ "installed versions are %s"%(', '.join(vlist))) version = v # if abi is unspecified, use ia32 # alternatives are ia64 for Itanium, or amd64 or em64t or x86_64 (all synonyms here) abi = check_abi(abi) if abi is None: if is_mac or is_linux: # Check if we are on 64-bit linux, default to 64 then. uname_m = os.uname()[4] if uname_m == 'x86_64': abi = 'x86_64' else: abi = 'ia32' else: if is_win64: abi = 'em64t' else: abi = 'ia32' if version and not topdir: try: topdir = get_intel_compiler_top(version, abi) except (SCons.Util.RegError, IntelCError): topdir = None if not topdir: # Normally this is an error, but it might not be if the compiler is # on $PATH and the user is importing their env. class ICLTopDirWarning(SCons.Warnings.Warning): pass if (is_mac or is_linux) and not env.Detect('icc') or \ is_windows and not env.Detect('icl'): SCons.Warnings.enableWarningClass(ICLTopDirWarning) SCons.Warnings.warn(ICLTopDirWarning, "Failed to find Intel compiler for version='%s', abi='%s'"% (str(version), str(abi))) else: # should be cleaned up to say what this other version is # since in this case we have some other Intel compiler installed SCons.Warnings.enableWarningClass(ICLTopDirWarning) SCons.Warnings.warn(ICLTopDirWarning, "Can't find Intel compiler top dir for version='%s', abi='%s'"% (str(version), str(abi))) if topdir: if verbose: print "Intel C compiler: using version %s (%g), abi %s, in '%s'"%\ (repr(version), linux_ver_normalize(version),abi,topdir) if is_linux: # Show the actual compiler version by running the compiler. os.system('%s/bin/icc --version'%topdir) if is_mac: # Show the actual compiler version by running the compiler. os.system('%s/bin/icc --version'%topdir) env['INTEL_C_COMPILER_TOP'] = topdir if is_linux: paths={'INCLUDE' : 'include', 'LIB' : 'lib', 'PATH' : 'bin', 'LD_LIBRARY_PATH' : 'lib'} for p in paths.keys(): env.PrependENVPath(p, os.path.join(topdir, paths[p])) if is_mac: paths={'INCLUDE' : 'include', 'LIB' : 'lib', 'PATH' : 'bin', 'LD_LIBRARY_PATH' : 'lib'} for p in paths.keys(): env.PrependENVPath(p, os.path.join(topdir, paths[p])) if is_windows: # env key reg valname default subdir of top paths=(('INCLUDE', 'IncludeDir', 'Include'), ('LIB' , 'LibDir', 'Lib'), ('PATH' , 'BinDir', 'Bin')) # We are supposed to ignore version if topdir is set, so set # it to the emptry string if it's not already set. if version is None: version = '' # Each path has a registry entry, use that or default to subdir for p in paths: try: path=get_intel_registry_value(p[1], version, abi) # These paths may have $(ICInstallDir) # which needs to be substituted with the topdir. path=path.replace('$(ICInstallDir)', topdir + os.sep) except IntelCError: # Couldn't get it from registry: use default subdir of topdir env.PrependENVPath(p[0], os.path.join(topdir, p[2])) else: env.PrependENVPath(p[0], path.split(os.pathsep)) # print "ICL %s: %s, final=%s"%(p[0], path, str(env['ENV'][p[0]])) if is_windows: env['CC'] = 'icl' env['CXX'] = 'icl' env['LINK'] = 'xilink' else: env['CC'] = 'icc' env['CXX'] = 'icpc' # Don't reset LINK here; # use smart_link which should already be here from link.py. #env['LINK'] = '$CC' env['AR'] = 'xiar' env['LD'] = 'xild' # not used by default # This is not the exact (detailed) compiler version, # just the major version as determined above or specified # by the user. It is a float like 80 or 90, in normalized form for Linux # (i.e. even for Linux 9.0 compiler, still returns 90 rather than 9.0) if version: env['INTEL_C_COMPILER_VERSION']=linux_ver_normalize(version) if is_windows: # Look for license file dir # in system environment, registry, and default location. envlicdir = os.environ.get("INTEL_LICENSE_FILE", '') K = ('SOFTWARE\Intel\Licenses') try: k = SCons.Util.RegOpenKeyEx(SCons.Util.HKEY_LOCAL_MACHINE, K) reglicdir = SCons.Util.RegQueryValueEx(k, "w_cpp")[0] except (AttributeError, SCons.Util.RegError): reglicdir = "" defaultlicdir = r'C:\Program Files\Common Files\Intel\Licenses' licdir = None for ld in [envlicdir, reglicdir]: # If the string contains an '@', then assume it's a network # license (port@system) and good by definition. if ld and (ld.find('@') != -1 or os.path.exists(ld)): licdir = ld break if not licdir: licdir = defaultlicdir if not os.path.exists(licdir): class ICLLicenseDirWarning(SCons.Warnings.Warning): pass SCons.Warnings.enableWarningClass(ICLLicenseDirWarning) SCons.Warnings.warn(ICLLicenseDirWarning, "Intel license dir was not found." " Tried using the INTEL_LICENSE_FILE environment variable (%s), the registry (%s) and the default path (%s)." " Using the default path as a last resort." % (envlicdir, reglicdir, defaultlicdir)) env['ENV']['INTEL_LICENSE_FILE'] = licdir def exists(env): if not (is_mac or is_linux or is_windows): # can't handle this platform return 0 try: versions = get_all_compiler_versions() except (SCons.Util.RegError, IntelCError): versions = None detected = versions is not None and len(versions) > 0 if not detected: # try env.Detect, maybe that will work if is_windows: return env.Detect('icl') elif is_linux: return env.Detect('icc') elif is_mac: return env.Detect('icc') return detected # end of file # Local Variables: # tab-width:4 # indent-tabs-mode:nil # End: # vim: set expandtab tabstop=4 shiftwidth=4:
meabsence/python-for-android
refs/heads/master
python-modules/zope/zope/interface/common/idatetime.py
50
############################################################################## # Copyright (c) 2002 Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. ############################################################################## """Datetime interfaces. This module is called idatetime because if it were called datetime the import of the real datetime would fail. $Id: idatetime.py 110736 2010-04-11 10:59:30Z regebro $ """ from zope.interface import Interface, Attribute from zope.interface import classImplements from datetime import timedelta, date, datetime, time, tzinfo class ITimeDeltaClass(Interface): """This is the timedelta class interface.""" min = Attribute("The most negative timedelta object") max = Attribute("The most positive timedelta object") resolution = Attribute( "The smallest difference between non-equal timedelta objects") class ITimeDelta(ITimeDeltaClass): """Represent the difference between two datetime objects. Supported operators: - add, subtract timedelta - unary plus, minus, abs - compare to timedelta - multiply, divide by int/long In addition, datetime supports subtraction of two datetime objects returning a timedelta, and addition or subtraction of a datetime and a timedelta giving a datetime. Representation: (days, seconds, microseconds). """ days = Attribute("Days between -999999999 and 999999999 inclusive") seconds = Attribute("Seconds between 0 and 86399 inclusive") microseconds = Attribute("Microseconds between 0 and 999999 inclusive") class IDateClass(Interface): """This is the date class interface.""" min = Attribute("The earliest representable date") max = Attribute("The latest representable date") resolution = Attribute( "The smallest difference between non-equal date objects") def today(): """Return the current local time. This is equivalent to date.fromtimestamp(time.time())""" def fromtimestamp(timestamp): """Return the local date from a POSIX timestamp (like time.time()) This may raise ValueError, if the timestamp is out of the range of values supported by the platform C localtime() function. It's common for this to be restricted to years from 1970 through 2038. Note that on non-POSIX systems that include leap seconds in their notion of a timestamp, leap seconds are ignored by fromtimestamp(). """ def fromordinal(ordinal): """Return the date corresponding to the proleptic Gregorian ordinal. January 1 of year 1 has ordinal 1. ValueError is raised unless 1 <= ordinal <= date.max.toordinal(). For any date d, date.fromordinal(d.toordinal()) == d. """ class IDate(IDateClass): """Represents a date (year, month and day) in an idealized calendar. Operators: __repr__, __str__ __cmp__, __hash__ __add__, __radd__, __sub__ (add/radd only with timedelta arg) """ year = Attribute("Between MINYEAR and MAXYEAR inclusive.") month = Attribute("Between 1 and 12 inclusive") day = Attribute( "Between 1 and the number of days in the given month of the given year.") def replace(year, month, day): """Return a date with the same value. Except for those members given new values by whichever keyword arguments are specified. For example, if d == date(2002, 12, 31), then d.replace(day=26) == date(2000, 12, 26). """ def timetuple(): """Return a 9-element tuple of the form returned by time.localtime(). The hours, minutes and seconds are 0, and the DST flag is -1. d.timetuple() is equivalent to (d.year, d.month, d.day, 0, 0, 0, d.weekday(), d.toordinal() - date(d.year, 1, 1).toordinal() + 1, -1) """ def toordinal(): """Return the proleptic Gregorian ordinal of the date January 1 of year 1 has ordinal 1. For any date object d, date.fromordinal(d.toordinal()) == d. """ def weekday(): """Return the day of the week as an integer. Monday is 0 and Sunday is 6. For example, date(2002, 12, 4).weekday() == 2, a Wednesday. See also isoweekday(). """ def isoweekday(): """Return the day of the week as an integer. Monday is 1 and Sunday is 7. For example, date(2002, 12, 4).isoweekday() == 3, a Wednesday. See also weekday(), isocalendar(). """ def isocalendar(): """Return a 3-tuple, (ISO year, ISO week number, ISO weekday). The ISO calendar is a widely used variant of the Gregorian calendar. See http://www.phys.uu.nl/~vgent/calendar/isocalendar.htm for a good explanation. The ISO year consists of 52 or 53 full weeks, and where a week starts on a Monday and ends on a Sunday. The first week of an ISO year is the first (Gregorian) calendar week of a year containing a Thursday. This is called week number 1, and the ISO year of that Thursday is the same as its Gregorian year. For example, 2004 begins on a Thursday, so the first week of ISO year 2004 begins on Monday, 29 Dec 2003 and ends on Sunday, 4 Jan 2004, so that date(2003, 12, 29).isocalendar() == (2004, 1, 1) and date(2004, 1, 4).isocalendar() == (2004, 1, 7). """ def isoformat(): """Return a string representing the date in ISO 8601 format. This is 'YYYY-MM-DD'. For example, date(2002, 12, 4).isoformat() == '2002-12-04'. """ def __str__(): """For a date d, str(d) is equivalent to d.isoformat().""" def ctime(): """Return a string representing the date. For example date(2002, 12, 4).ctime() == 'Wed Dec 4 00:00:00 2002'. d.ctime() is equivalent to time.ctime(time.mktime(d.timetuple())) on platforms where the native C ctime() function (which time.ctime() invokes, but which date.ctime() does not invoke) conforms to the C standard. """ def strftime(format): """Return a string representing the date. Controlled by an explicit format string. Format codes referring to hours, minutes or seconds will see 0 values. """ class IDateTimeClass(Interface): """This is the datetime class interface.""" min = Attribute("The earliest representable datetime") max = Attribute("The latest representable datetime") resolution = Attribute( "The smallest possible difference between non-equal datetime objects") def today(): """Return the current local datetime, with tzinfo None. This is equivalent to datetime.fromtimestamp(time.time()). See also now(), fromtimestamp(). """ def now(tz=None): """Return the current local date and time. If optional argument tz is None or not specified, this is like today(), but, if possible, supplies more precision than can be gotten from going through a time.time() timestamp (for example, this may be possible on platforms supplying the C gettimeofday() function). Else tz must be an instance of a class tzinfo subclass, and the current date and time are converted to tz's time zone. In this case the result is equivalent to tz.fromutc(datetime.utcnow().replace(tzinfo=tz)). See also today(), utcnow(). """ def utcnow(): """Return the current UTC date and time, with tzinfo None. This is like now(), but returns the current UTC date and time, as a naive datetime object. See also now(). """ def fromtimestamp(timestamp, tz=None): """Return the local date and time corresponding to the POSIX timestamp. Same as is returned by time.time(). If optional argument tz is None or not specified, the timestamp is converted to the platform's local date and time, and the returned datetime object is naive. Else tz must be an instance of a class tzinfo subclass, and the timestamp is converted to tz's time zone. In this case the result is equivalent to tz.fromutc(datetime.utcfromtimestamp(timestamp).replace(tzinfo=tz)). fromtimestamp() may raise ValueError, if the timestamp is out of the range of values supported by the platform C localtime() or gmtime() functions. It's common for this to be restricted to years in 1970 through 2038. Note that on non-POSIX systems that include leap seconds in their notion of a timestamp, leap seconds are ignored by fromtimestamp(), and then it's possible to have two timestamps differing by a second that yield identical datetime objects. See also utcfromtimestamp(). """ def utcfromtimestamp(timestamp): """Return the UTC datetime from the POSIX timestamp with tzinfo None. This may raise ValueError, if the timestamp is out of the range of values supported by the platform C gmtime() function. It's common for this to be restricted to years in 1970 through 2038. See also fromtimestamp(). """ def fromordinal(ordinal): """Return the datetime from the proleptic Gregorian ordinal. January 1 of year 1 has ordinal 1. ValueError is raised unless 1 <= ordinal <= datetime.max.toordinal(). The hour, minute, second and microsecond of the result are all 0, and tzinfo is None. """ def combine(date, time): """Return a new datetime object. Its date members are equal to the given date object's, and whose time and tzinfo members are equal to the given time object's. For any datetime object d, d == datetime.combine(d.date(), d.timetz()). If date is a datetime object, its time and tzinfo members are ignored. """ class IDateTime(IDate, IDateTimeClass): """Object contains all the information from a date object and a time object. """ year = Attribute("Year between MINYEAR and MAXYEAR inclusive") month = Attribute("Month between 1 and 12 inclusive") day = Attribute( "Day between 1 and the number of days in the given month of the year") hour = Attribute("Hour in range(24)") minute = Attribute("Minute in range(60)") second = Attribute("Second in range(60)") microsecond = Attribute("Microsecond in range(1000000)") tzinfo = Attribute( """The object passed as the tzinfo argument to the datetime constructor or None if none was passed""") def date(): """Return date object with same year, month and day.""" def time(): """Return time object with same hour, minute, second, microsecond. tzinfo is None. See also method timetz(). """ def timetz(): """Return time object with same hour, minute, second, microsecond, and tzinfo. See also method time(). """ def replace(year, month, day, hour, minute, second, microsecond, tzinfo): """Return a datetime with the same members, except for those members given new values by whichever keyword arguments are specified. Note that tzinfo=None can be specified to create a naive datetime from an aware datetime with no conversion of date and time members. """ def astimezone(tz): """Return a datetime object with new tzinfo member tz, adjusting the date and time members so the result is the same UTC time as self, but in tz's local time. tz must be an instance of a tzinfo subclass, and its utcoffset() and dst() methods must not return None. self must be aware (self.tzinfo must not be None, and self.utcoffset() must not return None). If self.tzinfo is tz, self.astimezone(tz) is equal to self: no adjustment of date or time members is performed. Else the result is local time in time zone tz, representing the same UTC time as self: after astz = dt.astimezone(tz), astz - astz.utcoffset() will usually have the same date and time members as dt - dt.utcoffset(). The discussion of class tzinfo explains the cases at Daylight Saving Time transition boundaries where this cannot be achieved (an issue only if tz models both standard and daylight time). If you merely want to attach a time zone object tz to a datetime dt without adjustment of date and time members, use dt.replace(tzinfo=tz). If you merely want to remove the time zone object from an aware datetime dt without conversion of date and time members, use dt.replace(tzinfo=None). Note that the default tzinfo.fromutc() method can be overridden in a tzinfo subclass to effect the result returned by astimezone(). """ def utcoffset(): """Return the timezone offset in minutes east of UTC (negative west of UTC).""" def dst(): """Return 0 if DST is not in effect, or the DST offset (in minutes eastward) if DST is in effect. """ def tzname(): """Return the timezone name.""" def timetuple(): """Return a 9-element tuple of the form returned by time.localtime().""" def utctimetuple(): """Return UTC time tuple compatilble with time.gmtimr().""" def toordinal(): """Return the proleptic Gregorian ordinal of the date. The same as self.date().toordinal(). """ def weekday(): """Return the day of the week as an integer. Monday is 0 and Sunday is 6. The same as self.date().weekday(). See also isoweekday(). """ def isoweekday(): """Return the day of the week as an integer. Monday is 1 and Sunday is 7. The same as self.date().isoweekday. See also weekday(), isocalendar(). """ def isocalendar(): """Return a 3-tuple, (ISO year, ISO week number, ISO weekday). The same as self.date().isocalendar(). """ def isoformat(sep='T'): """Return a string representing the date and time in ISO 8601 format. YYYY-MM-DDTHH:MM:SS.mmmmmm or YYYY-MM-DDTHH:MM:SS if microsecond is 0 If utcoffset() does not return None, a 6-character string is appended, giving the UTC offset in (signed) hours and minutes: YYYY-MM-DDTHH:MM:SS.mmmmmm+HH:MM or YYYY-MM-DDTHH:MM:SS+HH:MM if microsecond is 0. The optional argument sep (default 'T') is a one-character separator, placed between the date and time portions of the result. """ def __str__(): """For a datetime instance d, str(d) is equivalent to d.isoformat(' '). """ def ctime(): """Return a string representing the date and time. datetime(2002, 12, 4, 20, 30, 40).ctime() == 'Wed Dec 4 20:30:40 2002'. d.ctime() is equivalent to time.ctime(time.mktime(d.timetuple())) on platforms where the native C ctime() function (which time.ctime() invokes, but which datetime.ctime() does not invoke) conforms to the C standard. """ def strftime(format): """Return a string representing the date and time. This is controlled by an explicit format string. """ class ITimeClass(Interface): """This is the time class interface.""" min = Attribute("The earliest representable time") max = Attribute("The latest representable time") resolution = Attribute( "The smallest possible difference between non-equal time objects") class ITime(ITimeClass): """Represent time with time zone. Operators: __repr__, __str__ __cmp__, __hash__ """ hour = Attribute("Hour in range(24)") minute = Attribute("Minute in range(60)") second = Attribute("Second in range(60)") microsecond = Attribute("Microsecond in range(1000000)") tzinfo = Attribute( """The object passed as the tzinfo argument to the time constructor or None if none was passed.""") def replace(hour, minute, second, microsecond, tzinfo): """Return a time with the same value. Except for those members given new values by whichever keyword arguments are specified. Note that tzinfo=None can be specified to create a naive time from an aware time, without conversion of the time members. """ def isoformat(): """Return a string representing the time in ISO 8601 format. That is HH:MM:SS.mmmmmm or, if self.microsecond is 0, HH:MM:SS If utcoffset() does not return None, a 6-character string is appended, giving the UTC offset in (signed) hours and minutes: HH:MM:SS.mmmmmm+HH:MM or, if self.microsecond is 0, HH:MM:SS+HH:MM """ def __str__(): """For a time t, str(t) is equivalent to t.isoformat().""" def strftime(format): """Return a string representing the time. This is controlled by an explicit format string. """ def utcoffset(): """Return the timezone offset in minutes east of UTC (negative west of UTC). If tzinfo is None, returns None, else returns self.tzinfo.utcoffset(None), and raises an exception if the latter doesn't return None or a timedelta object representing a whole number of minutes with magnitude less than one day. """ def dst(): """Return 0 if DST is not in effect, or the DST offset (in minutes eastward) if DST is in effect. If tzinfo is None, returns None, else returns self.tzinfo.dst(None), and raises an exception if the latter doesn't return None, or a timedelta object representing a whole number of minutes with magnitude less than one day. """ def tzname(): """Return the timezone name. If tzinfo is None, returns None, else returns self.tzinfo.tzname(None), or raises an exception if the latter doesn't return None or a string object. """ class ITZInfo(Interface): """Time zone info class. """ def utcoffset(dt): """Return offset of local time from UTC, in minutes east of UTC. If local time is west of UTC, this should be negative. Note that this is intended to be the total offset from UTC; for example, if a tzinfo object represents both time zone and DST adjustments, utcoffset() should return their sum. If the UTC offset isn't known, return None. Else the value returned must be a timedelta object specifying a whole number of minutes in the range -1439 to 1439 inclusive (1440 = 24*60; the magnitude of the offset must be less than one day). """ def dst(dt): """Return the daylight saving time (DST) adjustment, in minutes east of UTC, or None if DST information isn't known. """ def tzname(dt): """Return the time zone name corresponding to the datetime object as a string. """ def fromutc(dt): """Return an equivalent datetime in self's local time.""" classImplements(timedelta, ITimeDelta) classImplements(date, IDate) classImplements(datetime, IDateTime) classImplements(time, ITime) classImplements(tzinfo, ITZInfo) ## directlyProvides(timedelta, ITimeDeltaClass) ## directlyProvides(date, IDateClass) ## directlyProvides(datetime, IDateTimeClass) ## directlyProvides(time, ITimeClass)
centricular/meson
refs/heads/gst-msvc
test cases/python3/2 extmodule/blaster.py
17
#!/usr/bin/env python3 import tachyon import sys result = tachyon.phaserize('shoot') if not isinstance(result, int): print('Returned result not an integer.') sys.exit(1) if result != 1: print('Returned result {} is not 1.'.format(result)) sys.exit(1)
silveregg/moto
refs/heads/master
tests/test_rds/test_rds.py
2
from __future__ import unicode_literals import boto.rds import boto.vpc from boto.exception import BotoServerError import sure # noqa from moto import mock_ec2, mock_rds from tests.helpers import disable_on_py3 @disable_on_py3() @mock_rds def test_create_database(): conn = boto.rds.connect_to_region("us-west-2") database = conn.create_dbinstance("db-master-1", 10, 'db.m1.small', 'root', 'hunter2', security_groups=["my_sg"]) database.status.should.equal('available') database.id.should.equal("db-master-1") database.allocated_storage.should.equal(10) database.instance_class.should.equal("db.m1.small") database.master_username.should.equal("root") database.endpoint.should.equal(('db-master-1.aaaaaaaaaa.us-west-2.rds.amazonaws.com', 3306)) database.security_groups[0].name.should.equal('my_sg') @disable_on_py3() @mock_rds def test_get_databases(): conn = boto.rds.connect_to_region("us-west-2") list(conn.get_all_dbinstances()).should.have.length_of(0) conn.create_dbinstance("db-master-1", 10, 'db.m1.small', 'root', 'hunter2') conn.create_dbinstance("db-master-2", 10, 'db.m1.small', 'root', 'hunter2') list(conn.get_all_dbinstances()).should.have.length_of(2) databases = conn.get_all_dbinstances("db-master-1") list(databases).should.have.length_of(1) databases[0].id.should.equal("db-master-1") @mock_rds def test_describe_non_existant_database(): conn = boto.rds.connect_to_region("us-west-2") conn.get_all_dbinstances.when.called_with("not-a-db").should.throw(BotoServerError) @disable_on_py3() @mock_rds def test_delete_database(): conn = boto.rds.connect_to_region("us-west-2") list(conn.get_all_dbinstances()).should.have.length_of(0) conn.create_dbinstance("db-master-1", 10, 'db.m1.small', 'root', 'hunter2') list(conn.get_all_dbinstances()).should.have.length_of(1) conn.delete_dbinstance("db-master-1") list(conn.get_all_dbinstances()).should.have.length_of(0) @mock_rds def test_delete_non_existant_database(): conn = boto.rds.connect_to_region("us-west-2") conn.delete_dbinstance.when.called_with("not-a-db").should.throw(BotoServerError) @mock_rds def test_create_database_security_group(): conn = boto.rds.connect_to_region("us-west-2") security_group = conn.create_dbsecurity_group('db_sg', 'DB Security Group') security_group.name.should.equal('db_sg') security_group.description.should.equal("DB Security Group") list(security_group.ip_ranges).should.equal([]) @mock_rds def test_get_security_groups(): conn = boto.rds.connect_to_region("us-west-2") list(conn.get_all_dbsecurity_groups()).should.have.length_of(0) conn.create_dbsecurity_group('db_sg1', 'DB Security Group') conn.create_dbsecurity_group('db_sg2', 'DB Security Group') list(conn.get_all_dbsecurity_groups()).should.have.length_of(2) databases = conn.get_all_dbsecurity_groups("db_sg1") list(databases).should.have.length_of(1) databases[0].name.should.equal("db_sg1") @mock_rds def test_get_non_existant_security_group(): conn = boto.rds.connect_to_region("us-west-2") conn.get_all_dbsecurity_groups.when.called_with("not-a-sg").should.throw(BotoServerError) @mock_rds def test_delete_database_security_group(): conn = boto.rds.connect_to_region("us-west-2") conn.create_dbsecurity_group('db_sg', 'DB Security Group') list(conn.get_all_dbsecurity_groups()).should.have.length_of(1) conn.delete_dbsecurity_group("db_sg") list(conn.get_all_dbsecurity_groups()).should.have.length_of(0) @mock_rds def test_delete_non_existant_security_group(): conn = boto.rds.connect_to_region("us-west-2") conn.delete_dbsecurity_group.when.called_with("not-a-db").should.throw(BotoServerError) @disable_on_py3() @mock_rds def test_security_group_authorize(): conn = boto.rds.connect_to_region("us-west-2") security_group = conn.create_dbsecurity_group('db_sg', 'DB Security Group') list(security_group.ip_ranges).should.equal([]) security_group.authorize(cidr_ip='10.3.2.45/32') security_group = conn.get_all_dbsecurity_groups()[0] list(security_group.ip_ranges).should.have.length_of(1) security_group.ip_ranges[0].cidr_ip.should.equal('10.3.2.45/32') @disable_on_py3() @mock_rds def test_add_security_group_to_database(): conn = boto.rds.connect_to_region("us-west-2") database = conn.create_dbinstance("db-master-1", 10, 'db.m1.small', 'root', 'hunter2') security_group = conn.create_dbsecurity_group('db_sg', 'DB Security Group') database.modify(security_groups=[security_group]) database = conn.get_all_dbinstances()[0] list(database.security_groups).should.have.length_of(1) database.security_groups[0].name.should.equal("db_sg") @mock_ec2 @mock_rds def test_add_database_subnet_group(): vpc_conn = boto.vpc.connect_to_region("us-west-2") vpc = vpc_conn.create_vpc("10.0.0.0/16") subnet1 = vpc_conn.create_subnet(vpc.id, "10.1.0.0/24") subnet2 = vpc_conn.create_subnet(vpc.id, "10.2.0.0/24") subnet_ids = [subnet1.id, subnet2.id] conn = boto.rds.connect_to_region("us-west-2") subnet_group = conn.create_db_subnet_group("db_subnet", "my db subnet", subnet_ids) subnet_group.name.should.equal('db_subnet') subnet_group.description.should.equal("my db subnet") list(subnet_group.subnet_ids).should.equal(subnet_ids) @mock_ec2 @mock_rds def test_describe_database_subnet_group(): vpc_conn = boto.vpc.connect_to_region("us-west-2") vpc = vpc_conn.create_vpc("10.0.0.0/16") subnet = vpc_conn.create_subnet(vpc.id, "10.1.0.0/24") conn = boto.rds.connect_to_region("us-west-2") conn.create_db_subnet_group("db_subnet1", "my db subnet", [subnet.id]) conn.create_db_subnet_group("db_subnet2", "my db subnet", [subnet.id]) list(conn.get_all_db_subnet_groups()).should.have.length_of(2) list(conn.get_all_db_subnet_groups("db_subnet1")).should.have.length_of(1) conn.get_all_db_subnet_groups.when.called_with("not-a-subnet").should.throw(BotoServerError) @mock_ec2 @mock_rds def test_delete_database_subnet_group(): vpc_conn = boto.vpc.connect_to_region("us-west-2") vpc = vpc_conn.create_vpc("10.0.0.0/16") subnet = vpc_conn.create_subnet(vpc.id, "10.1.0.0/24") conn = boto.rds.connect_to_region("us-west-2") conn.create_db_subnet_group("db_subnet1", "my db subnet", [subnet.id]) list(conn.get_all_db_subnet_groups()).should.have.length_of(1) conn.delete_db_subnet_group("db_subnet1") list(conn.get_all_db_subnet_groups()).should.have.length_of(0) conn.delete_db_subnet_group.when.called_with("db_subnet1").should.throw(BotoServerError) @disable_on_py3() @mock_ec2 @mock_rds def test_create_database_in_subnet_group(): vpc_conn = boto.vpc.connect_to_region("us-west-2") vpc = vpc_conn.create_vpc("10.0.0.0/16") subnet = vpc_conn.create_subnet(vpc.id, "10.1.0.0/24") conn = boto.rds.connect_to_region("us-west-2") conn.create_db_subnet_group("db_subnet1", "my db subnet", [subnet.id]) database = conn.create_dbinstance("db-master-1", 10, 'db.m1.small', 'root', 'hunter2', db_subnet_group_name="db_subnet1") database = conn.get_all_dbinstances("db-master-1")[0] database.subnet_group.name.should.equal("db_subnet1") @disable_on_py3() @mock_rds def test_create_database_replica(): conn = boto.rds.connect_to_region("us-west-2") primary = conn.create_dbinstance("db-master-1", 10, 'db.m1.small', 'root', 'hunter2') replica = conn.create_dbinstance_read_replica("replica", "db-master-1", "db.m1.small") replica.id.should.equal("replica") replica.instance_class.should.equal("db.m1.small") status_info = replica.status_infos[0] status_info.normal.should.equal(True) status_info.status_type.should.equal('read replication') status_info.status.should.equal('replicating') primary = conn.get_all_dbinstances("db-master-1")[0] primary.read_replica_dbinstance_identifiers[0].should.equal("replica") conn.delete_dbinstance("replica") primary = conn.get_all_dbinstances("db-master-1")[0] list(primary.read_replica_dbinstance_identifiers).should.have.length_of(0) @disable_on_py3() @mock_rds def test_create_cross_region_database_replica(): west_1_conn = boto.rds.connect_to_region("us-west-1") west_2_conn = boto.rds.connect_to_region("us-west-2") primary = west_1_conn.create_dbinstance("db-master-1", 10, 'db.m1.small', 'root', 'hunter2') primary_arn = "arn:aws:rds:us-west-1:1234567890:db:db-master-1" replica = west_2_conn.create_dbinstance_read_replica( "replica", primary_arn, "db.m1.small", ) primary = west_1_conn.get_all_dbinstances("db-master-1")[0] primary.read_replica_dbinstance_identifiers[0].should.equal("replica") replica = west_2_conn.get_all_dbinstances("replica")[0] replica.instance_class.should.equal("db.m1.small") west_2_conn.delete_dbinstance("replica") primary = west_1_conn.get_all_dbinstances("db-master-1")[0] list(primary.read_replica_dbinstance_identifiers).should.have.length_of(0) @disable_on_py3() @mock_rds def test_connecting_to_us_east_1(): # boto does not use us-east-1 in the URL for RDS, # and that broke moto in the past: # https://github.com/boto/boto/blob/e271ff09364ea18d9d8b6f4d63d6b0ac6cbc9b75/boto/endpoints.json#L285 conn = boto.rds.connect_to_region("us-east-1") database = conn.create_dbinstance("db-master-1", 10, 'db.m1.small', 'root', 'hunter2', security_groups=["my_sg"]) database.status.should.equal('available') database.id.should.equal("db-master-1") database.allocated_storage.should.equal(10) database.instance_class.should.equal("db.m1.small") database.master_username.should.equal("root") database.endpoint.should.equal(('db-master-1.aaaaaaaaaa.us-east-1.rds.amazonaws.com', 3306)) database.security_groups[0].name.should.equal('my_sg') @disable_on_py3() @mock_rds def test_create_database_with_iops(): conn = boto.rds.connect_to_region("us-west-2") database = conn.create_dbinstance("db-master-1", 10, 'db.m1.small', 'root', 'hunter2', iops=6000) database.status.should.equal('available') database.iops.should.equal(6000) # boto>2.36.0 may change the following property name to `storage_type` database.StorageType.should.equal('io1')
kaiweifan/horizon
refs/heads/vip2
openstack_dashboard/dashboards/settings/password/__init__.py
12133432
Sofokus/formtags
refs/heads/master
formtags/templatetags/__init__.py
12133432
xia2/xia2
refs/heads/main
src/xia2/Wrappers/__init__.py
12133432